code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
package jat.coreNOSA.gps; /* JAT: Java Astrodynamics Toolkit * * Copyright (c) 2003 National Aeronautics and Space Administration. All rights reserved. * * This file is part of JAT. JAT is free software; you can * redistribute it and/or modify it under the terms of the * NASA Open Source Agreement * * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * NASA Open Source Agreement for more details. * * You should have received a copy of the NASA Open Source Agreement * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * * File Created on Jun 20, 2003 */ //import jat.gps.*; import jat.coreNOSA.cm.Constants; import jat.coreNOSA.cm.TwoBody; import jat.coreNOSA.math.MatrixVector.data.Matrix; import jat.coreNOSA.math.MatrixVector.data.RandomNumber; import jat.coreNOSA.math.MatrixVector.data.VectorN; /** * <P> * The URE_Model Class provides a model of the errors in the GPS system * due to GPS SV ephemeris and clock errors. For this simulation, they * are modeled as random constants with sigmas from: * * Reference: J. F. Zumberge and W. I. Bertiger, "Ephemeris and Clock * Navigation Message Accuracy", Global Positioning System: Theory and * Applications, Volume 1, edited by Parkinson and Spilker. * * @author * @version 1.0 */ public class URE_Model { /** Radial ephemeris error sigma in meters */ private static final double sigma_r = 1.2; /** Cross-track ephemeris error sigma in meters */ private static final double sigma_c = 3.2; /** Along-track ephemeris error sigma in meters */ private static final double sigma_a = 4.5; /** SV Clock error sigma in meters */ private static final double sigma_t = 1.12E-08 * Constants.c; private static final double sigma_ure = Math.sqrt(sigma_r*sigma_r + sigma_c*sigma_c + sigma_a*sigma_a + sigma_t*sigma_t); public static final double correlationTime = 7200.0; private double qbias; private double q; /** Radial ephemeris error vector, one entry per GPS SV */ private VectorN dr; /** Crosstrack ephemeris error vector, one entry per GPS SV */ private VectorN dc; /** Alongtrack ephemeris error vector, one entry per GPS SV */ private VectorN da; /** SV Clock error vector, one entry per GPS SV */ private VectorN dtc; /** Size of the GPS Constellation */ private int size; /** Constructor * @param n size of the GPS Constellation */ public URE_Model(int n) { this.size = n; dr = new VectorN(n); dc = new VectorN(n); da = new VectorN(n); dtc = new VectorN(n); RandomNumber rn = new RandomNumber(); for (int i = 0; i < n; i++) { double radial = rn.normal(0.0, sigma_r); dr.set(i, radial); double crosstrack = rn.normal(0.0, sigma_c); dc.set(i, crosstrack); double alongtrack = rn.normal(0.0, sigma_a); da.set(i, alongtrack); double clock = rn.normal(0.0, sigma_t); dtc.set(i, clock); } double dt = 1.0; double exponent = -2.0*dt/correlationTime; this.qbias = sigma_ure*sigma_ure*(1.0 - Math.exp(exponent)); // in (rad/sec)^2/Hz this.q = 2.0 * sigma_ure*sigma_ure / correlationTime; } /** Constructor * @param n size of the GPS Constellation * @param seed long containing random number seed to be used */ public URE_Model(int n, long seed) { this.size = n; dr = new VectorN(n); dc = new VectorN(n); da = new VectorN(n); dtc = new VectorN(n); RandomNumber rn = new RandomNumber(seed); for (int i = 0; i < n; i++) { double radial = rn.normal(0.0, sigma_r); dr.set(i, radial); double crosstrack = rn.normal(0.0, sigma_c); dc.set(i, crosstrack); double alongtrack = rn.normal(0.0, sigma_a); da.set(i, alongtrack); double clock = rn.normal(0.0, sigma_t); dtc.set(i, clock); } double dt = 1.0; double exponent = -2.0*dt/correlationTime; this.qbias = sigma_ure*sigma_ure*(1.0 - Math.exp(exponent)); // in (rad/sec)^2/Hz this.q = 2.0 * sigma_ure*sigma_ure / correlationTime; } /** Compute the User range error due to SV clock and ephemeris errors. * @param i GPS SV index * @param los GPS line of sight vector * @param rGPS GPS SV position vector * @param vGPS GPS SV velocity vector * @return the user range error in meters */ public double ure (int i, VectorN los, VectorN rGPS, VectorN vGPS) { // get the transformation from RIC to ECI TwoBody orbit = new TwoBody(Constants.GM_Earth, rGPS, vGPS); Matrix rot = orbit.RSW2ECI(); // form the ephemeris error vector for the ith GPS SV VectorN error = new VectorN(this.dr.x[i], this.da.x[i], this.dc.x[i]); // rotate the ephemeris error to the ECI frame VectorN errECI = rot.times(error); // find the magnitude of the projection of the error vector onto the LOS vector double drho = errECI.projectionMag(los); // add the SV clock error double out = drho + this.dtc.x[i]; return out; } /** * Compute the derivatives for the URE state. * The URE is modeled as a first order Gauss-Markov process. * Used by GPS_INS Process Model. * @param ure URE state vector * @return the time derivative of the URE */ public VectorN ureProcess(VectorN ure) { double coef = -1.0/correlationTime; VectorN out = ure.times(coef); return out; } /** * Return the URE noise strength to be used in * the process noise matrix Q. * @return URE noise strength */ public double biasQ() { return this.qbias; } /** * Return the URE noise strength to be used in * the process noise matrix Q. * @return URE noise strength */ public double Q() { return this.q; } /** * Return the URE sigma * @return URE sigma */ public double sigma() { return sigma_ure; } }
atmelino/JATexperimental
src/jat/coreNOSA/gps/URE_Model.java
Java
apache-2.0
6,016
package statzall.codec; import static org.junit.Assert.*; import org.junit.Test; import statzall.Cast; import statzall.StreamCalc; import statzall.codec.StreamCalcKryo; public class StreamCalcKryoTest { @Test public void test() { StreamCalc calc = new StreamCalc(10, 10); StreamCalcKryo target = new StreamCalcKryo(); calc.add(1D, 2D, 3D, 4D, 5D, 6D, 7D, 8D, 9D); byte[] buff = target.write(calc); StreamCalc read = target.read(buff); assertEquals(9.0D, Cast.as(read.snapshot().get("q009"), Double.class), 0.0D); } }
LamdaFu/statzall
statzall-core/src/test/java/statzall/codec/StreamCalcKryoTest.java
Java
apache-2.0
539
<?php namespace WoowUpTest\WoowUp; use WoowUp\Client as WoowUp; /** * */ class PurchasesTest extends \PHPUnit_Framework_TestCase { public function testCreatePurchase() { $woowup = new WoowUp($_ENV['WOOWUP_API_KEY'], $_ENV['WOOWUP_API_HOST'], $_ENV['WOOWUP_API_VERSION']); $email = md5(microtime()) . '@email.com'; $r = $woowup->users->create([ 'service_uid' => $email, 'email' => $email, 'first_name' => 'John', 'last_name' => 'Doe', ]); $this->assertEquals($r, true); $invoiceNumber = rand(999, 99999); $r = $woowup->purchases->create([ "service_uid" => $email, "points" => 24, "invoice_number" => "$invoiceNumber", "purchase_detail" => [ [ "sku" => "2907362", "product_name" => "Heladera Patrick", "quantity" => 1, "unit_price" => 1999.00, "variations" => [ [ "name" => "Talle", "value" => "XL", ], ], ], ], "prices" => [ "cost" => 123.00, "shipping" => 123.00, "gross" => 123.00, "tax" => 123.00, "discount" => 123.00, "total" => 123.00, ], "branch_name" => "Palermo I", "createtime" => date('c'), // ISO8601 ]); $this->assertEquals($r, true); } }
woowup/woowup-php-client
tests/WoowUp/PurchasesTest.php
PHP
apache-2.0
1,726
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hyracks.algebricks.core.algebra.properties; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hyracks.algebricks.common.utils.ListSet; import org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass; import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable; public abstract class AbstractGroupingProperty { protected Set<LogicalVariable> columnSet; public AbstractGroupingProperty(Set<LogicalVariable> columnSet) { this.columnSet = columnSet; } public Set<LogicalVariable> getColumnSet() { return columnSet; } public final void normalizeGroupingColumns(Map<LogicalVariable, EquivalenceClass> equivalenceClasses, List<FunctionalDependency> fds) { replaceGroupingColumnsByEqClasses(equivalenceClasses); applyFDsToGroupingColumns(fds); } private void replaceGroupingColumnsByEqClasses(Map<LogicalVariable, EquivalenceClass> equivalenceClasses) { if (equivalenceClasses == null || equivalenceClasses.isEmpty()) { return; } Set<LogicalVariable> norm = new ListSet<LogicalVariable>(); for (LogicalVariable v : columnSet) { EquivalenceClass ec = equivalenceClasses.get(v); if (ec == null) { norm.add(v); } else { if (ec.representativeIsConst()) { // trivially satisfied, so the var. can be removed } else { norm.add(ec.getVariableRepresentative()); } } } columnSet = norm; } private void applyFDsToGroupingColumns(List<FunctionalDependency> fds) { // the set of vars. is unordered // so we try all FDs on all variables (incomplete algo?) if (fds == null || fds.isEmpty()) { return; } Set<LogicalVariable> norm = new ListSet<LogicalVariable>(); for (LogicalVariable v : columnSet) { boolean isImpliedByAnFD = false; for (FunctionalDependency fdep : fds) { if (columnSet.containsAll(fdep.getHead()) && fdep.getTail().contains(v)) { isImpliedByAnFD = true; norm.addAll(fdep.getHead()); break; } } if (!isImpliedByAnFD) { norm.add(v); } } columnSet = norm; } }
tectronics/hyracks
algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/AbstractGroupingProperty.java
Java
apache-2.0
3,116
/** * Copyright 2015-2016 INESC-ID * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package domainregistry; public class HypertiesNotFoundException extends RuntimeException{ }
reTHINK-project/dev-registry-domain
server/src/main/java/domainregistry/exception/HypertiesNotFoundException.java
Java
apache-2.0
701
from symbol.builder import add_anchor_to_arg from models.FPN.builder import MSRAResNet50V1FPN as Backbone from models.FPN.builder import FPNNeck as Neck from models.FPN.builder import FPNRoiAlign as RoiExtractor from models.FPN.builder import FPNBbox2fcHead as BboxHead from mxnext.complicate import normalizer_factory from models.msrcnn.builder import MaskScoringFasterRcnn as Detector from models.msrcnn.builder import MaskFPNRpnHead as RpnHead from models.msrcnn.builder import MaskFasterRcnn4ConvHead as MaskHead from models.maskrcnn.builder import BboxPostProcessor from models.maskrcnn.process_output import process_output from models.msrcnn.builder import MaskIoUConvHead as MaskIoUHead def get_config(is_train): class General: log_frequency = 10 name = __name__.rsplit("/")[-1].rsplit(".")[-1] batch_image = 2 if is_train else 1 fp16 = False loader_worker = 8 class KvstoreParam: kvstore = "nccl" batch_image = General.batch_image gpus = [0, 1, 2, 3, 4, 5, 6, 7] fp16 = General.fp16 class NormalizeParam: normalizer = normalizer_factory(type="fixbn") class BackboneParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer class NeckParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer class RpnParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer batch_image = General.batch_image nnvm_proposal = True nnvm_rpn_target = False class anchor_generate: scale = (8,) ratio = (0.5, 1.0, 2.0) stride = (4, 8, 16, 32, 64) image_anchor = 256 max_side = 1400 class anchor_assign: allowed_border = 0 pos_thr = 0.7 neg_thr = 0.3 min_pos_thr = 0.0 image_anchor = 256 pos_fraction = 0.5 class head: conv_channel = 256 mean = (0, 0, 0, 0) std = (1, 1, 1, 1) class proposal: pre_nms_top_n = 2000 if is_train else 1000 post_nms_top_n = 2000 if is_train else 1000 nms_thr = 0.7 min_bbox_side = 0 class subsample_proposal: proposal_wo_gt = False image_roi = 512 fg_fraction = 0.25 fg_thr = 0.5 bg_thr_hi = 0.5 bg_thr_lo = 0.0 class bbox_target: num_reg_class = 81 class_agnostic = False weight = (1.0, 1.0, 1.0, 1.0) mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class BboxParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer num_class = 1 + 80 image_roi = 512 batch_image = General.batch_image class regress_target: class_agnostic = False mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class MaskParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer resolution = 28 dim_reduced = 256 num_fg_roi = int(RpnParam.subsample_proposal.image_roi * RpnParam.subsample_proposal.fg_fraction) class RoiParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer out_size = 7 stride = (4, 8, 16, 32) roi_canonical_scale = 224 roi_canonical_level = 4 img_roi = 1000 class MaskRoiParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer out_size = 14 stride = (4, 8, 16, 32) roi_canonical_scale = 224 roi_canonical_level = 4 img_roi = 100 class DatasetParam: if is_train: image_set = ("coco_train2017", ) else: image_set = ("coco_val2017", ) class OptimizeParam: class optimizer: type = "sgd" lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image momentum = 0.9 wd = 0.0001 clip_gradient = None class schedule: mult = 1 begin_epoch = 0 end_epoch = 6 * mult lr_iter = [60000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image), 80000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)] class warmup: type = "gradual" lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image / 3.0 iter = 500 class TestParam: min_det_score = 0.05 max_det_per_image = 100 process_roidb = lambda x: x process_output = lambda x, y: process_output(x, y) class model: prefix = "experiments/{}/checkpoint".format(General.name) epoch = OptimizeParam.schedule.end_epoch class nms: type = "nms" thr = 0.5 class coco: annotation = "data/coco/annotations/instances_minival2014.json" backbone = Backbone(BackboneParam) neck = Neck(NeckParam) rpn_head = RpnHead(RpnParam, MaskParam) roi_extractor = RoiExtractor(RoiParam) mask_roi_extractor = RoiExtractor(MaskRoiParam) bbox_head = BboxHead(BboxParam) mask_head = MaskHead(BboxParam, MaskParam, MaskRoiParam) bbox_post_processer = BboxPostProcessor(TestParam) maskiou_head = MaskIoUHead(TestParam, BboxParam, MaskParam) detector = Detector() if is_train: train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, mask_roi_extractor, bbox_head, mask_head, maskiou_head) test_sym = None else: train_sym = None test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, mask_roi_extractor, bbox_head, mask_head, maskiou_head, bbox_post_processer) class ModelParam: train_symbol = train_sym test_symbol = test_sym from_scratch = False random = True memonger = False memonger_until = "stage3_unit21_plus" class pretrain: prefix = "pretrain_model/resnet-v1-50" epoch = 0 fixed_param = ["conv0", "stage1", "gamma", "beta"] excluded_param = ["mask_fcn"] def process_weight(sym, arg, aux): for stride in RpnParam.anchor_generate.stride: add_anchor_to_arg( sym, arg, aux, RpnParam.anchor_generate.max_side, stride, RpnParam.anchor_generate.scale, RpnParam.anchor_generate.ratio) # data processing class NormParam: mean = (122.7717, 115.9465, 102.9801) # RGB order std = (1.0, 1.0, 1.0) # data processing class ResizeParam: short = 800 long = 1333 class PadParam: short = 800 long = 1333 max_num_gt = 100 max_len_gt_poly = 2500 class AnchorTarget2DParam: def __init__(self): self.generate = self._generate() class _generate: def __init__(self): self.stride = (4, 8, 16, 32, 64) self.short = (200, 100, 50, 25, 13) self.long = (334, 167, 84, 42, 21) scales = (8) aspects = (0.5, 1.0, 2.0) class assign: allowed_border = 0 pos_thr = 0.7 neg_thr = 0.3 min_pos_thr = 0.0 class sample: image_anchor = 256 pos_fraction = 0.5 class RenameParam: mapping = dict(image="data") from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \ ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \ RenameRecord, Norm2DImage, Pad2DImage from models.maskrcnn.input import PreprocessGtPoly, EncodeGtPoly, \ Resize2DImageBboxMask, Flip2DImageBboxMask, Pad2DImageBboxMask from models.FPN.input import PyramidAnchorTarget2D if is_train: transform = [ ReadRoiRecord(None), Norm2DImage(NormParam), PreprocessGtPoly(), Resize2DImageBboxMask(ResizeParam), Flip2DImageBboxMask(), EncodeGtPoly(PadParam), Pad2DImageBboxMask(PadParam), ConvertImageFromHwcToChw(), RenameRecord(RenameParam.mapping) ] data_name = ["data"] label_name = ["im_info", "gt_bbox", "gt_poly"] if not RpnParam.nnvm_rpn_target: transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam())) label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"] else: transform = [ ReadRoiRecord(None), Norm2DImage(NormParam), Resize2DImageBbox(ResizeParam), ConvertImageFromHwcToChw(), RenameRecord(RenameParam.mapping) ] data_name = ["data", "im_info", "im_id", "rec_id"] label_name = [] import core.detection_metric as metric from models.msrcnn.metric import SigmoidCELossMetric, L2 rpn_acc_metric = metric.AccWithIgnore( "RpnAcc", ["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"], [] ) rpn_l1_metric = metric.L1( "RpnL1", ["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"], [] ) # for bbox, the label is generated in network so it is an output box_acc_metric = metric.AccWithIgnore( "RcnnAcc", ["bbox_cls_loss_output", "bbox_label_blockgrad_output"], [] ) box_l1_metric = metric.L1( "RcnnL1", ["bbox_reg_loss_output", "bbox_label_blockgrad_output"], [] ) mask_cls_metric = SigmoidCELossMetric( "MaskCE", ["mask_loss_output"], [] ) iou_l2_metric = L2( "IoUL2", ["iou_head_loss_output"], [] ) metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric, mask_cls_metric, iou_l2_metric] return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \ ModelParam, OptimizeParam, TestParam, \ transform, data_name, label_name, metric_list
TuSimple/simpledet
config/ms_r50v1_fpn_1x.py
Python
apache-2.0
10,314
/* * Copyright 2008 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Created on Feb 6, 2008 */ package org.drools.reteoo; import java.io.Serializable; import java.io.Externalizable; import java.io.ObjectOutput; import java.io.IOException; import java.io.ObjectInput; import java.util.HashMap; import java.util.Map; import org.drools.common.BaseNode; /** * This context class is used during rule removal to ensure * network consistency. * * @author etirelli * */ public class RuleRemovalContext implements Externalizable { private Map visitedNodes; public RuleRemovalContext() { this.visitedNodes = new HashMap(); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { visitedNodes = (Map) in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject( visitedNodes ); } /** * We need to track tuple source nodes that we visit * to avoid multiple removal in case of subnetworks * * @param node */ public void visitTupleSource(LeftTupleSource node) { this.visitedNodes.put( new Integer( node.getId() ), node ); } /** * We need to track tuple source nodes that we visit * to avoid multiple removal in case of subnetworks * * @param node * @return */ public boolean alreadyVisited(LeftTupleSource node) { return this.visitedNodes.containsKey( new Integer( node.getId() ) ); } public void clear() { this.visitedNodes.clear(); } }
bobmcwhirter/drools
drools-core/src/main/java/org/drools/reteoo/RuleRemovalContext.java
Java
apache-2.0
2,187
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // $Id: Validator.java 888884 2009-12-09 17:36:46Z mrglavas $ using System; using java = biz.ritter.javapi; using javax = biz.ritter.javapix; using org.xml.sax; namespace biz.ritter.javapix.xml.validation { /* import javax.xml.transform.Result; import javax.xml.transform.Source; import org.w3c.dom.ls.LSResourceResolver; */ /** * <p>A processor that checks an XML document against {@link Schema}.</p> * * <p/> * A validator is a thread-unsafe and non-reentrant object. * In other words, it is the application's responsibility to make * sure that one {@link Validator} object is not used from * more than one thread at any given time, and while the <tt>validate</tt> * method is invoked, applications may not recursively call * the <tt>validate</tt> method. * <p/> * * Note that while the {@link #validate(javax.xml.transform.Source)} and {@link #validate(javax.xml.transform.Source, javax.xml.transform.Result)} * methods take a {@link Source} instance, the <code>Source</code> * instance must be a <code>SAXSource</code>, <code>DOMSource</code>, <code>StAXSource</code> or <code>StreamSource</code>. * * @author <a href="mailto:Kohsuke.Kawaguchi@Sun.com">Kohsuke Kawaguchi</a> * @version $Revision: 888884 $, $Date: 2009-12-09 12:36:46 -0500 (Wed, 09 Dec 2009) $ * @since 1.5 */ public abstract class Validator { /** * Constructor for derived classes. * * <p/> * The constructor does nothing. * * <p/> * Derived classes must create {@link Validator} objects that have * <tt>null</tt> {@link ErrorHandler} and * <tt>null</tt> {@link LSResourceResolver}. */ protected Validator () { } /** * <p>Reset this <code>Validator</code> to its original configuration.</p> * * <p><code>Validator</code> is reset to the same state as when it was created with * {@link Schema#newValidator()}. * <code>reset()</code> is designed to allow the reuse of existing <code>Validator</code>s * thus saving resources associated with the creation of new <code>Validator</code>s.</p> * * <p>The reset <code>Validator</code> is not guaranteed to have the same {@link LSResourceResolver} or {@link ErrorHandler} * <code>Object</code>s, e.g. {@link Object#equals(Object obj)}. It is guaranteed to have a functionally equal * <code>LSResourceResolver</code> and <code>ErrorHandler</code>.</p> */ public abstract void reset (); /** * Validates the specified input. * * <p/> * This is just a convenience method of: * <pre> * validate(source,null); * </pre> * * @see #setErrorHandler(ErrorHandler) */ public void validate (javax.xml.transform.Source source) {//throws SAXException, IOException { validate (source, null); } /** * Validates the specified input and send the augmented validation * result to the specified output. * * <p/> * This method places the following restrictions on the types of * the {@link Source}/{@link Result} accepted. * * <h4>{@link Source}/{@link Result} accepted:</h4> * <table border='1'> * <thead> * <tr> * <td></td> * <td>{@link javax.xml.transform.sax.SAXSource}</td> * <td>{@link javax.xml.transform.dom.DOMSource}</td> * <td>{@link javax.xml.transform.stax.StAXSource}</td> * <td>{@link javax.xml.transform.stream.StreamSource}</td> * </tr> * </thead> * <tbody> * <tr> * <td><tt>null</tt></td> * <td>OK</td> * <td>OK</td> * <td>OK</td> * <td>OK</td> * </tr> * <tr> * <td>{@link javax.xml.transform.sax.SAXResult}</td> * <td>OK</td> * <td>Err</td> * <td>Err</td> * <td>Err</td> * </tr> * <tr> * <td>{@link javax.xml.transform.dom.DOMResult}</td> * <td>Err</td> * <td>OK</td> * <td>Err</td> * <td>Err</td> * </tr> * <tr> * <td>{@link javax.xml.transform.stax.StAXResult}</td> * <td>Err</td> * <td>Err</td> * <td>OK</td> * <td>Err</td> * </tr> * <tr> * <td>{@link javax.xml.transform.stream.StreamResult}</td> * <td>Err</td> * <td>Err</td> * <td>Err</td> * <td>OK</td> * </tr> * </tbody> * </table> * * <p/> * To validate one {@link Source} into another kind of {@link Result}, use the identity transformer * (see {@link javax.xml.transform.TransformerFactory#newTransformer()}). * * <p/> * Errors found during the validation is sent to the specified * {@link ErrorHandler}. * * <p/> * If a document is valid, or if a document contains some errors * but none of them were fatal and the {@link ErrorHandler} didn't * throw any exception, then the method returns normally. * * @param source * XML to be validated. Must not be null. * * @param result * The {@link Result} object that receives (possibly augmented) * XML. This parameter can be null if the caller is not interested * in it. * * Note that when a {@link javax.xml.transform.dom.DOMResult} is used, * a validator might just pass the same DOM node from * {@link javax.xml.transform.dom.DOMSource} to * {@link javax.xml.transform.dom.DOMResult} * (in which case <tt>source.getNode()==result.getNode()</tt>), * it might copy the entire DOM tree, or it might alter the * node given by the source. * * @throws IllegalArgumentException * If the {@link Result} type doesn't match the {@link Source} type, * or if the specified source is not a * {@link javax.xml.transform.sax.SAXSource}, * {@link javax.xml.transform.dom.DOMSource}, * {@link javax.xml.transform.stax.StAXSource} or * {@link javax.xml.transform.stream.StreamSource}. * * @throws SAXException * If the {@link ErrorHandler} throws a {@link SAXException} or * if a fatal error is found and the {@link ErrorHandler} returns * normally. * * @throws IOException * If the validator is processing a * {@link javax.xml.transform.sax.SAXSource} and the * underlying {@link org.xml.sax.XMLReader} throws an * {@link IOException}. * * @throws NullPointerException * If the <tt>source</tt> parameter is null. * * @see #validate(Source) */ public abstract void validate (javax.xml.transform.Source source, javax.xml.transform.Result result); // throws SAXException, IOException; /** * Sets the {@link ErrorHandler} to receive errors encountered * during the <code>validate</code> method invocation. * * <p/> * Error handler can be used to customize the error handling process * during a validation. When an {@link ErrorHandler} is set, * errors found during the validation will be first sent * to the {@link ErrorHandler}. * * <p/> * The error handler can abort further validation immediately * by throwing {@link SAXException} from the handler. Or for example * it can print an error to the screen and try to continue the * validation by returning normally from the {@link ErrorHandler} * * <p/> * If any {@link Throwable} is thrown from an {@link ErrorHandler}, * the caller of the <code>validate</code> method will be thrown * the same {@link Throwable} object. * * <p/> * {@link Validator} is not allowed to * throw {@link SAXException} without first reporting it to * {@link ErrorHandler}. * * <p/> * When the {@link ErrorHandler} is null, the implementation will * behave as if the following {@link ErrorHandler} is set: * <pre> * class DraconianErrorHandler implements {@link ErrorHandler} { * public void fatalError( {@link org.xml.sax.SAXParseException} e ) throws {@link SAXException} { * throw e; * } * public void error( {@link org.xml.sax.SAXParseException} e ) throws {@link SAXException} { * throw e; * } * public void warning( {@link org.xml.sax.SAXParseException} e ) throws {@link SAXException} { * // noop * } * } * </pre> * * <p/> * When a new {@link Validator} object is created, initially * this field is set to null. * * @param errorHandler * A new error handler to be set. This parameter can be null. */ public abstract void setErrorHandler (ErrorHandler errorHandler); /** * Gets the current {@link ErrorHandler} set to this {@link Validator}. * * @return * This method returns the object that was last set through * the {@link #setErrorHandler(ErrorHandler)} method, or null * if that method has never been called since this {@link Validator} * has created. * * @see #setErrorHandler(ErrorHandler) */ public abstract ErrorHandler getErrorHandler (); /** * Sets the {@link LSResourceResolver} to customize * resource resolution while in a validation episode. * * <p/> * {@link Validator} uses a {@link LSResourceResolver} * when it needs to locate external resources while a validation, * although exactly what constitutes "locating external resources" is * up to each schema language. * * <p/> * When the {@link LSResourceResolver} is null, the implementation will * behave as if the following {@link LSResourceResolver} is set: * <pre> * class DumbLSResourceResolver implements {@link LSResourceResolver} { * public {@link org.w3c.dom.ls.LSInput} resolveResource( * String publicId, String systemId, String baseURI) { * * return null; // always return null * } * } * </pre> * * <p/> * If a {@link LSResourceResolver} throws a {@link RuntimeException} * (or instances of its derived classes), * then the {@link Validator} will abort the parsing and * the caller of the <code>validate</code> method will receive * the same {@link RuntimeException}. * * <p/> * When a new {@link Validator} object is created, initially * this field is set to null. * * @param resourceResolver * A new resource resolver to be set. This parameter can be null. */ public abstract void setResourceResolver (org.w3c.dom.ls.LSResourceResolver resourceResolver); /** * Gets the current {@link LSResourceResolver} set to this {@link Validator}. * * @return * This method returns the object that was last set through * the {@link #setResourceResolver(LSResourceResolver)} method, or null * if that method has never been called since this {@link Validator} * has created. * * @see #setErrorHandler(ErrorHandler) */ public abstract org.w3c.dom.ls.LSResourceResolver getResourceResolver (); /** * Look up the value of a feature flag. * * <p/>The feature name is any fully-qualified URI. It is * possible for a {@link Validator} to recognize a feature name but * temporarily be unable to return its value. * Some feature values may be available only in specific * contexts, such as before, during, or after a validation. * * <p>Implementors are free (and encouraged) to invent their own features, * using names built on their own URIs.</p> * * @param name The feature name, which is a non-null fully-qualified URI. * @return The current value of the feature (true or false). * @exception org.xml.sax.SAXNotRecognizedException If the feature * value can't be assigned or retrieved. * @exception org.xml.sax.SAXNotSupportedException When the * {@link Validator} recognizes the feature name but * cannot determine its value at this time. * @throws NullPointerException * When the name parameter is null. * @see #setFeature(String, boolean) */ public bool getFeature (String name) {//throws SAXNotRecognizedException, SAXNotSupportedException { if (name == null) throw new java.lang.NullPointerException ("the name parameter is null"); throw new SAXNotRecognizedException (name); } /** * Set the value of a feature flag. * * <p> * Feature can be used to control the way a {@link Validator} * parses schemas, although {@link Validator}s are not required * to recognize any specific property names.</p> * * <p>The feature name is any fully-qualified URI. It is * possible for a {@link Validator} to expose a feature value but * to be unable to change the current value. * Some feature values may be immutable or mutable only * in specific contexts, such as before, during, or after * a validation.</p> * * @param name The feature name, which is a non-null fully-qualified URI. * @param value The requested value of the feature (true or false). * * @exception org.xml.sax.SAXNotRecognizedException If the feature * value can't be assigned or retrieved. * @exception org.xml.sax.SAXNotSupportedException When the * {@link Validator} recognizes the feature name but * cannot set the requested value. * @throws NullPointerException * When the name parameter is null. * * @see #getFeature(String) */ public void setFeature (String name, bool value) {//throws SAXNotRecognizedException, SAXNotSupportedException { if (name == null) throw new java.lang.NullPointerException ("the name parameter is null"); throw new SAXNotRecognizedException (name); } /** * Set the value of a property. * * <p>The property name is any fully-qualified URI. It is * possible for a {@link Validator} to recognize a property name but * to be unable to change the current value. * Some property values may be immutable or mutable only * in specific contexts, such as before, during, or after * a validation.</p> * * <p>{@link Validator}s are not required to recognize setting * any specific property names.</p> * * @param name The property name, which is a non-null fully-qualified URI. * @param object The requested value for the property. * @exception org.xml.sax.SAXNotRecognizedException If the property * value can't be assigned or retrieved. * @exception org.xml.sax.SAXNotSupportedException When the * {@link Validator} recognizes the property name but * cannot set the requested value. * @throws NullPointerException * When the name parameter is null. */ public void setProperty (String name, Object objectJ) {// throws SAXNotRecognizedException, SAXNotSupportedException { if (name == null) throw new java.lang.NullPointerException ("the name parameter is null"); throw new SAXNotRecognizedException (name); } /** * Look up the value of a property. * * <p>The property name is any fully-qualified URI. It is * possible for a {@link Validator} to recognize a property name but * temporarily be unable to return its value. * Some property values may be available only in specific * contexts, such as before, during, or after a validation.</p> * * <p>{@link Validator}s are not required to recognize any specific * property names.</p> * * <p>Implementors are free (and encouraged) to invent their own properties, * using names built on their own URIs.</p> * * @param name The property name, which is a non-null fully-qualified URI. * @return The current value of the property. * @exception org.xml.sax.SAXNotRecognizedException If the property * value can't be assigned or retrieved. * @exception org.xml.sax.SAXNotSupportedException When the * XMLReader recognizes the property name but * cannot determine its value at this time. * @throws NullPointerException * When the name parameter is null. * @see #setProperty(String, Object) */ public Object getProperty (String name) {//throws SAXNotRecognizedException, SAXNotSupportedException { if (name == null) throw new java.lang.NullPointerException ("the name parameter is null"); throw new SAXNotRecognizedException (name); } } }
sailesh341/JavApi
JavApi_Core/javax/xml/validation/Validator.cs
C#
apache-2.0
17,647
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.data; import java.io.Serializable; import java.util.Objects; import java.util.Optional; import com.vaadin.server.SerializableConsumer; import com.vaadin.server.SerializableFunction; import com.vaadin.server.SerializableSupplier; /** * Represents the result of an operation that might fail, such as type * conversion. A result may contain either a value, signifying a successful * operation, or an error message in case of a failure. * <p> * Result instances are created using the factory methods {@link #ok(R)} and * {@link #error(String)}, denoting success and failure respectively. * <p> * Unless otherwise specified, {@code Result} method arguments cannot be null. * * @param <R> * the result value type */ public interface Result<R> extends Serializable { /** * Returns a successful result wrapping the given value. * * @param <R> * the result value type * @param value * the result value, can be null * @return a successful result */ public static <R> Result<R> ok(R value) { return new SimpleResult<>(value, null); } /** * Returns a failure result wrapping the given error message. * * @param <R> * the result value type * @param message * the error message * @return a failure result */ public static <R> Result<R> error(String message) { Objects.requireNonNull(message, "message cannot be null"); return new SimpleResult<>(null, message); } /** * Returns a Result representing the result of invoking the given supplier. * If the supplier returns a value, returns a {@code Result.ok} of the * value; if an exception is thrown, returns the message in a * {@code Result.error}. * * @param <R> * the result value type * @param supplier * the supplier to run * @param onError * the function to provide the error message * @return the result of invoking the supplier */ public static <R> Result<R> of(SerializableSupplier<R> supplier, SerializableFunction<Exception, String> onError) { Objects.requireNonNull(supplier, "supplier cannot be null"); Objects.requireNonNull(onError, "onError cannot be null"); try { return ok(supplier.get()); } catch (Exception e) { return error(onError.apply(e)); } } /** * If this Result has a value, returns a Result of applying the given * function to the value. Otherwise, returns a Result bearing the same error * as this one. Note that any exceptions thrown by the mapping function are * not wrapped but allowed to propagate. * * @param <S> * the type of the mapped value * @param mapper * the mapping function * @return the mapped result */ public default <S> Result<S> map(SerializableFunction<R, S> mapper) { return flatMap(value -> ok(mapper.apply(value))); } /** * If this Result has a value, applies the given Result-returning function * to the value. Otherwise, returns a Result bearing the same error as this * one. Note that any exceptions thrown by the mapping function are not * wrapped but allowed to propagate. * * @param <S> * the type of the mapped value * @param mapper * the mapping function * @return the mapped result */ public <S> Result<S> flatMap(SerializableFunction<R, Result<S>> mapper); /** * Invokes either the first callback or the second one, depending on whether * this Result denotes a success or a failure, respectively. * * @param ifOk * the function to call if success * @param ifError * the function to call if failure */ public void handle(SerializableConsumer<R> ifOk, SerializableConsumer<String> ifError); /** * Applies the {@code consumer} if result is not an error. * * @param consumer * consumer to apply in case it's not an error */ public default void ifOk(SerializableConsumer<R> consumer) { handle(consumer, error -> { }); } /** * Applies the {@code consumer} if result is an error. * * @param consumer * consumer to apply in case it's an error */ public default void ifError(SerializableConsumer<String> consumer) { handle(value -> { }, consumer); } /** * Checks if the result denotes an error. * * @return <code>true</code> if the result denotes an error, * <code>false</code> otherwise */ public boolean isError(); /** * Returns an Optional of the result message, or an empty Optional if none. * * @return the optional message */ public Optional<String> getMessage(); /** * Return the value, if the result denotes success, otherwise throw an * exception to be created by the provided supplier. * * @param <X> * Type of the exception to be thrown * @param exceptionProvider * The provider which will return the exception to be thrown * based on the given error message * @return the value * @throws X * if this result denotes an error */ public <X extends Throwable> R getOrThrow( SerializableFunction<String, ? extends X> exceptionProvider) throws X; }
Legioth/vaadin
server/src/main/java/com/vaadin/data/Result.java
Java
apache-2.0
6,251
/* * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.product.swap; import java.io.Serializable; import java.time.LocalDate; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import org.joda.beans.Bean; import org.joda.beans.ImmutableBean; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaBean; import org.joda.beans.MetaProperty; import org.joda.beans.gen.BeanDefinition; import org.joda.beans.gen.DerivedProperty; import org.joda.beans.gen.PropertyDefinition; import org.joda.beans.impl.direct.DirectFieldsBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.ReferenceDataNotFoundException; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.basics.date.AdjustableDate; import com.opengamma.strata.basics.date.DayCount; import com.opengamma.strata.basics.index.Index; import com.opengamma.strata.basics.schedule.PeriodicSchedule; import com.opengamma.strata.basics.schedule.Schedule; import com.opengamma.strata.product.common.PayReceive; /** * A rate swap leg defined using a parameterized schedule and calculation. * <p> * This defines a single swap leg paying a rate, such as an interest rate. * The rate may be fixed or floating, see {@link FixedRateCalculation}, * {@link IborRateCalculation} and {@link OvernightRateCalculation}. * <p> * Interest is calculated based on <i>accrual periods</i> which follow a regular schedule * with optional initial and final stubs. Coupon payments are based on <i>payment periods</i> * which are typically the same as the accrual periods. * If the payment period is longer than the accrual period then compounding may apply. * The schedule of periods is defined using {@link PeriodicSchedule}, {@link PaymentSchedule}, * {@link NotionalSchedule} and {@link ResetSchedule}. * <p> * If the schedule needs to be manually specified, or there are other unusual calculation * rules then the {@link RatePeriodSwapLeg} class should be used instead. */ @BeanDefinition public final class RateCalculationSwapLeg implements ScheduledSwapLeg, ImmutableBean, Serializable { /** * Whether the leg is pay or receive. * <p> * A value of 'Pay' implies that the resulting amount is paid to the counterparty. * A value of 'Receive' implies that the resulting amount is received from the counterparty. * Note that negative interest rates can result in a payment in the opposite * direction to that implied by this indicator. */ @PropertyDefinition(validate = "notNull", overrideGet = true) private final PayReceive payReceive; /** * The accrual schedule. * <p> * This is used to define the accrual periods. * These are used directly or indirectly to determine other dates in the swap. */ @PropertyDefinition(validate = "notNull", overrideGet = true) private final PeriodicSchedule accrualSchedule; /** * The payment schedule. * <p> * This is used to define the payment periods, including any compounding. * The payment period dates are based on the accrual schedule. */ @PropertyDefinition(validate = "notNull", overrideGet = true) private final PaymentSchedule paymentSchedule; /** * The notional schedule. * <p> * The notional amount schedule, which can vary during the lifetime of the swap. * In most cases, the notional amount is not exchanged, with only the net difference being exchanged. * However, in certain cases, initial, final or intermediate amounts are exchanged. */ @PropertyDefinition(validate = "notNull") private final NotionalSchedule notionalSchedule; /** * The interest rate accrual calculation. * <p> * Different kinds of swap leg are determined by the subclass used here. * See {@link FixedRateCalculation}, {@link IborRateCalculation} and {@link OvernightRateCalculation}. */ @PropertyDefinition(validate = "notNull") private final RateCalculation calculation; //------------------------------------------------------------------------- @Override @DerivedProperty public SwapLegType getType() { return calculation.getType(); } @Override @DerivedProperty public AdjustableDate getStartDate() { return accrualSchedule.calculatedStartDate(); } @Override @DerivedProperty public AdjustableDate getEndDate() { return accrualSchedule.calculatedEndDate(); } @Override @DerivedProperty public Currency getCurrency() { return notionalSchedule.getCurrency(); } @Override public void collectCurrencies(ImmutableSet.Builder<Currency> builder) { builder.add(getCurrency()); calculation.collectCurrencies(builder); notionalSchedule.getFxReset().ifPresent(fxReset -> builder.add(fxReset.getReferenceCurrency())); } @Override public void collectIndices(ImmutableSet.Builder<Index> builder) { calculation.collectIndices(builder); notionalSchedule.getFxReset().ifPresent(fxReset -> builder.add(fxReset.getIndex())); } //------------------------------------------------------------------------- /** * Returns an instance based on this leg with the start date replaced. * <p> * This uses {@link PeriodicSchedule#replaceStartDate(LocalDate)}. * * @throws IllegalArgumentException if the start date cannot be replaced with the proposed start date */ @Override public RateCalculationSwapLeg replaceStartDate(LocalDate adjustedStartDate) { return toBuilder().accrualSchedule(accrualSchedule.replaceStartDate(adjustedStartDate)).build(); } /** * Converts this swap leg to the equivalent {@code ResolvedSwapLeg}. * <p> * An {@link ResolvedSwapLeg} represents the same data as this leg, but with * a complete schedule of dates defined using {@link RatePaymentPeriod}. * * @return the equivalent resolved swap leg * @throws ReferenceDataNotFoundException if an identifier cannot be resolved in the reference data * @throws RuntimeException if unable to resolve due to an invalid swap schedule or definition */ @Override public ResolvedSwapLeg resolve(ReferenceData refData) { DayCount dayCount = calculation.getDayCount(); Schedule resolvedAccruals = accrualSchedule.createSchedule(refData); Schedule resolvedPayments = paymentSchedule.createSchedule(resolvedAccruals, refData); List<RateAccrualPeriod> accrualPeriods = calculation.createAccrualPeriods(resolvedAccruals, resolvedPayments, refData); List<NotionalPaymentPeriod> payPeriods = paymentSchedule.createPaymentPeriods( resolvedAccruals, resolvedPayments, accrualPeriods, dayCount, notionalSchedule, payReceive, refData); LocalDate startDate = accrualPeriods.get(0).getStartDate(); ImmutableList<SwapPaymentEvent> payEvents = notionalSchedule.createEvents(payPeriods, startDate, refData); return new ResolvedSwapLeg(getType(), payReceive, payPeriods, payEvents, getCurrency()); } //------------------------- AUTOGENERATED START ------------------------- /** * The meta-bean for {@code RateCalculationSwapLeg}. * @return the meta-bean, not null */ public static RateCalculationSwapLeg.Meta meta() { return RateCalculationSwapLeg.Meta.INSTANCE; } static { MetaBean.register(RateCalculationSwapLeg.Meta.INSTANCE); } /** * The serialization version id. */ private static final long serialVersionUID = 1L; /** * Returns a builder used to create an instance of the bean. * @return the builder, not null */ public static RateCalculationSwapLeg.Builder builder() { return new RateCalculationSwapLeg.Builder(); } private RateCalculationSwapLeg( PayReceive payReceive, PeriodicSchedule accrualSchedule, PaymentSchedule paymentSchedule, NotionalSchedule notionalSchedule, RateCalculation calculation) { JodaBeanUtils.notNull(payReceive, "payReceive"); JodaBeanUtils.notNull(accrualSchedule, "accrualSchedule"); JodaBeanUtils.notNull(paymentSchedule, "paymentSchedule"); JodaBeanUtils.notNull(notionalSchedule, "notionalSchedule"); JodaBeanUtils.notNull(calculation, "calculation"); this.payReceive = payReceive; this.accrualSchedule = accrualSchedule; this.paymentSchedule = paymentSchedule; this.notionalSchedule = notionalSchedule; this.calculation = calculation; } @Override public RateCalculationSwapLeg.Meta metaBean() { return RateCalculationSwapLeg.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets whether the leg is pay or receive. * <p> * A value of 'Pay' implies that the resulting amount is paid to the counterparty. * A value of 'Receive' implies that the resulting amount is received from the counterparty. * Note that negative interest rates can result in a payment in the opposite * direction to that implied by this indicator. * @return the value of the property, not null */ @Override public PayReceive getPayReceive() { return payReceive; } //----------------------------------------------------------------------- /** * Gets the accrual schedule. * <p> * This is used to define the accrual periods. * These are used directly or indirectly to determine other dates in the swap. * @return the value of the property, not null */ @Override public PeriodicSchedule getAccrualSchedule() { return accrualSchedule; } //----------------------------------------------------------------------- /** * Gets the payment schedule. * <p> * This is used to define the payment periods, including any compounding. * The payment period dates are based on the accrual schedule. * @return the value of the property, not null */ @Override public PaymentSchedule getPaymentSchedule() { return paymentSchedule; } //----------------------------------------------------------------------- /** * Gets the notional schedule. * <p> * The notional amount schedule, which can vary during the lifetime of the swap. * In most cases, the notional amount is not exchanged, with only the net difference being exchanged. * However, in certain cases, initial, final or intermediate amounts are exchanged. * @return the value of the property, not null */ public NotionalSchedule getNotionalSchedule() { return notionalSchedule; } //----------------------------------------------------------------------- /** * Gets the interest rate accrual calculation. * <p> * Different kinds of swap leg are determined by the subclass used here. * See {@link FixedRateCalculation}, {@link IborRateCalculation} and {@link OvernightRateCalculation}. * @return the value of the property, not null */ public RateCalculation getCalculation() { return calculation; } //----------------------------------------------------------------------- /** * Returns a builder that allows this bean to be mutated. * @return the mutable builder, not null */ public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { RateCalculationSwapLeg other = (RateCalculationSwapLeg) obj; return JodaBeanUtils.equal(payReceive, other.payReceive) && JodaBeanUtils.equal(accrualSchedule, other.accrualSchedule) && JodaBeanUtils.equal(paymentSchedule, other.paymentSchedule) && JodaBeanUtils.equal(notionalSchedule, other.notionalSchedule) && JodaBeanUtils.equal(calculation, other.calculation); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(payReceive); hash = hash * 31 + JodaBeanUtils.hashCode(accrualSchedule); hash = hash * 31 + JodaBeanUtils.hashCode(paymentSchedule); hash = hash * 31 + JodaBeanUtils.hashCode(notionalSchedule); hash = hash * 31 + JodaBeanUtils.hashCode(calculation); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(320); buf.append("RateCalculationSwapLeg{"); buf.append("payReceive").append('=').append(JodaBeanUtils.toString(payReceive)).append(',').append(' '); buf.append("accrualSchedule").append('=').append(JodaBeanUtils.toString(accrualSchedule)).append(',').append(' '); buf.append("paymentSchedule").append('=').append(JodaBeanUtils.toString(paymentSchedule)).append(',').append(' '); buf.append("notionalSchedule").append('=').append(JodaBeanUtils.toString(notionalSchedule)).append(',').append(' '); buf.append("calculation").append('=').append(JodaBeanUtils.toString(calculation)).append(',').append(' '); buf.append("type").append('=').append(JodaBeanUtils.toString(getType())).append(',').append(' '); buf.append("startDate").append('=').append(JodaBeanUtils.toString(getStartDate())).append(',').append(' '); buf.append("endDate").append('=').append(JodaBeanUtils.toString(getEndDate())).append(',').append(' '); buf.append("currency").append('=').append(JodaBeanUtils.toString(getCurrency())); buf.append('}'); return buf.toString(); } //----------------------------------------------------------------------- /** * The meta-bean for {@code RateCalculationSwapLeg}. */ public static final class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code payReceive} property. */ private final MetaProperty<PayReceive> payReceive = DirectMetaProperty.ofImmutable( this, "payReceive", RateCalculationSwapLeg.class, PayReceive.class); /** * The meta-property for the {@code accrualSchedule} property. */ private final MetaProperty<PeriodicSchedule> accrualSchedule = DirectMetaProperty.ofImmutable( this, "accrualSchedule", RateCalculationSwapLeg.class, PeriodicSchedule.class); /** * The meta-property for the {@code paymentSchedule} property. */ private final MetaProperty<PaymentSchedule> paymentSchedule = DirectMetaProperty.ofImmutable( this, "paymentSchedule", RateCalculationSwapLeg.class, PaymentSchedule.class); /** * The meta-property for the {@code notionalSchedule} property. */ private final MetaProperty<NotionalSchedule> notionalSchedule = DirectMetaProperty.ofImmutable( this, "notionalSchedule", RateCalculationSwapLeg.class, NotionalSchedule.class); /** * The meta-property for the {@code calculation} property. */ private final MetaProperty<RateCalculation> calculation = DirectMetaProperty.ofImmutable( this, "calculation", RateCalculationSwapLeg.class, RateCalculation.class); /** * The meta-property for the {@code type} property. */ private final MetaProperty<SwapLegType> type = DirectMetaProperty.ofDerived( this, "type", RateCalculationSwapLeg.class, SwapLegType.class); /** * The meta-property for the {@code startDate} property. */ private final MetaProperty<AdjustableDate> startDate = DirectMetaProperty.ofDerived( this, "startDate", RateCalculationSwapLeg.class, AdjustableDate.class); /** * The meta-property for the {@code endDate} property. */ private final MetaProperty<AdjustableDate> endDate = DirectMetaProperty.ofDerived( this, "endDate", RateCalculationSwapLeg.class, AdjustableDate.class); /** * The meta-property for the {@code currency} property. */ private final MetaProperty<Currency> currency = DirectMetaProperty.ofDerived( this, "currency", RateCalculationSwapLeg.class, Currency.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "payReceive", "accrualSchedule", "paymentSchedule", "notionalSchedule", "calculation", "type", "startDate", "endDate", "currency"); /** * Restricted constructor. */ private Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -885469925: // payReceive return payReceive; case 304659814: // accrualSchedule return accrualSchedule; case -1499086147: // paymentSchedule return paymentSchedule; case 1447860727: // notionalSchedule return notionalSchedule; case -934682935: // calculation return calculation; case 3575610: // type return type; case -2129778896: // startDate return startDate; case -1607727319: // endDate return endDate; case 575402001: // currency return currency; } return super.metaPropertyGet(propertyName); } @Override public RateCalculationSwapLeg.Builder builder() { return new RateCalculationSwapLeg.Builder(); } @Override public Class<? extends RateCalculationSwapLeg> beanType() { return RateCalculationSwapLeg.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code payReceive} property. * @return the meta-property, not null */ public MetaProperty<PayReceive> payReceive() { return payReceive; } /** * The meta-property for the {@code accrualSchedule} property. * @return the meta-property, not null */ public MetaProperty<PeriodicSchedule> accrualSchedule() { return accrualSchedule; } /** * The meta-property for the {@code paymentSchedule} property. * @return the meta-property, not null */ public MetaProperty<PaymentSchedule> paymentSchedule() { return paymentSchedule; } /** * The meta-property for the {@code notionalSchedule} property. * @return the meta-property, not null */ public MetaProperty<NotionalSchedule> notionalSchedule() { return notionalSchedule; } /** * The meta-property for the {@code calculation} property. * @return the meta-property, not null */ public MetaProperty<RateCalculation> calculation() { return calculation; } /** * The meta-property for the {@code type} property. * @return the meta-property, not null */ public MetaProperty<SwapLegType> type() { return type; } /** * The meta-property for the {@code startDate} property. * @return the meta-property, not null */ public MetaProperty<AdjustableDate> startDate() { return startDate; } /** * The meta-property for the {@code endDate} property. * @return the meta-property, not null */ public MetaProperty<AdjustableDate> endDate() { return endDate; } /** * The meta-property for the {@code currency} property. * @return the meta-property, not null */ public MetaProperty<Currency> currency() { return currency; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -885469925: // payReceive return ((RateCalculationSwapLeg) bean).getPayReceive(); case 304659814: // accrualSchedule return ((RateCalculationSwapLeg) bean).getAccrualSchedule(); case -1499086147: // paymentSchedule return ((RateCalculationSwapLeg) bean).getPaymentSchedule(); case 1447860727: // notionalSchedule return ((RateCalculationSwapLeg) bean).getNotionalSchedule(); case -934682935: // calculation return ((RateCalculationSwapLeg) bean).getCalculation(); case 3575610: // type return ((RateCalculationSwapLeg) bean).getType(); case -2129778896: // startDate return ((RateCalculationSwapLeg) bean).getStartDate(); case -1607727319: // endDate return ((RateCalculationSwapLeg) bean).getEndDate(); case 575402001: // currency return ((RateCalculationSwapLeg) bean).getCurrency(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code RateCalculationSwapLeg}. */ public static final class Builder extends DirectFieldsBeanBuilder<RateCalculationSwapLeg> { private PayReceive payReceive; private PeriodicSchedule accrualSchedule; private PaymentSchedule paymentSchedule; private NotionalSchedule notionalSchedule; private RateCalculation calculation; /** * Restricted constructor. */ private Builder() { } /** * Restricted copy constructor. * @param beanToCopy the bean to copy from, not null */ private Builder(RateCalculationSwapLeg beanToCopy) { this.payReceive = beanToCopy.getPayReceive(); this.accrualSchedule = beanToCopy.getAccrualSchedule(); this.paymentSchedule = beanToCopy.getPaymentSchedule(); this.notionalSchedule = beanToCopy.getNotionalSchedule(); this.calculation = beanToCopy.getCalculation(); } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case -885469925: // payReceive return payReceive; case 304659814: // accrualSchedule return accrualSchedule; case -1499086147: // paymentSchedule return paymentSchedule; case 1447860727: // notionalSchedule return notionalSchedule; case -934682935: // calculation return calculation; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case -885469925: // payReceive this.payReceive = (PayReceive) newValue; break; case 304659814: // accrualSchedule this.accrualSchedule = (PeriodicSchedule) newValue; break; case -1499086147: // paymentSchedule this.paymentSchedule = (PaymentSchedule) newValue; break; case 1447860727: // notionalSchedule this.notionalSchedule = (NotionalSchedule) newValue; break; case -934682935: // calculation this.calculation = (RateCalculation) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public Builder set(MetaProperty<?> property, Object value) { super.set(property, value); return this; } @Override public RateCalculationSwapLeg build() { return new RateCalculationSwapLeg( payReceive, accrualSchedule, paymentSchedule, notionalSchedule, calculation); } //----------------------------------------------------------------------- /** * Sets whether the leg is pay or receive. * <p> * A value of 'Pay' implies that the resulting amount is paid to the counterparty. * A value of 'Receive' implies that the resulting amount is received from the counterparty. * Note that negative interest rates can result in a payment in the opposite * direction to that implied by this indicator. * @param payReceive the new value, not null * @return this, for chaining, not null */ public Builder payReceive(PayReceive payReceive) { JodaBeanUtils.notNull(payReceive, "payReceive"); this.payReceive = payReceive; return this; } /** * Sets the accrual schedule. * <p> * This is used to define the accrual periods. * These are used directly or indirectly to determine other dates in the swap. * @param accrualSchedule the new value, not null * @return this, for chaining, not null */ public Builder accrualSchedule(PeriodicSchedule accrualSchedule) { JodaBeanUtils.notNull(accrualSchedule, "accrualSchedule"); this.accrualSchedule = accrualSchedule; return this; } /** * Sets the payment schedule. * <p> * This is used to define the payment periods, including any compounding. * The payment period dates are based on the accrual schedule. * @param paymentSchedule the new value, not null * @return this, for chaining, not null */ public Builder paymentSchedule(PaymentSchedule paymentSchedule) { JodaBeanUtils.notNull(paymentSchedule, "paymentSchedule"); this.paymentSchedule = paymentSchedule; return this; } /** * Sets the notional schedule. * <p> * The notional amount schedule, which can vary during the lifetime of the swap. * In most cases, the notional amount is not exchanged, with only the net difference being exchanged. * However, in certain cases, initial, final or intermediate amounts are exchanged. * @param notionalSchedule the new value, not null * @return this, for chaining, not null */ public Builder notionalSchedule(NotionalSchedule notionalSchedule) { JodaBeanUtils.notNull(notionalSchedule, "notionalSchedule"); this.notionalSchedule = notionalSchedule; return this; } /** * Sets the interest rate accrual calculation. * <p> * Different kinds of swap leg are determined by the subclass used here. * See {@link FixedRateCalculation}, {@link IborRateCalculation} and {@link OvernightRateCalculation}. * @param calculation the new value, not null * @return this, for chaining, not null */ public Builder calculation(RateCalculation calculation) { JodaBeanUtils.notNull(calculation, "calculation"); this.calculation = calculation; return this; } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(320); buf.append("RateCalculationSwapLeg.Builder{"); buf.append("payReceive").append('=').append(JodaBeanUtils.toString(payReceive)).append(',').append(' '); buf.append("accrualSchedule").append('=').append(JodaBeanUtils.toString(accrualSchedule)).append(',').append(' '); buf.append("paymentSchedule").append('=').append(JodaBeanUtils.toString(paymentSchedule)).append(',').append(' '); buf.append("notionalSchedule").append('=').append(JodaBeanUtils.toString(notionalSchedule)).append(',').append(' '); buf.append("calculation").append('=').append(JodaBeanUtils.toString(calculation)).append(',').append(' '); buf.append("type").append('=').append(JodaBeanUtils.toString(null)).append(',').append(' '); buf.append("startDate").append('=').append(JodaBeanUtils.toString(null)).append(',').append(' '); buf.append("endDate").append('=').append(JodaBeanUtils.toString(null)).append(',').append(' '); buf.append("currency").append('=').append(JodaBeanUtils.toString(null)); buf.append('}'); return buf.toString(); } } //-------------------------- AUTOGENERATED END -------------------------- }
OpenGamma/Strata
modules/product/src/main/java/com/opengamma/strata/product/swap/RateCalculationSwapLeg.java
Java
apache-2.0
28,501
import difference from 'lodash/difference'; export const ActionTypes = { PERFORM_ACTION: 'PERFORM_ACTION', RESET: 'RESET', ROLLBACK: 'ROLLBACK', COMMIT: 'COMMIT', SWEEP: 'SWEEP', TOGGLE_ACTION: 'TOGGLE_ACTION', JUMP_TO_STATE: 'JUMP_TO_STATE', IMPORT_STATE: 'IMPORT_STATE' }; /** * Action creators to change the History state. */ export const ActionCreators = { performAction(action) { if (typeof action.type === 'undefined') { throw new Error( 'Actions may not have an undefined "type" property. ' + 'Have you misspelled a constant?' ); } return { type: ActionTypes.PERFORM_ACTION, action, timestamp: Date.now() }; }, reset() { return { type: ActionTypes.RESET, timestamp: Date.now() }; }, rollback() { return { type: ActionTypes.ROLLBACK, timestamp: Date.now() }; }, commit() { return { type: ActionTypes.COMMIT, timestamp: Date.now() }; }, sweep() { return { type: ActionTypes.SWEEP }; }, toggleAction(id) { return { type: ActionTypes.TOGGLE_ACTION, id }; }, jumpToState(index) { return { type: ActionTypes.JUMP_TO_STATE, index }; }, importState(nextLiftedState) { return { type: ActionTypes.IMPORT_STATE, nextLiftedState }; } }; const INIT_ACTION = { type: '@@INIT' }; /** * Computes the next entry in the log by applying an action. */ function computeNextEntry(reducer, action, state, error) { if (error) { return { state, error: 'Interrupted by an error up the chain' }; } let nextState = state; let nextError; try { nextState = reducer(state, action); } catch (err) { nextError = err.toString(); if (typeof window === 'object' && typeof window.chrome !== 'undefined') { // In Chrome, rethrowing provides better source map support setTimeout(() => { throw err; }); } else { console.error(err); } } return { state: nextState, error: nextError }; } /** * Runs the reducer on invalidated actions to get a fresh computation log. */ function recomputeStates( computedStates, minInvalidatedStateIndex, reducer, committedState, actionsById, stagedActionIds, skippedActionIds ) { // Optimization: exit early and return the same reference // if we know nothing could have changed. if ( minInvalidatedStateIndex >= computedStates.length && computedStates.length === stagedActionIds.length ) { return computedStates; } const nextComputedStates = computedStates.slice(0, minInvalidatedStateIndex); for (let i = minInvalidatedStateIndex; i < stagedActionIds.length; i++) { const actionId = stagedActionIds[i]; const action = actionsById[actionId].action; const previousEntry = nextComputedStates[i - 1]; const previousState = previousEntry ? previousEntry.state : committedState; const previousError = previousEntry ? previousEntry.error : undefined; const shouldSkip = skippedActionIds.indexOf(actionId) > -1; const entry = shouldSkip ? previousEntry : computeNextEntry(reducer, action, previousState, previousError); nextComputedStates.push(entry); } return nextComputedStates; } /** * Lifts an app's action into an action on the lifted store. */ function liftAction(action) { return ActionCreators.performAction(action); } /** * Creates a history state reducer from an app's reducer. */ function liftReducerWith(reducer, initialCommittedState, monitorReducer) { const initialLiftedState = { monitorState: monitorReducer(undefined, {}), nextActionId: 1, actionsById: { 0: liftAction(INIT_ACTION) }, stagedActionIds: [0], skippedActionIds: [], committedState: initialCommittedState, currentStateIndex: 0, computedStates: [] }; /** * Manages how the history actions modify the history state. */ return (liftedState = initialLiftedState, liftedAction) => { let { monitorState, actionsById, nextActionId, stagedActionIds, skippedActionIds, committedState, currentStateIndex, computedStates } = liftedState; // By default, agressively recompute every state whatever happens. // This has O(n) performance, so we'll override this to a sensible // value whenever we feel like we don't have to recompute the states. let minInvalidatedStateIndex = 0; switch (liftedAction.type) { case ActionTypes.RESET: { // Get back to the state the store was created with. actionsById = { 0: liftAction(INIT_ACTION) }; nextActionId = 1; stagedActionIds = [0]; skippedActionIds = []; committedState = initialCommittedState; currentStateIndex = 0; computedStates = []; break; } case ActionTypes.COMMIT: { // Consider the last committed state the new starting point. // Squash any staged actions into a single committed state. actionsById = { 0: liftAction(INIT_ACTION) }; nextActionId = 1; stagedActionIds = [0]; skippedActionIds = []; committedState = computedStates[currentStateIndex].state; currentStateIndex = 0; computedStates = []; break; } case ActionTypes.ROLLBACK: { // Forget about any staged actions. // Start again from the last committed state. actionsById = { 0: liftAction(INIT_ACTION) }; nextActionId = 1; stagedActionIds = [0]; skippedActionIds = []; currentStateIndex = 0; computedStates = []; break; } case ActionTypes.TOGGLE_ACTION: { // Toggle whether an action with given ID is skipped. // Being skipped means it is a no-op during the computation. const { id: actionId } = liftedAction; const index = skippedActionIds.indexOf(actionId); if (index === -1) { skippedActionIds = [actionId, ...skippedActionIds]; } else { skippedActionIds = skippedActionIds.filter(id => id !== actionId); } // Optimization: we know history before this action hasn't changed minInvalidatedStateIndex = stagedActionIds.indexOf(actionId); break; } case ActionTypes.JUMP_TO_STATE: { // Without recomputing anything, move the pointer that tell us // which state is considered the current one. Useful for sliders. currentStateIndex = liftedAction.index; // Optimization: we know the history has not changed. minInvalidatedStateIndex = Infinity; break; } case ActionTypes.SWEEP: { // Forget any actions that are currently being skipped. stagedActionIds = difference(stagedActionIds, skippedActionIds); skippedActionIds = []; currentStateIndex = Math.min(currentStateIndex, stagedActionIds.length - 1); break; } case ActionTypes.PERFORM_ACTION: { if (currentStateIndex === stagedActionIds.length - 1) { currentStateIndex++; } const actionId = nextActionId++; // Mutation! This is the hottest path, and we optimize on purpose. // It is safe because we set a new key in a cache dictionary. actionsById[actionId] = liftedAction; stagedActionIds = [...stagedActionIds, actionId]; // Optimization: we know that only the new action needs computing. minInvalidatedStateIndex = stagedActionIds.length - 1; break; } case ActionTypes.IMPORT_STATE: { // Completely replace everything. ({ monitorState, actionsById, nextActionId, stagedActionIds, skippedActionIds, committedState, currentStateIndex, computedStates } = liftedAction.nextLiftedState); break; } case '@@redux/INIT': { // Always recompute states on hot reload and init. minInvalidatedStateIndex = 0; break; } default: { // If the action is not recognized, it's a monitor action. // Optimization: a monitor action can't change history. minInvalidatedStateIndex = Infinity; break; } } computedStates = recomputeStates( computedStates, minInvalidatedStateIndex, reducer, committedState, actionsById, stagedActionIds, skippedActionIds ); monitorState = monitorReducer(monitorState, liftedAction); return { monitorState, actionsById, nextActionId, stagedActionIds, skippedActionIds, committedState, currentStateIndex, computedStates }; }; } /** * Provides an app's view into the state of the lifted store. */ function unliftState(liftedState) { const { computedStates, currentStateIndex } = liftedState; const { state } = computedStates[currentStateIndex]; return state; } /** * Provides an app's view into the lifted store. */ function unliftStore(liftedStore, liftReducer) { let lastDefinedState; return { ...liftedStore, liftedStore, dispatch(action) { liftedStore.dispatch(liftAction(action)); return action; }, getState() { const state = unliftState(liftedStore.getState()); if (state !== undefined) { lastDefinedState = state; } return lastDefinedState; }, replaceReducer(nextReducer) { liftedStore.replaceReducer(liftReducer(nextReducer)); } }; } /** * Redux instrumentation store enhancer. */ export default function instrument(monitorReducer = () => null) { return createStore => (reducer, initialState, enhancer) => { function liftReducer(r) { if (typeof r !== 'function') { if (r && typeof r.default === 'function') { throw new Error( 'Expected the reducer to be a function. ' + 'Instead got an object with a "default" field. ' + 'Did you pass a module instead of the default export? ' + 'Try passing require(...).default instead.' ); } throw new Error('Expected the reducer to be a function.'); } return liftReducerWith(r, initialState, monitorReducer); } const liftedStore = createStore(liftReducer(reducer), enhancer); if (liftedStore.liftedStore) { throw new Error( 'DevTools instrumentation should not be applied more than once. ' + 'Check your store configuration.' ); } return unliftStore(liftedStore, liftReducer); }; }
Maxwelloff/react-football
node_modules/redux-devtools/src/instrument.js
JavaScript
apache-2.0
10,577
package com.uservoice.uservoicesdk.ui; import android.annotation.SuppressLint; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.net.Uri; import android.support.v4.app.FragmentActivity; import android.util.TypedValue; import android.view.View; import android.webkit.WebChromeClient; import android.webkit.WebSettings.PluginState; import android.webkit.WebView; import android.widget.ImageView; import android.widget.TextView; import com.uservoice.uservoicesdk.R; import com.uservoice.uservoicesdk.Session; import com.uservoice.uservoicesdk.activity.TopicActivity; import com.uservoice.uservoicesdk.dialog.ArticleDialogFragment; import com.uservoice.uservoicesdk.dialog.SuggestionDialogFragment; import com.uservoice.uservoicesdk.model.Article; import com.uservoice.uservoicesdk.model.BaseModel; import com.uservoice.uservoicesdk.model.Suggestion; import com.uservoice.uservoicesdk.model.Topic; import java.util.Locale; public class Utils { @SuppressLint("SetJavaScriptEnabled") public static void displayArticle(WebView webView, Article article, Context context) { String styles = "iframe, img { width: 100%; }"; if (isDarkTheme(context)) { webView.setBackgroundColor(Color.BLACK); styles += "body { background-color: #000000; color: #F6F6F6; } a { color: #0099FF; }"; } String html = String.format("<html><head><meta charset=\"utf-8\"><link rel=\"stylesheet\" type=\"text/css\" href=\"http://cdn.uservoice.com/stylesheets/vendor/typeset.css\"/><style>%s</style></head><body class=\"typeset\" style=\"font-family: sans-serif; margin: 1em\"><h3>%s</h3>%s</body></html>", styles, article.getTitle(), article.getHtml()); webView.setWebChromeClient(new WebChromeClient()); webView.getSettings().setJavaScriptEnabled(true); webView.getSettings().setPluginState(PluginState.ON); webView.loadUrl(String.format("data:text/html;charset=utf-8,%s", Uri.encode(html))); } public static boolean isDarkTheme(Context context) { TypedValue tv = new TypedValue(); float[] hsv = new float[3]; context.getTheme().resolveAttribute(android.R.attr.textColorPrimary, tv, true); Color.colorToHSV(context.getResources().getColor(tv.resourceId), hsv); return hsv[2] > 0.5f; } @SuppressLint("DefaultLocale") public static String getQuantityString(View view, int id, int count) { return String.format("%,d %s", count, view.getContext().getResources().getQuantityString(id, count)); } public static void displayInstantAnswer(View view, BaseModel model) { TextView title = (TextView) view.findViewById(R.id.uv_title); TextView detail = (TextView) view.findViewById(R.id.uv_detail); View suggestionDetails = view.findViewById(R.id.uv_suggestion_details); ImageView image = (ImageView) view.findViewById(R.id.uv_icon); if (model instanceof Article) { Article article = (Article) model; image.setImageResource(R.drawable.uv_article); title.setText(article.getTitle()); if (article.getTopicName() != null) { detail.setVisibility(View.VISIBLE); detail.setText(article.getTopicName()); } else { detail.setVisibility(View.GONE); } suggestionDetails.setVisibility(View.GONE); } else if (model instanceof Suggestion) { Suggestion suggestion = (Suggestion) model; image.setImageResource(R.drawable.uv_idea); title.setText(suggestion.getTitle()); detail.setVisibility(View.VISIBLE); detail.setText(suggestion.getForumName()); if (suggestion.getStatus() != null) { View statusColor = suggestionDetails.findViewById(R.id.uv_suggestion_status_color); TextView status = (TextView) suggestionDetails.findViewById(R.id.uv_suggestion_status); int color = Color.parseColor(suggestion.getStatusColor()); suggestionDetails.setVisibility(View.VISIBLE); status.setText(suggestion.getStatus().toUpperCase(Locale.getDefault())); status.setTextColor(color); statusColor.setBackgroundColor(color); } else { suggestionDetails.setVisibility(View.GONE); } } } public static void showModel(FragmentActivity context, BaseModel model) { if (model instanceof Article) { ArticleDialogFragment fragment = new ArticleDialogFragment((Article) model); fragment.show(context.getSupportFragmentManager(), "ArticleDialogFragment"); } else if (model instanceof Suggestion) { SuggestionDialogFragment fragment = new SuggestionDialogFragment((Suggestion) model); fragment.show(context.getSupportFragmentManager(), "SuggestionDialogFragment"); } else if (model instanceof Topic) { Session.getInstance().setTopic((Topic) model); context.startActivity(new Intent(context, TopicActivity.class)); } } }
goodev/android-discourse
userVoiceSDK/src/main/java/com/uservoice/uservoicesdk/ui/Utils.java
Java
apache-2.0
5,192
/* Copyright 2016 Mikolaj Stefaniak * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package pl.surreal.finance.transaction.resources; import java.io.InputStream; import java.net.URI; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.InternalServerErrorException; import javax.ws.rs.NotFoundException; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Link; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; import org.hibernate.exception.ConstraintViolationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.codahale.metrics.annotation.Timed; import io.dropwizard.hibernate.UnitOfWork; import io.dropwizard.jersey.params.LongParam; import io.swagger.annotations.Api; import pl.surreal.finance.transaction.api.AccountApi; import pl.surreal.finance.transaction.api.CardApi; import pl.surreal.finance.transaction.api.CardOperationApi; import pl.surreal.finance.transaction.api.ImportResult; import pl.surreal.finance.transaction.api.ImportType; import pl.surreal.finance.transaction.api.LabelResultApi; import pl.surreal.finance.transaction.api.TransactionApi; import pl.surreal.finance.transaction.api.TransferApi; import pl.surreal.finance.transaction.core.CardOperation; import pl.surreal.finance.transaction.core.Label; import pl.surreal.finance.transaction.core.Transaction; import pl.surreal.finance.transaction.core.Transfer; import pl.surreal.finance.transaction.db.LabelDAO; import pl.surreal.finance.transaction.db.TransactionDAO; import pl.surreal.finance.transaction.labeler.ITransactionLabeler; import pl.surreal.finance.transaction.parser.IParserFactory; import pl.surreal.finance.transaction.parser.ITransactionParser; import pl.surreal.finance.transaction.parser.ParserSupportedType; @Path("/transactions") @Api(value = "transactions") @Produces(MediaType.APPLICATION_JSON) public class TransactionResource { private static final Logger LOGGER = LoggerFactory.getLogger(TransactionResource.class); private TransactionDAO transactionDAO; private LabelDAO labelDAO; private IParserFactory parserFactory; private ITransactionLabeler transactionLabeler; @Context private UriInfo uriInfo; public TransactionResource(TransactionDAO transactionDAO,LabelDAO labelDAO,IParserFactory parserFactory,ITransactionLabeler transactionLabeler) { this.transactionDAO = transactionDAO; this.parserFactory = parserFactory; this.transactionLabeler = transactionLabeler; this.labelDAO = labelDAO; } private TransactionApi mapDomainToApi(Transaction transaction) { TransactionApi transactionApi = new TransactionApi(); transactionApi.setId(transaction.getId()); transactionApi.setDate(transaction.getAccountingDate()); transactionApi.setAmount(transaction.getAccountingAmount()); transactionApi.setAccountingAmount(transaction.getAccountingAmount()); transactionApi.setCurrency(transaction.getCurrency()); transactionApi.setTitle(transaction.getTitle()); transactionApi.setType(transaction.getClass().getSimpleName()); List<Long> labelIds = new ArrayList<>(); for(Label label : transaction.getLabels()) { labelIds.add(label.getId()); } transactionApi.setLabelIds(labelIds); if(transaction instanceof CardOperation) { CardOperation cardOperTrans = (CardOperation)transaction; CardOperationApi cardOperApi = new CardOperationApi(); if(cardOperTrans.getCard()!=null) { cardOperApi.setCard(new CardApi(cardOperTrans.getCard().getNumber(),(cardOperTrans.getCard().getName()))); } cardOperApi.setDestination(cardOperTrans.getDestination()); transactionApi.setDetails(cardOperApi); } else if(transaction instanceof Transfer) { Transfer transferTrans = (Transfer)transaction; TransferApi transferApi = new TransferApi(); transferApi.setDescription(transferTrans.getDescription()); transferApi.setInternal(transferTrans.isInternal()); transferApi.setDirection(transferTrans.getDirection().toString()); if(transferTrans.getDstAccount()!=null) { transferApi.setDstAccount(new AccountApi(transferTrans.getDstAccount().getNumber(),transferTrans.getDstAccount().getName())); } if(transferTrans.getSrcAccount()!=null) { transferApi.setSrcAccount(new AccountApi(transferTrans.getSrcAccount().getNumber(),transferTrans.getSrcAccount().getName())); } transactionApi.setDetails(transferApi); } return transactionApi; } private Transaction mapApiToDomain(TransactionApi transactionApi,Transaction transaction) throws NotFoundException { Objects.requireNonNull(transaction); List<Label> labels = new ArrayList<>(); for(Long labelId : transactionApi.getLabelIds()) { Label label = labelDAO.findById(labelId).orElseThrow(() -> new NotFoundException("Label "+labelId+" not found.")); labels.add(label); } transaction.setLabels(labels); return transaction; } @GET @UnitOfWork @Timed public List<TransactionApi> get( @QueryParam("first") @Min(0) Integer first, @QueryParam("max") @Min(0) Integer max, @QueryParam("label") @Min(0) Integer labelId, @QueryParam("dateFrom") @Pattern(regexp="\\d{4}-\\d{2}-\\d{2}") String dateFromString, @QueryParam("dateTo") @Pattern(regexp="\\d{4}-\\d{2}-\\d{2}") String dateToString) { MultivaluedMap<String,String> queryParams = uriInfo.getQueryParameters(); HashMap<String,Object> queryAttributes = new HashMap<>(); for(String queryParam : queryParams.keySet()) { Object attrToAdd = queryParams.getFirst(queryParam); if(queryParam.equals("dateFrom") || queryParam.equals("dateTo")) { try { attrToAdd = new SimpleDateFormat("yyyy-MM-dd").parse((String)attrToAdd); } catch (ParseException e) { LOGGER.debug("Can't parse date string {}",dateFromString); continue; } } if(queryParam.equals("label")) { List<String> labelIDs = queryParams.get("label"); List<Label> labels = new ArrayList<>(); for(String id : labelIDs) { Label label = labelDAO.findById(Long.parseLong(id)).orElseThrow(() -> new NotFoundException("Label "+id+" not found.")); labels.add(label); } attrToAdd = labels; } queryAttributes.put(queryParam,attrToAdd); } ArrayList<TransactionApi> apiTransactions = new ArrayList<>(); for(Transaction transaction : transactionDAO.findAll(queryAttributes)) { TransactionApi transactionApi = mapDomainToApi(transaction); apiTransactions.add(transactionApi); } return apiTransactions; } @GET @Path("/{id}") @UnitOfWork public TransactionApi getById(@PathParam("id") LongParam id) { Transaction transaction = transactionDAO.findById(id.get()).orElseThrow(() -> new NotFoundException("Not found.")); TransactionApi transactionApi = mapDomainToApi(transaction); return transactionApi; } @PUT @Path("/{id}") @UnitOfWork public TransactionApi replace(@PathParam("id") LongParam id, TransactionApi transactionApi) { Transaction transaction = transactionDAO.findById(id.get()).orElseThrow(() -> new NotFoundException("Transaction not found.")); mapApiToDomain(transactionApi, transaction); transactionDAO.create(transaction); return transactionApi; } @POST @Path("/import") @Consumes(MediaType.MULTIPART_FORM_DATA) @UnitOfWork(transactional=false) @Timed public Response importTransactions( @NotNull @FormDataParam("file") InputStream uploadedInputStream, @NotNull @FormDataParam("file") FormDataContentDisposition fileDetail, @NotNull @FormDataParam("type") String fileType, @NotNull @FormDataParam("baseResourceId") String baseResourceId) { ITransactionParser parser = parserFactory.getParser(uploadedInputStream,fileType,baseResourceId).orElseThrow(()->new InternalServerErrorException("Can't configure parser for a given file type")); ImportResult result = importTransactions(parser); result.setFileName(fileDetail.getFileName()); return Response.status(200).entity(result).build(); } @GET @Path("/import") public List<ImportType> getImportTypes() { ArrayList<ImportType> importTypes = new ArrayList<>(); for(ParserSupportedType type : parserFactory.getSupportedTypes()) { try { Class<?> resourceClass = Class.forName("pl.surreal.finance.transaction.resources."+type.getBaseResourceClass().getSimpleName()+"Resource"); URI uri = uriInfo.getBaseUriBuilder().path(resourceClass).build(); ImportType importType = new ImportType(type.getId(),type.getDescription(),Link.fromUri(uri).rel("describedby").type(type.getBaseResourceClass().getSimpleName()).build()); importTypes.add(importType); } catch(Exception e) { LOGGER.warn("getImportCapabilities : can't add supported type due to '{}'",e.toString()); e.printStackTrace(); } } return importTypes; } @POST @Path("/runAllRules") @UnitOfWork public LabelResultApi runAllRules() { int transactionCount=0; int labelsCount=0; for(pl.surreal.finance.transaction.core.Transaction t: transactionDAO.findAll()) { int appliedCount = transactionLabeler.label(t); if(appliedCount>0) { transactionCount++; labelsCount+=appliedCount; transactionDAO.create(t); } } return new LabelResultApi(transactionCount,labelsCount); } @POST @Path("/runRule/{id}") @UnitOfWork public LabelResultApi runRule(@PathParam("id") LongParam id) { int transactionCount=0; int labelsCount=0; for(pl.surreal.finance.transaction.core.Transaction t: transactionDAO.findAll()) { try { int appliedCount = transactionLabeler.label(t,id.get()); if(appliedCount>0) { transactionCount++; labelsCount+=appliedCount; transactionDAO.create(t); } } catch(NoSuchElementException ex) { throw new NotFoundException(ex.getMessage()); } } return new LabelResultApi(transactionCount,labelsCount); } private ImportResult importTransactions(ITransactionParser parser) { ImportResult result = new ImportResult(); while(parser.hasNext()) { Optional<pl.surreal.finance.transaction.core.Transaction> optional = parser.getNext(); result.incProcessed(); if(optional.isPresent()) { try { transactionDAO.create(optional.get()); result.incImported(); } catch(ConstraintViolationException ex) { LOGGER.warn("importCSV: constraint violation '{}'",ex.getMessage()); result.incContraintViolations(); continue; } } else { result.incNulls(); } } parser.close(); return result; } }
mikouaj/finsight
finsight-backend/src/main/java/pl/surreal/finance/transaction/resources/TransactionResource.java
Java
apache-2.0
11,611
package com.voisintech.easeljs.display; public class MovieClip { }
Vam85/gwt-createjs
gwt-easeljs/src/com/voisintech/easeljs/display/MovieClip.java
Java
apache-2.0
69
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.management; import org.neo4j.jmx.Description; import org.neo4j.jmx.ManagementInterface; @ManagementInterface( name = HighAvailability.NAME ) @Description( "Information about an instance participating in a HA cluster" ) public interface HighAvailability { final String NAME = "High Availability"; @Description( "The identifier used to identify this server in the HA cluster" ) String getInstanceId(); @Description( "Whether this instance is available or not" ) boolean isAvailable(); @Description( "Whether this instance is alive or not" ) boolean isAlive(); @Description( "The role this instance has in the cluster" ) String getRole(); @Description( "The time when the data on this instance was last updated from the master" ) String getLastUpdateTime(); @Description( "The latest transaction id present in this instance's store" ) long getLastCommittedTxId(); @Description( "Information about all instances in this cluster" ) ClusterMemberInfo[] getInstancesInCluster(); @Description( "(If this is a slave) Update the database on this " + "instance with the latest transactions from the master" ) String update(); }
HuangLS/neo4j
advanced/management/src/main/java/org/neo4j/management/HighAvailability.java
Java
apache-2.0
2,055
// Sanity test for removing documents with adjacent index keys. SERVER-2008 t = db.jstests_removec; t.drop(); t.ensureIndex({a: 1}); /** @return an array containing a sequence of numbers from i to i + 10. */ function runStartingWith(i) { ret = []; for (j = 0; j < 11; ++j) { ret.push(i + j); } return ret; } // Insert some documents with adjacent index keys. for (i = 0; i < 1100; i += 11) { t.save({a: runStartingWith(i)}); } // Remove and then reinsert random documents in the background. s = startParallelShell('t = db.jstests_removec;' + 'Random.setRandomSeed();' + 'for( j = 0; j < 1000; ++j ) {' + ' o = t.findOne( { a:Random.randInt( 1100 ) } );' + ' t.remove( { _id:o._id } );' + ' t.insert( o );' + '}'); // Find operations are error free. Note that the cursor throws if it detects the $err // field in the returned document. for (i = 0; i < 200; ++i) { t.find({a: {$gte: 0}}).hint({a: 1}).itcount(); } s(); t.drop();
christkv/mongo-shell
test/jstests/core/removec.js
JavaScript
apache-2.0
1,114
"use strict"; var cases = module.exports = []; cases[0] = {}; cases[0].resources = [{ "resource": { "id": "Condition/c-0-0", "resourceType": "Condition", "status": "confirmed", "onsetDateTime": "2012-08-05", "dateAsserted": "2012-08-05", "abatementBoolean": true, "code": { "coding": [{ "code": "233604007", "system": "http://snomed.info/sct", "display": "Pneumonia" }] } } }]; cases[0].input = cases[0].resources[0]; cases[0].result = { "problem": { "code": { "name": "Pneumonia", "code": "233604007", "code_system_name": "SNOMED CT" }, "date_time": { "low": { "date": "2012-08-05T00:00:00.000Z", "precision": "day" } } }, "status": { "name": "Resolved" } }; cases[1] = {}; cases[1].resources = [{ "resource": { "id": "Condition/c-1-0", "resourceType": "Condition", "status": "confirmed", "onsetDateTime": "2007-01-03", "dateAsserted": "2007-01-03", "code": { "coding": [{ "code": "195967001", "system": "http://snomed.info/sct", "display": "Asthma" }] } } }]; cases[1].input = cases[1].resources[0]; cases[1].result = { "problem": { "code": { "name": "Asthma", "code": "195967001", "code_system_name": "SNOMED CT" }, "date_time": { "low": { "date": "2007-01-03T00:00:00.000Z", "precision": "day" } } } }; cases[2] = {}; cases[2].resources = [{ "resource": { "id": "Condition/c-2-0", "resourceType": "Condition", "status": "confirmed", "onsetDateTime": "2007-01-03", "dateAsserted": "2007-01-03", "abatementDate": "2012-09-05", "code": { "coding": [{ "code": "195967001", "system": "http://snomed.info/sct", "display": "Asthma" }] } } }]; cases[2].input = cases[2].resources[0]; cases[2].result = { "problem": { "code": { "name": "Asthma", "code": "195967001", "code_system_name": "SNOMED CT" }, "date_time": { "low": { "date": "2007-01-03T00:00:00.000Z", "precision": "day" }, "high": { "date": "2012-09-05T00:00:00.000Z", "precision": "day" } } } };
amida-tech/blue-button-fhir
test/fixtures/unit/condition.js
JavaScript
apache-2.0
2,758
/* * Copyright (c) 2015-2016, Mostafa Ali (engabdomostafa@gmail.com) * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. Redistributions * in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ package ca.mali.customcontrol; import ca.mali.fomparser.DimensionFDD; import ca.mali.fomparser.FddObjectModel; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ReadOnlyStringProperty; import javafx.beans.property.ReadOnlyStringWrapper; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.control.CheckBox; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.layout.VBox; import javafx.util.Callback; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.IOException; import java.util.List; import java.util.stream.Collectors; /** * @author Mostafa Ali <engabdomostafa@gmail.com> */ public class DimensionsListController extends VBox { //Logger private static final Logger logger = LogManager.getLogger(); @FXML private TableView<DimensionState> DimensionTableView; @FXML private TableColumn<DimensionState, String> DimensionTableColumn; @FXML private TableColumn CheckTableColumn; CheckBox cb = new CheckBox(); ObservableList<DimensionState> dimensions = FXCollections.observableArrayList(); public DimensionsListController() { logger.entry(); FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("/fxml/customcontrol/DimensionsList.fxml")); fxmlLoader.setController(this); fxmlLoader.setRoot(this); try { fxmlLoader.load(); } catch (IOException ex) { logger.log(Level.FATAL, ex.getMessage(), ex); } logger.exit(); } public void setFddObjectModel(FddObjectModel fddObjectModel) { logger.entry(); if (fddObjectModel != null) { fddObjectModel.getDimensions().values().stream().forEach((value) -> dimensions.add(new DimensionState(value))); DimensionTableView.setItems(dimensions); dimensions.forEach((interaction) -> interaction.onProperty().addListener((observable, oldValue, newValue) -> { if (!newValue) { cb.setSelected(false); } else if (dimensions.stream().allMatch(a -> a.isOn())) { cb.setSelected(true); } })); DimensionTableColumn.setCellValueFactory(new PropertyValueFactory<>("DimensionName")); CheckTableColumn.setCellValueFactory(new Callback<TableColumn.CellDataFeatures<DimensionState, Boolean>, ObservableValue<Boolean>>() { @Override public ObservableValue<Boolean> call(TableColumn.CellDataFeatures<DimensionState, Boolean> param) { return param.getValue().onProperty(); } }); CheckTableColumn.setCellFactory(CheckBoxTableCell.forTableColumn(CheckTableColumn)); cb.setUserData(CheckTableColumn); cb.setOnAction((ActionEvent event) -> { CheckBox cb1 = (CheckBox) event.getSource(); TableColumn tc = (TableColumn) cb1.getUserData(); DimensionTableView.getItems().stream().forEach((item) -> item.setOn(cb1.isSelected())); }); CheckTableColumn.setGraphic(cb); } logger.exit(); } public List<DimensionFDD> getDimensions() { return dimensions.stream().filter(DimensionState::isOn).map(a -> a.dimension).collect(Collectors.toList()); } public static class DimensionState { private final ReadOnlyStringWrapper DimensionName = new ReadOnlyStringWrapper(); private final BooleanProperty on = new SimpleBooleanProperty(); private final DimensionFDD dimension; public DimensionState(DimensionFDD dimension) { this.dimension = dimension; DimensionName.set(dimension.getName()); } public String getDimensionName() { return DimensionName.get(); } public ReadOnlyStringProperty dimensionNameProperty() { return DimensionName.getReadOnlyProperty(); } public boolean isOn() { return on.get(); } public void setOn(boolean value) { on.set(value); } public BooleanProperty onProperty() { return on; } @Override public String toString() { return dimension.getName(); } } }
EMostafaAli/HlaListener
src/main/java/ca/mali/customcontrol/DimensionsListController.java
Java
apache-2.0
6,175
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.knightsoftnet.validators.client.data; import com.google.gwt.i18n.client.Constants; import java.util.Map; /** * set of phone country region codes. * * @author Manfred Tremmel * */ public interface PhoneRegionCode504Constants extends Constants { Map<String, String> phoneRegionCodes504(); }
ManfredTremmel/gwt-bean-validators
gwt-bean-validators/src/main/java/de/knightsoftnet/validators/client/data/PhoneRegionCode504Constants.java
Java
apache-2.0
1,101
/* * Copyright 2017 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.spinnaker.clouddriver.kubernetes.provider; import com.netflix.spinnaker.clouddriver.model.HealthState; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; @Slf4j public class KubernetesModelUtil { public static long translateTime(String time) { return KubernetesModelUtil.translateTime(time, "yyyy-MM-dd'T'HH:mm:ssX"); } public static long translateTime(String time, String format) { try { return StringUtils.isNotEmpty(time) ? (new SimpleDateFormat(format).parse(time)).getTime() : 0; } catch (ParseException e) { log.error("Failed to parse kubernetes timestamp", e); return 0; } } public static HealthState getHealthState(List<Map<String, Object>> health) { return someUpRemainingUnknown(health) ? HealthState.Up : someSucceededRemainingUnknown(health) ? HealthState.Succeeded : anyStarting(health) ? HealthState.Starting : anyDown(health) ? HealthState.Down : anyFailed(health) ? HealthState.Failed : anyOutOfService(health) ? HealthState.OutOfService : HealthState.Unknown; } private static boolean stateEquals(Map<String, Object> health, HealthState state) { Object healthState = health.get("state"); return healthState != null && healthState.equals(state.name()); } private static boolean someUpRemainingUnknown(List<Map<String, Object>> healthsList) { List<Map<String, Object>> knownHealthList = healthsList.stream() .filter(h -> !stateEquals(h, HealthState.Unknown)) .collect(Collectors.toList()); return !knownHealthList.isEmpty() && knownHealthList.stream().allMatch(h -> stateEquals(h, HealthState.Up)); } private static boolean someSucceededRemainingUnknown(List<Map<String, Object>> healthsList) { List<Map<String, Object>> knownHealthList = healthsList.stream() .filter(h -> !stateEquals(h, HealthState.Unknown)) .collect(Collectors.toList()); return !knownHealthList.isEmpty() && knownHealthList.stream().allMatch(h -> stateEquals(h, HealthState.Succeeded)); } private static boolean anyDown(List<Map<String, Object>> healthsList) { return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.Down)); } private static boolean anyStarting(List<Map<String, Object>> healthsList) { return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.Starting)); } private static boolean anyFailed(List<Map<String, Object>> healthsList) { return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.Failed)); } private static boolean anyOutOfService(List<Map<String, Object>> healthsList) { return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.OutOfService)); } }
duftler/clouddriver
clouddriver-kubernetes/src/main/java/com/netflix/spinnaker/clouddriver/kubernetes/provider/KubernetesModelUtil.java
Java
apache-2.0
3,696
/** * @license * Copyright 2020 Google LLC. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================================= */ // Use the CPU backend for running tests. import '@tensorflow/tfjs-backend-cpu'; // tslint:disable-next-line:no-imports-from-dist import * as jasmine_util from '@tensorflow/tfjs-core/dist/jasmine_util'; // tslint:disable-next-line:no-require-imports const jasmineCtor = require('jasmine'); // tslint:disable-next-line:no-require-imports Error.stackTraceLimit = Infinity; process.on('unhandledRejection', e => { throw e; }); jasmine_util.setTestEnvs( [{name: 'test-inference-api', backendName: 'cpu', flags: {}}]); const unitTests = 'src/**/*_test.ts'; const runner = new jasmineCtor(); runner.loadConfig({spec_files: [unitTests], random: false}); runner.execute();
tensorflow/tfjs
tfjs-inference/src/test_node.ts
TypeScript
apache-2.0
1,381
app.controller('submissions', ['$scope', '$http', '$rootScope', 'globalHelpers', function ($scope, $http, $rootScope, globalHelpers) { $scope.stats = {}; $scope.getUrlLanguages = function(gitUrlId){ globalHelpers.getUrlLanguagesPromise(gitUrlId).then( function (response){ $scope.stats[gitUrlId] = response.data.languages; }); } $scope.addForReview = function () { $scope.showWarning = false; $scope.showGithubWarning = false; if (!$scope.newName || !$scope.newUrl) { $scope.showWarning = true; return; } var _new_name = $scope.newName.trim(); var _new = $scope.newUrl.trim(); if (!_new || !_new_name) { $scope.showWarning = true; return; } if (!_new || !_new_name) { $scope.showWarning = true; return; } var _newUrl = globalHelpers.getLocation(_new); var pathArray = _newUrl.pathname.split('/'); isCommit = pathArray.indexOf('commit') > -1; isPR = pathArray.indexOf('pull') > -1; if (_newUrl.hostname != "github.com" || (!isCommit && !isPR)){ $scope.showGithubWarning = true; return; } var obj = JSON.parse('{"github_user": "' + $scope.github_user + '", "name": "' + _new_name + '", "url": "' + _new + '"}'); for (var i=0; i < $scope.existing.length; i++){ if (Object.keys($scope.existing[i])[0] == _new){ return; } } $scope.existing.push(obj); $http({ method: "post", url: "/add_for_review", headers: {'Content-Type': "application/json"}, data: obj }).success(function () { // console.log("success!"); }); $scope.showUWarning = false; $scope.showGithubWarning = false; $scope._new = ''; $rootScope.$broadcast('urlEntryChange', 'args'); }; $scope.removeUrl = function (url) { if(confirm("Are you sure you want to delete entry \"" + url["name"] + "\"?")){ for (var i=0; i < $scope.existing.length; i++){ if ($scope.existing[i]["url"] == url['url']){ $scope.existing.splice(i, 1) $http({ method: "post", url: "/remove_from_list", headers: {'Content-Type': "application/json"}, data: url }).success(function () { console.log("success!"); }); $rootScope.$broadcast('urlEntryChange', 'args'); } } } }; }]);
trobert2/gitRoulette
static/controllers/submissions.js
JavaScript
apache-2.0
2,774
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.cdi.se; import org.apache.camel.CamelContext; import org.apache.camel.ProducerTemplate; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.cdi.CdiCamelExtension; import org.apache.camel.cdi.ContextName; import org.apache.camel.cdi.Uri; import org.apache.camel.cdi.se.bean.FirstCamelContextBean; import org.apache.camel.cdi.se.bean.SecondNamedCamelContextBean; import org.apache.camel.component.mock.MockEndpoint; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.EmptyAsset; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.Test; import org.junit.runner.RunWith; import javax.inject.Inject; import java.util.concurrent.TimeUnit; import static org.apache.camel.cdi.se.expression.ExchangeExpression.fromCamelContext; import static org.apache.camel.component.mock.MockEndpoint.assertIsSatisfied; @RunWith(Arquillian.class) public class MultiCamelContextReusedRouteTest { @Deployment public static Archive<?> deployment() { return ShrinkWrap.create(JavaArchive.class) // Camel CDI .addPackage(CdiCamelExtension.class.getPackage()) // Test classes .addClasses(FirstCamelContextBean.class, SecondNamedCamelContextBean.class) // Bean archive deployment descriptor .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"); } @Inject @ContextName("first") private CamelContext firstCamelContext; @Inject @ContextName("first") @Uri("direct:inbound") private ProducerTemplate firstInbound; @Inject @ContextName("first") @Uri("mock:outbound") private MockEndpoint firstOutbound; @Inject @ContextName("second") private CamelContext secondCamelContext; @Inject @ContextName("second") @Uri("direct:inbound") private ProducerTemplate secondInbound; @Inject @ContextName("second") @Uri("mock:outbound") private MockEndpoint secondOutbound; @Test public void sendMessageToFirstCamelContextInbound() throws InterruptedException { firstOutbound.expectedMessageCount(1); firstOutbound.expectedBodiesReceived("test-first"); firstOutbound.expectedHeaderReceived("context", "test"); firstOutbound.message(0).exchange().matches(fromCamelContext("first")); firstInbound.sendBody("test-first"); assertIsSatisfied(2L, TimeUnit.SECONDS, firstOutbound); } @Test public void sendMessageToSecondCamelContextInbound() throws InterruptedException { secondOutbound.expectedMessageCount(1); secondOutbound.expectedBodiesReceived("test-second"); secondOutbound.expectedHeaderReceived("context", "test"); secondOutbound.message(0).exchange().matches(fromCamelContext("second")); secondInbound.sendBody("test-second"); assertIsSatisfied(2L, TimeUnit.SECONDS, secondOutbound); } @ContextName("first") @ContextName("second") static class ReusedRouteBuilder extends RouteBuilder { @Override public void configure() { from("direct:inbound").setHeader("context").constant("test").to("mock:outbound"); } } }
astefanutti/camel-cdi
envs/se/src/test/java/org/apache/camel/cdi/se/MultiCamelContextReusedRouteTest.java
Java
apache-2.0
4,298
/* * Copyright 2014 Radialpoint SafeCare Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.radialpoint.word2vec; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; /** * This class stores the mapping of String->array of float that constitutes each vector. * * The class can serialize to/from a stream. * * The ConvertVectors allows to transform the C binary vectors into instances of this class. */ public class Vectors { /** * The vectors themselves. */ protected float[][] vectors; /** * The words associated with the vectors */ protected String[] vocabVects; /** * Size of each vector */ protected int size; /** * Inverse map, word-> index */ protected Map<String, Integer> vocab; /** * Package-level constructor, used by the ConvertVectors program. * * @param vectors * , it cannot be empty * @param vocabVects * , the length should match vectors */ Vectors(float[][] vectors, String[] vocabVects) throws VectorsException { this.vectors = vectors; this.size = vectors[0].length; if (vectors.length != vocabVects.length) throw new VectorsException("Vectors and vocabulary size mismatch"); this.vocabVects = vocabVects; this.vocab = new HashMap<String, Integer>(); for (int i = 0; i < vocabVects.length; i++) vocab.put(vocabVects[i], i); } /** * Initialize a Vectors instance from an open input stream. This method closes the stream. * * @param is * the open stream * @throws IOException * if there are problems reading from the stream */ public Vectors(InputStream is) throws IOException { DataInputStream dis = new DataInputStream(is); int words = dis.readInt(); int size = dis.readInt(); this.size = size; this.vectors = new float[words][]; this.vocabVects = new String[words]; for (int i = 0; i < words; i++) { this.vocabVects[i] = dis.readUTF(); float[] vector = new float[size]; for (int j = 0; j < size; j++) vector[j] = dis.readFloat(); this.vectors[i] = vector; } this.vocab = new HashMap<String, Integer>(); for (int i = 0; i < vocabVects.length; i++) vocab.put(vocabVects[i], i); dis.close(); } /** * Writes this vector to an open output stream. This method closes the stream. * * @param os * the stream to write to * @throws IOException * if there are problems writing to the stream */ public void writeTo(OutputStream os) throws IOException { DataOutputStream dos = new DataOutputStream(os); dos.writeInt(this.vectors.length); dos.writeInt(this.size); for (int i = 0; i < vectors.length; i++) { dos.writeUTF(this.vocabVects[i]); for (int j = 0; j < size; j++) dos.writeFloat(this.vectors[i][j]); } dos.close(); } public float[][] getVectors() { return vectors; } public float[] getVector(int i) { return vectors[i]; } public float[] getVector(String term) throws OutOfVocabularyException { Integer idx = vocab.get(term); if (idx == null) throw new OutOfVocabularyException("Unknown term '" + term + "'"); return vectors[idx]; } public int getIndex(String term) throws OutOfVocabularyException { Integer idx = vocab.get(term); if (idx == null) throw new OutOfVocabularyException("Unknown term '" + term + "'"); return idx; } public Integer getIndexOrNull(String term) { return vocab.get(term); } public String getTerm(int index) { return vocabVects[index]; } public Map<String, Integer> getVocabulary() { return vocab; } public boolean hasTerm(String term) { return vocab.containsKey(term); } public int vectorSize() { return size; } public int wordCount() { return vectors.length; } }
StephanieMak/word2vec-query-expansion
src/main/java/com/radialpoint/word2vec/Vectors.java
Java
apache-2.0
4,960
package io.skysail.server.queryfilter.parser; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.osgi.framework.InvalidSyntaxException; import org.osgi.service.component.annotations.Component; import io.skysail.server.domain.jvm.FieldFacet; import io.skysail.server.filter.ExprNode; import io.skysail.server.filter.FilterParser; import io.skysail.server.queryfilter.nodes.AndNode; import io.skysail.server.queryfilter.nodes.EqualityNode; import io.skysail.server.queryfilter.nodes.GreaterNode; import io.skysail.server.queryfilter.nodes.IsInNode; import io.skysail.server.queryfilter.nodes.LessNode; import io.skysail.server.queryfilter.nodes.NotNode; import io.skysail.server.queryfilter.nodes.OrNode; import io.skysail.server.queryfilter.nodes.PresentNode; import io.skysail.server.queryfilter.nodes.SubstringNode; import lombok.ToString; import lombok.extern.slf4j.Slf4j; /** * @author org.osgi.framework.FrameworkUtil.ExprNode.Parser * */ @Component @Slf4j @ToString public class LdapParser implements FilterParser { private char[] filterChars; private int pos; @Override public ExprNode parse(String filterstring) { filterChars = filterstring.toCharArray(); pos = 0; ExprNode filter; try { filter = parseFilter(filterstring); sanityCheck(filterstring); return filter; } catch (ArrayIndexOutOfBoundsException | InvalidSyntaxException e) { log.error(e.getMessage(),e); } return null; } /* e.g. (buchungstag;YYYY=2006) */ @SuppressWarnings("unchecked") @Override public Set<String> getSelected(FieldFacet facet, Map<String, String> lines, String filterParamValue) { return (Set<String>) parse(filterParamValue).accept(n -> n.getSelected(facet, lines)); } private ExprNode parseFilter(String filterstring) throws InvalidSyntaxException { skipWhiteSpace(); if (filterChars[pos] != '(') { throw new InvalidSyntaxException("Missing '(': " + filterstring.substring(pos), filterstring); } pos++; ExprNode filter = parseFiltercomp(filterstring); skipWhiteSpace(); if (filterChars[pos] != ')') { throw new InvalidSyntaxException("Missing ')': " + filterstring.substring(pos), filterstring); } pos++; skipWhiteSpace(); return filter; } private ExprNode parseFiltercomp(String filterstring) throws InvalidSyntaxException { skipWhiteSpace(); char c = filterChars[pos]; switch (c) { case '&': { pos++; return parseAnd(filterstring); } case '|': { pos++; return parseOr(filterstring); } case '!': { pos++; return parseNot(filterstring); } } return parseItem(filterstring); } private ExprNode parseAnd(String filterstring) throws InvalidSyntaxException { int lookahead = pos; skipWhiteSpace(); if (filterChars[pos] != '(') { pos = lookahead - 1; return parseItem(filterstring); } List<ExprNode> operands = new ArrayList<>(10); while (filterChars[pos] == '(') { ExprNode child = parseFilter(filterstring); operands.add(child); } return new AndNode(operands); } private ExprNode parseOr(String filterstring) throws InvalidSyntaxException { int lookahead = pos; skipWhiteSpace(); if (filterChars[pos] != '(') { pos = lookahead - 1; return parseItem(filterstring); } List<ExprNode> operands = new ArrayList<>(10); while (filterChars[pos] == '(') { ExprNode child = parseFilter(filterstring); operands.add(child); } return new OrNode(operands); } private ExprNode parseNot(String filterstring) throws InvalidSyntaxException { int lookahead = pos; skipWhiteSpace(); if (filterChars[pos] != '(') { pos = lookahead - 1; return parseItem(filterstring); } ExprNode child = parseFilter(filterstring); return new NotNode(child); } private ExprNode parseItem(String filterstring) throws InvalidSyntaxException { String attr = parseAttr(filterstring); skipWhiteSpace(); switch (filterChars[pos]) { case '~': { if (filterChars[pos + 1] == '=') { pos += 2; return null; } break; } case '>': { if (filterChars[pos + 1] == '=') { pos += 2; throw new InvalidSyntaxException("Invalid operator: >= not implemented", filterstring); } else { pos += 1; return new GreaterNode(attr, Float.valueOf((String)parseSubstring())); } } case '<': { if (filterChars[pos + 1] == '=') { pos += 2; throw new InvalidSyntaxException("Invalid operator: <= not implemented", filterstring); } else { pos += 1; return new LessNode(attr, Float.valueOf((String)parseSubstring())); } } case '=': { if (filterChars[pos + 1] == '*') { int oldpos = pos; pos += 2; skipWhiteSpace(); if (filterChars[pos] == ')') { return new PresentNode(attr, null); } pos = oldpos; } pos++; Object string = parseSubstring(); if (string instanceof String) { return new EqualityNode(attr, (String) string); } if (string instanceof String[]) { String[] value = (String[]) string; if (value.length == 3) { return new SubstringNode(attr, value[1]); } else if (value.length == 2) { if (value[0] == null) { return new SubstringNode(attr, value[1]); } else if (value[1] == null) { return new SubstringNode(attr, value[0]); } } } return null; } case '\u00A7': { // paragraph or section symbol, "element of", "is in", // not standard LDAP syntax! will replace this whole // thing with a ANTLR-based grammar pos++; Object string = parseSubstring(); if (string instanceof String) { return new IsInNode(attr, (String) string); } return null; } } throw new InvalidSyntaxException("Invalid operator: " + filterstring.substring(pos), filterstring); } private String parseAttr(String filterstring) throws InvalidSyntaxException { skipWhiteSpace(); int begin = pos; int end = pos; char c = filterChars[pos]; while (c != '~' && c != '\u00A7' && c != '<' && c != '>' && c != '=' && c != '(' && c != ')') { pos++; if (!Character.isWhitespace(c)) { end = pos; } c = filterChars[pos]; } int length = end - begin; if (length == 0) { throw new InvalidSyntaxException("Missing attr: " + filterstring.substring(pos), filterstring); } return new String(filterChars, begin, length); } private Object parseSubstring() throws InvalidSyntaxException { StringBuilder sb = new StringBuilder(filterChars.length - pos); List<String> operands = new ArrayList<>(10); boolean isMethod = false; parseloop: while (true) { char c = filterChars[pos]; switch (c) { case ')': { if (!isMethod) { if (sb.length() > 0) { operands.add(sb.toString()); } break parseloop; } else { isMethod = false; pos++; sb.append(")"); break; } } case '(': { isMethod = true; pos += 1; sb.append("("); break; } case '*': { if (sb.length() > 0) { operands.add(sb.toString()); } sb.setLength(0); operands.add(null); pos++; break; } case '\\': { pos++; c = filterChars[pos]; /* fall through into default */ } default: { sb.append(c); pos++; break; } } } int size = operands.size(); if (size == 0) { return ""; } if (size == 1) { Object single = operands.get(0); if (single != null) { return single; } } return operands.toArray(new String[size]); } private void skipWhiteSpace() { for (int length = filterChars.length; (pos < length) && Character.isWhitespace(filterChars[pos]);) { pos++; } } private void sanityCheck(String filterstring) throws InvalidSyntaxException { if (pos != filterChars.length) { throw new InvalidSyntaxException("Extraneous trailing characters: " + filterstring.substring(pos), filterstring); } } }
evandor/skysail
skysail.server.queryfilter/src/io/skysail/server/queryfilter/parser/LdapParser.java
Java
apache-2.0
9,977
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; /** * <p> * A data volume used in a task definition. * </p> */ public class Volume implements Serializable, Cloneable { /** * <p> * The name of the volume. Up to 255 letters (uppercase and lowercase), * numbers, hyphens, and underscores are allowed. This name is referenced in * the <code>sourceVolume</code> parameter of container definition * <code>mountPoints</code>. * </p> */ private String name; /** * <p> * The contents of the <code>host</code> parameter determine whether your * data volume persists on the host container instance and where it is * stored. If the host parameter is empty, then the Docker daemon assigns a * host path for your data volume, but the data is not guaranteed to persist * after the containers associated with it stop running. * </p> */ private HostVolumeProperties host; /** * <p> * The name of the volume. Up to 255 letters (uppercase and lowercase), * numbers, hyphens, and underscores are allowed. This name is referenced in * the <code>sourceVolume</code> parameter of container definition * <code>mountPoints</code>. * </p> * * @param name * The name of the volume. Up to 255 letters (uppercase and * lowercase), numbers, hyphens, and underscores are allowed. This * name is referenced in the <code>sourceVolume</code> parameter of * container definition <code>mountPoints</code>. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the volume. Up to 255 letters (uppercase and lowercase), * numbers, hyphens, and underscores are allowed. This name is referenced in * the <code>sourceVolume</code> parameter of container definition * <code>mountPoints</code>. * </p> * * @return The name of the volume. Up to 255 letters (uppercase and * lowercase), numbers, hyphens, and underscores are allowed. This * name is referenced in the <code>sourceVolume</code> parameter of * container definition <code>mountPoints</code>. */ public String getName() { return this.name; } /** * <p> * The name of the volume. Up to 255 letters (uppercase and lowercase), * numbers, hyphens, and underscores are allowed. This name is referenced in * the <code>sourceVolume</code> parameter of container definition * <code>mountPoints</code>. * </p> * * @param name * The name of the volume. Up to 255 letters (uppercase and * lowercase), numbers, hyphens, and underscores are allowed. This * name is referenced in the <code>sourceVolume</code> parameter of * container definition <code>mountPoints</code>. * @return Returns a reference to this object so that method calls can be * chained together. */ public Volume withName(String name) { setName(name); return this; } /** * <p> * The contents of the <code>host</code> parameter determine whether your * data volume persists on the host container instance and where it is * stored. If the host parameter is empty, then the Docker daemon assigns a * host path for your data volume, but the data is not guaranteed to persist * after the containers associated with it stop running. * </p> * * @param host * The contents of the <code>host</code> parameter determine whether * your data volume persists on the host container instance and where * it is stored. If the host parameter is empty, then the Docker * daemon assigns a host path for your data volume, but the data is * not guaranteed to persist after the containers associated with it * stop running. */ public void setHost(HostVolumeProperties host) { this.host = host; } /** * <p> * The contents of the <code>host</code> parameter determine whether your * data volume persists on the host container instance and where it is * stored. If the host parameter is empty, then the Docker daemon assigns a * host path for your data volume, but the data is not guaranteed to persist * after the containers associated with it stop running. * </p> * * @return The contents of the <code>host</code> parameter determine whether * your data volume persists on the host container instance and * where it is stored. If the host parameter is empty, then the * Docker daemon assigns a host path for your data volume, but the * data is not guaranteed to persist after the containers associated * with it stop running. */ public HostVolumeProperties getHost() { return this.host; } /** * <p> * The contents of the <code>host</code> parameter determine whether your * data volume persists on the host container instance and where it is * stored. If the host parameter is empty, then the Docker daemon assigns a * host path for your data volume, but the data is not guaranteed to persist * after the containers associated with it stop running. * </p> * * @param host * The contents of the <code>host</code> parameter determine whether * your data volume persists on the host container instance and where * it is stored. If the host parameter is empty, then the Docker * daemon assigns a host path for your data volume, but the data is * not guaranteed to persist after the containers associated with it * stop running. * @return Returns a reference to this object so that method calls can be * chained together. */ public Volume withHost(HostVolumeProperties host) { setHost(host); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: " + getName() + ","); if (getHost() != null) sb.append("Host: " + getHost()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Volume == false) return false; Volume other = (Volume) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getHost() == null ^ this.getHost() == null) return false; if (other.getHost() != null && other.getHost().equals(this.getHost()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getHost() == null) ? 0 : getHost().hashCode()); return hashCode; } @Override public Volume clone() { try { return (Volume) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
trasa/aws-sdk-java
aws-java-sdk-ecs/src/main/java/com/amazonaws/services/ecs/model/Volume.java
Java
apache-2.0
8,673
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies; import java.util.Iterator; import org.eclipse.emf.ecore.EAnnotation; import org.eclipse.gef.commands.Command; import org.eclipse.gmf.runtime.diagram.core.commands.DeleteCommand; import org.eclipse.gmf.runtime.emf.commands.core.command.CompositeTransactionalCommand; import org.eclipse.gmf.runtime.emf.type.core.commands.DestroyElementCommand; import org.eclipse.gmf.runtime.emf.type.core.requests.CreateRelationshipRequest; import org.eclipse.gmf.runtime.emf.type.core.requests.DestroyElementRequest; import org.eclipse.gmf.runtime.emf.type.core.requests.ReorientRelationshipRequest; import org.eclipse.gmf.runtime.notation.Edge; import org.eclipse.gmf.runtime.notation.View; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EsbLinkCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EsbLinkReorientCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EsbLinkEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; /** * @generated */ public class TransactionMediatorOutputConnectorItemSemanticEditPolicy extends EsbBaseItemSemanticEditPolicy { /** * @generated */ public TransactionMediatorOutputConnectorItemSemanticEditPolicy() { super(EsbElementTypes.TransactionMediatorOutputConnector_3119); } /** * @generated */ protected Command getDestroyElementCommand(DestroyElementRequest req) { View view = (View) getHost().getModel(); CompositeTransactionalCommand cmd = new CompositeTransactionalCommand( getEditingDomain(), null); cmd.setTransactionNestingEnabled(false); for (Iterator<?> it = view.getSourceEdges().iterator(); it.hasNext();) { Edge outgoingLink = (Edge) it.next(); if (EsbVisualIDRegistry.getVisualID(outgoingLink) == EsbLinkEditPart.VISUAL_ID) { DestroyElementRequest r = new DestroyElementRequest( outgoingLink.getElement(), false); cmd.add(new DestroyElementCommand(r)); cmd.add(new DeleteCommand(getEditingDomain(), outgoingLink)); continue; } } EAnnotation annotation = view.getEAnnotation("Shortcut"); //$NON-NLS-1$ if (annotation == null) { // there are indirectly referenced children, need extra commands: false addDestroyShortcutsCommand(cmd, view); // delete host element cmd.add(new DestroyElementCommand(req)); } else { cmd.add(new DeleteCommand(getEditingDomain(), view)); } return getGEFWrapper(cmd.reduce()); } /** * @generated */ protected Command getCreateRelationshipCommand(CreateRelationshipRequest req) { Command command = req.getTarget() == null ? getStartCreateRelationshipCommand(req) : getCompleteCreateRelationshipCommand(req); return command != null ? command : super .getCreateRelationshipCommand(req); } /** * @generated */ protected Command getStartCreateRelationshipCommand( CreateRelationshipRequest req) { if (EsbElementTypes.EsbLink_4001 == req.getElementType()) { return getGEFWrapper(new EsbLinkCreateCommand(req, req.getSource(), req.getTarget())); } return null; } /** * @generated */ protected Command getCompleteCreateRelationshipCommand( CreateRelationshipRequest req) { if (EsbElementTypes.EsbLink_4001 == req.getElementType()) { return null; } return null; } /** * Returns command to reorient EClass based link. New link target or source * should be the domain model element associated with this node. * * @generated */ protected Command getReorientRelationshipCommand( ReorientRelationshipRequest req) { switch (getVisualID(req)) { case EsbLinkEditPart.VISUAL_ID: return getGEFWrapper(new EsbLinkReorientCommand(req)); } return super.getReorientRelationshipCommand(req); } }
rajeevanv89/developer-studio
esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/policies/TransactionMediatorOutputConnectorItemSemanticEditPolicy.java
Java
apache-2.0
3,894
/* * Copyright 2011 France Telecom R&D Beijing Co., Ltd 北京法国电信研发中心有限公司 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.social.weibo.api.impl.json; import java.util.Date; import java.util.SortedSet; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.springframework.social.weibo.api.Trends.Trend; /** * Annotated mixin to add Jackson annotations to TrendsWrapper. * * @author edva8332 */ @JsonIgnoreProperties(ignoreUnknown = true) abstract class TrendsWrapperMixin { @JsonProperty("trends") @JsonDeserialize(using = TrendsDeserializer.class) SortedSet<Trend> trends; @JsonProperty("as_of") @JsonDeserialize(using = DateInSecondsDeserializer.class) Date asOf; }
vergnes/spring-social-weibo
src/main/java/org/springframework/social/weibo/api/impl/json/TrendsWrapperMixin.java
Java
apache-2.0
1,429
package com.example.stackexchange.util; import java.nio.file.Path; import java.nio.file.Paths; public class SiteUtils { private static String siteName; public static String getSiteName(String dir) { if (siteName == null) { Path path = Paths.get(dir); Path parent = path.getParent(); Path name = parent.getName(parent.getNameCount() - 1); siteName = name.toString(); } return siteName; } public static void setSiteName(String siteName) { SiteUtils.siteName = siteName; } }
timstoner/stackexchangeimporter
src/main/java/com/example/stackexchange/util/SiteUtils.java
Java
apache-2.0
504
/** * Copyright 2016 JustWayward Team * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.justwayward.reader.ui.fragment; import android.content.Intent; import android.net.Uri; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.View; import com.justwayward.reader.R; import com.justwayward.reader.base.BaseFragment; import com.justwayward.reader.bean.support.FindBean; import com.justwayward.reader.common.OnRvItemClickListener; import com.justwayward.reader.component.AppComponent; import com.justwayward.reader.ui.activity.SubjectBookListActivity; import com.justwayward.reader.ui.activity.TopCategoryListActivity; import com.justwayward.reader.ui.activity.TopRankActivity; import com.justwayward.reader.ui.adapter.FindAdapter; import com.justwayward.reader.view.SupportDividerItemDecoration; import java.util.ArrayList; import java.util.List; import butterknife.Bind; /** * 发现 * * @author yuyh. * @date 16/9/1. */ public class FindFragment extends BaseFragment implements OnRvItemClickListener<FindBean> { @Bind(R.id.recyclerview) RecyclerView mRecyclerView; private FindAdapter mAdapter; private List<FindBean> mList = new ArrayList<>(); @Override public int getLayoutResId() { return R.layout.fragment_find; } @Override public void initDatas() { mList.clear(); mList.add(new FindBean("排行榜", R.drawable.home_find_rank)); mList.add(new FindBean("主题书单", R.drawable.home_find_topic)); mList.add(new FindBean("分类", R.drawable.home_find_category)); mList.add(new FindBean("官方QQ群", R.drawable.home_find_listen)); } @Override public void configViews() { mRecyclerView.setHasFixedSize(true); mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity())); mRecyclerView.addItemDecoration(new SupportDividerItemDecoration(mContext, LinearLayoutManager.VERTICAL, true)); mAdapter = new FindAdapter(mContext, mList, this); mRecyclerView.setAdapter(mAdapter); } @Override protected void setupActivityComponent(AppComponent appComponent) { } @Override public void attachView() { } @Override public void onItemClick(View view, int position, FindBean data) { switch (position) { case 0: TopRankActivity.startActivity(activity); break; case 1: SubjectBookListActivity.startActivity(activity); break; case 2: startActivity(new Intent(activity, TopCategoryListActivity.class)); break; case 3: startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://jq.qq.com/?_wv=1027&k=46qbql8"))); break; default: break; } } }
weiwenqiang/GitHub
MVP/BookReader-master/app/src/main/java/com/justwayward/reader/ui/fragment/FindFragment.java
Java
apache-2.0
3,463
package org.gradle.test.performance.mediummonolithicjavaproject.p281; public class Production5628 { private String property0; public String getProperty0() { return property0; } public void setProperty0(String value) { property0 = value; } private String property1; public String getProperty1() { return property1; } public void setProperty1(String value) { property1 = value; } private String property2; public String getProperty2() { return property2; } public void setProperty2(String value) { property2 = value; } private String property3; public String getProperty3() { return property3; } public void setProperty3(String value) { property3 = value; } private String property4; public String getProperty4() { return property4; } public void setProperty4(String value) { property4 = value; } private String property5; public String getProperty5() { return property5; } public void setProperty5(String value) { property5 = value; } private String property6; public String getProperty6() { return property6; } public void setProperty6(String value) { property6 = value; } private String property7; public String getProperty7() { return property7; } public void setProperty7(String value) { property7 = value; } private String property8; public String getProperty8() { return property8; } public void setProperty8(String value) { property8 = value; } private String property9; public String getProperty9() { return property9; } public void setProperty9(String value) { property9 = value; } }
oehme/analysing-gradle-performance
my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p281/Production5628.java
Java
apache-2.0
1,891
function loadText() { var txtLang = document.getElementsByName("txtLang"); txtLang[0].innerHTML = "St\u00F8rrelse"; txtLang[1].innerHTML = "Egenskaber"; txtLang[2].innerHTML = "Typografi"; txtLang[3].innerHTML = "Bredde"; txtLang[4].innerHTML = "Bredde styret af indhold"; txtLang[5].innerHTML = "Tabelbredde"; txtLang[6].innerHTML = "Tilpas til vindue"; txtLang[7].innerHTML = "H\u00F8jde"; txtLang[8].innerHTML = "Bredde styret af indhold"; txtLang[9].innerHTML = "Tabelbredde"; txtLang[10].innerHTML = "Tilpas til vindue"; txtLang[11].innerHTML = "Justering"; txtLang[12].innerHTML = "Margen"; txtLang[13].innerHTML = "Venstre"; txtLang[14].innerHTML = "H\u00F8jre"; txtLang[15].innerHTML = "Top"; txtLang[16].innerHTML = "Nederst"; txtLang[17].innerHTML = "Ramme"; txtLang[18].innerHTML = "Collapse"; txtLang[19].innerHTML = "Baggrund"; txtLang[20].innerHTML = "Celle afstand"; txtLang[21].innerHTML = "Celle margen"; txtLang[22].innerHTML = "Typografi"; var optLang = document.getElementsByName("optLang"); optLang[0].text = "pixels" optLang[1].text = "procent" optLang[2].text = "pixels" optLang[3].text = "procent" optLang[4].text = "Venstre" optLang[5].text = "Centrer" optLang[6].text = "H\u00F8jre" optLang[7].text = "Ingen" optLang[8].text = "Ja" optLang[9].text = "Nej" document.getElementById("btnPick").value="V\u00E6lg"; document.getElementById("btnImage").value="Billede"; document.getElementById("btnCancel").value = "Annuller"; document.getElementById("btnApply").value = "Opdater"; document.getElementById("btnOk").value = " Ok "; } function getText(s) { switch(s) { case "Custom Colors": return "Egne farver"; case "More Colors...": return "Flere farver..."; default:return ""; } } function writeTitle() { document.write("<title>Tabel egenskaber</title>") }
studiodev/archives
2009 - Team D4 (IxGamer)/include/Editor/scripts/language/danish/table_edit.js
JavaScript
apache-2.0
2,072
db = connect(mserver); db = db.getSiblingDB('kynetx'); db.schedev.ensureIndex({cron_id : 1}); db.schedev.ensureIndex({ken : 1}); db.schedev.ensureIndex({expired : 1},{expireAfterSeconds : 120});
kre/kre_standalone
kre01/dist/files/mongo08.js
JavaScript
apache-2.0
195
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org) * Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.mysql.tools; import org.jkiss.dbeaver.ext.mysql.MySQLConstants; import org.jkiss.dbeaver.ext.mysql.MySQLDataSourceProvider; import org.jkiss.dbeaver.ext.mysql.MySQLMessages; import org.jkiss.dbeaver.ext.mysql.MySQLServerHome; import org.jkiss.dbeaver.ext.mysql.model.MySQLCatalog; import org.jkiss.dbeaver.ui.dialogs.tools.AbstractScriptExecuteWizard; import org.jkiss.dbeaver.utils.RuntimeUtils; import org.jkiss.utils.CommonUtils; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; class MySQLScriptExecuteWizard extends AbstractScriptExecuteWizard<MySQLCatalog, MySQLCatalog> { enum LogLevel { Normal, Verbose, Debug } private LogLevel logLevel; private boolean noBeep; private boolean isImport; private MySQLScriptExecuteWizardPageSettings mainPage; public MySQLScriptExecuteWizard(MySQLCatalog catalog, boolean isImport) { super(Collections.singleton(catalog), isImport ? MySQLMessages.tools_script_execute_wizard_db_import : MySQLMessages.tools_script_execute_wizard_execute_script); this.isImport = isImport; this.logLevel = LogLevel.Normal; this.noBeep = true; this.mainPage = new MySQLScriptExecuteWizardPageSettings(this); } public LogLevel getLogLevel() { return logLevel; } public void setLogLevel(LogLevel logLevel) { this.logLevel = logLevel; } public boolean isImport() { return isImport; } @Override public boolean isVerbose() { return logLevel == LogLevel.Verbose || logLevel == LogLevel.Debug; } @Override public void addPages() { addPage(mainPage); super.addPages(); } @Override public void fillProcessParameters(List<String> cmd, MySQLCatalog arg) throws IOException { String dumpPath = RuntimeUtils.getHomeBinary(getClientHome(), MySQLConstants.BIN_FOLDER, "mysql").getAbsolutePath(); //$NON-NLS-1$ cmd.add(dumpPath); if (logLevel == LogLevel.Debug) { cmd.add("--debug-info"); //$NON-NLS-1$ } if (noBeep) { cmd.add("--no-beep"); //$NON-NLS-1$ } } @Override protected void setupProcessParameters(ProcessBuilder process) { if (!CommonUtils.isEmpty(getToolUserPassword())) { process.environment().put(MySQLConstants.ENV_VARIABLE_MYSQL_PWD, getToolUserPassword()); } } @Override public MySQLServerHome findServerHome(String clientHomeId) { return MySQLDataSourceProvider.getServerHome(clientHomeId); } @Override public Collection<MySQLCatalog> getRunInfo() { return getDatabaseObjects(); } @Override protected List<String> getCommandLine(MySQLCatalog arg) throws IOException { List<String> cmd = MySQLToolScript.getMySQLToolCommandLine(this, arg); cmd.add(arg.getName()); return cmd; } }
ruspl-afed/dbeaver
plugins/org.jkiss.dbeaver.ext.mysql/src/org/jkiss/dbeaver/ext/mysql/tools/MySQLScriptExecuteWizard.java
Java
apache-2.0
3,895
<?php namespace App\Http\Lib\CFDIXmlReader\CfdiParser\properties; use Exception; use SimpleXMLElement; /** * */ class fecha { final public static function extract(SimpleXMLElement $xml, array $namespace, $version) { switch ($version) { case 3: case 3.2: return (string) $xml['fecha']; break; default: throw new Exception('Unkown document version ' . $version); break; } } }
herimh/addenda-soriana
app/Http/Lib/CFDIXmlReader/CfdiParser/properties/fecha.php
PHP
apache-2.0
507
namespace Epi.Cloud.Common.Constants { public enum EmailCombinationEnum { ResetPassword = 1, PasswordChanged = 2, UpdateUserInfo = 3, InsertUser = 4, UpdateOrganization = 5, InsertOrganization = 6 } }
Epi-Info/Epi-Info-Cloud-Contact-Tracing
Cloud Enter/Epi.Cloud.Common/Constants/EmailCombinationEnum.cs
C#
apache-2.0
219
class TabList { constructor (tabs, parentTaskList) { this.tabs = tabs || [] this.parentTaskList = parentTaskList } //tab properties that shouldn't be saved to disk static temporaryProperties = ['hasAudio', 'previewImage', 'loaded'] add (tab = {}, options = {}) { var tabId = String(tab.id || Math.round(Math.random() * 100000000000000000)) // you can pass an id that will be used, or a random one will be generated. var newTab = { url: tab.url || '', title: tab.title || '', id: tabId, lastActivity: tab.lastActivity || Date.now(), secure: tab.secure, private: tab.private || false, readerable: tab.readerable || false, themeColor: tab.themeColor, backgroundColor: tab.backgroundColor, scrollPosition: tab.scrollPosition || 0, selected: tab.selected || false, muted: tab.muted || false, loaded: tab.loaded || false, hasAudio: false, previewImage: '', isFileView: false, } if (options.atEnd) { this.tabs.push(newTab) } else { this.tabs.splice(this.getSelectedIndex() + 1, 0, newTab) } this.parentTaskList.emit('tab-added', tabId) return tabId } update (id, data) { if (!this.has(id)) { throw new ReferenceError('Attempted to update a tab that does not exist.') } const index = this.getIndex(id) for (var key in data) { if (data[key] === undefined) { throw new ReferenceError('Key ' + key + ' is undefined.') } this.tabs[index][key] = data[key] this.parentTaskList.emit('tab-updated', id, key) // changing URL erases scroll position if (key === 'url') { this.tabs[index].scrollPosition = 0 this.parentTaskList.emit('tab-updated', id, 'scrollPosition') } } } destroy (id) { const index = this.getIndex(id) if (index < 0) return false tasks.getTaskContainingTab(id).tabHistory.push(this.toPermanentState(this.tabs[index])) this.tabs.splice(index, 1) this.parentTaskList.emit('tab-destroyed', id) return index } destroyAll () { // this = [] doesn't work, so set the length of the array to 0 to remove all of the itemss this.tabs.length = 0 } get (id) { if (!id) { // no id provided, return an array of all tabs // it is important to copy the tab objects when returning them. Otherwise, the original tab objects get modified when the returned tabs are modified (such as when processing a url). var tabsToReturn = [] for (var i = 0; i < this.tabs.length; i++) { tabsToReturn.push(Object.assign({}, this.tabs[i])) } return tabsToReturn } for (var i = 0; i < this.tabs.length; i++) { if (this.tabs[i].id === id) { return Object.assign({}, this.tabs[i]) } } return undefined } has (id) { return this.getIndex(id) > -1 } getIndex (id) { for (var i = 0; i < this.tabs.length; i++) { if (this.tabs[i].id === id) { return i } } return -1 } getSelected () { for (var i = 0; i < this.tabs.length; i++) { if (this.tabs[i].selected) { return this.tabs[i].id } } return null } getSelectedIndex () { for (var i = 0; i < this.tabs.length; i++) { if (this.tabs[i].selected) { return i } } return null } getAtIndex (index) { return this.tabs[index] || undefined } setSelected (id) { if (!this.has(id)) { throw new ReferenceError('Attempted to select a tab that does not exist.') } for (var i = 0; i < this.tabs.length; i++) { if (this.tabs[i].id === id) { this.tabs[i].selected = true this.tabs[i].lastActivity = Date.now() } else if (this.tabs[i].selected) { this.tabs[i].selected = false this.tabs[i].lastActivity = Date.now() } } this.parentTaskList.emit('tab-selected', id) } moveBy (id, offset) { var currentIndex = this.getIndex(id) var newIndex = currentIndex + offset var newIndexTab = this.getAtIndex(newIndex) if (newIndexTab) { var currentTab = this.getAtIndex(currentIndex) this.splice(currentIndex, 1, newIndexTab) this.splice(newIndex, 1, currentTab) } } count () { return this.tabs.length } isEmpty () { if (!this.tabs || this.tabs.length === 0) { return true } if (this.tabs.length === 1 && !this.tabs[0].url) { return true } return false } forEach (fun) { return this.tabs.forEach(fun) } splice (...args) { return this.tabs.splice.apply(this.tabs, args) } toPermanentState (tab) { //removes temporary properties of the tab that are lost on page reload let result = {} Object.keys(tab) .filter(key => !TabList.temporaryProperties.includes(key)) .forEach(key => result[key] = tab[key]) return result } getStringifyableState () { return this.tabs.map(tab => this.toPermanentState(tab)) } } module.exports = TabList
minbrowser/min
js/tabState/tab.js
JavaScript
apache-2.0
5,074
package au.com.mountainpass.hyperstate.core; import java.util.Map; import java.util.concurrent.CompletableFuture; import org.eclipse.jdt.annotation.Nullable; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.MediaType; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonUnwrapped; import au.com.mountainpass.hyperstate.core.entities.CreatedEntity; import au.com.mountainpass.hyperstate.core.entities.DeletedEntity; import au.com.mountainpass.hyperstate.core.entities.EntityWrapper; import au.com.mountainpass.hyperstate.core.entities.UpdatedEntity; @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class Link extends Titled { @Nullable private MediaType representationFormat = MediaTypes.SIREN_JSON; private Address address; public Link(@JsonProperty("href") Address address, @JsonProperty("title") final String title, @JsonProperty("class") final String... classes) { super(title, classes); this.address = address; } public Link() { } public Link(Address address) { this.address = address; } @JsonProperty("type") public MediaType getRepresentationFormat() { return representationFormat == null ? MediaTypes.SIREN_JSON : representationFormat; } public <T extends EntityWrapper<?>> CompletableFuture<T> resolve( Class<T> type) { return address.get(type); } public <T extends EntityWrapper<?>> CompletableFuture<T> resolve( ParameterizedTypeReference<T> type) { return address.get(type); } @JsonIgnore public String getPath() { return address.getPath(); } public CompletableFuture<EntityWrapper<?>> get( Map<String, Object> filteredParameters) { return address.get(filteredParameters); } public CompletableFuture<DeletedEntity> delete( Map<String, Object> filteredParameters) { return address.delete(filteredParameters); } public CompletableFuture<CreatedEntity> create( Map<String, Object> filteredParameters) { return address.create(filteredParameters); } public CompletableFuture<UpdatedEntity> update( Map<String, Object> filteredParameters) { return address.update(filteredParameters); } public CompletableFuture<EntityWrapper<?>> get() { return address.get(); } @JsonIgnore public <T extends EntityWrapper<?>> CompletableFuture<T> get( Class<T> type) { return address.get(type); } /** * @return the address */ @JsonUnwrapped public Address getAddress() { return address; } /** * @param address * the address to set */ public void setAddress(Address address) { this.address = address; } }
mountain-pass/hyperstate
hyperstate-core/src/main/java/au/com/mountainpass/hyperstate/core/Link.java
Java
apache-2.0
3,050
package com.hyd.redisfx; import com.hyd.fx.Fxml; import com.hyd.fx.app.AppLogo; import com.hyd.fx.app.AppPrimaryStage; import com.hyd.redisfx.fx.BackgroundExecutor; import com.hyd.redisfx.i18n.I18n; import javafx.application.Application; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.layout.BorderPane; import javafx.stage.Stage; public class RedisFxApp extends Application { public static void main(String[] args) { launch(RedisFxApp.class); } @Override public void start(Stage primaryStage) throws Exception { AppPrimaryStage.setPrimaryStage(primaryStage); AppLogo.setStageLogo(primaryStage); primaryStage.setTitle("RedisFX"); primaryStage.setScene(new Scene(getRoot())); primaryStage.setOnCloseRequest(event -> BackgroundExecutor.shutdown()); primaryStage.show(); } public Parent getRoot() throws Exception { FXMLLoader fxmlLoader = Fxml.load("/fxml/Main.fxml", I18n.UI_MAIN_BUNDLE); return fxmlLoader.<BorderPane>getRoot(); } }
yiding-he/redisfx
src/main/java/com/hyd/redisfx/RedisFxApp.java
Java
apache-2.0
1,099
package net.catchpole.B9.codec.transcoder; import net.catchpole.B9.codec.stream.BitInputStream; import net.catchpole.B9.codec.stream.BitOutputStream; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Set; class MapTranscoder implements TypeTranscoder<Map>, FieldInterceptor<Map> { private ObjectArrayTranscoder objectArrayTranscoder; public MapTranscoder(ObjectArrayTranscoder objectArrayTranscoder) { this.objectArrayTranscoder = objectArrayTranscoder; } public Map read(BitInputStream in) throws IOException { Object[] all = objectArrayTranscoder.read(in); Map value = new HashMap(); for (int x=0;x<all.length;x+=2) { value.put(all[x],all[x+1]); } return value; } public void write(BitOutputStream out, Map value) throws IOException { Object[] values = new Object[value.size()*2]; int x=0; Set<Map.Entry> entrySet = value.entrySet(); for (Map.Entry entry : entrySet) { values[x++] = entry.getKey(); values[x++] = entry.getValue(); } objectArrayTranscoder.write(out, values); } public Map intercept(Map currentValue, Map newValue) { if (currentValue != null && currentValue.isEmpty()) { currentValue.putAll(newValue); return currentValue; } else { return newValue; } } }
slipperyseal/B9
src/main/java/net/catchpole/B9/codec/transcoder/MapTranscoder.java
Java
apache-2.0
1,451
package com.riskvis.entity; // Generated May 12, 2014 11:45:38 PM by Hibernate Tools 4.0.0 import static javax.persistence.GenerationType.IDENTITY; import java.util.HashSet; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.OneToMany; import javax.persistence.Table; import org.hibernate.annotations.Cascade; import org.hibernate.annotations.CascadeType; /** * Transports generated by hbm2java */ @Entity @Table(name = "transports", catalog = "test") public class Transports extends AbstractEntity implements java.io.Serializable { private static final long serialVersionUID = -2787847957235299023L; private Integer idtransports; private String name; private String description; private byte[] icon; private Set<PlacesHasTransports> placesHasTransportses = new HashSet<PlacesHasTransports>( 0); private Set<Transportationrisks> transportationriskses = new HashSet<Transportationrisks>( 0); public Transports() { } public Transports(String name) { this.name = name; } public Transports(String name, String description, byte[] icon, Set<PlacesHasTransports> placesHasTransportses, Set<Transportationrisks> transportationriskses) { this.name = name; this.description = description; this.icon = icon; this.placesHasTransportses = placesHasTransportses; this.transportationriskses = transportationriskses; } @Id @GeneratedValue(strategy = IDENTITY) @Column(name = "idtransports", unique = true, nullable = false) public Integer getIdtransports() { return this.idtransports; } public void setIdtransports(Integer idtransports) { this.idtransports = idtransports; } @Column(name = "name", nullable = false, length = 45) public String getName() { return this.name; } public void setName(String name) { this.name = name; } @Column(name = "description", length = 512) public String getDescription() { return this.description; } public void setDescription(String description) { this.description = description; } @Column(name = "icon") public byte[] getIcon() { return this.icon; } public void setIcon(byte[] icon) { this.icon = icon; } @OneToMany(fetch = FetchType.LAZY, mappedBy = "transports") public Set<PlacesHasTransports> getPlacesHasTransportses() { return this.placesHasTransportses; } public void setPlacesHasTransportses( Set<PlacesHasTransports> placesHasTransportses) { this.placesHasTransportses = placesHasTransportses; } @OneToMany(fetch = FetchType.EAGER, mappedBy = "transports") @Cascade({ CascadeType.DELETE }) public Set<Transportationrisks> getTransportationriskses() { return this.transportationriskses; } public void setTransportationriskses( Set<Transportationrisks> transportationriskses) { this.transportationriskses = transportationriskses; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((idtransports == null) ? 0 : idtransports.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Transports other = (Transports) obj; if (idtransports == null) { if (other.idtransports != null) return false; } else if (!idtransports.equals(other.idtransports)) return false; return true; } }
machadolucas/watchout
src/main/java/com/riskvis/entity/Transports.java
Java
apache-2.0
3,527
package jdt.mantis.appmanager; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.LaxRedirectStrategy; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class HttpSession { private CloseableHttpClient httpclient; private ApplicationManager app; public HttpSession(ApplicationManager app) { //powstaje nowa sesja this.app = app; //powstaje nowy klient (sesja) do pracy po protokole http, objekt wysyłający zapotrzebowania na serwer httpclient = HttpClients.custom().setRedirectStrategy(new LaxRedirectStrategy()).build(); } public boolean login(String username, String password) throws IOException { //adres na jajki wysylane jest zapytanie HttpPost post = new HttpPost(app.getProperty("web.baseUrl") + "login.php"); List<NameValuePair> params = new ArrayList<NameValuePair>(); //inicjalizacja zbioru parametrow params.add(new BasicNameValuePair("username", username)); params.add(new BasicNameValuePair("password", password)); params.add(new BasicNameValuePair("secure_session", "on")); params.add(new BasicNameValuePair("return", "index.php")); //parametry zapakowują się zgodnie z określonymi zasadami i lokują się we wczesniej strworzone zapytanie post.setEntity(new UrlEncodedFormEntity(params)); //wysyłka zapytania httpclient.execute(post) a wynikiem jest odp od serwera CloseableHttpResponse response = httpclient.execute(post); //Analizujemy odp od serwera String body = getTextFrom(response); //sprawdzanie czy uzytkownik sie zalogowal, czy kod strony zawiera taka linijke return body.contains(String.format("<span class=\"user-info\">%s</span>", username)); } //metoda do otrzymywania tekstu z odp serwera private String getTextFrom(CloseableHttpResponse response) throws IOException { try { return EntityUtils.toString(response.getEntity()); } finally { response.close(); } } //jaki uzytkownik jest teraz zalogowany public boolean isLoggedInAs(String username) throws IOException { //wchodzimy na strone glowna HttpGet get = new HttpGet(app.getProperty("web.baseUrl") + "/index.php"); // wykonujemy zapytanie i otzymujemy odpowiedz CloseableHttpResponse response = httpclient.execute(get); String body = getTextFrom(response); return body.contains(String.format("<span class=\"user-info\">%s</span>", username)); } }
sgasiewska/java_dla_testerow
mantis-tests/src/test/java/jdt/mantis/appmanager/HttpSession.java
Java
apache-2.0
2,863
//: net/mindview/util/Print.java // Print methods that can be used without // qualifiers, using Java SE5 static imports: package tsj.example.java.project.tsj.net.mindview.util; import java.io.*; public class Print { // Print with a newline: public static void print(Object obj) { System.out.println(obj); } // Print a newline by itself: public static void print() { System.out.println(); } // Print with no line break: public static void printnb(Object obj) { System.out.print(obj); } // The new Java SE5 printf() (from C): public static PrintStream printf(String format, Object... args) { return System.out.printf(format, args); } } ///:~
taoshujian/exampleJava
src/main/java/tsj/example/java/project/tsj/net/mindview/util/Print.java
Java
apache-2.0
683
/** * Copyright &copy; 2012-2014 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved. */ package com.thinkgem.jeesite.modules.sys.service.gbj; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.RequestParam; import com.thinkgem.jeesite.common.persistence.Page; import com.thinkgem.jeesite.common.service.CrudService; import com.thinkgem.jeesite.modules.sys.dao.gbj.GbjUserSoldCommentsReplyDao; import com.thinkgem.jeesite.modules.sys.entity.gbj.GbjUserSoldCommentsReply; /** * 卖标信息评论回复管理Service * * @author snnu * @version 2018-01-18 */ @Service @Transactional(readOnly = true) public class GbjUserSoldCommentsReplyService extends CrudService<GbjUserSoldCommentsReplyDao, GbjUserSoldCommentsReply> { @Autowired GbjUserSoldCommentsReplyDao gbjUserSoldCommentsReplyDao; public GbjUserSoldCommentsReply get(String id) { return super.get(id); } public List<GbjUserSoldCommentsReply> findList(GbjUserSoldCommentsReply gbjUserSoldCommentsReply) { return super.findList(gbjUserSoldCommentsReply); } public Page<GbjUserSoldCommentsReply> findPage(Page<GbjUserSoldCommentsReply> page, GbjUserSoldCommentsReply gbjUserSoldCommentsReply) { return super.findPage(page, gbjUserSoldCommentsReply); } @Transactional(readOnly = false) public void save(GbjUserSoldCommentsReply gbjUserSoldCommentsReply) { super.save(gbjUserSoldCommentsReply); } @Transactional(readOnly = false) public void delete(GbjUserSoldCommentsReply gbjUserSoldCommentsReply) { super.delete(gbjUserSoldCommentsReply); } public List<GbjUserSoldCommentsReply> findDomainArticleSoldReplyCommentsList(@RequestParam("id") String id) { return gbjUserSoldCommentsReplyDao.findDomainArticleSoldReplyCommentsList(id); } }
GSSBuse/GSSB
src/main/java/com/thinkgem/jeesite/modules/sys/service/gbj/GbjUserSoldCommentsReplyService.java
Java
apache-2.0
1,961
/* * Copyright 2011-2012 Gregory P. Moyer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.syphr.mythtv.protocol.impl; import org.syphr.mythtv.commons.exception.ProtocolException; import org.syphr.mythtv.commons.socket.CommandUtils; import org.syphr.mythtv.commons.translate.Translator; /* default */class Command63QueryRemoteEncoderCancelNextRecording extends AbstractCommand63QueryRemoteEncoder<Void> { private final boolean cancel; public Command63QueryRemoteEncoderCancelNextRecording(Translator translator, Parser parser, int recorderId, boolean cancel) { super(translator, parser, recorderId); this.cancel = cancel; } @Override protected String getSubCommand() throws ProtocolException { return getParser().combineArguments("CANCEL_NEXT_RECORDING", cancel ? "1" : "0"); } @Override protected Void parseResponse(String response) throws ProtocolException { CommandUtils.expectOk(response); return null; } }
syphr42/libmythtv-java
protocol/src/main/java/org/syphr/mythtv/protocol/impl/Command63QueryRemoteEncoderCancelNextRecording.java
Java
apache-2.0
1,753
/* * ! JSRT JavaScript Library 0.1.1 lico.atom@gmail.com * * Copyright 2008, 2014 Atom Union, Inc. Released under the MIT license * * Date: Feb 11, 2014 */ Class.forName({ name: "class js.util.Iterator extends Object", "private _element": null, "private _cursor": 0, "private _lastRet": -1, Iterator: function(element) { this._element = element || []; }, hasNext: function() { return this._cursor < this._element.size(); }, next: function() { try { var next = this._element.get(this._cursor); this._lastRet = this._cursor++; return next; } catch (e) { throw new js.lang.IndexOutOfBoundsException("Index: " + this._cursor + ", Size: " + this._element.size() + ",Message:" + e.getMessage()); } }, remove: function() { if (this._lastRet === -1) throw new js.lang.IllegalStateException(); try { this._element.removeAt(this._lastRet); if (this._lastRet < this._cursor) this._cursor--; this._lastRet = -1; } catch (e) { throw new js.lang.IndexOutOfBoundsException(); } } });
ctripcorp/tars
tars/surface/static/jre/src/main/js/js/util/Iterator.js
JavaScript
apache-2.0
1,103
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.colors; import com.intellij.ui.ListScrollingUtil; import com.intellij.util.EventDispatcher; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.HashSet; import java.util.Set; public class OptionsPanelImpl extends JPanel implements OptionsPanel { private final JList myOptionsList; private final ColorAndFontDescriptionPanel myOptionsPanel; private final ColorAndFontOptions myOptions; private final SchemesPanel mySchemesProvider; private final String myCategoryName; private final EventDispatcher<ColorAndFontSettingsListener> myDispatcher = EventDispatcher.create(ColorAndFontSettingsListener.class); public OptionsPanelImpl(ColorAndFontDescriptionPanel optionsPanel, ColorAndFontOptions options, SchemesPanel schemesProvider, String categoryName) { super(new BorderLayout()); myOptions = options; mySchemesProvider = schemesProvider; myCategoryName = categoryName; optionsPanel.addActionListener(new ActionListener(){ public void actionPerformed(final ActionEvent e) { myDispatcher.getMulticaster().settingsChanged(); } }); myOptionsList = new JList(); myOptionsList.addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { if (!mySchemesProvider.areSchemesLoaded()) return; processListValueChanged(); } }); myOptionsList.setCellRenderer(new DefaultListCellRenderer(){ public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Component component = super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); if (value instanceof ColorAndFontDescription) { setIcon(((ColorAndFontDescription)value).getIcon()); setToolTipText(((ColorAndFontDescription)value).getToolTip()); } return component; } }); myOptionsList.setModel(new DefaultListModel()); myOptionsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); JScrollPane scrollPane = new JScrollPane(myOptionsList); scrollPane.setPreferredSize(new Dimension(230, 60)); JPanel north = new JPanel(new BorderLayout()); north.add(scrollPane, BorderLayout.WEST); north.add(optionsPanel, BorderLayout.CENTER); myOptionsPanel = optionsPanel; add(north, BorderLayout.NORTH); } public void addListener(ColorAndFontSettingsListener listener) { myDispatcher.addListener(listener); } private void processListValueChanged() { Object selectedValue = myOptionsList.getSelectedValue(); ColorAndFontDescription description = (ColorAndFontDescription)selectedValue; ColorAndFontDescriptionPanel optionsPanel = myOptionsPanel; if (description == null) { optionsPanel.resetDefault(); return; } optionsPanel.reset(description); myDispatcher.getMulticaster().selectedOptionChanged(description); } private void fillOptionsList() { int selIndex = myOptionsList.getSelectedIndex(); DefaultListModel listModel = (DefaultListModel)myOptionsList.getModel(); listModel.removeAllElements(); EditorSchemeAttributeDescriptor[] descriptions = myOptions.getCurrentDescriptions(); for (EditorSchemeAttributeDescriptor description : descriptions) { if (description.getGroup().equals(myCategoryName)) { listModel.addElement(description); } } if (selIndex >= 0) { myOptionsList.setSelectedIndex(selIndex); } ListScrollingUtil.ensureSelectionExists(myOptionsList); Object selected = myOptionsList.getSelectedValue(); if (selected instanceof EditorSchemeAttributeDescriptor) { myDispatcher.getMulticaster().selectedOptionChanged(selected); } } public JPanel getPanel() { return this; } public void updateOptionsList() { fillOptionsList(); processListValueChanged(); } public Runnable showOption(final String option) { DefaultListModel model = (DefaultListModel)myOptionsList.getModel(); for (int i = 0; i < model.size(); i++) { Object o = model.get(i); if (o instanceof EditorSchemeAttributeDescriptor) { String type = ((EditorSchemeAttributeDescriptor)o).getType(); if (type.toLowerCase().contains(option.toLowerCase())) { final int i1 = i; return new Runnable() { public void run() { ListScrollingUtil.selectItem(myOptionsList, i1); } }; } } } return null; } public void applyChangesToScheme() { Object selectedValue = myOptionsList.getSelectedValue(); if (selectedValue instanceof ColorAndFontDescription) { myOptionsPanel.apply((ColorAndFontDescription)selectedValue,myOptions.getSelectedScheme()); } } public void selectOption(final String typeToSelect) { DefaultListModel model = (DefaultListModel)myOptionsList.getModel(); for (int i = 0; i < model.size(); i++) { Object o = model.get(i); if (o instanceof EditorSchemeAttributeDescriptor) { if (typeToSelect.equals(((EditorSchemeAttributeDescriptor)o).getType())) { ListScrollingUtil.selectItem(myOptionsList, i); return; } } } } public Set<String> processListOptions() { HashSet<String> result = new HashSet<String>(); EditorSchemeAttributeDescriptor[] descriptions = myOptions.getCurrentDescriptions(); for (EditorSchemeAttributeDescriptor description : descriptions) { if (description.getGroup().equals(myCategoryName)) { result.add(description.toString()); } } return result; } }
jexp/idea2
platform/lang-impl/src/com/intellij/application/options/colors/OptionsPanelImpl.java
Java
apache-2.0
6,502
//simple GUI for the Matching program package compare; //importing requird libraries import java.awt.BorderLayout; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; public class FilePicker extends JPanel { //declaring required variables private String textFieldLabel; private String buttonLabel; private JLabel label; public static JTextField textField; private JButton browseButton; private JFileChooser fileChooser; public static String fileName, outputDestination, filePath; private int mode; public static final int MODE_OPEN = 1; public static final int MODE_SAVE = 2; public static JLabel categoryLabel; //Constructor for the UI public FilePicker(String textFieldLabel, String buttonLabel) { this.textFieldLabel = textFieldLabel; this.buttonLabel = buttonLabel; fileChooser = new JFileChooser(); label = new JLabel(textFieldLabel); textField = new JTextField(30); browseButton = new JButton(buttonLabel); browseButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { browseButtonActionPerformed(evt); } }); add(label); add(textField); add(browseButton); } //browse button click event private void browseButtonActionPerformed(ActionEvent evt) { if (mode == MODE_OPEN) { if (fileChooser.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { textField.setText(fileChooser.getSelectedFile().getAbsolutePath()); } } else if (mode == MODE_SAVE) { if (fileChooser.showSaveDialog(this) == JFileChooser.APPROVE_OPTION) { textField.setText(fileChooser.getSelectedFile().getAbsolutePath()); } } this.filePath = getSelectedFilePath(); } //adding required filter for browsing files public void addFileTypeFilter(String extension, String description) { FileTypeFilter filter = new FileTypeFilter(extension, description); fileChooser.addChoosableFileFilter(filter); } //browsing mode public void setMode(int mode) { this.mode = mode; } //get file path of the selected file public static String getSelectedFilePath() { return textField.getText(); } //get the file Chooser public JFileChooser getFileChooser() { return this.fileChooser; } }
nowshad-sust/ImageMatching-RGB
src/compare/FilePicker.java
Java
apache-2.0
2,982
/** * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bushstar.htmlcoinj.jni; import com.bushstar.htmlcoinj.core.ECKey; import com.bushstar.htmlcoinj.core.Transaction; import com.bushstar.htmlcoinj.core.Wallet; import com.bushstar.htmlcoinj.core.WalletEventListener; import com.bushstar.htmlcoinj.script.Script; import java.math.BigInteger; import java.util.List; /** * An event listener that relays events to a native C++ object. A pointer to that object is stored in * this class using JNI on the native side, thus several instances of this can point to different actual * native implementations. */ public class NativeWalletEventListener implements WalletEventListener { public long ptr; @Override public native void onCoinsReceived(Wallet wallet, Transaction tx, BigInteger prevBalance, BigInteger newBalance); @Override public native void onCoinsSent(Wallet wallet, Transaction tx, BigInteger prevBalance, BigInteger newBalance); @Override public native void onReorganize(Wallet wallet); @Override public native void onTransactionConfidenceChanged(Wallet wallet, Transaction tx); @Override public native void onWalletChanged(Wallet wallet); @Override public native void onKeysAdded(Wallet wallet, List<ECKey> keys); @Override public native void onScriptsAdded(Wallet wallet, List<Script> scripts); }
machado-rev/htmlcoinj
core/src/main/java/com/bushstar/htmlcoinj/jni/NativeWalletEventListener.java
Java
apache-2.0
1,933
using System; using System.Collections.Generic; namespace Enterprise.Invoicing.Entities.Models { public partial class DelegateOrderDetail { public DelegateOrderDetail() { this.DelegateSendDetails = new List<DelegateSendDetail>(); } public int detailSn { get; set; } public string delegateNo { get; set; } public string materialNo { get; set; } public decimal amount { get; set; } public decimal price { get; set; } public decimal sendAmount { get; set; } public decimal backAmount { get; set; } public string remark { get; set; } public virtual ICollection<DelegateSendDetail> DelegateSendDetails { get; set; } } }
amzulin/dlerp
Enterprise.Invoicing.Entities/Models/DelegateOrderDetail.cs
C#
apache-2.0
737
import React from 'react'; import { within, userEvent } from '@storybook/testing-library'; import { action } from '@storybook/addon-actions'; import { Badge } from '@talend/react-components'; import { useTranslation } from 'react-i18next'; import set from 'lodash/set'; import cloneDeep from 'lodash/cloneDeep'; import times from 'lodash/times'; import FacetedSearch from '../src'; import { FacetedSearchIcon } from '../src/components'; import { BadgeFacetedProvider } from '../src/components/context/badgeFaceted.context'; import { BadgesGenerator } from '../src/components/BadgesGenerator'; import { createBadgesDict, getBadgesFromDict } from '../src/dictionary/badge.dictionary'; import { badgeConnectionType, badgeName, badgeConnectionName, badgeAuthor, badgeAll, badgePrice, badgeValid, badgeEmpty, badgeInvalid, badgeTags, badgeCreationDate, badgeWithVeryLongName, badgeEnumWithLotOfValues, badgeTextAsCategory, badgeTextAsCustomAttribute, badgeEnumsAsCustomAttribute, badgePriceAsCustomAttribute, badgeEmptyLabel, } from './badgesDefinitions'; const badgesDefinitions = [ badgeAll, badgeName, badgeConnectionName, badgeAuthor, badgeConnectionType, badgeTags, badgePrice, badgeValid, badgeEmpty, badgeInvalid, badgeCreationDate, ]; const callbacks = { getTags: () => new Promise(resolve => setTimeout(resolve, 2000, [ 'clean', 'production', 'last chunk', 'salesforce', 'outdated', 'extracted', 'security', 'in processing', 'deep learning', 'sql', 'cluster', 'visualization', 'analytics', 'users', 'warehouse', 'api', ]), ), }; const badgesFaceted = { badges: [ { properties: { attribute: 'connection.type', initialOperatorOpened: false, initialValueOpened: false, label: 'Connection Type', operator: { label: 'In', name: 'in', }, operators: [ { label: 'In', name: 'in', }, ], type: 'checkbox', value: [ { id: 'amazon_s3', label: 'Amazon S3', checked: true, }, ], }, metadata: { badgePerFacet: '1', entitiesPerBadge: 'N', values: [ { id: 'amazon_s3', label: 'Amazon S3' }, { id: 'hdfs', label: 'HDFS' }, { id: 'kafka', label: 'Kafka' }, { id: 'localcon', label: 'Local connection' }, { id: 'salesforce', label: 'Salesforce' }, { id: 'aws_kinesis', label: 'AWS kinesis' }, ], operators: ['in'], badgeId: 'connection.type-9f0e5bc7-c687-4198-9635-d0fc7724dfd1', isInCreation: false, }, }, ], }; const badgesWithAll = { badges: [ { properties: { attribute: 'all', initialOperatorOpened: false, initialValueOpened: false, label: 'All', operator: { label: 'Contains', name: 'containsIgnoreCase', iconName: 'contains' }, operators: [], type: 'text', value: 'test', }, metadata: { isAvailableForFacetList: false, badgePerFacet: 'N', entitiesPerBadge: '1', operators: ['containsIgnoreCase'], badgeId: 'all-b6c04e3d-1d72-4aca-9565-09d206f76d88', isInCreation: false, }, }, ], }; export default { title: 'Faceted search', component: FacetedSearch.Faceted, parameters: { docs: { description: { component: 'Faceted search is a technique that involves augmenting traditional search techniques with a faceted navigation system, allowing users to narrow down search results by applying multiple filters based on faceted classification of the items. The user can look for any value, even if the field is not currently visible.', }, }, }, decorators: [ (Story, context) => ( <div> <style> {` #talend-pie-charts path[class^='ti-slice-'] { fill: #C6C6C6; } #talend-pie-charts path.ti-slice-right { fill: currentColor; } .tc-badge-slider-form .invalid { color: #EA8330; } .tc-badge-slider-form .valid { color: #82BD41; } .tc-badge-slider-form .empty { color: #202020; } `} </style> <Story {...context} /> </div> ), ], }; export const Default = () => ( <FacetedSearch.Faceted id="my-faceted-search"> {currentFacetedMode => (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.ADVANCED && ( <FacetedSearch.AdvancedSearch onSubmit={action('onSubmit')} /> )) || (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.BASIC && ( <FacetedSearch.BasicSearch badgesDefinitions={badgesDefinitions} callbacks={callbacks} onSubmit={action('onSubmit')} /> )) } </FacetedSearch.Faceted> ); export const IconDefaultActiveAndLoading = () => ( <div style={{ display: 'flex', gap: '1rem' }}> <div> <FacetedSearchIcon loading onClick={action('onClick')} /> </div> <div> <FacetedSearchIcon active onClick={action('onClick')} /> </div> <div> <FacetedSearchIcon onClick={action('onClick')} /> </div> </div> ); export const Initialized = () => ( <FacetedSearch.Faceted id="my-faceted-search"> {currentFacetedMode => (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.ADVANCED && ( <FacetedSearch.AdvancedSearch onSubmit={action('onSubmit')} /> )) || (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.BASIC && ( <FacetedSearch.BasicSearch badgesDefinitions={badgesDefinitions} badgesFaceted={badgesFaceted} onSubmit={action('onSubmit')} callbacks={callbacks} /> )) } </FacetedSearch.Faceted> ); export const InitializedWithABadgeWhichIsNotVisibleInTheList = () => ( <FacetedSearch.Faceted id="my-faceted-search"> {currentFacetedMode => (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.ADVANCED && ( <FacetedSearch.AdvancedSearch onSubmit={action('onSubmit')} /> )) || (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.BASIC && ( <FacetedSearch.BasicSearch badgesDefinitions={badgesDefinitions} badgesFaceted={badgesWithAll} callbacks={callbacks} onSubmit={action('onSubmit')} /> )) } </FacetedSearch.Faceted> ); export const Colored = () => ( <FacetedSearch.Faceted id="my-faceted-search"> {currentFacetedMode => (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.ADVANCED && ( <FacetedSearch.AdvancedSearch onSubmit={action('onSubmit')} /> )) || (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.BASIC && ( <FacetedSearch.BasicSearch badgesDefinitions={badgesDefinitions} badgesFaceted={set( cloneDeep(badgesFaceted), 'badges[0].properties.displayType', Badge.TYPES.VALUE, )} onSubmit={action('onSubmit')} callbacks={callbacks} /> )) } </FacetedSearch.Faceted> ); export const WithSpecialChars = () => { const { t } = useTranslation(); const badgesDictionary = createBadgesDict(); const badge = cloneDeep(badgesFaceted.badges[0]); Object.assign(badge.properties, { value: ' text ', type: 'text', displayType: Badge.TYPES.PATTERN, }); return ( <BadgeFacetedProvider value={{}}> <BadgesGenerator badges={[badge]} badgesDictionary={badgesDictionary} getBadgeFromDict={getBadgesFromDict} t={t} /> </BadgeFacetedProvider> ); }; export const DatePicker = () => { const { t } = useTranslation(); const badgesDictionary = createBadgesDict(); const badge = cloneDeep(badgesFaceted.badges[0]); Object.assign(badge.properties, { value: Date.now(), type: 'date', }); return ( <BadgeFacetedProvider value={{}}> <BadgesGenerator badges={[badge]} badgesDictionary={badgesDictionary} getBadgeFromDict={getBadgesFromDict} t={t} /> </BadgeFacetedProvider> ); }; export const ReadOnly = () => { const { t } = useTranslation(); const badgesDictionary = createBadgesDict(); return ( <BadgeFacetedProvider value={{}}> <BadgesGenerator badges={[ set(cloneDeep(badgesFaceted.badges[0]), 'properties.readOnly', true), set(cloneDeep(badgesFaceted.badges[0]), 'properties.removable', false), ]} badgesDictionary={badgesDictionary} getBadgeFromDict={getBadgesFromDict} t={t} /> </BadgeFacetedProvider> ); }; export const WithExternalState = () => { const [state, setState] = React.useState(badgesFaceted); const onSubmit = React.useCallback( (_, badges) => setState(previousState => ({ ...previousState, badges })), [setState], ); return ( <div> <button onClick={() => setState(badgesFaceted)}>Reset state</button> <FacetedSearch.Faceted id="my-faceted-search"> {currentFacetedMode => (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.ADVANCED && ( <FacetedSearch.AdvancedSearch onSubmit={action('onSubmit')} /> )) || (currentFacetedMode === FacetedSearch.constants.FACETED_MODE.BASIC && ( <FacetedSearch.BasicSearch badgesDefinitions={badgesDefinitions} badgesFaceted={state} onSubmit={onSubmit} callbacks={callbacks} /> )) } </FacetedSearch.Faceted> </div> ); }; export const WithoutLabelOrOperatorButton = () => ( <FacetedSearch.Faceted id="my-faceted-search"> <FacetedSearch.BasicSearch badgesDefinitions={badgesDefinitions} badgesFaceted={set(cloneDeep(badgesFaceted), 'badges[0].properties.label', '')} onSubmit={action('onSubmit')} callbacks={callbacks} /> </FacetedSearch.Faceted> ); const lotsOfBadgesDefinitions = Array(50).fill(badgeName); export const BasicSearchWithLotOfBadgeDefinitions = { render: () => ( <FacetedSearch.Faceted id="my-faceted-search"> <FacetedSearch.BasicSearch badgesDefinitions={lotsOfBadgesDefinitions} onSubmit={action('onSubmit')} callbacks={callbacks} /> </FacetedSearch.Faceted> ), play: async ({ canvasElement }) => { await userEvent.type(within(canvasElement).getByRole('searchbox'), 'lorem ipsum'); }, }; export const BasicSearchWithBadgeWithVeryLongName = { render: () => ( <FacetedSearch.Faceted id="my-faceted-search"> <FacetedSearch.BasicSearch badgesDefinitions={[badgeWithVeryLongName, badgeConnectionType, badgeName, badgePrice]} onSubmit={action('onSubmit')} callbacks={callbacks} /> </FacetedSearch.Faceted> ), play: async ({ canvasElement }) => { await userEvent.type(within(canvasElement).getByRole('searchbox'), 'lorem ipsum'); }, }; export const BasicSearchInABadgeWithALotOfValues = () => ( <FacetedSearch.Faceted id="my-faceted-search"> <FacetedSearch.BasicSearch badgesDefinitions={[badgeEnumWithLotOfValues]} onSubmit={action('onSubmit')} callbacks={callbacks} /> </FacetedSearch.Faceted> ); export const BasicSearchWithBadgesCategories = () => ( <FacetedSearch.Faceted id="my-faceted-search"> <FacetedSearch.BasicSearch badgesDefinitions={[ badgeConnectionType, badgeName, badgePrice, badgeTags, badgeTextAsCustomAttribute, badgePriceAsCustomAttribute, badgeEnumsAsCustomAttribute, ...times(2, () => badgeTextAsCategory), ]} onSubmit={action('onSubmit')} callbacks={callbacks} /> </FacetedSearch.Faceted> ); export const BasicSearchWithAnEmptyLabelBadge = () => ( <FacetedSearch.Faceted id="my-faceted-search"> <FacetedSearch.BasicSearch badgesDefinitions={[badgeName, badgeEmptyLabel]} onSubmit={action('onSubmit')} callbacks={callbacks} /> </FacetedSearch.Faceted> );
Talend/ui
packages/faceted-search/stories/facetedSearch.stories.js
JavaScript
apache-2.0
11,370
/* * ../../../..//localization/cy/FontWarnings.js * * Copyright (c) 2009-2018 The MathJax Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /************************************************************* * * MathJax/localization/cy/FontWarnings.js * * Copyright (c) 2009-2018 The MathJax Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ MathJax.Localization.addTranslation("cy", "FontWarnings", { version: "2.7.5", isLoaded: true, strings: {} }); MathJax.Ajax.loadComplete("[MathJax]/localization/cy/FontWarnings.js");
GerHobbelt/MathJax
localization/cy/FontWarnings.js
JavaScript
apache-2.0
1,604
/** * Copyright 2011 Pedro Ribeiro * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jalphanode.scheduler; import java.util.Date; /** * Iterator which builds the next execution time. * * @author ribeirux * @version $Revision$ */ public interface ScheduleIterator { /** * Builds the next execution time according the specified {@code date}. * * @param date the date to begin the search for the next valid date * * @return the next execution date */ Date next(Date date); }
ribeirux/jalphanode
core/src/main/java/org/jalphanode/scheduler/ScheduleIterator.java
Java
apache-2.0
1,080
// Copyright (c) 2014 ikawaha. // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions // and limitations under the License. package kagome import ( "fmt" "strings" ) // Token represents a morph of a sentence. type Token struct { Id int Class NodeClass Start int End int Surface string dic *Dic udic *UserDic } // Features returns contents of a token. func (t Token) Features() (features []string) { switch t.Class { case DUMMY: return case KNOWN: features = t.dic.Contents[t.Id] case UNKNOWN: features = sysDic.UnkContents[t.Id] case USER: // XXX pos := t.udic.Contents[t.Id].Pos tokens := strings.Join(t.udic.Contents[t.Id].Tokens, "/") yomi := strings.Join(t.udic.Contents[t.Id].Yomi, "/") features = append(features, pos, tokens, yomi) } return } // String returns a string representation of a token. func (t Token) String() string { return fmt.Sprintf("%v(%v, %v)%v[%v]", t.Surface, t.Start, t.End, t.Class, t.Id) }
sidestepism/kagome
token.go
GO
apache-2.0
1,434
/* * (c)2016-2017, Cris Luengo. * Based on original DIPlib code: (c)1995-2014, Delft University of Technology. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "diplib.h" #include "diplib/statistics.h" #include "diplib/math.h" #include "diplib/framework.h" #include "diplib/overload.h" namespace dip { namespace { class CountLineFilter : public Framework::ScanLineFilter { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return 2; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { bin const* in = static_cast< bin const* >( params.inBuffer[ 0 ].buffer ); dip::uint count = 0; auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask && *in ) { ++count; } in += inStride; mask += maskStride; } } else { // Otherwise we don't. for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *in ) { ++count; } in += inStride; } } counts_[ params.thread ] += count; } virtual void SetNumberOfThreads( dip::uint threads ) override { counts_.resize( threads ); } dip::uint GetResult() { dip::uint out = counts_[ 0 ]; for( dip::uint ii = 1; ii < counts_.size(); ++ii ) { out += counts_[ ii ]; } return out; } private: std::vector< dip::uint > counts_; }; } // namespace dip::uint Count( Image const& in, Image const& mask ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); DIP_THROW_IF( !in.IsScalar(), E::IMAGE_NOT_SCALAR ); CountLineFilter scanLineFilter; DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( in, mask, DT_BIN, scanLineFilter )); return scanLineFilter.GetResult(); } namespace { class MaxMinPixelLineFilter : public Framework::ScanLineFilter { public: virtual UnsignedArray GetResult() = 0; }; template< typename TPI > class MaxPixelLineFilter : public MaxMinPixelLineFilter { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return 2; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); UnsignedArray coord( params.position.size() ); TPI value = std::numeric_limits< TPI >::lowest(); auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); if( first_ ) { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask && ( *in > value )) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; mask += maskStride; } } else { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask && ( *in >= value )) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; mask += maskStride; } } } else { // Otherwise we don't. if( first_ ) { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *in > value ) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; } } else { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *in >= value ) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; } } } if( coord_[ params.thread ].empty() ) { // Ensure we always have something in `coord_`, even if the whole image is NaN. value_[ params.thread ] = value; coord_[ params.thread ] = coord; } else { if( first_ ) { if( value > value_[ params.thread ] ) { value_[ params.thread ] = value; coord_[ params.thread ] = coord; } } else { if( value >= value_[ params.thread ] ) { value_[ params.thread ] = value; coord_[ params.thread ] = coord; } } } } virtual void SetNumberOfThreads( dip::uint threads ) override { coord_.resize( threads ); value_.resize( threads, std::numeric_limits< TPI >::lowest() ); } MaxPixelLineFilter( bool first ) : first_( first ) {} virtual UnsignedArray GetResult() override { dip::uint index = 0; for( dip::uint ii = 1; ii < coord_.size(); ++ii ) { if( first_ ? value_[ ii ] > value_[ index ] : value_[ ii ] >= value_[ index ] ) { index = ii; } } return coord_[ index ]; } private: std::vector< UnsignedArray > coord_; std::vector< TPI > value_; bool first_; }; template< typename TPI > class MinPixelLineFilter : public MaxMinPixelLineFilter { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return 2; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); UnsignedArray coord( params.position.size() ); TPI value = std::numeric_limits< TPI >::max(); auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); if( first_ ) { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask && ( *in < value )) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; mask += maskStride; } } else { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask && ( *in <= value )) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; mask += maskStride; } } } else { // Otherwise we don't. if( first_ ) { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *in < value ) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; } } else { for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *in <= value ) { value = *in; coord = params.position; coord[ params.dimension ] += ii; } in += inStride; } } } if( coord_[ params.thread ].empty() ) { // Ensure we always have something in `coord_`, even if the whole image is NaN. value_[ params.thread ] = value; coord_[ params.thread ] = coord; } else { if( first_ ) { if( value < value_[ params.thread ] ) { value_[ params.thread ] = value; coord_[ params.thread ] = coord; } } else { if( value <= value_[ params.thread ] ) { value_[ params.thread ] = value; coord_[ params.thread ] = coord; } } } } virtual void SetNumberOfThreads( dip::uint threads ) override { coord_.resize( threads ); value_.resize( threads, std::numeric_limits< TPI >::max() ); } MinPixelLineFilter( bool first ) : first_( first ) {} virtual UnsignedArray GetResult() override { dip::uint index = 0; for( dip::uint ii = 1; ii < coord_.size(); ++ii ) { if( first_ ? value_[ ii ] < value_[ index ] : value_[ ii ] <= value_[ index ] ) { index = ii; } } return coord_[ index ]; } private: std::vector< UnsignedArray > coord_; std::vector< TPI > value_; bool first_; }; } // namespace UnsignedArray MaximumPixel( Image const& in, Image const& mask, String const& positionFlag ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); DIP_THROW_IF( !in.IsScalar(), E::IMAGE_NOT_SCALAR ); bool first; DIP_STACK_TRACE_THIS( first = BooleanFromString( positionFlag, S::FIRST, S::LAST )); DataType dataType = DataType::SuggestReal( in.DataType() ); std::unique_ptr< MaxMinPixelLineFilter > scanLineFilter; DIP_OVL_NEW_REAL( scanLineFilter, MaxPixelLineFilter, ( first ), dataType ); DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( in, mask, dataType, *scanLineFilter, Framework::ScanOption::NeedCoordinates )); return scanLineFilter->GetResult(); } UnsignedArray MinimumPixel( Image const& in, Image const& mask, String const& positionFlag ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); DIP_THROW_IF( !in.IsScalar(), E::IMAGE_NOT_SCALAR ); bool first; DIP_STACK_TRACE_THIS( first = BooleanFromString( positionFlag, S::FIRST, S::LAST )); DataType dataType = DataType::SuggestReal( in.DataType() ); std::unique_ptr< MaxMinPixelLineFilter > scanLineFilter; DIP_OVL_NEW_REAL( scanLineFilter, MinPixelLineFilter, ( first ), dataType ); DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( in, mask, dataType, *scanLineFilter, Framework::ScanOption::NeedCoordinates )); return scanLineFilter->GetResult(); } namespace { template< typename TPI > class CumSumFilter : public Framework::SeparableLineFilter { public: virtual dip::uint GetNumberOfOperations( dip::uint lineLength, dip::uint, dip::uint, dip::uint ) override { return lineLength; } virtual void Filter( Framework::SeparableLineFilterParameters const& params ) override { TPI* in = static_cast< TPI* >( params.inBuffer.buffer ); dip::uint length = params.inBuffer.length; dip::sint inStride = params.inBuffer.stride; TPI* out = static_cast< TPI* >( params.outBuffer.buffer ); dip::sint outStride = params.outBuffer.stride; TPI sum = 0; for( dip::uint ii = 0; ii < length; ++ii ) { sum += *in; *out = sum; in += inStride; out += outStride; } } }; } // namespace void CumulativeSum( Image const& in, Image const& mask, Image& out, BooleanArray const& process ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); DIP_THROW_IF( in.Dimensionality() < 1, E::DIMENSIONALITY_NOT_SUPPORTED ); DataType dataType = DataType::SuggestFlex( in.DataType() ); std::unique_ptr< Framework::SeparableLineFilter > lineFilter; DIP_OVL_NEW_FLEX( lineFilter, CumSumFilter, (), dataType ); if( mask.IsForged() ) { Select( in, Image( 0, dataType ), mask, out ); DIP_STACK_TRACE_THIS( Framework::Separable( out, out, dataType, dataType, process, { 0 }, {}, *lineFilter, Framework::SeparableOption::AsScalarImage )); } else { DIP_STACK_TRACE_THIS( Framework::Separable( in, out, dataType, dataType, process, { 0 }, {}, *lineFilter, Framework::SeparableOption::AsScalarImage )); } } namespace { class MaximumAndMinimumLineFilterBase : public Framework::ScanLineFilter { public: virtual MinMaxAccumulator GetResult() = 0; }; template< typename TPI > class MaximumAndMinimumLineFilter : public MaximumAndMinimumLineFilterBase { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return 3; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); MinMaxAccumulator vars; auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask ) { vars.Push( static_cast< dfloat >( *in )); } in += inStride; mask += maskStride; } } else { // Otherwise we don't. dip::uint ii = 0; for( ; ii < bufferLength - 1; ii += 2 ) { TPI v = *in; in += inStride; vars.Push( static_cast< dfloat >( v ), static_cast< dfloat >( *in )); in += inStride; } if( ii < bufferLength ) { vars.Push( static_cast< dfloat >( *in )); } } accArray_[ params.thread ] += vars; } virtual void SetNumberOfThreads( dip::uint threads ) override { accArray_.resize( threads ); } virtual MinMaxAccumulator GetResult() override { MinMaxAccumulator out = accArray_[ 0 ]; for( dip::uint ii = 1; ii < accArray_.size(); ++ii ) { out += accArray_[ ii ]; } return out; } private: std::vector< MinMaxAccumulator > accArray_; }; } // namespace MinMaxAccumulator MaximumAndMinimum( Image const& in, Image const& mask ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); // In case of complex images, separate them as a new dimension. Image c_in = in.QuickCopy(); if( c_in.DataType().IsComplex() ) { c_in.SplitComplex(); // Note that mask will be singleton-expanded, which allows adding dimensions at the end. } std::unique_ptr< MaximumAndMinimumLineFilterBase > scanLineFilter; DIP_OVL_NEW_NONCOMPLEX( scanLineFilter, MaximumAndMinimumLineFilter, (), c_in.DataType() ); DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( c_in, mask, c_in.DataType(), *scanLineFilter, Framework::ScanOption::TensorAsSpatialDim )); return scanLineFilter->GetResult(); } namespace { class SampleStatisticsLineFilterBase : public Framework::ScanLineFilter { public: virtual StatisticsAccumulator GetResult() = 0; }; template< typename TPI > class SampleStatisticsLineFilter : public SampleStatisticsLineFilterBase { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return 23; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); StatisticsAccumulator vars; auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask ) { vars.Push( static_cast< dfloat >( *in )); } in += inStride; mask += maskStride; } } else { // Otherwise we don't. for( dip::uint ii = 0; ii < bufferLength; ++ii ) { vars.Push( static_cast< dfloat >( *in )); in += inStride; } } accArray_[ params.thread ] += vars; } virtual void SetNumberOfThreads( dip::uint threads ) override { accArray_.resize( threads ); } virtual StatisticsAccumulator GetResult() override { StatisticsAccumulator out = accArray_[ 0 ]; for( dip::uint ii = 1; ii < accArray_.size(); ++ii ) { out += accArray_[ ii ]; } return out; } private: std::vector< StatisticsAccumulator > accArray_; }; } // namespace StatisticsAccumulator SampleStatistics( Image const& in, Image const& mask ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); std::unique_ptr< SampleStatisticsLineFilterBase > scanLineFilter; DIP_OVL_NEW_REAL( scanLineFilter, SampleStatisticsLineFilter, (), in.DataType() ); DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( in, mask, in.DataType(), *scanLineFilter, Framework::ScanOption::TensorAsSpatialDim )); return scanLineFilter->GetResult(); } namespace { class CovarianceLineFilterBase : public Framework::ScanLineFilter { public: virtual CovarianceAccumulator GetResult() = 0; }; template< typename TPI > class CovarianceLineFilter : public CovarianceLineFilterBase { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return 10; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in1 = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); TPI const* in2 = static_cast< TPI const* >( params.inBuffer[ 1 ].buffer ); CovarianceAccumulator vars; auto bufferLength = params.bufferLength; auto in1Stride = params.inBuffer[ 0 ].stride; auto in2Stride = params.inBuffer[ 1 ].stride; if( params.inBuffer.size() > 2 ) { // If there's three input buffers, we have a mask image. auto maskStride = params.inBuffer[ 2 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 2 ].buffer ); for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask ) { vars.Push( static_cast< dfloat >( *in1 ), static_cast< dfloat >( *in2 )); } in1 += in1Stride; in2 += in2Stride; mask += maskStride; } } else { // Otherwise we don't. for( dip::uint ii = 0; ii < bufferLength; ++ii ) { vars.Push( static_cast< dfloat >( *in1 ), static_cast< dfloat >( *in2 )); in1 += in1Stride; in2 += in2Stride; } } accArray_[ params.thread ] += vars; } virtual void SetNumberOfThreads( dip::uint threads ) override { accArray_.resize( threads ); } virtual CovarianceAccumulator GetResult() override { CovarianceAccumulator out = accArray_[ 0 ]; for( dip::uint ii = 1; ii < accArray_.size(); ++ii ) { out += accArray_[ ii ]; } return out; } private: std::vector< CovarianceAccumulator > accArray_; }; } // namespace CovarianceAccumulator Covariance( Image const& in1, Image const& in2, Image const& c_mask ) { DIP_THROW_IF( !in1.IsForged() || !in2.IsForged(), E::IMAGE_NOT_FORGED ); DIP_STACK_TRACE_THIS( in1.CompareProperties( in2, Option::CmpProp::AllSizes )); DataType ovlDataType = DataType::SuggestDyadicOperation( in1.DataType(), in2.DataType() ); ImageConstRefArray inar; inar.reserve( 3 ); inar.push_back( in1 ); inar.push_back( in2 ); DataTypeArray inBufT{ ovlDataType, ovlDataType }; Image mask; if( c_mask.IsForged() ) { // If we have a mask, add it to the input array. mask = c_mask.QuickCopy(); DIP_START_STACK_TRACE mask.CheckIsMask( in1.Sizes(), Option::AllowSingletonExpansion::DO_ALLOW, Option::ThrowException::DO_THROW ); mask.ExpandSingletonDimensions( in1.Sizes() ); DIP_END_STACK_TRACE inar.push_back( mask ); inBufT.push_back( mask.DataType() ); } ImageRefArray outar{}; std::unique_ptr< CovarianceLineFilterBase > scanLineFilter; DIP_OVL_NEW_REAL( scanLineFilter, CovarianceLineFilter, (), ovlDataType ); DIP_STACK_TRACE_THIS( Framework::Scan( inar, outar, inBufT, {}, {}, {}, *scanLineFilter, Framework::ScanOption::TensorAsSpatialDim )); return scanLineFilter->GetResult(); } namespace { template< typename TPI > std::vector< dip::uint > ComputeRank( void const* ptr, std::vector< dip::uint >& indices ) { // First sort the indices // NOTE!!! The indices must be contiguous, starting at 0, and with max_element(indices) == indices.size()-1. TPI const* data = static_cast< TPI const* >( ptr ); std::sort( indices.begin(), indices.end(), [ & ]( dip::uint const& a, dip::uint const& b ) { return data[ a ] < data[ b ]; } ); // Next find the ranks std::vector< dip::uint > rank( indices.size() ); for( dip::uint ii = 0; ii < indices.size(); ++ii ) { // Identify the equal-valued pixels dip::uint rr = ii + 1; while(( rr < indices.size()) && ( data[ indices[ rr ]] == data[ indices[ ii ]] )) { ++rr; } // Assign the mean rank to all these pixels dip::uint mean = ( rr + ii - 1 ) / 2; for( dip::uint jj = ii; jj < rr; ++jj ) { rank[ indices[ jj ]] = mean; } // Advance to next group of equal-valued pixels ii = rr - 1; } return rank; } std::vector< dip::uint > CreateRankArray( Image const& img ) { DIP_ASSERT( img.HasContiguousData() ); // Create indices array to each sample in the image std::vector< dip::uint > indices( img.Sizes().product() * img.TensorElements() ); std::iota( indices.begin(), indices.end(), dip::uint( 0 )); // Get the rank for each pixel std::vector< dip::uint > rank; DIP_OVL_CALL_ASSIGN_REAL( rank, ComputeRank, ( img.Origin(), indices ), img.DataType() ); return rank; } } // namespace dfloat SpearmanRankCorrelation( Image const& in1, Image const& in2, Image const& mask ) { DIP_THROW_IF( !in1.IsForged() || !in2.IsForged(), E::IMAGE_NOT_FORGED ); DIP_STACK_TRACE_THIS( in1.CompareProperties( in2, Option::CmpProp::AllSizes )); // Get the data in normal stride order. We need the data to be contiguous and the two images to have // the same strides. This is a simple way of accomplishing that. Image in1_c; Image in2_c; if( mask.IsForged() ) { DIP_START_STACK_TRACE in1_c = in1.At( mask ); in2_c = in2.At( mask ); DIP_END_STACK_TRACE } else { in1_c = in1.QuickCopy(); in2_c = in2.QuickCopy(); } in1_c.ForceNormalStrides(); // Might copy the data, but if we already copied it (through `mask`) it won't need to, in2_c.ForceNormalStrides(); // so we're guaranteed to copy the image data at most once. // Find the rank for each pixel auto idx1 = CreateRankArray( in1_c ); auto idx2 = CreateRankArray( in2_c ); // Now compute correlation between the two sorted index arrays. // We're not using the cheaper formula because we're not guaranteed a unique sort order (some pixels can have // the same value). CovarianceAccumulator vars; for( auto it1 = idx1.begin(), it2 = idx2.begin(); it1 != idx1.end(); ++it1, ++it2 ) { vars.Push( static_cast< dfloat >( *it1 ), static_cast< dfloat >( *it2 )); } return vars.Correlation(); } namespace { class CenterOfMassLineFilterBase : public Framework::ScanLineFilter { public: virtual FloatArray GetResult() = 0; }; template< typename TPI > class CenterOfMassLineFilter : public CenterOfMassLineFilterBase { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return nD_ + 1; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); FloatArray vars( nD_ + 1, 0.0 ); auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; UnsignedArray pos = params.position; dip::uint procDim = params.dimension; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask ) { for( dip::uint jj = 0; jj < nD_; ++jj ) { vars[ jj ] += static_cast< dfloat >( pos[ jj ] ) * static_cast< dfloat >( *in ); } vars[ nD_ ] += static_cast< dfloat >( *in ); } in += inStride; mask += maskStride; ++( pos[ procDim ] ); } } else { // Otherwise we don't. for( dip::uint ii = 0; ii < bufferLength; ++ii ) { for( dip::uint jj = 0; jj < nD_; ++jj ) { vars[ jj ] += static_cast< dfloat >( pos[ jj ] ) * static_cast< dfloat >( *in ); } vars[ nD_ ] += static_cast< dfloat >( *in ); in += inStride; ++( pos[ procDim ] ); } } accArray_[ params.thread ] += vars; } CenterOfMassLineFilter( dip::uint nD ) : nD_( nD ) {} virtual void SetNumberOfThreads( dip::uint threads ) override { accArray_.resize( threads ); for( dip::uint ii = 0; ii < threads; ++ii ) { accArray_[ ii ].resize( nD_ + 1, 0.0 ); } } virtual FloatArray GetResult() override { FloatArray out = accArray_[ 0 ]; for( dip::uint ii = 1; ii < accArray_.size(); ++ii ) { out += accArray_[ ii ]; } dfloat n = out[ nD_ ]; out.resize( nD_ ); if( n != 0 ) { out /= n; } else { out.fill( 0.0 ); } return out; } private: std::vector< FloatArray > accArray_; // one per thread, each one contains: sum(I*x),sum(I*y),...,sum(I) dip::uint nD_; }; } // namespace FloatArray CenterOfMass( Image const& in, Image const& mask ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); DIP_THROW_IF( !in.IsScalar(), E::IMAGE_NOT_SCALAR ); std::unique_ptr< CenterOfMassLineFilterBase > scanLineFilter; DIP_OVL_NEW_NONCOMPLEX( scanLineFilter, CenterOfMassLineFilter, ( in.Dimensionality() ), in.DataType() ); DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( in, mask, in.DataType(), *scanLineFilter, Framework::ScanOption::NeedCoordinates )); return scanLineFilter->GetResult(); } namespace { class MomentsLineFilterBase : public Framework::ScanLineFilter { public: virtual MomentAccumulator GetResult() = 0; }; template< typename TPI > class MomentsLineFilter : public MomentsLineFilterBase { public: virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override { return nD_ * ( nD_ + 1 ) / 2 * 3 + nD_ + 2; } virtual void Filter( Framework::ScanLineFilterParameters const& params ) override { TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer ); MomentAccumulator vars( nD_ ); auto bufferLength = params.bufferLength; auto inStride = params.inBuffer[ 0 ].stride; FloatArray pos{ params.position }; dip::uint procDim = params.dimension; if( params.inBuffer.size() > 1 ) { // If there's two input buffers, we have a mask image. auto maskStride = params.inBuffer[ 1 ].stride; bin const* mask = static_cast< bin const* >( params.inBuffer[ 1 ].buffer ); for( dip::uint ii = 0; ii < bufferLength; ++ii ) { if( *mask ) { vars.Push( pos, static_cast< dfloat >( *in )); } in += inStride; mask += maskStride; ++( pos[ procDim ] ); } } else { // Otherwise we don't. for( dip::uint ii = 0; ii < bufferLength; ++ii ) { vars.Push( pos, static_cast< dfloat >( *in )); in += inStride; ++( pos[ procDim ] ); } } accArray_[ params.thread ] += vars; } MomentsLineFilter( dip::uint nD ) : nD_( nD ) {} virtual void SetNumberOfThreads( dip::uint threads ) override { accArray_.resize( threads, MomentAccumulator( nD_ )); } virtual MomentAccumulator GetResult() override { MomentAccumulator out = accArray_[ 0 ]; for( dip::uint ii = 1; ii < accArray_.size(); ++ii ) { out += accArray_[ ii ]; } return out; } private: std::vector< MomentAccumulator > accArray_; dip::uint nD_; }; } // namespace MomentAccumulator Moments( Image const& in, Image const& mask ) { DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED ); DIP_THROW_IF( !in.IsScalar(), E::IMAGE_NOT_SCALAR ); std::unique_ptr< MomentsLineFilterBase > scanLineFilter; DIP_OVL_NEW_NONCOMPLEX( scanLineFilter, MomentsLineFilter, ( in.Dimensionality() ), in.DataType() ); DIP_STACK_TRACE_THIS( Framework::ScanSingleInput( in, mask, in.DataType(), *scanLineFilter, Framework::ScanOption::NeedCoordinates )); return scanLineFilter->GetResult(); } } // namespace dip
DIPlib/diplib
src/statistics/statistics.cpp
C++
apache-2.0
31,998
/*! * ${copyright} */ // Provides control sap.ui.commons.Tree. sap.ui.define(['jquery.sap.global', './library', 'sap/ui/core/Control'], function(jQuery, library, Control) { "use strict"; /** * Constructor for a new Tree. * * @param {string} [sId] id for the new control, generated automatically if no id is given * @param {object} [mSettings] initial settings for the new control * * @class * Simple tree to display item in a hierarchical way * @extends sap.ui.core.Control * @version ${version} * * @constructor * @public * @deprecated Since version 1.38. * @alias sap.ui.commons.Tree * @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel */ var Tree = Control.extend("sap.ui.commons.Tree", /** @lends sap.ui.commons.Tree.prototype */ { metadata : { library : "sap.ui.commons", properties : { /** * Tree title */ title : {type : "string", group : "Misc", defaultValue : null}, /** * Tree width */ width : {type : "sap.ui.core.CSSSize", group : "Misc", defaultValue : 'auto'}, /** * Tree height */ height : {type : "sap.ui.core.CSSSize", group : "Misc", defaultValue : 'auto'}, /** * Tree Header is display. If false, the tree will be in a transparent mode */ showHeader : {type : "boolean", group : "Misc", defaultValue : true}, /** * Show Header icons (e.g. Expand/Collapse all). Only consider if showHeader is true */ showHeaderIcons : {type : "boolean", group : "Misc", defaultValue : true}, /** * Display horizontal scrollbar. If false, the overflow content will be hidden */ showHorizontalScrollbar : {type : "boolean", group : "Misc", defaultValue : false}, /** * Minimal width for the Tree. Can be useful when, for example, the width is specified in percentage, to avoid the tree to become too narrow when container is resize */ minWidth : {type : "sap.ui.core.CSSSize", group : "Misc", defaultValue : null}, /** * Selection mode of the Tree. */ selectionMode : {type : "sap.ui.commons.TreeSelectionMode", group : "Behavior", defaultValue : sap.ui.commons.TreeSelectionMode.Legacy} }, defaultAggregation : "nodes", aggregations : { /** * First level nodes */ nodes : {type : "sap.ui.commons.TreeNode", multiple : true, singularName : "node", bindable : "bindable"} }, events : { /** * Event is fired when a tree node is selected. */ select : {allowPreventDefault : true, parameters : { /** * The node which has been selected. */ node : {type : "sap.ui.commons.TreeNode"}, /** * The binding context of the selected node. */ nodeContext : {type : "object"} } }, /** * fired when the selection of the tree has been changed */ selectionChange : { parameters : { /** * The nodes which has been selected. */ nodes : {type : "sap.ui.commons.TreeNode[]"}, /** * The binding context of the selected nodes. */ nodeContexts : {type : "object[]"} } } } }}); Tree.prototype.resizeListenerId; Tree.prototype.init = function(){ this.bAllCollapsed = false; this.allowTextSelection(false); this.iOldScrollTop = null; this.mSelectedNodes = {}; this.mSelectedContexts = {}; this.aLeadSelection = null; //Create Buttons for Header var oResourceBundle = sap.ui.getCore().getLibraryResourceBundle("sap.ui.commons"); this.oCollapseAllButton = new sap.ui.commons.Button(this.getId() + "-CollapseAll", { icon: this.getIconPrefix() + "CollapseAll.png", tooltip: oResourceBundle.getText("TREE_COLLAPSE_ALL"), lite: true }); this.oExpandAllButton = new sap.ui.commons.Button(this.getId() + "-ExpandAll", { icon: this.getIconPrefix() + "ExpandAll.png", tooltip: oResourceBundle.getText("TREE_EXPAND_ALL"), lite: true }); this.oCollapseAllButton.attachPress(this.onCollapseAll,this); this.oExpandAllButton.attachPress(this.onExpandAll,this); this.oCollapseAllButton.addStyleClass("sapUiTreeCol"); this.oExpandAllButton.addStyleClass("sapUiTreeExp"); }; /** * Does all the cleanup when the Tree is to be destroyed. * Called from the element's destroy() method. * @private */ Tree.prototype.exit = function(){ if ( this.oCollapseAllButton ) { this.oCollapseAllButton.destroy(); this.oCollapseAllButton = null; } if ( this.oExpandAllButton ) { this.oExpandAllButton.destroy(); this.oExpandAllButton = null; } }; // Enumeration for different types of selection in the tree Tree.SelectionType = { Select: "Select", Toggle: "Toggle", Range: "Range" }; /*********************************************************************************** * EVENTS HANDLING ***********************************************************************************/ /** Handler for "Theme Changed" event. * @private */ Tree.prototype.onThemeChanged = function(){ if (this.oCollapseAllButton && this.oExpandAllButton) { this.oCollapseAllButton.setIcon(this.getIconPrefix() + "CollapseAll.png"); this.oExpandAllButton.setIcon(this.getIconPrefix() + "ExpandAll.png"); } }; /** Handler for "Expand All" button. * @private */ Tree.prototype.onExpandAll = function(){ this.expandAll(); }; /**Handler for "Collapse All" button. * @private */ Tree.prototype.onCollapseAll = function(){ this.collapseAll(); }; /*"********************************************************************************* * PUBLIC METHODS ***********************************************************************************/ /** * Expands all nodes in the tree * * @type void * @public * @ui5-metamodel This method also will be described in the UI5 (legacy) designtime metamodel */ Tree.prototype.expandAll = function(){ var aNodes = this._getNodes(); for (var i = 0;i < aNodes.length;i++) { aNodes[i].expand(true, true); this._adjustSelectionOnExpanding(aNodes[i]); } }; /** * Collapses all nodes in the tree * * @type void * @public * @ui5-metamodel This method also will be described in the UI5 (legacy) designtime metamodel */ Tree.prototype.collapseAll = function(){ var aNodes = this._getNodes(); for (var i = 0;i < aNodes.length;i++) { aNodes[i].collapse(true, true); this._adjustSelectionOnCollapsing(aNodes[i]); } this._adjustFocus(); }; /*********************************************************************************** * KEYBOARD NAVIGATION ***********************************************************************************/ /** * DOWN key behavior * Opens the section or activates the UI element on DOWN key * @private * @param oEvent Browser event */ Tree.prototype.onsapdown = function(oEvent){ this.moveFocus(false); oEvent.preventDefault(); }; /** * UP key behavior * Opens the section or activates the UI element on UP key * @private * @param oEvent Browser event */ Tree.prototype.onsapup = function(oEvent){ this.moveFocus(true); oEvent.preventDefault(); }; /** * The general HOME key event of the tree * @private * @param {jQuery.Event} oEvent The saphome event object */ Tree.prototype.onsaphome = function(oEvent) { this.placeFocus(this.getFirstSibling(oEvent.target)); oEvent.preventDefault(); }; /** * The general CTRL+HOME key event of the tree * @private * @param {jQuery.Event} oEvent The saphome event object */ Tree.prototype.onsaphomemodifiers = function(oEvent) { this.placeFocus(this.getFirst()); oEvent.preventDefault(); }; /** * The general END key event of the tree * @private * @param {jQuery.Event} oEvent The sapend event object */ Tree.prototype.onsapend = function(oEvent) { this.placeFocus(this.getLastSibling(oEvent.target)); oEvent.preventDefault(); }; /** * The general CTRL+END key event of the tree * @private * @param {jQuery.Event} oEvent The sapend event object */ Tree.prototype.onsapendmodifiers = function(oEvent) { this.placeFocus(this.getLast()); oEvent.preventDefault(); }; /** * The numpad STAR(*) key event of the tree * @private * @param {jQuery.Event} oEvent The sapcollapseall event object */ Tree.prototype.onsapcollapseall = function(oEvent) { if (this.bAllCollapsed ) { this.expandAll(); } else { this.collapseAll(); } this.bAllCollapsed = !this.bAllCollapsed; }; /*********************************************************************************** * HELPER METHODS - DOM NAVIGATION ***********************************************************************************/ /** * Determine the icon prefix for the embedded button icons * @private */ Tree.prototype.getIconPrefix = function() { var sIconPrefix = "themes/" + sap.ui.getCore().getConfiguration().getTheme() + "/"; if (!sap.ui.getCore().getConfiguration().getRTL()) { sIconPrefix += "img/tree/"; } else { sIconPrefix += "img-RTL/tree/"; } return sap.ui.resource("sap.ui.commons", sIconPrefix); }; /**Returns the first Sibling tree node based on DOM Tree node provided * @param oDomNode The DOM Tree node from which calculate the first sibling * @returns The first sibling tree node * @private */ Tree.prototype.getFirstSibling = function(oDomNode) { var aDomFirstSiblingNode = jQuery(oDomNode).siblings(".sapUiTreeNode:visible").first(); if (aDomFirstSiblingNode.length) { return aDomFirstSiblingNode[0]; } return null; }; /**Returns the last Sibling tree node based on DOM Tree node provided * @param oDomNode The DOM Tree node from which calculate the last sibling * @returns The last sibling tree node * @private */ Tree.prototype.getLastSibling = function(oDomNode) { var aDomLastSiblingNode = jQuery(oDomNode).siblings(".sapUiTreeNode:visible").last(); if (aDomLastSiblingNode.length) { return aDomLastSiblingNode[0]; } return null; }; /**Returns the first tree node of the tree. Children of collapsed nodes (hidden) are not considered. * @returns The first tree node * @private */ Tree.prototype.getFirst = function() { var aDomFirstNode = this.$().find(".sapUiTreeNode:visible").first(); if (aDomFirstNode.length) { return aDomFirstNode[0]; } return null; }; /**Returns the last tree node of the tree. Children of collapsed nodes (hidden) are not considered. * @returns The last tree node * @private */ Tree.prototype.getLast = function() { var aDomLastNode = this.$().find(".sapUiTreeNode:visible").last(); if (aDomLastNode.length) { return aDomLastNode[0]; } return null; }; /*********************************************************************************** * HELPER METHODS - FOCUS MANAGEMENT ***********************************************************************************/ /** * Move the focus by one position, either UP or DOWN depending of "bMoveUp" * @param bMoveUp When true the focus is move up. Otherwise, it's moved down * @private */ Tree.prototype.moveFocus = function(bMoveUp){ var afocusedNodeDom = jQuery(".sapUiTreeNode:focus"); if (afocusedNodeDom.length) { var oCurrNode = sap.ui.getCore().byId(afocusedNodeDom[0].id); var aDomAllNodes = this.$().find(".sapUiTreeNode:visible"); var currIndex = aDomAllNodes.index(afocusedNodeDom[0]); var nextIndex = currIndex; if (bMoveUp) { nextIndex--; } else { nextIndex++; } if (nextIndex >= 0 && nextIndex < aDomAllNodes.length) { var oDomNextNode = aDomAllNodes.eq( nextIndex ); var oNextNode = sap.ui.getCore().byId(oDomNextNode[0].id); oCurrNode.blur(); oNextNode.focus(); } } }; /**Adjusts the focus after a node is collapsed. This is necessary as the currently focused node can then be hidden, * leading the tree not being focusable anymore. * * When the focusable is being hid by the collapsing of its parent, the focus is then set on this parent. * * @private */ Tree.prototype._adjustFocus = function(){ var oFocusableNode = this.$().find('.sapUiTreeNode[tabIndex="0"]'); if (!oFocusableNode.is(':visible')) { var aDomAllNodes = this.$().find(".sapUiTreeNode"); var focusIndex = aDomAllNodes.index(oFocusableNode[0]); var aDomPrecedingNodes = aDomAllNodes.filter(":lt(" + focusIndex + ")"); var aDomVisiblePrecedingNodes = aDomPrecedingNodes.filter(":visible"); var oNewFocusNode = aDomVisiblePrecedingNodes[aDomVisiblePrecedingNodes.length - 1]; if (oNewFocusNode) { oNewFocusNode.setAttribute("tabindex", "0"); if ( jQuery(".sapUiTreeNode:focus").is(":not(:visible)")) { oNewFocusNode.focus(); } } } }; /**Places the focus on the node corresponding to given DOM Tree Node * @param oDomTargetNode The DOM Tree Node corresponding to the node to focus * @private */ Tree.prototype.placeFocus = function(oDomTargetNode){ if (!oDomTargetNode) { return; //No Target node provided! } var oDomfocusedNode = this.$().find(".sapUiTreeNode[tabIndex='0']"); if (oDomfocusedNode.length) { oDomfocusedNode[0].setAttribute("tabindex", "-1"); } oDomTargetNode.setAttribute("tabindex", "0"); var oTargetNode = sap.ui.getCore().byId(oDomTargetNode.id); oTargetNode.focus(); }; /*********************************************************************************** * HELPER METHODS - SELECTION MANAGEMENT ***********************************************************************************/ /** * Adjusts the selection, when expanding, by re-selecting a children node when the expanded node was * selected only to represented the selection of a children node. * @param {sap.ui.commons.TreeNode} oExpandingNode The Node being expanded * @private */ Tree.prototype._adjustSelectionOnExpanding = function(oExpandingNode) { if (!oExpandingNode) { return; } var aExpandingParents = []; if (oExpandingNode.getSelectedForNodes().length) { aExpandingParents.push(oExpandingNode); } restoreSelectedChildren(oExpandingNode, aExpandingParents, null); //update dom if necessary var $ExpandingNode = oExpandingNode.$(); if ($ExpandingNode && $ExpandingNode.hasClass('sapUiTreeNodeSelectedParent')) { $ExpandingNode.removeClass('sapUiTreeNodeSelectedParent'); } //remove the remaining selectedparent classes from all expanded subnodes var $SelectedChildrenForTheirChildren = oExpandingNode.$('children').find('.sapUiTreeNodeExpanded.sapUiTreeNodeSelectedParent'); $SelectedChildrenForTheirChildren.removeClass('sapUiTreeNodeSelectedParent'); }; /** * Removes the references inside the expanded node of its selected children, because * they are no longer needed. * @param {sap.ui.commons.TreeNode} oNode The current node to look at * @param {object} aExpandingParents Array of parents of the current node that have selectedForNodes references * @param {sap.ui.commons.TreeNode} oFirstCollapsedParent The topmost collapsed parent node of the current node */ function restoreSelectedChildren(oNode, aExpandingParents, oFirstCollapsedParent) { var bIsExpanded = oNode.getExpanded(), bNodeReferredInParents = false, bIncludeInExpandingParents = bIsExpanded && !!oNode.getSelectedForNodes().length, oFirstCollapsedParentNode = (oFirstCollapsedParent || bIsExpanded) ? oFirstCollapsedParent : oNode, i; //check if any of the expanded parents, that have references, refers the current node //if so - remove the reference for (i = 0; i < aExpandingParents.length; i++) { if (aExpandingParents[i].getSelectedForNodes().indexOf(oNode.getId()) !== -1) { bNodeReferredInParents = true; aExpandingParents[i].removeAssociation("selectedForNodes", oNode, true); } } //if the node is referred somewhere in its parents and it has a collapsed parent //add a reference to the node in the first collapsed parent (if it is not already there) if (oFirstCollapsedParentNode && bNodeReferredInParents && oFirstCollapsedParentNode !== oNode) { if (oFirstCollapsedParentNode.getSelectedForNodes().indexOf(oNode.getId()) === -1) { oFirstCollapsedParentNode.addAssociation("selectedForNodes", oNode, true); } oFirstCollapsedParentNode.$().addClass('sapUiTreeNodeSelectedParent'); } //include the node in the expanding parents only if it has references to selected child nodes if (bIncludeInExpandingParents) { aExpandingParents.push(oNode); } var aNodes = oNode._getNodes(); for (i = 0; i < aNodes.length; i++) { restoreSelectedChildren(aNodes[i], aExpandingParents, oFirstCollapsedParentNode); } //exclude the node from the expanding parents if (bIncludeInExpandingParents) { aExpandingParents.pop(oNode); } } /** * Adds references inside the collapsed node of all its selected children recursively. * @param {sap.ui.commons.TreeNode} oNode The current node to look at * @param {sap.ui.commons.TreeNode} oRootNode The root node that was collapsed */ function rememberSelectedChildren(oNode, oRootNode) { var aNodes = oNode._getNodes(), oCurrentNode; for (var i = 0; i < aNodes.length; i++) { oCurrentNode = aNodes[i]; if (oCurrentNode.getIsSelected()) { oRootNode.addAssociation("selectedForNodes", oCurrentNode, true); } rememberSelectedChildren(oCurrentNode, oRootNode); } } /** * Adjusts the selection, when collapsing, selecting a parent when the actual selected node is * not visible. * @param {sap.ui.commons.TreeNode} oCollapsingNode The Node being collapsed * @private */ Tree.prototype._adjustSelectionOnCollapsing = function(oCollapsingNode) { if (!oCollapsingNode) { return; } // the root node, which needs to update references for selected children, // is also the first node to look at rememberSelectedChildren(oCollapsingNode, oCollapsingNode); //update dom if necessary if (oCollapsingNode.getSelectedForNodes().length) { var $CollapsingNode = oCollapsingNode.$(); if ($CollapsingNode && !$CollapsingNode.hasClass('sapUiTreeNodeSelectedParent')) { $CollapsingNode.addClass('sapUiTreeNodeSelectedParent'); } } }; /** * override this method on Element.js and return true if tree binding * @private */ Tree.prototype.isTreeBinding = function(sName) { return (sName == "nodes"); }; /** * override element updateAggregation method with this one and update the tree node bindings * @private */ Tree.prototype.updateNodes = function(sReason) { var aNodes, oNode, sKey, iNodesLength, i; // Delete all old node instances if (sReason === "filter") { aNodes = this.getAggregation("nodes"); iNodesLength = aNodes.length; for (i = 0; i < iNodesLength; i++) { aNodes[i].destroy(); } // We reset here mSelectedNodes as it collects id's and after filtering // the tree nodes they are recreated with new id's which can be the same as // the old ones and to result of false positives for node selection. this.mSelectedNodes = {}; } this.updateAggregation("nodes"); for (sKey in this.mSelectedContexts) { oNode = this.getNodeByContext(this.mSelectedContexts[sKey]); if (oNode) { oNode.setIsSelected(true); } else { this.mSelectedContexts = this._removeItemFromObject(this.mSelectedContexts, sKey); } } }; /** * Clones a flat object removing a key/value pair from the old one * @param {object} oObject The object from which the key shall be removed * @param {string} sKeyToRemove Key to be removed from the object * @returns {object} The new object without the removed key * @private */ Tree.prototype._removeItemFromObject = function (oObject, sKeyToRemove) { var sKey, oReturn = {}; for (sKey in oObject) { if (sKey !== sKeyToRemove) { oReturn[sKey] = oObject[sKey]; } } return oReturn; }; /** * Determine the binding context of the given node (dependent on the model name used * for the nodes binding) * * @param {sap.ui.commons.TreeNode} oNode * @private */ Tree.prototype.getNodeContext = function(oNode) { var oBindingInfo = this.getBindingInfo("nodes"), sModelName = oBindingInfo && oBindingInfo.model; return oNode.getBindingContext(sModelName); }; /** * Returns the node with the given context, or null if no such node currently exists * * @param {sap.ui.model.Context} oContext the context of the node to be retrieved * @public * @since 1.19 */ Tree.prototype.getNodeByContext = function(oContext){ var oBindingInfo = this.getBindingInfo("nodes"), sModelName = oBindingInfo && oBindingInfo.model; return this.findNode(this, function(oNode) { var oBindingContext = oNode.getBindingContext(sModelName); return (oContext && oBindingContext && oContext.getPath() === oBindingContext.getPath()); }); }; /** * Search through all existing nodes and return the first node which matches using * the given matching function * * @param {function} fnMatch the matching function * @param {sap.ui.commons.Tree|sap.ui.commons.TreeNode} oNode the node to check * @returns The found node * @private */ Tree.prototype.findNode = function(oNode, fnMatch) { var oFoundNode, that = this; if (fnMatch(oNode)) { return oNode; } jQuery.each(oNode._getNodes(), function(i, oNode) { oFoundNode = that.findNode(oNode, fnMatch); if (oFoundNode) { return false; } }); return oFoundNode; }; Tree.prototype.setSelectionMode = function(oMode){ oMode = this.validateProperty("selectionMode", oMode); if (this.getSelectionMode() != oMode) { this.setProperty("selectionMode", oMode); // Clear current selection, whenever the selectionmode changes this._delSelection(); } }; /**Returns the selected node in the tree. If not selection, returns false. * @returns The selected node * @private */ Tree.prototype.getSelection = function(){ for (var sId in this.mSelectedNodes) { return this.mSelectedNodes[sId]; } return null; }; /**Sets the selected node reference of the Tree * @private */ Tree.prototype.setSelection = function(oNode, bSuppressEvent, sType, bDeselectOtherNodes){ var bDoSelect = true; if (!bSuppressEvent) { bDoSelect = this.fireSelect({node: oNode, nodeContext: this.getNodeContext(oNode)}); } if (bDoSelect) { switch (this.getSelectionMode()) { case sap.ui.commons.TreeSelectionMode.Legacy: case sap.ui.commons.TreeSelectionMode.Single: this._setSelectedNode(oNode, bSuppressEvent); break; case sap.ui.commons.TreeSelectionMode.Multi: if (sType == Tree.SelectionType.Range) { this._setSelectedNodeMapRange(oNode, bSuppressEvent); } else if (sType == Tree.SelectionType.Toggle) { this._setSelectedNodeMapToggle(oNode, bSuppressEvent); } else { this._setSelectedNode(oNode, bSuppressEvent); } break; case sap.ui.commons.TreeSelectionMode.None: break; } } }; /**Rerendering handling. Sets the scroll position so that the selected node stays on the position it * was before rerendering, for example after the expand and adding the nodes dynamically. * @private */ Tree.prototype.onAfterRendering = function () { if (this.iOldScrollTop) { this.$("TreeCont").scrollTop(this.iOldScrollTop); } }; /** * Whenever nodes are added ore removed from the tree, the selection needs to be adapted, * so that the selected node map is in sync with the isSelected properties of the contained * nodes * @private */ Tree.prototype.invalidate = function () { var that = this; Control.prototype.invalidate.apply(this, arguments); if (this.iSelectionUpdateTimer) { return; } this.iSelectionUpdateTimer = setTimeout(function() { that.mSelectedNodes = {}; that.mSelectedContexts = []; that.updateSelection(that, true); that.iSelectionUpdateTimer = null; }, 0); }; /** * Add's node context to the internal mSelectedContexts object. * Taking care if TreeSelectionMode === Multi to not duplicate the node context in mSelectedContexts. * @param oContext The binding context of the node * @private */ Tree.prototype._addSelectedNodeContext = function (oContext) { var sPath; if (oContext && oContext.sPath) { sPath = oContext.sPath; if (this.getSelectionMode() === sap.ui.commons.TreeSelectionMode.Multi) { if (!(sPath in this.mSelectedContexts)) { this.mSelectedContexts[sPath] = oContext; } } else { this.mSelectedContexts = {}; this.mSelectedContexts[sPath] = oContext; } } }; /** * Loop through all tree nodes and collect the selected state * @private */ Tree.prototype.updateSelection = function (oNode, bExpanded) { var that = this; jQuery.each(oNode._getNodes(), function(i, oNode) { if (oNode.getIsSelected()) { switch (that.getSelectionMode()) { case sap.ui.commons.TreeSelectionMode.None: jQuery.sap.log.warning("Added selected nodes in a tree with disabled selection"); oNode.setIsSelected(false); break; case sap.ui.commons.TreeSelectionMode.Legacy: if (jQuery.isEmptyObject(that.mSelectedNodes)) { that.mSelectedNodes[oNode.getId()] = oNode; that._addSelectedNodeContext(that.getNodeContext(oNode)); } break; case sap.ui.commons.TreeSelectionMode.Single: if (jQuery.isEmptyObject(that.mSelectedNodes) == false) { jQuery.sap.log.warning("Added multiple selected nodes in single select tree"); oNode.setIsSelected(false); } else { that.mSelectedNodes[oNode.getId()] = oNode; that._addSelectedNodeContext(that.getNodeContext(oNode)); } break; case sap.ui.commons.TreeSelectionMode.Multi: if (!bExpanded) { jQuery.sap.log.warning("Added selected node inside collapsed node in multi select tree"); oNode.setIsSelected(false); } else { that.mSelectedNodes[oNode.getId()] = oNode; that._addSelectedNodeContext(that.getNodeContext(oNode)); } break; } } that.updateSelection(oNode, bExpanded && oNode.getExpanded()); }); }; /**Rerendering handling. Remembers the scroll position of the selected node. * @private */ Tree.prototype.onBeforeRendering = function() { this.iOldScrollTop = this.$("TreeCont").scrollTop(); }; Tree.prototype._setSelectedNode = function(oNode, bSuppressEvent) { var that = this, oContext = this.getNodeContext(oNode); jQuery.each(this.mSelectedNodes, function(sId, oNode){ that._delMultiSelection(oNode, bSuppressEvent); }); oNode._select(bSuppressEvent, true); this.mSelectedNodes[oNode.getId()] = oNode; this._addSelectedNodeContext(oContext); this.oLeadSelection = oNode; if (!bSuppressEvent) { this.fireSelectionChange({nodes: [oNode], nodeContexts: [oContext]}); } }; Tree.prototype._setSelectedNodeMapToggle = function(oNode, bSuppressEvent) { this._setNodeSelection(oNode, !oNode.getIsSelected(), bSuppressEvent); }; Tree.prototype._setSelectedNodeMapRange = function(oNode, bSuppressEvent) { var aSelectableNodes, aSelectedNodes = [], aSelectedNodeContexts = [], iStartIndex, iEndIndex, iFrom, iTo; if (this.mSelectedNodes[oNode.getId()] == oNode) { return; //Nothing to do! } else { if (this._getNodes().length > 0) { aSelectableNodes = this._getSelectableNodes(); iStartIndex = aSelectableNodes.indexOf(this.oLeadSelection); iEndIndex = aSelectableNodes.indexOf(oNode); iFrom = iStartIndex < iEndIndex ? iStartIndex : iEndIndex; iTo = iStartIndex < iEndIndex ? iEndIndex : iStartIndex; for (var i = iFrom; i <= iTo; i++) { this._setMultiSelection(aSelectableNodes[i], bSuppressEvent); } } } if (!bSuppressEvent) { jQuery.map(this.mSelectedNodes, function(oNode) {aSelectedNodes.push(oNode);}); jQuery.map(this.mSelectedContexts, function(oContext) {aSelectedNodeContexts.push(oContext);}); this.fireSelectionChange({nodes: aSelectedNodes, nodeContexts: aSelectedNodeContexts}); } }; Tree.prototype._getSelectableNodes = function(aNodes) { var aSelectableNodes = []; function collectSelectableNodes(aNodes) { jQuery.each(aNodes, function(i, oNode) { if (oNode.getSelectable()) { aSelectableNodes.push(oNode); } if (oNode.getExpanded()) { collectSelectableNodes(oNode._getNodes()); } }); } collectSelectableNodes(this._getNodes()); return aSelectableNodes; }; Tree.prototype._setNodeSelection = function(oNode, bIsSelected, bSuppressEvent) { var aSelectedNodes = [], aSelectedNodeContexts = [], oVisibleNode; if (this.getSelectionMode() == sap.ui.commons.TreeSelectionMode.Single) { if (bIsSelected) { var oSelectedNode = this.getSelection(); this._setSelectedNode(oNode, bSuppressEvent); if (!oNode.isVisible()) { oVisibleNode = this._getVisibleNode(oNode); this._adjustSelectionOnCollapsing(oVisibleNode); } if (oSelectedNode && !oSelectedNode.isVisible()) { oVisibleNode = this._getVisibleNode(oSelectedNode); this._adjustSelectionOnExpanding(oVisibleNode); } return; } else { this._delMultiSelection(oNode, bSuppressEvent); if (!oNode.isVisible()) { oVisibleNode = this._getVisibleNode(oNode); this._adjustSelectionOnExpanding(oVisibleNode); } } } if (bIsSelected) { this._setMultiSelection(oNode, bSuppressEvent); this.oLeadSelection = oNode; } else { this._delMultiSelection(oNode, bSuppressEvent); this.oLeadSelection = oNode; } if (!bSuppressEvent) { jQuery.map(this.mSelectedNodes, function(oNode) {aSelectedNodes.push(oNode);}); jQuery.map(this.mSelectedContexts, function(oContext) {aSelectedNodeContexts.push(oContext);}); this.fireSelectionChange({nodes: aSelectedNodes, nodeContexts: aSelectedNodeContexts}); } }; Tree.prototype._setMultiSelection = function(oSelNode, bSuppressEvent) { if (!oSelNode) { return; } oSelNode._select(bSuppressEvent); this.mSelectedNodes[oSelNode.getId()] = oSelNode; this._addSelectedNodeContext(this.getNodeContext(oSelNode)); }; Tree.prototype._delMultiSelection = function(oSelNode) { var oContext; if (!oSelNode) { return; } oSelNode._deselect(); this.mSelectedNodes = this._removeItemFromObject(this.mSelectedNodes, oSelNode.getId()); oContext = oSelNode.getBindingContext(); if (oContext && oContext.sPath) { if (oContext.sPath in this.mSelectedContexts) { this.mSelectedContexts = this._removeItemFromObject(this.mSelectedContexts, oContext.sPath); } } }; Tree.prototype._delSelection = function() { var that = this; if (this.oSelectedNode) { this.oSelectedNode._deselect(); } if (jQuery.isEmptyObject(this.mSelectedNodes) == false) { jQuery.each(this.mSelectedNodes, function(sId, oNode){ that._delMultiSelection(oNode); }); } }; Tree.prototype._getNodes = function() { return this.mAggregations.nodes || []; }; Tree.prototype._getVisibleNode = function(oNode) { var oParentNode = oNode.getParent(); if (oParentNode.isVisible()) { var oVisibleNode = oParentNode; } else { oVisibleNode = this._getVisibleNode(oParentNode); } return oVisibleNode; }; return Tree; }, /* bExport= */ true);
olirogers/openui5
src/sap.ui.commons/src/sap/ui/commons/Tree.js
JavaScript
apache-2.0
31,259
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Module used after C{%(destdir)s} has been finalized to create the initial packaging. Also contains error reporting. """ import codecs import imp import itertools import os import re import site import sre_constants import stat import subprocess import sys from conary import files, trove from conary.build import buildpackage, filter, policy, recipe, tags, use from conary.build import smartform from conary.deps import deps from conary.lib import elf, magic, util, pydeps, fixedglob, graph from conary.build.action import TARGET_LINUX from conary.build.action import TARGET_WINDOWS try: from xml.etree import ElementTree except ImportError: try: from elementtree import ElementTree except ImportError: ElementTree = None # Helper class class _DatabaseDepCache(object): __slots__ = ['db', 'cache'] def __init__(self, db): self.db = db self.cache = {} def getProvides(self, depSetList): ret = {} missing = [] for depSet in depSetList: if depSet in self.cache: ret[depSet] = self.cache[depSet] else: missing.append(depSet) newresults = self.db.getTrovesWithProvides(missing) ret.update(newresults) self.cache.update(newresults) return ret class _filterSpec(policy.Policy): """ Pure virtual base class from which C{ComponentSpec} and C{PackageSpec} are derived. """ bucket = policy.PACKAGE_CREATION processUnmodified = False supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def __init__(self, *args, **keywords): self.extraFilters = [] policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): """ Call derived classes (C{ComponentSpec} or C{PackageSpec}) as:: ThisClass('<name>', 'filterexp1', 'filterexp2') where C{filterexp} is either a regular expression or a tuple of C{(regexp[, setmodes[, unsetmodes]])} """ if args: theName = args[0] for filterexp in args[1:]: self.extraFilters.append((theName, filterexp)) policy.Policy.updateArgs(self, **keywords) class _addInfo(policy.Policy): """ Pure virtual class for policies that add information such as tags, requirements, and provision, to files. """ bucket = policy.PACKAGE_CREATION processUnmodified = False requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ) keywords = { 'included': {}, 'excluded': {} } supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def updateArgs(self, *args, **keywords): """ Call as:: C{I{ClassName}(I{info}, I{filterexp})} or:: C{I{ClassName}(I{info}, exceptions=I{filterexp})} where C{I{filterexp}} is either a regular expression or a tuple of C{(regexp[, setmodes[, unsetmodes]])} """ if args: args = list(args) info = args.pop(0) if args: if not self.included: self.included = {} if info not in self.included: self.included[info] = [] self.included[info].extend(args) elif 'exceptions' in keywords: # not the usual exception handling, this is an exception if not self.excluded: self.excluded = {} if info not in self.excluded: self.excluded[info] = [] self.excluded[info].append(keywords.pop('exceptions')) else: raise TypeError, 'no paths provided' policy.Policy.updateArgs(self, **keywords) def doProcess(self, recipe): # for filters self.rootdir = self.rootdir % recipe.macros # instantiate filters d = {} for info in self.included: newinfo = info % recipe.macros l = [] for item in self.included[info]: l.append(filter.Filter(item, recipe.macros)) d[newinfo] = l self.included = d d = {} for info in self.excluded: newinfo = info % recipe.macros l = [] for item in self.excluded[info]: l.append(filter.Filter(item, recipe.macros)) d[newinfo] = l self.excluded = d policy.Policy.doProcess(self, recipe) def doFile(self, path): fullpath = self.recipe.macros.destdir+path if not util.isregular(fullpath) and not os.path.islink(fullpath): return self.runInfo(path) def runInfo(self, path): 'pure virtual' pass class Config(policy.Policy): """ NAME ==== B{C{r.Config()}} - Mark files as configuration files SYNOPSIS ======== C{r.Config([I{filterexp}] || [I{exceptions=filterexp}])} DESCRIPTION =========== The C{r.Config} policy marks all files below C{%(sysconfdir)s} (that is, C{/etc}) and C{%(taghandlerdir)s} (that is, C{/usr/libexec/conary/tags/}), and any other files explicitly mentioned, as configuration files. - To mark files as exceptions, use C{r.Config(exceptions='I{filterexp}')}. - To mark explicit inclusions as configuration files, use: C{r.Config('I{filterexp}')} A file marked as a Config file cannot also be marked as a Transient file or an InitialContents file. Conary enforces this requirement. EXAMPLES ======== C{r.Config(exceptions='%(sysconfdir)s/X11/xkb/xkbcomp')} The file C{/etc/X11/xkb/xkbcomp} is marked as an exception, since it is not actually a configuration file even though it is within the C{/etc} (C{%(sysconfdir)s}) directory hierarchy and would be marked as a configuration file by default. C{r.Config('%(mmdir)s/Mailman/mm_cfg.py')} Marks the file C{%(mmdir)s/Mailman/mm_cfg.py} as a configuration file; it would not be automatically marked as a configuration file otherwise. """ bucket = policy.PACKAGE_CREATION processUnmodified = True requires = ( # for :config component, ComponentSpec must run after Config # Otherwise, this policy would follow PackageSpec and just set isConfig # on each config file ('ComponentSpec', policy.REQUIRED_SUBSEQUENT), ) invariantinclusions = [ '%(sysconfdir)s/', '%(taghandlerdir)s/'] invariantexceptions = [ '%(userinfodir)s/', '%(groupinfodir)s' ] def doFile(self, filename): m = self.recipe.magic[filename] if m and m.name == "ELF": # an ELF file cannot be a config file, some programs put # ELF files under /etc (X, for example), and tag handlers # can be ELF or shell scripts; we just want tag handlers # to be config files if they are shell scripts. # Just in case it was not intentional, warn... if self.macros.sysconfdir in filename: self.info('ELF file %s found in config directory', filename) return fullpath = self.macros.destdir + filename if os.path.isfile(fullpath) and util.isregular(fullpath): if self._fileIsBinary(filename, fullpath): self.error("binary file '%s' is marked as config" % \ filename) self._markConfig(filename, fullpath) def _fileIsBinary(self, path, fn, maxsize=None, decodeFailIsError=True): limit = os.stat(fn)[stat.ST_SIZE] if maxsize is not None and limit > maxsize: self.warn('%s: file size %d longer than max %d', path, limit, maxsize) return True # we'll consider file to be binary file if we don't find any # good reason to mark it as text, or if we find a good reason # to mark it as binary foundFF = False foundNL = False f = open(fn, 'r') try: while f.tell() < limit: buf = f.read(65536) if chr(0) in buf: self.warn('%s: file contains NULL byte', path) return True if '\xff\xff' in buf: self.warn('%s: file contains 0xFFFF sequence', path) return True if '\xff' in buf: foundFF = True if '\n' in buf: foundNL = True finally: f.close() if foundFF and not foundNL: self.error('%s: found 0xFF without newline', path) utf8 = codecs.open(fn, 'r', 'utf-8') win1252 = codecs.open(fn, 'r', 'windows-1252') try: try: while utf8.tell() < limit: utf8.read(65536) except UnicodeDecodeError, e: # Still want to print a warning if it is not unicode; # Note that Code Page 1252 is considered a legacy # encoding on Windows self.warn('%s: %s', path, str(e)) try: while win1252.tell() < limit: win1252.read(65536) except UnicodeDecodeError, e: self.warn('%s: %s', path, str(e)) return decodeFailIsError finally: utf8.close() win1252.close() return False def _addTrailingNewline(self, filename, fullpath): # FIXME: This exists only for stability; there is no longer # any need to add trailing newlines to config files. This # also violates the rule that no files are modified after # destdir modification has been completed. self.warn("adding trailing newline to config file '%s'" % \ filename) mode = os.lstat(fullpath)[stat.ST_MODE] oldmode = None if mode & 0600 != 0600: # need to be able to read and write the file to fix it oldmode = mode os.chmod(fullpath, mode|0600) f = open(fullpath, 'a') f.seek(0, 2) f.write('\n') f.close() if oldmode is not None: os.chmod(fullpath, oldmode) def _markConfig(self, filename, fullpath): self.info(filename) f = file(fullpath) f.seek(0, 2) if f.tell(): # file has contents f.seek(-1, 2) lastchar = f.read(1) f.close() if lastchar != '\n': self._addTrailingNewline(filename, fullpath) f.close() self.recipe.ComponentSpec(_config=filename) class ComponentSpec(_filterSpec): """ NAME ==== B{C{r.ComponentSpec()}} - Determines which component each file is in SYNOPSIS ======== C{r.ComponentSpec([I{componentname}, I{filterexp}] || [I{packagename}:I{componentname}, I{filterexp}])} DESCRIPTION =========== The C{r.ComponentSpec} policy includes the filter expressions that specify the default assignment of files to components. The expressions are considered in the order in which they are evaluated in the recipe, and the first match wins. After all the recipe-provided expressions are evaluated, the default expressions are evaluated. If no expression matches, then the file is assigned to the C{catchall} component. Note that in the C{I{packagename}:I{componentname}} form, the C{:} must be literal, it cannot be part of a macro. KEYWORDS ======== B{catchall} : Specify the component name which gets all otherwise unassigned files. Default: C{runtime} EXAMPLES ======== C{r.ComponentSpec('manual', '%(contentdir)s/manual/')} Uses C{r.ComponentSpec} to specify that all files below the C{%(contentdir)s/manual/} directory are part of the C{:manual} component. C{r.ComponentSpec('foo:bar', '%(sharedir)s/foo/')} Uses C{r.ComponentSpec} to specify that all files below the C{%(sharedir)s/foo/} directory are part of the C{:bar} component of the C{foo} package, avoiding the need to invoke both the C{ComponentSpec} and C{PackageSpec} policies. C{r.ComponentSpec(catchall='data')} Uses C{r.ComponentSpec} to specify that all files not otherwise specified go into the C{:data} component instead of the default {:runtime} component. """ requires = ( ('Config', policy.REQUIRED_PRIOR), ('PackageSpec', policy.REQUIRED_SUBSEQUENT), ) keywords = { 'catchall': 'runtime' } def __init__(self, *args, **keywords): """ @keyword catchall: The component name which gets all otherwise unassigned files. Default: C{runtime} """ _filterSpec.__init__(self, *args, **keywords) self.configFilters = [] self.derivedFilters = [] def updateArgs(self, *args, **keywords): if '_config' in keywords: configPath=keywords.pop('_config') self.recipe.PackageSpec(_config=configPath) if args: name = args[0] if ':' in name: package, name = name.split(':') args = list(itertools.chain([name], args[1:])) if package: # we've got a package as well as a component, pass it on pkgargs = list(itertools.chain((package,), args[1:])) self.recipe.PackageSpec(*pkgargs) _filterSpec.updateArgs(self, *args, **keywords) def doProcess(self, recipe): compFilters = [] self.macros = recipe.macros self.rootdir = self.rootdir % recipe.macros self.loadFilterDirs() # The extras need to come before base in order to override decisions # in the base subfilters; invariants come first for those very few # specs that absolutely should not be overridden in recipes. for filteritem in itertools.chain(self.invariantFilters, self.extraFilters, self.derivedFilters, self.configFilters, self.baseFilters): if not isinstance(filteritem, (filter.Filter, filter.PathSet)): name = filteritem[0] % self.macros assert(name != 'source') args, kwargs = self.filterExpArgs(filteritem[1:], name=name) filteritem = filter.Filter(*args, **kwargs) compFilters.append(filteritem) # by default, everything that hasn't matched a filter pattern yet # goes in the catchall component ('runtime' by default) compFilters.append(filter.Filter('.*', self.macros, name=self.catchall)) # pass these down to PackageSpec for building the package recipe.PackageSpec(compFilters=compFilters) def loadFilterDirs(self): invariantFilterMap = {} baseFilterMap = {} self.invariantFilters = [] self.baseFilters = [] # Load all component python files for componentDir in self.recipe.cfg.componentDirs: for filterType, map in (('invariant', invariantFilterMap), ('base', baseFilterMap)): oneDir = os.sep.join((componentDir, filterType)) if not os.path.isdir(oneDir): continue for filename in os.listdir(oneDir): fullpath = os.sep.join((oneDir, filename)) if (not filename.endswith('.py') or not util.isregular(fullpath)): continue self.loadFilter(filterType, map, filename, fullpath) # populate the lists with dependency-sorted information for filterType, map, filterList in ( ('invariant', invariantFilterMap, self.invariantFilters), ('base', baseFilterMap, self.baseFilters)): dg = graph.DirectedGraph() for filterName in map.keys(): dg.addNode(filterName) filter, follows, precedes = map[filterName] def warnMissing(missing): self.error('%s depends on missing %s', filterName, missing) for prior in follows: if not prior in map: warnMissing(prior) dg.addEdge(prior, filterName) for subsequent in precedes: if not subsequent in map: warnMissing(subsequent) dg.addEdge(filterName, subsequent) # test for dependency loops depLoops = [x for x in dg.getStronglyConnectedComponents() if len(x) > 1] if depLoops: self.error('dependency loop(s) in component filters: %s', ' '.join(sorted(':'.join(x) for x in sorted(list(depLoops))))) return # Create a stably-sorted list of config filters where # the filter is not empty. (An empty filter with both # follows and precedes specified can be used to induce # ordering between otherwise unrelated components.) #for name in dg.getTotalOrdering(nodeSort=lambda a, b: cmp(a,b)): for name in dg.getTotalOrdering(): filters = map[name][0] if not filters: continue componentName = filters[0] for filterExp in filters[1]: filterList.append((componentName, filterExp)) def loadFilter(self, filterType, map, filename, fullpath): # do not load shared libraries desc = [x for x in imp.get_suffixes() if x[0] == '.py'][0] f = file(fullpath) modname = filename[:-3] m = imp.load_module(modname, f, fullpath, desc) f.close() if not 'filters' in m.__dict__: self.warn('%s missing "filters"; not a valid component' ' specification file', fullpath) return filters = m.__dict__['filters'] if filters and len(filters) > 1 and type(filters[1]) not in (list, tuple): self.error('invalid expression in %s: filters specification' " must be ('name', ('expression', ...))", fullpath) follows = () if 'follows' in m.__dict__: follows = m.__dict__['follows'] precedes = () if 'precedes' in m.__dict__: precedes = m.__dict__['precedes'] map[modname] = (filters, follows, precedes) class PackageSpec(_filterSpec): """ NAME ==== B{C{r.PackageSpec()}} - Determines which package each file is in SYNOPSIS ======== C{r.PackageSpec(I{packagename}, I{filterexp})} DESCRIPTION =========== The C{r.PackageSpec()} policy determines which package each file is in. (Use C{r.ComponentSpec()} to specify the component without specifying the package, or to specify C{I{package}:I{component}} in one invocation.) EXAMPLES ======== C{r.PackageSpec('openssh-server', '%(sysconfdir)s/pam.d/sshd')} Specifies that the file C{%(sysconfdir)s/pam.d/sshd} is in the package C{openssh-server} rather than the default (which in this case would have been C{openssh} because this example was provided by C{openssh.recipe}). """ requires = ( ('ComponentSpec', policy.REQUIRED_PRIOR), ) keywords = { 'compFilters': None } def __init__(self, *args, **keywords): """ @keyword compFilters: reserved for C{ComponentSpec} to pass information needed by C{PackageSpec}. """ _filterSpec.__init__(self, *args, **keywords) self.configFiles = [] self.derivedFilters = [] def updateArgs(self, *args, **keywords): if '_config' in keywords: self.configFiles.append(keywords.pop('_config')) # keep a list of packages filtered for in PackageSpec in the recipe if args: newTrove = args[0] % self.recipe.macros self.recipe.packages[newTrove] = True _filterSpec.updateArgs(self, *args, **keywords) def preProcess(self): self.pkgFilters = [] recipe = self.recipe self.destdir = recipe.macros.destdir if self.exceptions: self.warn('PackageSpec does not honor exceptions') self.exceptions = None if self.inclusions: # would have an effect only with exceptions listed, so no warning... self.inclusions = None # userinfo and groupinfo are invariant filters, so they must come first for infoType in ('user', 'group'): infoDir = '%%(%sinfodir)s' % infoType % self.macros realDir = util.joinPaths(self.destdir, infoDir) if not os.path.isdir(realDir): continue for infoPkgName in os.listdir(realDir): pkgPath = util.joinPaths(infoDir, infoPkgName) self.pkgFilters.append( \ filter.Filter(pkgPath, self.macros, name = 'info-%s' % infoPkgName)) # extras need to come before derived so that derived packages # can change the package to which a file is assigned for filteritem in itertools.chain(self.extraFilters, self.derivedFilters): if not isinstance(filteritem, (filter.Filter, filter.PathSet)): name = filteritem[0] % self.macros if not trove.troveNameIsValid(name): self.error('%s is not a valid package name', name) args, kwargs = self.filterExpArgs(filteritem[1:], name=name) self.pkgFilters.append(filter.Filter(*args, **kwargs)) else: self.pkgFilters.append(filteritem) # by default, everything that hasn't matched a pattern in the # main package filter goes in the package named recipe.name self.pkgFilters.append(filter.Filter('.*', self.macros, name=recipe.name)) # OK, all the filters exist, build an autopackage object that # knows about them recipe.autopkg = buildpackage.AutoBuildPackage( self.pkgFilters, self.compFilters, recipe) self.autopkg = recipe.autopkg def do(self): # Walk capsule contents ignored by doFile for filePath, _, componentName in self.recipe._iterCapsulePaths(): realPath = self.destdir + filePath if util.exists(realPath): # Files that do not exist on the filesystem (devices) # are handled separately self.autopkg.addFile(filePath, realPath, componentName) # Walk normal files _filterSpec.do(self) def doFile(self, path): # all policy classes after this require that the initial tree is built if not self.recipe._getCapsulePathsForFile(path): realPath = self.destdir + path self.autopkg.addFile(path, realPath) def postProcess(self): # flag all config files for confname in self.configFiles: self.recipe.autopkg.pathMap[confname].flags.isConfig(True) class InitialContents(policy.Policy): """ NAME ==== B{C{r.InitialContents()}} - Mark only explicit inclusions as initial contents files SYNOPSIS ======== C{InitialContents([I{filterexp}])} DESCRIPTION =========== By default, C{r.InitialContents()} does not apply to any files. It is used to specify all files that Conary needs to mark as providing only initial contents. When Conary installs or updates one of these files, it will never replace existing contents; it uses the provided contents only if the file does not yet exist at the time Conary is creating it. A file marked as an InitialContents file cannot also be marked as a Transient file or a Config file. Conary enforces this requirement. EXAMPLES ======== C{r.InitialContents('%(sysconfdir)s/conary/.*gpg')} The files C{%(sysconfdir)s/conary/.*gpg} are being marked as initial contents files. Conary will use those contents when creating the files the first time, but will never overwrite existing contents in those files. """ requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('Config', policy.REQUIRED_PRIOR), ) bucket = policy.PACKAGE_CREATION processUnmodified = True invariantexceptions = [ '%(userinfodir)s/', '%(groupinfodir)s' ] invariantinclusions = ['%(localstatedir)s/run/', '%(localstatedir)s/log/', '%(cachedir)s/'] def postInit(self, *args, **kwargs): self.recipe.Config(exceptions = self.invariantinclusions, allowUnusedFilters = True) def updateArgs(self, *args, **keywords): policy.Policy.updateArgs(self, *args, **keywords) self.recipe.Config(exceptions=args, allowUnusedFilters = True) def doFile(self, filename): fullpath = self.macros.destdir + filename recipe = self.recipe if os.path.isfile(fullpath) and util.isregular(fullpath): self.info(filename) f = recipe.autopkg.pathMap[filename] f.flags.isInitialContents(True) if f.flags.isConfig(): self.error( '%s is marked as both a configuration file and' ' an initial contents file', filename) class Transient(policy.Policy): """ NAME ==== B{C{r.Transient()}} - Mark files that have transient contents SYNOPSIS ======== C{r.Transient([I{filterexp}])} DESCRIPTION =========== The C{r.Transient()} policy marks files as containing transient contents. It automatically marks the two most common uses of transient contents: python and emacs byte-compiled files (C{.pyc}, C{.pyo}, and C{.elc} files). Files containing transient contents are almost the opposite of configuration files: their contents should be overwritten by the new contents without question at update time, even if the contents in the filesystem have changed. (Conary raises an error if file contents have changed in the filesystem for normal files.) A file marked as a Transient file cannot also be marked as an InitialContents file or a Config file. Conary enforces this requirement. EXAMPLES ======== C{r.Transient('%(libdir)s/firefox/extensions/')} Marks all the files in the directory C{%(libdir)s/firefox/extensions/} as having transient contents. """ bucket = policy.PACKAGE_CREATION filetree = policy.PACKAGE processUnmodified = True requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('Config', policy.REQUIRED_PRIOR), ('InitialContents', policy.REQUIRED_PRIOR), ) invariantinclusions = [ r'..*\.py(c|o)$', r'..*\.elc$', r'%(userinfodir)s/', r'%(groupinfodir)s' ] def doFile(self, filename): fullpath = self.macros.destdir + filename if os.path.isfile(fullpath) and util.isregular(fullpath): recipe = self.recipe f = recipe.autopkg.pathMap[filename] f.flags.isTransient(True) if f.flags.isConfig() or f.flags.isInitialContents(): self.error( '%s is marked as both a transient file and' ' a configuration or initial contents file', filename) class TagDescription(policy.Policy): """ NAME ==== B{C{r.TagDescription()}} - Marks tag description files SYNOPSIS ======== C{r.TagDescription([I{filterexp}])} DESCRIPTION =========== The C{r.TagDescription} class marks tag description files as such so that conary handles them correctly. Every file in C{%(tagdescriptiondir)s/} is marked as a tag description file by default. No file outside of C{%(tagdescriptiondir)s/} will be considered by this policy. EXAMPLES ======== This policy is not called explicitly. """ bucket = policy.PACKAGE_CREATION processUnmodified = False requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ) invariantsubtrees = [ '%(tagdescriptiondir)s/' ] def doFile(self, path): if self.recipe._getCapsulePathsForFile(path): return fullpath = self.macros.destdir + path if os.path.isfile(fullpath) and util.isregular(fullpath): self.info('conary tag file: %s', path) self.recipe.autopkg.pathMap[path].tags.set("tagdescription") class TagHandler(policy.Policy): """ NAME ==== B{C{r.TagHandler()}} - Mark tag handler files SYNOPSIS ======== C{r.TagHandler([I{filterexp}])} DESCRIPTION =========== All files in C{%(taghandlerdir)s/} are marked as a tag handler files. EXAMPLES ======== This policy is not called explicitly. """ bucket = policy.PACKAGE_CREATION processUnmodified = False requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ) invariantsubtrees = [ '%(taghandlerdir)s/' ] def doFile(self, path): if self.recipe._getCapsulePathsForFile(path): return fullpath = self.macros.destdir + path if os.path.isfile(fullpath) and util.isregular(fullpath): self.info('conary tag handler: %s', path) self.recipe.autopkg.pathMap[path].tags.set("taghandler") class TagSpec(_addInfo): """ NAME ==== B{C{r.TagSpec()}} - Apply tags defined by tag descriptions SYNOPSIS ======== C{r.TagSpec([I{tagname}, I{filterexp}] || [I{tagname}, I{exceptions=filterexp}])} DESCRIPTION =========== The C{r.TagSpec()} policy automatically applies tags defined by tag descriptions in both the current system and C{%(destdir)s} to all files in C{%(destdir)s}. To apply tags manually (removing a dependency on the tag description file existing when the packages is cooked), use the syntax: C{r.TagSpec(I{tagname}, I{filterexp})}. To set an exception to this policy, use: C{r.TagSpec(I{tagname}, I{exceptions=filterexp})}. EXAMPLES ======== C{r.TagSpec('initscript', '%(initdir)s/')} Applies the C{initscript} tag to all files in the directory C{%(initdir)s/}. """ requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ) def doProcess(self, recipe): self.tagList = [] self.buildReqsComputedForTags = set() self.suggestBuildRequires = set() # read the system and %(destdir)s tag databases for directory in (recipe.macros.destdir+'/etc/conary/tags/', '/etc/conary/tags/'): if os.path.isdir(directory): for filename in os.listdir(directory): path = util.joinPaths(directory, filename) self.tagList.append(tags.TagFile(path, recipe.macros, True)) self.fullReqs = self.recipe._getTransitiveBuildRequiresNames() _addInfo.doProcess(self, recipe) def markTag(self, name, tag, path, tagFile=None): # commonly, a tagdescription will nominate a file to be # tagged, but it will also be set explicitly in the recipe, # and therefore markTag will be called twice. if (len(tag.split()) > 1 or not tag.replace('-', '').replace('_', '').isalnum()): # handlers for multiple tags require strict tag names: # no whitespace, only alphanumeric plus - and _ characters self.error('illegal tag name %s for file %s' %(tag, path)) return tags = self.recipe.autopkg.pathMap[path].tags if tag not in tags: self.info('%s: %s', name, path) tags.set(tag) if tagFile and tag not in self.buildReqsComputedForTags: self.buildReqsComputedForTags.add(tag) db = self._getDb() for trove in db.iterTrovesByPath(tagFile.tagFile): troveName = trove.getName() if troveName not in self.fullReqs: # XXX should be error, change after bootstrap self.warn("%s assigned by %s to file %s, so add '%s'" ' to buildRequires or call r.TagSpec()' %(tag, tagFile.tagFile, path, troveName)) self.suggestBuildRequires.add(troveName) def runInfo(self, path): if self.recipe._getCapsulePathsForFile(path): # capsules do not participate in the tag protocol return excludedTags = {} for tag in self.included: for filt in self.included[tag]: if filt.match(path): isExcluded = False if tag in self.excluded: for filt in self.excluded[tag]: if filt.match(path): s = excludedTags.setdefault(tag, set()) s.add(path) isExcluded = True break if not isExcluded: self.markTag(tag, tag, path) for tag in self.tagList: if tag.match(path): if tag.name: name = tag.name else: name = tag.tag isExcluded = False if tag.tag in self.excluded: for filt in self.excluded[tag.tag]: # exception handling is per-tag, so handled specially if filt.match(path): s = excludedTags.setdefault(name, set()) s.add(path) isExcluded = True break if not isExcluded: self.markTag(name, tag.tag, path, tag) if excludedTags: for tag in excludedTags: self.info('ignoring tag match for %s: %s', tag, ', '.join(sorted(excludedTags[tag]))) def postProcess(self): if self.suggestBuildRequires: self.info('possibly add to buildRequires: %s', str(sorted(list(self.suggestBuildRequires)))) self.recipe.reportMissingBuildRequires(self.suggestBuildRequires) class Properties(policy.Policy): """ NAME ==== B{C{r.Properties()}} - Read property definition files SYNOPSIS ======== C{r.Properties(I{exceptions=filterexp} || [I{contents=xml}, I{package=pkg:component}] || [I{/path/to/file}, I{filterexp}], I{contents=ipropcontents})} DESCRIPTION =========== The C{r.Properties()} policy automatically parses iconfig property definition files, making the properties available for configuration management with iconfig. To add configuration properties manually, use the syntax: C{r.Properties(I{contents=ipropcontents}, I{package=pkg:component}} Where contents is the xml string that would normally be stored in the iprop file and package is the component where to attach the config metadata. (NOTE: This component must exist) or C{r.Properties([I{/path/to/file}, I{filterexp}], I{contents=ipropcontents}) Where contents is the xml string that would normally be stored in the iprop file and the path or filterexp matches the files that represent the conponent that the property should be attached to. """ supported_targets = (TARGET_LINUX, TARGET_WINDOWS) bucket = policy.PACKAGE_CREATION processUnmodified = True _supports_file_properties = True requires = ( # We need to know what component files have been assigned to ('PackageSpec', policy.REQUIRED_PRIOR), ) def __init__(self, *args, **kwargs): policy.Policy.__init__(self, *args, **kwargs) self.ipropFilters = [] self.ipropPaths = [ r'%(prefix)s/lib/iconfig/properties/.*\.iprop' ] self.contents = [] self.paths = [] self.fileFilters = [] self.propMap = {} def updateArgs(self, *args, **kwargs): if 'contents' in kwargs: contents = kwargs.pop('contents') pkg = kwargs.pop('package', None) if pkg is None and args: for arg in args: self.paths.append((arg, contents)) else: self.contents.append((pkg, contents)) policy.Policy.updateArgs(self, *args, **kwargs) def doProcess(self, recipe): for filterSpec, iprop in self.paths: self.fileFilters.append(( filter.Filter(filterSpec, recipe.macros), iprop, )) for ipropPath in self.ipropPaths: self.ipropFilters.append( filter.Filter(ipropPath, recipe.macros)) policy.Policy.doProcess(self, recipe) def _getComponent(self, path): componentMap = self.recipe.autopkg.componentMap if path not in componentMap: return main, comp = componentMap[path].getName().split(':') return main, comp def doFile(self, path): if path not in self.recipe.autopkg.pathMap: return for fltr, iprop in self.fileFilters: if fltr.match(path): main, comp = self._getComponent(path) self._parsePropertyData(iprop, main, comp) # Make sure any remaining files are actually in the root. fullpath = self.recipe.macros.destdir + path if not os.path.isfile(fullpath) or not util.isregular(fullpath): return # Check to see if this is an iprop file locaiton that we know about. for fltr in self.ipropFilters: if fltr.match(path): break else: return main, comp = self._getComponent(path) xml = open(fullpath).read() self._parsePropertyData(xml, main, comp) def postProcess(self): for pkg, content in self.contents: pkg = pkg % self.macros pkgName, compName = pkg.split(':') self._parsePropertyData(content, pkgName, compName) def _parsePropertyData(self, xml, pkgName, compName): pkgSet = self.propMap.setdefault(xml, set()) if (pkgName, compName) in pkgSet: return pkgSet.add((pkgName, compName)) self.recipe._addProperty(trove._PROPERTY_TYPE_SMARTFORM, pkgName, compName, xml) class MakeDevices(policy.Policy): """ NAME ==== B{C{r.MakeDevices()}} - Make device nodes SYNOPSIS ======== C{MakeDevices([I{path},] [I{type},] [I{major},] [I{minor},] [I{owner},] [I{groups},] [I{mode}])} DESCRIPTION =========== The C{r.MakeDevices()} policy creates device nodes. Conary's policy of non-root builds requires that these nodes exist only in the package, and not in the filesystem, as only root may actually create device nodes. EXAMPLES ======== C{r.MakeDevices(I{'/dev/tty', 'c', 5, 0, 'root', 'root', mode=0666, package=':dev'})} Creates the device node C{/dev/tty}, as type 'c' (character, as opposed to type 'b', or block) with a major number of '5', minor number of '0', owner, and group are both the root user, and permissions are 0666. """ bucket = policy.PACKAGE_CREATION processUnmodified = True requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('Ownership', policy.REQUIRED_SUBSEQUENT), ) def __init__(self, *args, **keywords): self.devices = [] policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): """ MakeDevices(path, devtype, major, minor, owner, group, mode=0400) """ if args: args = list(args) l = len(args) if not ((l > 5) and (l < 9)): self.recipe.error('MakeDevices: incorrect arguments: %r %r' %(args, keywords)) mode = keywords.pop('mode', None) package = keywords.pop('package', None) if l > 6 and mode is None: mode = args[6] if mode is None: mode = 0400 if l > 7 and package is None: package = args[7] self.devices.append( (args[0:6], {'perms': mode, 'package': package})) policy.Policy.updateArgs(self, **keywords) def do(self): for device, kwargs in self.devices: r = self.recipe filename = device[0] owner = device[4] group = device[5] r.Ownership(owner, group, filename) device[0] = device[0] % r.macros r.autopkg.addDevice(*device, **kwargs) class setModes(policy.Policy): """ Do not call from recipes; this is used internally by C{r.SetModes}, C{r.ParseManifest}, and unpacking derived packages. This policy modified modes relative to the mode on the file in the filesystem. It adds setuid/setgid bits not otherwise set/honored on files on the filesystem, and sets user r/w/x bits if they were altered for the purposes of accessing the files during packaging. Otherwise, it honors the bits found on the filesystem. It does not modify bits in capsules. """ bucket = policy.PACKAGE_CREATION processUnmodified = True requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('WarnWriteable', policy.REQUIRED_SUBSEQUENT), ('ExcludeDirectories', policy.CONDITIONAL_SUBSEQUENT), ) def __init__(self, *args, **keywords): self.sidbits = {} self.userbits = {} policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): """ setModes(path(s), [sidbits=int], [userbits=int]) """ sidbits = keywords.pop('sidbits', None) userbits = keywords.pop('userbits', None) for path in args: if sidbits is not None: self.sidbits[path] = sidbits if userbits is not None: self.userbits[path] = userbits self.recipe.WarnWriteable( exceptions=re.escape(path).replace('%', '%%'), allowUnusedFilters = True) policy.Policy.updateArgs(self, **keywords) def doFile(self, path): # Don't set modes on capsule files if self.recipe._getCapsulePathsForFile(path): return # Skip files that aren't part of the package if path not in self.recipe.autopkg.pathMap: return newmode = oldmode = self.recipe.autopkg.pathMap[path].inode.perms() if path in self.userbits: newmode = (newmode & 077077) | self.userbits[path] if path in self.sidbits and self.sidbits[path]: newmode |= self.sidbits[path] self.info('suid/sgid: %s mode 0%o', path, newmode & 07777) if newmode != oldmode: self.recipe.autopkg.pathMap[path].inode.perms.set(newmode) class LinkType(policy.Policy): """ NAME ==== B{C{r.LinkType()}} - Ensures only regular, non-configuration files are hardlinked SYNOPSIS ======== C{r.LinkType([I{filterexp}])} DESCRIPTION =========== The C{r.LinkType()} policy ensures that only regular, non-configuration files are hardlinked. EXAMPLES ======== This policy is not called explicitly. """ bucket = policy.PACKAGE_CREATION processUnmodified = True requires = ( ('Config', policy.REQUIRED_PRIOR), ('PackageSpec', policy.REQUIRED_PRIOR), ) def do(self): for component in self.recipe.autopkg.getComponents(): for path in sorted(component.hardlinkMap.keys()): if self.recipe.autopkg.pathMap[path].flags.isConfig(): self.error("Config file %s has illegal hard links", path) for path in component.badhardlinks: self.error("Special file %s has illegal hard links", path) class LinkCount(policy.Policy): """ NAME ==== B{C{r.LinkCount()}} - Restricts hardlinks across directories. SYNOPSIS ======== C{LinkCount([I{filterexp}] | [I{exceptions=filterexp}])} DESCRIPTION =========== The C{r.LinkCount()} policy restricts hardlinks across directories. It is generally an error to have hardlinks across directories, except when the packager knows that there is no reasonable chance that they will be on separate filesystems. In cases where the packager is certain hardlinks will not cross filesystems, a list of regular expressions specifying files which are excepted from this rule may be passed to C{r.LinkCount}. EXAMPLES ======== C{r.LinkCount(exceptions='/usr/share/zoneinfo/')} Uses C{r.LinkCount} to except zoneinfo files, located in C{/usr/share/zoneinfo/}, from the policy against cross-directory hardlinks. """ bucket = policy.PACKAGE_CREATION processUnmodified = False requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ) def __init__(self, *args, **keywords): policy.Policy.__init__(self, *args, **keywords) self.excepts = set() def updateArgs(self, *args, **keywords): allowUnusedFilters = keywords.pop('allowUnusedFilters', False) or \ self.allowUnusedFilters exceptions = keywords.pop('exceptions', None) if exceptions: if type(exceptions) is str: self.excepts.add(exceptions) if not allowUnusedFilters: self.unusedFilters['exceptions'].add(exceptions) elif type(exceptions) in (tuple, list): self.excepts.update(exceptions) if not allowUnusedFilters: self.unusedFilters['exceptions'].update(exceptions) # FIXME: we may want to have another keyword argument # that passes information down to the buildpackage # that causes link groups to be broken for some # directories but not others. We need to research # first whether this is useful; it may not be. def do(self): if self.recipe.getType() == recipe.RECIPE_TYPE_CAPSULE: return filters = [(x, filter.Filter(x, self.macros)) for x in self.excepts] for component in self.recipe.autopkg.getComponents(): for inode in component.linkGroups: # ensure all in same directory, except for directories # matching regexps that have been passed in allPaths = [x for x in component.linkGroups[inode]] for path in allPaths[:]: for regexp, f in filters: if f.match(path): self.unusedFilters['exceptions'].discard(regexp) allPaths.remove(path) dirSet = set(os.path.dirname(x) + '/' for x in allPaths) if len(dirSet) > 1: self.error('files %s are hard links across directories %s', ', '.join(sorted(component.linkGroups[inode])), ', '.join(sorted(list(dirSet)))) self.error('If these directories cannot reasonably be' ' on different filesystems, disable this' ' warning by calling' " r.LinkCount(exceptions=('%s')) or" " equivalent" % "', '".join(sorted(list(dirSet)))) class ExcludeDirectories(policy.Policy): """ NAME ==== B{C{r.ExcludeDirectories()}} - Exclude directories from package SYNOPSIS ======== C{r.ExcludeDirectories([I{filterexp}] | [I{exceptions=filterexp}])} DESCRIPTION =========== The C{r.ExcludeDirectories} policy causes directories to be excluded from the package by default. Use C{r.ExcludeDirectories(exceptions=I{filterexp})} to set exceptions to this policy, which will cause directories matching the regular expression C{filterexp} to be included in the package. Remember that Conary packages cannot share files, including directories, so only one package installed on a system at any one time can own the same directory. There are only three reasons to explicitly package a directory: the directory needs permissions other than 0755, it needs non-root owner or group, or it must exist even if it is empty. Therefore, it should generally not be necessary to invoke this policy directly. If your directory requires permissions other than 0755, simply use C{r.SetMode} to specify the permissions, and the directory will be automatically included. Similarly, if you wish to include an empty directory with owner or group information, call C{r.Ownership} on that empty directory, Because C{r.Ownership} can reasonably be called on an entire subdirectory tree and indiscriminately applied to files and directories alike, non-empty directories with owner or group set will be excluded from packaging unless an exception is explicitly provided. If you call C{r.Ownership} with a filter that applies to an empty directory, but you do not want to package that directory, you will have to remove the directory with C{r.Remove}. Packages do not need to explicitly include directories to ensure existence of a target to place a file in. Conary will appropriately create the directory, and delete it later if the directory becomes empty. EXAMPLES ======== C{r.ExcludeDirectories(exceptions='/tftpboot')} Sets the directory C{/tftboot} as an exception to the C{r.ExcludeDirectories} policy, so that the C{/tftpboot} directory will be included in the package. """ bucket = policy.PACKAGE_CREATION processUnmodified = True requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('Ownership', policy.REQUIRED_PRIOR), ('MakeDevices', policy.CONDITIONAL_PRIOR), ) invariantinclusions = [ ('.*', stat.S_IFDIR) ] supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def doFile(self, path): # temporarily do nothing for capsules, we might do something later if self.recipe._getCapsulePathsForFile(path): return fullpath = self.recipe.macros.destdir + os.sep + path s = os.lstat(fullpath) mode = s[stat.ST_MODE] if mode & 0777 != 0755: self.info('excluding directory %s with mode %o', path, mode&0777) elif not os.listdir(fullpath): d = self.recipe.autopkg.pathMap[path] if d.inode.owner.freeze() != 'root': self.info('not excluding empty directory %s' ' because of non-root owner', path) return elif d.inode.group.freeze() != 'root': self.info('not excluding empty directory %s' ' because of non-root group', path) return self.info('excluding empty directory %s', path) # if its empty and we're not packaging it, there's no need for it # to continue to exist on the filesystem to potentially confuse # other policy actions... see CNP-18 os.rmdir(fullpath) self.recipe.autopkg.delFile(path) class ByDefault(policy.Policy): """ NAME ==== B{C{r.ByDefault()}} - Determines components to be installed by default SYNOPSIS ======== C{r.ByDefault([I{inclusions} || C{exceptions}=I{exceptions}])} DESCRIPTION =========== The C{r.ByDefault()} policy determines which components should be installed by default at the time the package is installed on the system. The default setting for the C{ByDefault} policy is that the C{:debug}, and C{:test} packages are not installed with the package. The inclusions and exceptions do B{not} specify filenames. They are either C{I{package}:I{component}} or C{:I{component}}. Inclusions are considered before exceptions, and inclusions and exceptions are considered in the order provided in the recipe, and first match wins. EXAMPLES ======== C{r.ByDefault(exceptions=[':manual'])} Uses C{r.ByDefault} to ignore C{:manual} components when enforcing the policy. C{r.ByDefault(exceptions=[':manual'])} C{r.ByDefault('foo:manual')} If these lines are in the C{bar} package, and there is both a C{foo:manual} and a C{bar:manual} component, then the C{foo:manual} component will be installed by default when the C{foo} package is installed, but the C{bar:manual} component will not be installed by default when the C{bar} package is installed. """ bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ) filetree = policy.NO_FILES supported_targets = (TARGET_LINUX, TARGET_WINDOWS) invariantexceptions = [':test', ':debuginfo'] allowUnusedFilters = True def doProcess(self, recipe): if not self.inclusions: self.inclusions = [] if not self.exceptions: self.exceptions = [] recipe.setByDefaultOn(frozenset(self.inclusions)) recipe.setByDefaultOff(frozenset(self.exceptions + self.invariantexceptions)) class _UserGroup(policy.Policy): """ Abstract base class that implements marking owner/group dependencies. """ bucket = policy.PACKAGE_CREATION # All classes that descend from _UserGroup must run before the # Requires policy, as they implicitly depend on it to set the # file requirements and union the requirements up to the package. requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('Requires', policy.REQUIRED_SUBSEQUENT), ) filetree = policy.PACKAGE processUnmodified = True def setUserGroupDep(self, path, info, depClass): componentMap = self.recipe.autopkg.componentMap if path not in componentMap: return pkg = componentMap[path] f = pkg.getFile(path) if path not in pkg.requiresMap: pkg.requiresMap[path] = deps.DependencySet() pkg.requiresMap[path].addDep(depClass, deps.Dependency(info, [])) class Ownership(_UserGroup): """ NAME ==== B{C{r.Ownership()}} - Set file ownership SYNOPSIS ======== C{r.Ownership([I{username},] [I{groupname},] [I{filterexp}])} DESCRIPTION =========== The C{r.Ownership()} policy sets user and group ownership of files when the default of C{root:root} is not appropriate. List the ownerships in order, most specific first, ending with least specific. The filespecs will be matched in the order that you provide them. KEYWORDS ======== None. EXAMPLES ======== C{r.Ownership('apache', 'apache', '%(localstatedir)s/lib/php/session')} Sets ownership of C{%(localstatedir)s/lib/php/session} to owner C{apache}, and group C{apache}. """ def __init__(self, *args, **keywords): self.filespecs = [] self.systemusers = ('root',) self.systemgroups = ('root',) policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): if args: for filespec in args[2:]: self.filespecs.append((filespec, args[0], args[1])) policy.Policy.updateArgs(self, **keywords) def doProcess(self, recipe): # we must NEVER take ownership from the filesystem assert(not self.exceptions) self.rootdir = self.rootdir % recipe.macros self.fileFilters = [] for (filespec, user, group) in self.filespecs: self.fileFilters.append( (filter.Filter(filespec, recipe.macros), user %recipe.macros, group %recipe.macros)) del self.filespecs policy.Policy.doProcess(self, recipe) def doFile(self, path): if self.recipe._getCapsulePathsForFile(path): return pkgfile = self.recipe.autopkg.pathMap[path] pkgOwner = pkgfile.inode.owner() pkgGroup = pkgfile.inode.group() bestOwner = pkgOwner bestGroup = pkgGroup for (f, owner, group) in self.fileFilters: if f.match(path): bestOwner, bestGroup = owner, group break if bestOwner != pkgOwner: pkgfile.inode.owner.set(bestOwner) if bestGroup != pkgGroup: pkgfile.inode.group.set(bestGroup) if bestOwner and bestOwner not in self.systemusers: self.setUserGroupDep(path, bestOwner, deps.UserInfoDependencies) if bestGroup and bestGroup not in self.systemgroups: self.setUserGroupDep(path, bestGroup, deps.GroupInfoDependencies) class _Utilize(_UserGroup): """ Pure virtual base class for C{UtilizeUser} and C{UtilizeGroup} """ def __init__(self, *args, **keywords): self.filespecs = [] policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): """ call as:: UtilizeFoo(item, filespec(s)...) List them in order, most specific first, ending with most general; the filespecs will be matched in the order that you provide them. """ item = args[0] % self.recipe.macros if args: for filespec in args[1:]: self.filespecs.append((filespec, item)) policy.Policy.updateArgs(self, **keywords) def doProcess(self, recipe): self.rootdir = self.rootdir % recipe.macros self.fileFilters = [] for (filespec, item) in self.filespecs: self.fileFilters.append( (filter.Filter(filespec, recipe.macros), item)) del self.filespecs policy.Policy.doProcess(self, recipe) def doFile(self, path): for (f, item) in self.fileFilters: if f.match(path): self._markItem(path, item) return def _markItem(self, path, item): # pure virtual assert(False) class UtilizeUser(_Utilize): """ NAME ==== B{C{r.UtilizeUser()}} - Marks files as requiring a user definition to exist SYNOPSIS ======== C{r.UtilizeUser([I{username}, I{filterexp}])} DESCRIPTION =========== The C{r.UtilizeUser} policy marks files as requiring a user definition to exist even though the file is not owned by that user. This is particularly useful for daemons that are setuid root ant change their user id to a user id with no filesystem permissions after they start. EXAMPLES ======== C{r.UtilizeUser('sshd', '%(sbindir)s/sshd')} Marks the file C{%(sbindir)s/sshd} as requiring the user definition 'sshd' although the file is not owned by the 'sshd' user. """ def _markItem(self, path, user): if not self.recipe._getCapsulePathsForFile(path): self.info('user %s: %s' % (user, path)) self.setUserGroupDep(path, user, deps.UserInfoDependencies) class UtilizeGroup(_Utilize): """ NAME ==== B{C{r.UtilizeGroup()}} - Marks files as requiring a user definition to exist SYNOPSIS ======== C{r.UtilizeGroup([groupname, filterexp])} DESCRIPTION =========== The C{r.UtilizeGroup} policy marks files as requiring a group definition to exist even though the file is not owned by that group. This is particularly useful for daemons that are setuid root ant change their user id to a group id with no filesystem permissions after they start. EXAMPLES ======== C{r.UtilizeGroup('users', '%(sysconfdir)s/default/useradd')} Marks the file C{%(sysconfdir)s/default/useradd} as requiring the group definition 'users' although the file is not owned by the 'users' group. """ def _markItem(self, path, group): if not self.recipe._getCapsulePathsForFile(path): self.info('group %s: %s' % (group, path)) self.setUserGroupDep(path, group, deps.GroupInfoDependencies) class ComponentRequires(policy.Policy): """ NAME ==== B{C{r.ComponentRequires()}} - Create automatic intra-package, inter-component dependencies SYNOPSIS ======== C{r.ComponentRequires([{'I{componentname}': I{requiringComponentSet}}] | [{'I{packagename}': {'I{componentname}': I{requiringComponentSet}}}])} DESCRIPTION =========== The C{r.ComponentRequires()} policy creates automatic, intra-package, inter-component dependencies, such as a corresponding dependency between C{:lib} and C{:data} components. Changes are passed in using dictionaries, both for additions that are specific to a specific package, and additions that apply generally to all binary packages being cooked from one recipe. For general changes that are not specific to a package, use this syntax: C{r.ComponentRequires({'I{componentname}': I{requiringComponentSet}})}. For package-specific changes, you need to specify packages as well as components: C{r.ComponentRequires({'I{packagename}': 'I{componentname}': I{requiringComponentSet}})}. By default, both C{:lib} and C{:runtime} components (if they exist) require the C{:data} component (if it exists). If you call C{r.ComponentRequires({'data': set(('lib',))})}, you limit it so that C{:runtime} components will not require C{:data} components for this recipe. In recipes that create more than one binary package, you may need to limit your changes to a single binary package. To do so, use the package-specific syntax. For example, to remove the C{:runtime} requirement on C{:data} only for the C{foo} package, call: C{r.ComponentRequires({'foo': 'data': set(('lib',))})}. Note that C{r.ComponentRequires} cannot require capability flags; use C{r.Requires} if you need to specify requirements, including capability flags. EXAMPLES ======== C{r.ComponentRequires({'openssl': {'config': set(('runtime', 'lib'))}})} Uses C{r.ComponentRequires} to create dependencies in a top-level manner for the C{:runtime} and C{:lib} component sets to require the C{:config} component for the C{openssl} package. """ bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('ExcludeDirectories', policy.CONDITIONAL_PRIOR), ) supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def __init__(self, *args, **keywords): self.depMap = { # component: components that require it if they both exist 'data': frozenset(('lib', 'runtime', 'devellib', 'cil', 'java', 'perl', 'python', 'ruby')), 'devellib': frozenset(('devel',)), 'lib': frozenset(('devel', 'devellib', 'runtime')), 'config': frozenset(('runtime', 'lib', 'devellib', 'devel')), } self.overridesMap = {} policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): d = args[0] if isinstance(d[d.keys()[0]], dict): # dict of dicts for packageName in d: if packageName not in self.overridesMap: # start with defaults, then override them individually o = {} o.update(self.depMap) self.overridesMap[packageName] = o self.overridesMap[packageName].update(d[packageName]) else: # dict of sets self.depMap.update(d) def do(self): flags = [] if self.recipe.isCrossCompileTool(): flags.append((_getTargetDepFlag(self.macros), deps.FLAG_SENSE_REQUIRED)) components = self.recipe.autopkg.components for packageName in [x.name for x in self.recipe.autopkg.packageMap]: if packageName in self.overridesMap: d = self.overridesMap[packageName] else: d = self.depMap for requiredComponent in d: for requiringComponent in d[requiredComponent]: reqName = ':'.join((packageName, requiredComponent)) wantName = ':'.join((packageName, requiringComponent)) if (reqName in components and wantName in components and components[reqName] and components[wantName]): if (d == self.depMap and reqName in self.recipe._componentReqs and wantName in self.recipe._componentReqs): # this is an automatically generated dependency # which was not in the parent of a derived # pacakge. don't add it here either continue # Note: this does not add dependencies to files; # these dependencies are insufficiently specific # to attach to files. ds = deps.DependencySet() depClass = deps.TroveDependencies ds.addDep(depClass, deps.Dependency(reqName, flags)) p = components[wantName] p.requires.union(ds) class ComponentProvides(policy.Policy): """ NAME ==== B{C{r.ComponentProvides()}} - Causes each trove to explicitly provide itself. SYNOPSIS ======== C{r.ComponentProvides(I{flags})} DESCRIPTION =========== The C{r.ComponentProvides()} policy causes each trove to explicitly provide its name. Call it to provide optional capability flags consisting of a single string, or a list, tuple, or set of strings, It is impossible to provide a capability flag for one component but not another within a single package. EXAMPLES ======== C{r.ComponentProvides("addcolumn")} Uses C{r.ComponentProvides} in the context of the sqlite recipe, and causes sqlite to provide itself explicitly with the capability flag C{addcolumn}. """ bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('ExcludeDirectories', policy.CONDITIONAL_PRIOR), ) supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def __init__(self, *args, **keywords): self.flags = set() self.excepts = set() policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): if 'exceptions' in keywords: exceptions = keywords.pop('exceptions') if type(exceptions) is str: self.excepts.add(exceptions) elif type(exceptions) in (tuple, list): self.excepts.update(set(exceptions)) if not args: return if len(args) >= 2: # update the documentation if we ever support the # pkgname, flags calling convention #pkgname = args[0] flags = args[1] else: flags = args[0] if not isinstance(flags, (list, tuple, set)): flags=(flags,) self.flags |= set(flags) def do(self): self.excepts = set(re.compile(x) for x in self.excepts) self.flags = set(x for x in self.flags if not [y.match(x) for y in self.excepts]) if self.flags: flags = [ (x % self.macros, deps.FLAG_SENSE_REQUIRED) for x in self.flags ] else: flags = [] if self.recipe.isCrossCompileTool(): flags.append(('target-%s' % self.macros.target, deps.FLAG_SENSE_REQUIRED)) for component in self.recipe.autopkg.components.values(): component.provides.addDep(deps.TroveDependencies, deps.Dependency(component.name, flags)) def _getTargetDepFlag(macros): return 'target-%s' % macros.target class _dependency(policy.Policy): """ Internal class for shared code between Provides and Requires """ def __init__(self, *args, **kwargs): # bootstrap keeping only one copy of these around self.bootstrapPythonFlags = None self.bootstrapSysPath = [] self.bootstrapPerlIncPath = [] self.bootstrapRubyLibs = [] self.cachedProviders = {} self.pythonFlagNamespace = None self.removeFlagsByDependencyClass = None # pre-transform self.removeFlagsByDependencyClassMap = {} def updateArgs(self, *args, **keywords): removeFlagsByDependencyClass = keywords.pop( 'removeFlagsByDependencyClass', None) if removeFlagsByDependencyClass is not None: clsName, ignoreFlags = removeFlagsByDependencyClass cls = deps.dependencyClassesByName[clsName] l = self.removeFlagsByDependencyClassMap.setdefault(cls, []) if isinstance(ignoreFlags, (list, set, tuple)): l.append(set(ignoreFlags)) else: l.append(re.compile(ignoreFlags)) policy.Policy.updateArgs(self, **keywords) def preProcess(self): self.CILPolicyRE = re.compile(r'.*mono/.*/policy.*/policy.*\.config$') self.legalCharsRE = re.compile('[.0-9A-Za-z_+-/]') self.pythonInterpRE = re.compile(r'\.[a-z]+-\d\dm?') # interpolate macros, using canonical path form with no trailing / self.sonameSubtrees = set(os.path.normpath(x % self.macros) for x in self.sonameSubtrees) self.pythonFlagCache = {} self.pythonTroveFlagCache = {} self.pythonVersionCache = {} def _hasContents(self, m, contents): """ Return False if contents is set and m does not have that contents """ if contents and (contents not in m.contents or not m.contents[contents]): return False return True def _isELF(self, m, contents=None): "Test whether is ELF file and optionally has certain contents" # Note: for provides, check for 'abi' not 'provides' because we # can provide the filename even if there is no provides list # as long as a DT_NEEDED entry has been present to set the abi return m and m.name == 'ELF' and self._hasContents(m, contents) def _isPython(self, path): return path.endswith('.py') or path.endswith('.pyc') def _isPythonModuleCandidate(self, path): return path.endswith('.so') or self._isPython(path) def _runPythonScript(self, binPath, destdir, libdir, scriptLines): script = '\n'.join(scriptLines) environ = {} if binPath.startswith(destdir): environ['LD_LIBRARY_PATH'] = destdir + libdir proc = subprocess.Popen([binPath, '-Ec', script], executable=binPath, stdout=subprocess.PIPE, shell=False, env=environ, ) stdout, _ = proc.communicate() if proc.returncode: raise RuntimeError("Process exited with status %s" % (proc.returncode,)) return stdout def _getPythonVersion(self, pythonPath, destdir, libdir): if pythonPath not in self.pythonVersionCache: try: stdout = self._runPythonScript(pythonPath, destdir, libdir, ["import sys", "print('%d.%d' % sys.version_info[:2])"]) self.pythonVersionCache[pythonPath] = stdout.strip() except (OSError, RuntimeError): self.warn("Unable to determine Python version directly; " "guessing based on path.") self.pythonVersionCache[pythonPath] = self._getPythonVersionFromPath(pythonPath, destdir) return self.pythonVersionCache[pythonPath] def _getPythonSysPath(self, pythonPath, destdir, libdir, useDestDir=False): """Return the system path for the python interpreter at C{pythonPath} @param pythonPath: Path to the target python interpreter @param destdir: Destination root, in case of a python bootstrap @param libdir: Destination libdir, in case of a python bootstrap @param useDestDir: If True, look in the destdir instead. """ script = ["import sys, site"] if useDestDir: # Repoint site.py at the destdir so it picks up .pth files there. script.extend([ "sys.path = []", "sys.prefix = %r + sys.prefix" % (destdir,), "sys.exec_prefix = %r + sys.exec_prefix" % (destdir,), "site.PREFIXES = [sys.prefix, sys.exec_prefix]", "site.addsitepackages(None)", ]) script.append(r"print('\0'.join(sys.path))") try: stdout = self._runPythonScript(pythonPath, destdir, libdir, script) except (OSError, RuntimeError): # something went wrong, don't trust any output self.info('Could not run system python "%s", guessing sys.path...', pythonPath) sysPath = [] else: sysPath = [x.strip() for x in stdout.split('\0') if x.strip()] if not sysPath and not useDestDir: # probably a cross-build -- let's try a decent assumption # for the syspath. self.info("Failed to detect system python path, using fallback") pyVer = self._getPythonVersionFromPath(pythonPath, destdir) if not pyVer and self.bootstrapPythonFlags is not None: pyVer = self._getPythonVersionFromFlags( self.bootstrapPythonFlags) if pyVer and self.bootstrapSysPath is not None: lib = self.recipe.macros.lib # this list needs to include all sys.path elements that # might be needed for python per se -- note that # bootstrapPythonFlags and bootstrapSysPath go # together sysPath = self.bootstrapSysPath + [ '/usr/%s/%s' %(lib, pyVer), '/usr/%s/%s/plat-linux2' %(lib, pyVer), '/usr/%s/%s/lib-tk' %(lib, pyVer), '/usr/%s/%s/lib-dynload' %(lib, pyVer), '/usr/%s/%s/site-packages' %(lib, pyVer), # for purelib python on x86_64 '/usr/lib/%s/site-packages' %pyVer, ] return sysPath def _warnPythonPathNotInDB(self, pathName): self.warn('%s found on system but not provided by' ' system database; python requirements' ' may be generated incorrectly as a result', pathName) return set([]) def _getPythonTroveFlags(self, pathName): if pathName in self.pythonTroveFlagCache: return self.pythonTroveFlagCache[pathName] db = self._getDb() foundPath = False pythonFlags = set() pythonTroveList = db.iterTrovesByPath(pathName) if pythonTroveList: depContainer = pythonTroveList[0] assert(depContainer.getName()) foundPath = True for dep in depContainer.getRequires().iterDepsByClass( deps.PythonDependencies): flagNames = [x[0] for x in dep.getFlags()[0]] pythonFlags.update(flagNames) self.pythonTroveFlagCache[pathName] = pythonFlags if not foundPath: self.pythonTroveFlagCache[pathName] = self._warnPythonPathNotInDB( pathName) return self.pythonTroveFlagCache[pathName] def _getPythonFlags(self, pathName, bootstrapPythonFlags=None): if pathName in self.pythonFlagCache: return self.pythonFlagCache[pathName] if bootstrapPythonFlags: self.pythonFlagCache[pathName] = bootstrapPythonFlags return self.pythonFlagCache[pathName] db = self._getDb() foundPath = False # FIXME: This should be iterFilesByPath when implemented (CNY-1833) # For now, cache all the python deps in all the files in the # trove(s) so that we iterate over each trove only once containingTroveList = db.iterTrovesByPath(pathName) for containerTrove in containingTroveList: for pathid, p, fileid, v in containerTrove.iterFileList(): if pathName == p: foundPath = True pythonFlags = set() f = files.ThawFile(db.getFileStream(fileid), pathid) for dep in f.provides().iterDepsByClass( deps.PythonDependencies): flagNames = [x[0] for x in dep.getFlags()[0]] pythonFlags.update(flagNames) self.pythonFlagCache[p] = pythonFlags if not foundPath: self.pythonFlagCache[pathName] = self._warnPythonPathNotInDB( pathName) return self.pythonFlagCache[pathName] def _getPythonFlagsFromPath(self, pathName): pathList = pathName.split('/') foundLib = False foundVer = False flags = set() for dirName in pathList: if not foundVer and not foundLib and dirName.startswith('lib'): # lib will always come before ver foundLib = True flags.add(dirName) elif not foundVer and dirName.startswith('python'): foundVer = True flags.add(dirName[6:]) if foundLib and foundVer: break if self.pythonFlagNamespace: flags = set('%s:%s' %(self.pythonFlagNamespace, x) for x in flags) return flags def _stringIsPythonVersion(self, s): return not set(s).difference(set('.0123456789')) def _getPythonVersionFromFlags(self, flags): nameSpace = self.pythonFlagNamespace for flag in flags: if nameSpace and flag.startswith(nameSpace): flag = flag[len(nameSpace):] if self._stringIsPythonVersion(flag): return 'python'+flag def _getPythonVersionFromPath(self, pathName, destdir): if destdir and pathName.startswith(destdir): pathName = pathName[len(destdir):] pathList = pathName.split('/') for dirName in pathList: if dirName.startswith('python') and self._stringIsPythonVersion( dirName[6:]): # python2.4 or python2.5 or python3.9 but not python.so return dirName return '' def _isCIL(self, m): return m and m.name == 'CIL' def _isJava(self, m, contents=None): return m and isinstance(m, (magic.jar, magic.java)) and self._hasContents(m, contents) def _isPerlModule(self, path): return (path.endswith('.pm') or path.endswith('.pl') or path.endswith('.ph')) def _isPerl(self, path, m, f): return self._isPerlModule(path) or ( f.inode.perms() & 0111 and m and m.name == 'script' and 'interpreter' in m.contents and '/bin/perl' in m.contents['interpreter']) def _createELFDepSet(self, m, elfinfo, recipe=None, basedir=None, soname=None, soflags=None, libPathMap={}, getRPATH=None, path=None, isProvides=None): """ Add dependencies from ELF information. @param m: magic.ELF object @param elfinfo: requires or provides from magic.ELF.contents @param recipe: recipe object for calling Requires if basedir is not None @param basedir: directory to add into dependency @param soname: alternative soname to use @param libPathMap: mapping from base dependency name to new dependency name @param isProvides: whether the dependency being created is a provides """ abi = m.contents['abi'] elfClass = abi[0] nameMap = {} usesLinuxAbi = False depSet = deps.DependencySet() for depClass, main, flags in elfinfo: if soflags: flags = itertools.chain(*(flags, soflags)) flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ] if depClass == 'soname': if '/' in main: main = os.path.basename(main) if getRPATH: rpath = getRPATH(main) if rpath: # change the name to follow the rpath main = '/'.join((rpath, main)) elif soname: main = soname if basedir: oldname = os.path.normpath('/'.join((elfClass, main))) main = '/'.join((basedir, main)) main = os.path.normpath('/'.join((elfClass, main))) if basedir: nameMap[main] = oldname if libPathMap and main in libPathMap: # if we have a mapping to a provided library that would be # satisfied, then we modify the requirement to match the # provision provided = libPathMap[main] requiredSet = set(x[0] for x in flags) providedSet = set(provided.flags.keys()) if requiredSet.issubset(providedSet): main = provided.getName()[0] else: pathString = '' if path: pathString = 'for path %s' %path self.warn('Not replacing %s with %s because of missing %s%s', main, provided.getName()[0], sorted(list(requiredSet-providedSet)), pathString) curClass = deps.SonameDependencies for flag in abi[1]: if flag == 'Linux': usesLinuxAbi = True flags.append(('SysV', deps.FLAG_SENSE_REQUIRED)) else: flags.append((flag, deps.FLAG_SENSE_REQUIRED)) dep = deps.Dependency(main, flags) elif depClass == 'abi': curClass = deps.AbiDependency dep = deps.Dependency(main, flags) else: assert(0) depSet.addDep(curClass, dep) # This loop has to happen late so that the soname # flag merging from multiple flag instances has happened if nameMap: for soDep in depSet.iterDepsByClass(deps.SonameDependencies): newName = soDep.getName()[0] if newName in nameMap: oldName = nameMap[newName] recipe.Requires(_privateDepMap=(oldname, soDep)) if usesLinuxAbi and not isProvides: isnset = m.contents.get('isnset', None) if elfClass == 'ELF32' and isnset == 'x86': main = 'ELF32/ld-linux.so.2' elif elfClass == 'ELF64' and isnset == 'x86_64': main = 'ELF64/ld-linux-x86-64.so.2' else: self.error('%s: unknown ELF class %s or instruction set %s', path, elfClass, isnset) return depSet flags = [('Linux', deps.FLAG_SENSE_REQUIRED), ('SysV', deps.FLAG_SENSE_REQUIRED), (isnset, deps.FLAG_SENSE_REQUIRED)] dep = deps.Dependency(main, flags) depSet.addDep(curClass, dep) return depSet def _addDepToMap(self, path, depMap, depType, dep): "Add a single dependency to a map, regardless of whether path was listed before" if path not in depMap: depMap[path] = deps.DependencySet() depMap[path].addDep(depType, dep) def _addDepSetToMap(self, path, depMap, depSet): "Add a dependency set to a map, regardless of whether path was listed before" if path in depMap: depMap[path].union(depSet) else: depMap[path] = depSet @staticmethod def _recurseSymlink(path, destdir, fullpath=None): """ Recurse through symlinks in destdir and get the final path and fullpath. If initial fullpath (or destdir+path if fullpath not specified) does not exist, return path. """ if fullpath is None: fullpath = destdir + path while os.path.islink(fullpath): contents = os.readlink(fullpath) if contents.startswith('/'): fullpath = os.path.normpath(contents) else: fullpath = os.path.normpath( os.path.dirname(fullpath)+'/'+contents) return fullpath[len(destdir):], fullpath def _symlinkMagic(self, path, fullpath, macros, m=None): "Recurse through symlinks and get the final path and magic" path, _ = self._recurseSymlink(path, macros.destdir, fullpath=fullpath) m = self.recipe.magic[path] return m, path def _enforceProvidedPath(self, path, fileType='interpreter', unmanagedError=False): key = path, fileType if key in self.cachedProviders: return self.cachedProviders[key] db = self._getDb() troveNames = [ x.getName() for x in db.iterTrovesByPath(path) ] if not troveNames: talk = {True: self.error, False: self.warn}[bool(unmanagedError)] talk('%s file %s not managed by conary' %(fileType, path)) return None troveName = sorted(troveNames)[0] # prefer corresponding :devel to :devellib if it exists package, component = troveName.split(':', 1) if component in ('devellib', 'lib'): for preferredComponent in ('devel', 'devellib'): troveSpec = ( ':'.join((package, preferredComponent)), None, None ) results = db.findTroves(None, [troveSpec], allowMissing = True) if troveSpec in results: troveName = results[troveSpec][0][0] break if troveName not in self.recipe._getTransitiveBuildRequiresNames(): self.recipe.reportMissingBuildRequires(troveName) self.cachedProviders[key] = troveName return troveName def _getRuby(self, macros, path): # For bootstrapping purposes, prefer the just-built version if # it exists # Returns tuple: (pathToRubyInterpreter, bootstrap) ruby = '%(ruby)s' %macros if os.access('%(destdir)s/%(ruby)s' %macros, os.X_OK): return '%(destdir)s/%(ruby)s' %macros, True elif os.access(ruby, os.X_OK): # Enforce the build requirement, since it is not in the package self._enforceProvidedPath(ruby) return ruby, False else: self.warn('%s not available for Ruby dependency discovery' ' for path %s' %(ruby, path)) return False, None def _getRubyLoadPath(self, macros, rubyInvocation, bootstrap): # Returns tuple of (invocationString, loadPathList) destdir = macros.destdir if bootstrap: rubyLibPath = [destdir + x for x in self.bootstrapRubyLibs] rubyInvocation = (('LD_LIBRARY_PATH=%(destdir)s%(libdir)s ' 'RUBYLIB="'+':'.join(rubyLibPath)+'" ' +rubyInvocation)%macros) rubyLoadPath = util.popen( "%s -e 'puts $:'" % rubyInvocation).readlines() # get gem dir if rubygems is installed if os.access('%(bindir)s/gem' %macros, os.X_OK): rubyLoadPath.extend( util.popen("%s -rubygems -e 'puts Gem.default_dir'" % rubyInvocation).readlines()) rubyLoadPath = [ x.strip() for x in rubyLoadPath if x.startswith('/') ] loadPathList = rubyLoadPath[:] if bootstrap: rubyLoadPath = [ destdir+x for x in rubyLoadPath ] rubyInvocation = ('LD_LIBRARY_PATH=%(destdir)s%(libdir)s' ' RUBYLIB="'+':'.join(rubyLoadPath)+'"' ' %(destdir)s/%(ruby)s') % macros return (rubyInvocation, loadPathList) def _getRubyVersion(self, macros): cmd = self.rubyInvocation + (" -e 'puts RUBY_VERSION'" % macros) rubyVersion = util.popen(cmd).read() rubyVersion = '.'.join(rubyVersion.split('.')[0:2]) return rubyVersion def _getRubyFlagsFromPath(self, pathName, rubyVersion): pathList = pathName.split('/') pathList = [ x for x in pathList if x ] foundLib = False foundVer = False flags = set() for dirName in pathList: if not foundLib and dirName.startswith('lib'): foundLib = True flags.add(dirName) elif not foundVer and dirName.split('.')[:1] == rubyVersion.split('.')[:1]: # we only compare major and minor versions due to # ruby api version (dirName) differing from programs # version (rubyVersion) foundVer = True flags.add(dirName) if foundLib and foundVer: break return flags def _getmonodis(self, macros, path): # For bootstrapping purposes, prefer the just-built version if # it exists monodis = '%(monodis)s' %macros if os.access('%(destdir)s/%(monodis)s' %macros, os.X_OK): return ('MONO_PATH=%(destdir)s%(prefix)s/lib' ' LD_LIBRARY_PATH=%(destdir)s%(libdir)s' ' %(destdir)s/%(monodis)s' %macros) elif os.access(monodis, os.X_OK): # Enforce the build requirement, since it is not in the package self._enforceProvidedPath(monodis) return monodis else: self.warn('%s not available for CIL dependency discovery' ' for path %s' %(monodis, path)) return None def _getperlincpath(self, perl, destdir): """ Fetch the perl @INC path, falling back to bootstrapPerlIncPath only if perl cannot be run. All elements of the search path will be resolved against symlinks in destdir if they exist. (CNY-2949) """ if not perl: return [] p = util.popen(r"""%s -e 'print join("\n", @INC)'""" %perl) perlIncPath = p.readlines() # make sure that the command completed successfully try: rc = p.close() perlIncPath = [x.strip() for x in perlIncPath if not x.startswith('.')] return [self._recurseSymlink(x, destdir)[0] for x in perlIncPath] except RuntimeError: return [self._recurseSymlink(x, destdir)[0] for x in self.bootstrapPerlIncPath] def _getperl(self, macros, recipe): """ Find the preferred instance of perl to use, including setting any environment variables necessary to use that perl. Returns string for running it, the C{@INC} path, and a separate string, if necessary, for adding to @INC. """ perlDestPath = '%(destdir)s%(bindir)s/perl' %macros # not %(bindir)s so that package modifications do not affect # the search for system perl perlPath = '/usr/bin/perl' destdir = macros.destdir def _perlDestInc(destdir, perlDestInc): return ' '.join(['-I' + destdir + x for x in perlDestInc]) if os.access(perlDestPath, os.X_OK): # must use packaged perl if it exists m = recipe.magic[perlDestPath[len(destdir):]] # not perlPath if m and 'RPATH' in m.contents and m.contents['RPATH']: # we need to prepend the destdir to each element of the RPATH # in order to run perl in the destdir perl = ''.join(( 'export LD_LIBRARY_PATH=', '%s%s:' %(destdir, macros.libdir), ':'.join([destdir+x for x in m.contents['RPATH'].split(':')]), ';', perlDestPath )) perlIncPath = self._getperlincpath(perl, destdir) perlDestInc = _perlDestInc(destdir, perlIncPath) return [perl, perlIncPath, perlDestInc] else: # perl that does not use/need rpath perl = 'LD_LIBRARY_PATH=%s%s %s' %( destdir, macros.libdir, perlDestPath) perlIncPath = self._getperlincpath(perl, destdir) perlDestInc = _perlDestInc(destdir, perlIncPath) return [perl, perlIncPath, perlDestInc] elif os.access(perlPath, os.X_OK): # system perl if no packaged perl, needs no @INC mangling self._enforceProvidedPath(perlPath) perlIncPath = self._getperlincpath(perlPath, destdir) return [perlPath, perlIncPath, ''] # must be no perl at all return ['', [], ''] def _getPython(self, macros, path): """ Takes a path Returns, for that path, a tuple of - the preferred instance of python to use - whether that instance is in the destdir """ m = self.recipe.magic[path] if m and m.name == 'script' and 'python' in m.contents['interpreter']: pythonPath = [m.contents['interpreter']] else: pythonVersion = self._getPythonVersionFromPath(path, None) # After PATH, fall back to %(bindir)s. If %(bindir)s should be # preferred, it needs to be earlier in the PATH. Include # unversioned python as a last resort for confusing cases. shellPath = os.environ.get('PATH', '').split(':') + [ '%(bindir)s' ] pythonPath = [] if pythonVersion: pythonPath = [ os.path.join(x, pythonVersion) for x in shellPath ] pythonPath.extend([ os.path.join(x, 'python') for x in shellPath ]) for pathElement in pythonPath: pythonDestPath = ('%(destdir)s'+pathElement) %macros if os.access(pythonDestPath, os.X_OK): return (pythonDestPath, True) for pathElement in pythonPath: pythonDestPath = pathElement %macros if os.access(pythonDestPath, os.X_OK): self._enforceProvidedPath(pythonDestPath) return (pythonDestPath, False) # Specified python not found on system (usually because of # bad interpreter path -- CNY-2050) if len(pythonPath) == 1: missingPythonPath = '%s ' % pythonPath[0] else: missingPythonPath = '' self.warn('Python interpreter %snot found for %s', missingPythonPath, path) return (None, None) def _stripDestDir(self, pathList, destdir): destDirLen = len(destdir) pathElementList = [] for pathElement in pathList: if pathElement.startswith(destdir): pathElementList.append(pathElement[destDirLen:]) else: pathElementList.append(pathElement) return pathElementList class Provides(_dependency): """ NAME ==== B{C{r.Provides()}} - Creates dependency provision SYNOPSIS ======== C{r.Provides([I{provision}, I{filterexp}] || [I{exceptions=filterexp}])} DESCRIPTION =========== The C{r.Provides()} policy marks files as providing certain features or characteristics, and can be called to explicitly provide things that cannot be automatically discovered. C{r.Provides} can also override automatic discovery, and prevent marking a file as providing things, such as for package-private plugin modules installed in system library directories. A C{I{provision}} may be C{'file'} to mark a file as providing its filename, or a dependency type. You can create a file, soname or ABI C{I{provision}} manually; all other types are only automatically discovered. Provisions that begin with C{file} are files, those that start with C{soname:} are sonames, and those that start with C{abi:} are ABIs. Other prefixes are reserved. Soname provisions are normally discovered automatically; they need to be provided manually only in two cases: - If a shared library was not built with a soname at all. - If a symbolic link to a shared library needs to provide its name as a soname. Note: Use {Cr.ComponentProvides} rather than C{r.Provides} to add capability flags to components. For unusual cases where you want to remove a provision Conary automatically finds, you can specify C{r.Provides(exceptDeps='regexp')} to override all provisions matching a regular expression, C{r.Provides(exceptDeps=('filterexp', 'regexp'))} to override provisions matching a regular expression only for files matching filterexp, or C{r.Provides(exceptDeps=(('filterexp', 'regexp'), ...))} to specify multiple overrides. EXAMPLES ======== C{r.Provides('file', '/usr/share/dict/words')} Demonstrates using C{r.Provides} to specify the file provision C{/usr/share/dict/words}, so that other files can now require that file. C{r.Provides('soname: libperl.so', '%(libdir)s/perl5/.*/CORE/libperl.so')} Demonstrates synthesizing a shared library provision for all the libperl.so symlinks. C{r.Provides(exceptDeps = 'java: .*')} Demonstrates removing all java provisions. """ bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('SharedLibrary', policy.REQUIRED), # _ELFPathProvide calls Requires to pass in discovered info # _addCILPolicyProvides does likewise ('Requires', policy.REQUIRED_SUBSEQUENT), ) filetree = policy.PACKAGE invariantexceptions = ( '%(docdir)s/', ) dbDepCacheClass = _DatabaseDepCache def __init__(self, *args, **keywords): _dependency.__init__(self, *args, **keywords) self.provisions = [] self.sonameSubtrees = set() self.sysPath = None self.monodisPath = None self.rubyInterpreter = None self.rubyVersion = None self.rubyInvocation = None self.rubyLoadPath = None self.perlIncPath = None self.pythonSysPathMap = {} self.exceptDeps = [] policy.Policy.__init__(self, *args, **keywords) self.depCache = self.dbDepCacheClass(self._getDb()) def updateArgs(self, *args, **keywords): if args: for filespec in args[1:]: self.provisions.append((filespec, args[0])) sonameSubtrees = keywords.pop('sonameSubtrees', None) if sonameSubtrees: if type(sonameSubtrees) in (list, tuple): self.sonameSubtrees.update(set(sonameSubtrees)) else: self.sonameSubtrees.add(sonameSubtrees) exceptDeps = keywords.pop('exceptDeps', None) if exceptDeps: if type(exceptDeps) is str: exceptDeps = ('.*', exceptDeps) assert(type(exceptDeps) == tuple) if type(exceptDeps[0]) is tuple: self.exceptDeps.extend(exceptDeps) else: self.exceptDeps.append(exceptDeps) # The next three are called only from Requires and should override # completely to make sure the policies are in sync pythonFlagNamespace = keywords.pop('_pythonFlagNamespace', None) if pythonFlagNamespace is not None: self.pythonFlagNamespace = pythonFlagNamespace bootstrapPythonFlags = keywords.pop('_bootstrapPythonFlags', None) if bootstrapPythonFlags is not None: self.bootstrapPythonFlags = bootstrapPythonFlags bootstrapSysPath = keywords.pop('_bootstrapSysPath', None) if bootstrapSysPath is not None: self.bootstrapSysPath = bootstrapSysPath bootstrapPerlIncPath = keywords.pop('_bootstrapPerlIncPath', None) if bootstrapPerlIncPath is not None: self.bootstrapPerlIncPath = bootstrapPerlIncPath bootstrapRubyLibs = keywords.pop('_bootstrapRubyLibs', None) if bootstrapRubyLibs is not None: self.bootstrapRubyLibs = bootstrapRubyLibs if keywords.get('removeFlagsByDependencyClass', None): self.error('removeFlagsByDependencyClass not currently implemented for Provides (CNY-3443)') _dependency.updateArgs(self, **keywords) def preProcess(self): macros = self.macros if self.bootstrapPythonFlags is not None: self.bootstrapPythonFlags = set(x % macros for x in self.bootstrapPythonFlags) if self.bootstrapSysPath: self.bootstrapSysPath = [x % macros for x in self.bootstrapSysPath] if self.pythonFlagNamespace is not None: self.pythonFlagNamespace = self.pythonFlagNamespace % macros if self.bootstrapPerlIncPath: self.bootstrapPerlIncPath = [x % macros for x in self.bootstrapPerlIncPath] self.rootdir = self.rootdir % macros self.fileFilters = [] self.binDirs = frozenset( x % macros for x in [ '%(bindir)s', '%(sbindir)s', '%(essentialbindir)s', '%(essentialsbindir)s', '%(libexecdir)s', ]) self.noProvDirs = frozenset( x % macros for x in [ '%(testdir)s', '%(debuglibdir)s', ]).union(self.binDirs) exceptDeps = [] for fE, rE in self.exceptDeps: try: exceptDeps.append((filter.Filter(fE, macros), re.compile(rE % self.macros))) except sre_constants.error, e: self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e) self.exceptDeps= exceptDeps for filespec, provision in self.provisions: self.fileFilters.append( (filter.Filter(filespec, macros), provision % macros)) del self.provisions _dependency.preProcess(self) def doFile(self, path): pkgs = self.recipe.autopkg.findComponents(path) if not pkgs: return pkgFiles = [(x, x.getFile(path)) for x in pkgs] macros = self.recipe.macros m = self.recipe.magic[path] fullpath = macros.destdir + path basepath = os.path.basename(path) dirpath = os.path.dirname(path) if os.path.exists(fullpath): mode = os.lstat(fullpath)[stat.ST_MODE] # First, add in the manual provisions self.addExplicitProvides(path, fullpath, pkgFiles, macros, m) # Next, discover all automatically-discoverable provisions if os.path.exists(fullpath): if (self._isELF(m, 'abi') and m.contents['Type'] != elf.ET_EXEC and not [ x for x in self.noProvDirs if path.startswith(x) ]): # we do not add elf provides for programs that won't be linked to self._ELFAddProvide(path, m, pkgFiles, basedir=dirpath) if dirpath in self.sonameSubtrees: # only export filename as soname if is shlib sm, finalpath = self._symlinkMagic(path, fullpath, macros, m) if sm and self._isELF(sm, 'abi') and sm.contents['Type'] != elf.ET_EXEC: # add the filename as a soname provision (CNY-699) # note: no provides necessary self._ELFAddProvide(path, sm, pkgFiles, soname=basepath, basedir=dirpath) if self._isPythonModuleCandidate(path): self._addPythonProvides(path, m, pkgFiles, macros) rubyProv = self._isRubyModule(path, macros, fullpath) if rubyProv: self._addRubyProvides(path, m, pkgFiles, macros, rubyProv) elif self._isCIL(m): self._addCILProvides(path, m, pkgFiles, macros) elif self.CILPolicyRE.match(path): self._addCILPolicyProvides(path, pkgFiles, macros) elif self._isJava(m, 'provides'): # Cache the internal provides if not hasattr(self.recipe, '_internalJavaDepMap'): self.recipe._internalJavaDepMap = None self._addJavaProvides(path, m, pkgFiles) elif self._isPerlModule(path): self._addPerlProvides(path, m, pkgFiles) self.addPathDeps(path, dirpath, pkgFiles) self.whiteOut(path, pkgFiles) self.unionDeps(path, pkgFiles) def whiteOut(self, path, pkgFiles): # remove intentionally discarded provides for pkg, f in pkgFiles: if self.exceptDeps and path in pkg.providesMap: depSet = deps.DependencySet() for depClass, dep in pkg.providesMap[path].iterDeps(): for filt, exceptRe in self.exceptDeps: if filt.match(path): matchName = '%s: %s' %(depClass.tagName, str(dep)) if exceptRe.match(matchName): # found one to not copy dep = None break if dep is not None: depSet.addDep(depClass, dep) pkg.providesMap[path] = depSet def addExplicitProvides(self, path, fullpath, pkgFiles, macros, m): for (filter, provision) in self.fileFilters: if filter.match(path): self._markProvides(path, fullpath, provision, pkgFiles, macros, m) def addPathDeps(self, path, dirpath, pkgFiles): # Because paths can change, individual files do not provide their # paths. However, within a trove, a file does provide its name. # Furthermore, non-regular files can be path dependency targets # Therefore, we have to handle this case a bit differently. for pkg, f in pkgFiles: if dirpath in self.binDirs and not isinstance(f, files.Directory): # CNY-930: automatically export paths in bindirs # CNY-1721: but not directories in bindirs f.flags.isPathDependencyTarget(True) if f.flags.isPathDependencyTarget(): pkg.provides.addDep(deps.FileDependencies, deps.Dependency(path)) def unionDeps(self, path, pkgFiles): for pkg, f in pkgFiles: if path in pkg.providesMap: f.provides.set(pkg.providesMap[path]) pkg.provides.union(f.provides()) def _getELFinfo(self, m, soname): if 'provides' in m.contents and m.contents['provides']: return m.contents['provides'] else: # we need to synthesize some provides information return [('soname', soname, ())] def _ELFAddProvide(self, path, m, pkgFiles, soname=None, soflags=None, basedir=None): if basedir is None: basedir = os.path.dirname(path) if basedir in self.sonameSubtrees: # do not record the basedir basedir = None else: # path needs to be in the dependency, since the # provides is too broad otherwise, so add it. # We can only add characters from the path that are legal # in a dependency name basedir = ''.join(x for x in basedir if self.legalCharsRE.match(x)) elfinfo = self._getELFinfo(m, os.path.basename(path)) depSet = self._createELFDepSet(m, elfinfo, recipe=self.recipe, basedir=basedir, soname=soname, soflags=soflags, path=path, isProvides=True) for pkg, _ in pkgFiles: self._addDepSetToMap(path, pkg.providesMap, depSet) def _getPythonProvidesSysPath(self, path): """Generate an ordered list of python paths for the target package. This includes the current system path, plus any paths added by the new package in the destdir through .pth files or a newly built python. @return: (sysPath, pythonVersion) """ pythonPath, bootstrapPython = self._getPython(self.macros, path) if not pythonPath: # Most likely bad interpreter path in a .py file return (None, None) if pythonPath in self.pythonSysPathMap: return self.pythonSysPathMap[pythonPath] destdir = self.macros.destdir libdir = self.macros.libdir pythonVersion = self._getPythonVersion(pythonPath, destdir, libdir) # Get default sys.path from python interpreter, either the one just # built (in the case of a python bootstrap) or from the system. systemPaths = set(self._getPythonSysPath(pythonPath, destdir, libdir, useDestDir=False)) # Now add paths from the destdir's site-packages, typically due to # newly installed .pth files. systemPaths.update(self._getPythonSysPath(pythonPath, destdir, libdir, useDestDir=True)) # Sort in descending order so that the longest path matches first. sysPath = sorted(self._stripDestDir(systemPaths, destdir), reverse=True) self.pythonSysPathMap[pythonPath] = (sysPath, pythonVersion) return self.pythonSysPathMap[pythonPath] def _fetchPerlIncPath(self): """ Cache the perl @INC path, sorted longest first """ if self.perlIncPath is not None: return _, self.perlIncPath, _ = self._getperl( self.recipe.macros, self.recipe) self.perlIncPath.sort(key=len, reverse=True) def _addPythonProvides(self, path, m, pkgFiles, macros): if not self._isPythonModuleCandidate(path): return sysPath, pythonVersion = self._getPythonProvidesSysPath(path) if not sysPath: return # Add provides for every match in sys.path. For example, PIL.Imaging # and Imaging should both be provided since they are both reachable # names. for sysPathEntry in sysPath: if not path.startswith(sysPathEntry): continue newDepPath = path[len(sysPathEntry)+1:] if newDepPath.split('.')[0] == '__init__': # we don't allow bare __init__ as a python import # hopefully we'll find this init as a deeper import at some # other point in the sysPath continue elif ('site-packages' in newDepPath or 'lib-dynload' in newDepPath or 'plat-linux' in newDepPath ): # site-packages should be specifically excluded since both it # and its parent are always in sys.path. However, invalid # python package names in general are allowed due to certain # cases where relative imports happen inside a hyphenated # directory and the requires detector picks up on that. continue # Note that it's possible to have a false positive here. For # example, in the PIL case if PIL/__init__.py did not exist, # PIL.Imaging would still be provided. The odds of this causing # problems are so small that it is not checked for here. self._addPythonProvidesSingle(path, m, pkgFiles, macros, newDepPath) def _addPythonProvidesSingle(self, path, m, pkgFiles, macros, depPath): # remove extension depPath, extn = depPath.rsplit('.', 1) if depPath == '__future__': return # remove python3 __pycache__ directory from dep if '__pycache__/' in depPath: depPath = depPath.replace('__pycache__/', '') # PEP 3147 adds the interperter and version to the pyc file depPath = self.pythonInterpRE.sub('', depPath) if depPath.endswith('/__init__'): depPath = depPath.replace('/__init__', '') depPath = depPath.replace('/', '.') depPaths = [ depPath ] if extn == 'so': fname = util.joinPaths(macros.destdir, path) try: syms = elf.getDynSym(fname) # Does this module have an init<blah> function? initfuncs = [ x[4:] for x in syms if x.startswith('init') ] # This is the equivalent of dirname() comps = depPath.rsplit('.', 1) dpPrefix = comps[0] if len(comps) == 1: # Top-level python module depPaths.extend(initfuncs) else: for initfunc in initfuncs: depPaths.append('.'.join([dpPrefix, initfunc])) except elf.error: pass flags = self._getPythonFlagsFromPath(path) flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in sorted(list(flags))] for dpath in depPaths: dep = deps.Dependency(dpath, flags) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.PythonDependencies, dep) def _addOneCILProvide(self, pkgFiles, path, name, ver): for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.CILDependencies, deps.Dependency(name, [(ver, deps.FLAG_SENSE_REQUIRED)])) def _addCILPolicyProvides(self, path, pkgFiles, macros): if ElementTree is None: return try: keys = {'urn': '{urn:schemas-microsoft-com:asm.v1}'} fullpath = macros.destdir + path tree = ElementTree.parse(fullpath) root = tree.getroot() identity, redirect = root.find('runtime/%(urn)sassemblyBinding/%(urn)sdependentAssembly' % keys).getchildren() assembly = identity.get('name') self._addOneCILProvide(pkgFiles, path, assembly, redirect.get('oldVersion')) self.recipe.Requires(_CILPolicyProvides={ path: (assembly, redirect.get('newVersion'))}) except: return def _addCILProvides(self, path, m, pkgFiles, macros): if not m or m.name != 'CIL': return fullpath = macros.destdir + path if not self.monodisPath: self.monodisPath = self._getmonodis(macros, path) if not self.monodisPath: return p = util.popen('%s --assembly %s' %( self.monodisPath, fullpath)) name = None ver = None for line in [ x.strip() for x in p.readlines() ]: if 'Name:' in line: name = line.split()[1] elif 'Version:' in line: ver = line.split()[1] p.close() # monodis did not give us any info if not name or not ver: return self._addOneCILProvide(pkgFiles, path, name, ver) def _isRubyModule(self, path, macros, fullpath): if not util.isregular(fullpath) or os.path.islink(fullpath): return False if '/ruby/' in path: # load up ruby opportunistically; this is our first chance if self.rubyInterpreter is None: self.rubyInterpreter, bootstrap = self._getRuby(macros, path) if not self.rubyInterpreter: return False self.rubyInvocation, self.rubyLoadPath = self._getRubyLoadPath( macros, self.rubyInterpreter, bootstrap) self.rubyVersion = self._getRubyVersion(macros) # we need to look deep first self.rubyLoadPath = sorted(list(self.rubyLoadPath), key=len, reverse=True) elif self.rubyInterpreter is False: return False for pathElement in self.rubyLoadPath: if path.startswith(pathElement) \ and (path.endswith('.rb') or path.endswith('.so')): if '/gems/' in path: path = path.partition("/gems/")[-1] if '/lib/' in path: return path.partition('/lib/')[-1].rsplit('.', 1)[0] else: return path[len(pathElement)+1:].rsplit('.', 1)[0] return False def _addRubyProvides(self, path, m, pkgFiles, macros, prov): flags = self._getRubyFlagsFromPath(path, self.rubyVersion) flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in sorted(list(flags))] dep = deps.Dependency(prov, flags) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.RubyDependencies, dep) def _addJavaProvides(self, path, m, pkgFiles): if 'provides' not in m.contents or not m.contents['provides']: return if not hasattr(self.recipe, '_reqExceptDeps'): self.recipe._reqExceptDeps = [] # Compile requires exceptDeps (and persist them) if not hasattr(self.recipe, '_compiledReqExceptDeps'): self.recipe._compiledReqExceptDeps = exceptDeps = [] macros = self.recipe.macros for fE, rE in self.recipe._reqExceptDeps: try: exceptDeps.append((filter.Filter(fE, macros), re.compile(rE % macros))) except sre_constants.error, e: self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e) # We will no longer need this, we have the compiled version now self.recipe._reqExceptDeps = [] if self.recipe._internalJavaDepMap is None: # Instantiate the dictionary of provides from this package self.recipe._internalJavaDepMap = internalJavaDepMap = {} componentMap = self.recipe.autopkg.componentMap for opath in componentMap: om = self.recipe.magic[opath] if not self._isJava(om, 'provides'): continue # The file could be a .jar, in which case it contains multiple # classes. contents['files'] is a dict, keyed on the file name # within the jar and with a provide and a set of requires as # value. internalJavaDepMap.setdefault(opath, {}).update( om.contents['files']) else: internalJavaDepMap = self.recipe._internalJavaDepMap if hasattr(self.recipe, '_internalJavaProvides'): internalProvides = self.recipe._internalJavaProvides else: # We need to cache the internal java provides, otherwise we do too # much work for each file (CNY-3372) self.recipe._internalJavaProvides = internalProvides = set() for opath, ofiles in internalJavaDepMap.items(): internalProvides.update(x[0] for x in ofiles.values() if x[0] is not None) # Now drop internal provides from individual class requires for opath, ofiles in internalJavaDepMap.items(): for oclassName, (oclassProv, oclassReqSet) in ofiles.items(): if oclassReqSet is None: continue oclassReqSet.difference_update(internalProvides) reqs = set() if self._isJava(m, 'requires'): # Extract this file's requires reqs.update(m.contents['requires']) # Remove the ones that are satisfied internally reqs.difference_update(internalProvides) # For now, we are only trimming the provides (and requires) for # classes for which the requires are not satisfied, neither internally # nor from the system Conary database. In the future we may need to # build a dependency tree between internal classes, such that we do # the removal transitively (class A requires class B which doesn't # have its deps satisfied should make class A unusable). This can come # at a later time # CNY-3362: we don't drop provides for classes which had requires on # classes that had their dependencies pruned. (at least not yet) if reqs: # Try to resolve these deps against the Conary database depSetList = [] depSetMap = {} for req in reqs: depSet = deps.DependencySet() depSet.addDep(deps.JavaDependencies, deps.Dependency(req, [])) depSetList.append(depSet) depSetMap[depSet] = req troves = self.depCache.getProvides(depSetList) missingDepSets = set(depSetList) - set(troves) missingReqs = set(depSetMap[x] for x in missingDepSets) # White out the missing requires if exceptDeps for them are found rExceptDeps = self.recipe._compiledReqExceptDeps if missingReqs and rExceptDeps: depClass = deps.JavaDependencies filteredMissingDeps = set() for dep in list(missingReqs): for filt, exceptRe in rExceptDeps: if not filt.match(path): continue matchName = '%s: %s' %(depClass.tagName, str(dep)) if exceptRe.match(matchName): # found one to not copy missingReqs.remove(dep) filteredMissingDeps.add(dep) break if filteredMissingDeps: # We need to take them out of the per-file requires ofiles = internalJavaDepMap[path] for _, (oclassProv, oclassReqSet) in ofiles.items(): if oclassProv is not None: oclassReqSet.difference_update(filteredMissingDeps) if missingReqs: fileDeps = internalJavaDepMap[path] # This file has unsatisfied dependencies. # Walk its list of classes to determine which ones are not # satisfied. satisfiedClasses = dict((fpath, (fprov, freqs)) for (fpath, (fprov, freqs)) in fileDeps.iteritems() if freqs is not None and not freqs.intersection(missingReqs)) internalJavaDepMap[path] = satisfiedClasses self.warn('Provides and requirements for file %s are disabled ' 'because of unsatisfied dependencies. To re-enable ' 'them, add to the recipe\'s buildRequires the ' 'packages that provide the following ' 'requirements: %s' % (path, " ".join(sorted(missingReqs)))) # Add the remaining provides fileDeps = internalJavaDepMap[path] provs = set(fprov for fpath, (fprov, freqs) in fileDeps.iteritems() if fprov is not None) for prov in provs: dep = deps.Dependency(prov, []) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.JavaDependencies, dep) def _addPerlProvides(self, path, m, pkgFiles): # do not call perl to get @INC unless we have something to do for perl self._fetchPerlIncPath() # It is possible that we'll want to allow user-specified # additions to the perl search path, but if so, we need # to path-encode those files, so we can't just prepend # those elements to perlIncPath. We would need to end up # with something like "perl: /path/to/foo::bar" because # for perl scripts that don't modify @INC, they could not # find those scripts. It is not clear that we need this # at all, because most if not all of those cases would be # intra-package dependencies that we do not want to export. depPath = None for pathPrefix in self.perlIncPath: if path.startswith(pathPrefix): depPath = path[len(pathPrefix)+1:] break if depPath is None: return # foo/bar/baz.pm -> foo::bar::baz prov = '::'.join(depPath.split('/')).rsplit('.', 1)[0] dep = deps.Dependency(prov, []) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.PerlDependencies, dep) def _markProvides(self, path, fullpath, provision, pkgFiles, macros, m): if provision.startswith("file"): # can't actually specify what to provide, just that it provides... for _, f in pkgFiles: f.flags.isPathDependencyTarget(True) elif provision.startswith("abi:"): abistring = provision[4:].strip() op = abistring.index('(') abi = abistring[:op] flags = abistring[op+1:-1].split() flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ] dep = deps.Dependency(abi, flags) for pkg, _ in pkgFiles: self._addDepToMap(path, pkg.providesMap, deps.AbiDependency, dep) elif provision.startswith("soname:"): sm, finalpath = self._symlinkMagic(path, fullpath, macros, m) if self._isELF(sm, 'abi'): # Only ELF files can provide sonames. # This is for libraries that don't really include a soname, # but programs linked against them require a soname. # For this reason, we do not pass 'provides' to _isELF soname = provision[7:].strip() soflags = [] if '(' in soname: # get list of arbitrary flags soname, rest = soname.split('(') soflags.extend(rest[:-1].split()) basedir = None if '/' in soname: basedir, soname = soname.rsplit('/', 1) self._ELFAddProvide(path, sm, pkgFiles, soname=soname, soflags=soflags, basedir=basedir) else: self.error('Provides %s for file %s does not start with one of' ' "file", "abi:", or "soname"', provision, path) class Requires(_addInfo, _dependency): """ NAME ==== B{C{r.Requires()}} - Creates dependency requirements SYNOPSIS ======== C{r.Requires([I{/path/to/file}, I{filterexp}] || [I{packagename:component[(FLAGS)]}, I{filterexp}] || [I{exceptions=filterexp)}])} DESCRIPTION =========== The C{r.Requires()} policy adds requirements for a file. You can pass in exceptions that should not have automatic requirement discovery done, such as example shell scripts outside of C{%(docdir)s}. Note: Components are the only troves which can be required. For executables executed only through wrappers that use C{LD_LIBRARY_PATH} to find the libraries instead of embedding an RPATH in the binary, you will need to provide a synthetic RPATH using C{r.Requires(rpath='I{RPATH}')} or C{r.Requires(rpath=('I{filterExp}', 'I{RPATH}'))} calls, which are tested in the order provided. The RPATH is a standard Unix-style path string containing one or more directory names, separated only by colon characters, except for one significant change: Each path component is interpreted using shell-style globs, which are checked first in the C{%(destdir)s} and then on the installed system. (The globs are useful for cases like perl where statically determining the entire content of the path is difficult. Use globs only for variable parts of paths; be as specific as you can without using the glob feature any more than necessary.) Executables that use C{dlopen()} to open a shared library will not automatically have a dependency on that shared library. If the program unconditionally requires that it be able to C{dlopen()} the shared library, encode that requirement by manually creating the requirement by calling C{r.Requires('soname: libfoo.so', 'filterexp')} or C{r.Requires('soname: /path/to/libfoo.so', 'filterexp')} depending on whether the library is in a system library directory or not. (It should be the same as how the soname dependency is expressed by the providing package.) For unusual cases where a system library is not listed in C{ld.so.conf} but is instead found through a search through special subdirectories with architecture-specific names (such as C{i686} and C{tls}), you can pass in a string or list of strings specifying the directory or list of directories. with C{r.Requires(sonameSubtrees='/directoryname')} or C{r.Requires(sonameSubtrees=['/list', '/of', '/dirs'])} Note: These are B{not} regular expressions. They will have macro expansion expansion performed on them. For unusual cases where Conary finds a false or misleading dependency, or in which you need to override a true dependency, you can specify C{r.Requires(exceptDeps='regexp')} to override all dependencies matching a regular expression, C{r.Requires(exceptDeps=('filterexp', 'regexp'))} to override dependencies matching a regular expression only for files matching filterexp, or C{r.Requires(exceptDeps=(('filterexp', 'regexp'), ...))} to specify multiple overrides. EXAMPLES ======== C{r.Requires('mailbase:runtime', '%(sbindir)s/sendmail')} Demonstrates using C{r.Requires} to specify a manual requirement of the file C{%(sbindir)s/sendmail} to the C{:runtime} component of package C{mailbase}. C{r.Requires('file: %(sbindir)s/sendmail', '%(datadir)s/squirrelmail/index.php')} Specifies that conary should require the file C{%(sbindir)s/sendmail} to be present when trying to install C{%(datadir)s/squirrelmail/index.php}. C{r.Requires('soname: %(libdir)/kde3/kgreet_classic.so', '%(bindir)/kdm')} Demonstrates using C{r.Requires} to specify a manual soname requirement of the file C{%(bindir)s/kdm} to the soname C{%(libdir)/kde3/kgreet_classic.so}. C{r.Requires(exceptions='/usr/share/vim/.*/doc/')} Demonstrates using C{r.Requires} to specify that files in the subdirectory C{/usr/share/vim/.*/doc} are excepted from being marked as requirements. C{r.Requires(exceptDeps='trove:$trovename')} Uses C{r.Requires} to specify that the trove C{trovename} is excluded from the dependencies for the package. """ bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('SharedLibrary', policy.REQUIRED_PRIOR), # Requires depends on ELF dep path discovery previously done in Provides ('Provides', policy.REQUIRED_PRIOR), ) filetree = policy.PACKAGE invariantexceptions = ( '%(docdir)s/', ) dbDepCacheClass = _DatabaseDepCache def __init__(self, *args, **keywords): _dependency.__init__(self, *args, **keywords) self.bootstrapPythonFlags = set() self.bootstrapSysPath = [] self.bootstrapPerlIncPath = [] self.bootstrapRubyLibs = [] self.pythonFlagNamespace = None self.sonameSubtrees = set() self._privateDepMap = {} self.rpathFixup = [] self.exceptDeps = [] self.sysPath = None self.monodisPath = None self.rubyInterpreter = None self.rubyVersion = None self.rubyInvocation = None self.rubyLoadPath = None self.perlReqs = None self.perlPath = None self.perlIncArgs = None self._CILPolicyProvides = {} self.pythonSysPathMap = {} self.pythonModuleFinderMap = {} self.troveDeps = {} policy.Policy.__init__(self, *args, **keywords) self.depCache = self.dbDepCacheClass(self._getDb()) ISD = deps.InstructionSetDependency TISD = deps.TargetInstructionSetDependency instructionDeps = list(self.recipe._buildFlavor.iterDepsByClass(ISD)) instructionDeps += list(self.recipe._buildFlavor.iterDepsByClass(TISD)) self.allowableIsnSets = [ x.name for x in instructionDeps ] def updateArgs(self, *args, **keywords): # _privateDepMap is used only for Provides to talk to Requires privateDepMap = keywords.pop('_privateDepMap', None) if privateDepMap: self._privateDepMap.update([privateDepMap]) sonameSubtrees = keywords.pop('sonameSubtrees', None) if sonameSubtrees: if type(sonameSubtrees) in (list, tuple): self.sonameSubtrees.update(set(sonameSubtrees)) else: self.sonameSubtrees.add(sonameSubtrees) bootstrapPythonFlags = keywords.pop('bootstrapPythonFlags', None) if bootstrapPythonFlags: if type(bootstrapPythonFlags) in (list, tuple): self.bootstrapPythonFlags.update(set(bootstrapPythonFlags)) else: self.bootstrapPythonFlags.add(bootstrapPythonFlags) # pass full set to Provides to share the exact same data self.recipe.Provides( _bootstrapPythonFlags=self.bootstrapPythonFlags) bootstrapSysPath = keywords.pop('bootstrapSysPath', None) if bootstrapSysPath: if type(bootstrapSysPath) in (list, tuple): self.bootstrapSysPath.extend(bootstrapSysPath) else: self.error('bootstrapSysPath must be list or tuple') # pass full set to Provides to share the exact same data self.recipe.Provides( _bootstrapSysPath=self.bootstrapSysPath) pythonFlagNamespace = keywords.pop('pythonFlagNamespace', None) if pythonFlagNamespace is not None: self.pythonFlagNamespace = pythonFlagNamespace self.recipe.Provides(_pythonFlagNamespace=pythonFlagNamespace) bootstrapPerlIncPath = keywords.pop('bootstrapPerlIncPath', None) if bootstrapPerlIncPath: if type(bootstrapPerlIncPath) in (list, tuple): self.bootstrapPerlIncPath.extend(bootstrapPerlIncPath) else: self.error('bootstrapPerlIncPath must be list or tuple') # pass full set to Provides to share the exact same data self.recipe.Provides( _bootstrapPerlIncPath=self.bootstrapPerlIncPath) bootstrapRubyLibs = keywords.pop('bootstrapRubyLibs', None) if bootstrapRubyLibs is not None: if type(bootstrapRubyLibs) in (list, tuple): self.bootstrapRubyLibs.extend(bootstrapRubyLibs) else: self.error('bootstrapRubyLibs must be list or tuple') # pass full set to Provides to share the exact same data self.recipe.Provides( _bootstrapRubyLibs=self.bootstrapRubyLibs) _CILPolicyProvides = keywords.pop('_CILPolicyProvides', None) if _CILPolicyProvides: self._CILPolicyProvides.update(_CILPolicyProvides) rpath = keywords.pop('rpath', None) if rpath: if type(rpath) is str: rpath = ('.*', rpath) assert(type(rpath) == tuple) self.rpathFixup.append(rpath) exceptDeps = keywords.pop('exceptDeps', None) if exceptDeps: if type(exceptDeps) is str: exceptDeps = ('.*', exceptDeps) assert(type(exceptDeps) == tuple) if type(exceptDeps[0]) is tuple: self.exceptDeps.extend(exceptDeps) else: self.exceptDeps.append(exceptDeps) if not hasattr(self.recipe, '_reqExceptDeps'): self.recipe._reqExceptDeps = [] self.recipe._reqExceptDeps.extend(self.exceptDeps) # Filter out trove deps that are not associated with a file. if len(args) >= 2: troves = [] component = re.compile('^[-a-zA-Z0-9]*:[a-zA-Z]+$') for arg in args[1:]: arg = arg % self.recipe.macros # Make sure arg looks like a component if not component.match(arg): break troves.append(arg.lstrip(':')) else: self.troveDeps[args[0]] = troves args = () _dependency.updateArgs(self, *args, **keywords) _addInfo.updateArgs(self, *args, **keywords) def preProcess(self): macros = self.macros self.systemLibPaths = set(os.path.normpath(x % macros) for x in self.sonameSubtrees) self.bootstrapPythonFlags = set(x % macros for x in self.bootstrapPythonFlags) self.bootstrapSysPath = [x % macros for x in self.bootstrapSysPath] if self.pythonFlagNamespace is not None: self.pythonFlagNamespace = self.pythonFlagNamespace % macros self.bootstrapPerlIncPath = [x % macros for x in self.bootstrapPerlIncPath] # anything that any buildreqs have caused to go into ld.so.conf # or ld.so.conf.d/*.conf is a system library by definition, # but only look at paths, not (for example) "include" lines if os.path.exists('/etc/ld.so.conf'): self.systemLibPaths |= set(os.path.normpath(x.strip()) for x in file('/etc/ld.so.conf').readlines() if x.startswith('/')) for fileName in fixedglob.glob('/etc/ld.so.conf.d/*.conf'): self.systemLibPaths |= set(os.path.normpath(x.strip()) for x in file(fileName).readlines() if x.startswith('/')) self.rpathFixup = [(filter.Filter(x, macros), y % macros) for x, y in self.rpathFixup] exceptDeps = [] for fE, rE in self.exceptDeps: try: exceptDeps.append((filter.Filter(fE, macros), re.compile(rE % macros))) except sre_constants.error, e: self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e) self.exceptDeps= exceptDeps _dependency.preProcess(self) def postProcess(self): self._delPythonRequiresModuleFinder() components = {} for comp in self.recipe.autopkg.getComponents(): components[comp.getName()] = comp shortName = comp.getName().split(':')[1] # Mark copmonent names with duplicates if shortName in components: components[shortName] = None else: components[shortName] = comp # r.Requires('foo:runtime', 'msi') # r.Requires('foo:runtime', ':msi') # r.Requires('foo:runtime', 'bar:msi') depClass = deps.TroveDependencies for info, troves in self.troveDeps.iteritems(): # Sanity check inputs. if ':' not in info: self.error('package dependency %s not allowed', info) return for trove in troves: if trove not in components: self.error('no component named %s', trove) return if components[trove] is None: self.error('specified component name matches multiple ' 'components %s', trove) return # Add the trove dependency. dep = deps.Dependency(info) for trove in troves: components[trove].requires.addDep(depClass, dep) def doFile(self, path): pkgs = self.recipe.autopkg.findComponents(path) if not pkgs: return pkgFiles = [(x, x.getFile(path)) for x in pkgs] # this file object used only for tests, not for doing packaging f = pkgFiles[0][1] macros = self.recipe.macros fullpath = macros.destdir + path m = self.recipe.magic[path] if self._isELF(m, 'requires'): isnset = m.contents['isnset'] if isnset in self.allowableIsnSets: # only add requirements for architectures # that we are actually building for (this may include # major and minor architectures) self._addELFRequirements(path, m, pkgFiles) # now go through explicit requirements for info in self.included: for filt in self.included[info]: if filt.match(path): self._markManualRequirement(info, path, pkgFiles, m) # now check for automatic dependencies besides ELF if f.inode.perms() & 0111 and m and m.name == 'script': interp = m.contents['interpreter'] if interp.strip().startswith('/') and self._checkInclusion(interp, path): # no interpreter string warning is in BadInterpreterPaths if not (os.path.exists(interp) or os.path.exists(macros.destdir+interp)): # this interpreter not on system, warn # cannot be an error to prevent buildReq loops self.warn('interpreter "%s" (referenced in %s) missing', interp, path) # N.B. no special handling for /{,usr/}bin/env here; # if there has been an exception to # NormalizeInterpreterPaths, then it is a # real dependency on the env binary self._addRequirement(path, interp, [], pkgFiles, deps.FileDependencies) if (f.inode.perms() & 0111 and m and m.name == 'script' and os.path.basename(m.contents['interpreter']).startswith('python')): self._addPythonRequirements(path, fullpath, pkgFiles) elif self._isPython(path): self._addPythonRequirements(path, fullpath, pkgFiles) if (f.inode.perms() & 0111 and m and m.name == 'script' and os.path.basename(m.contents['interpreter']).startswith('ruby')): self._addRubyRequirements(path, fullpath, pkgFiles, script=True) elif '/ruby/' in path and path.endswith('.rb'): self._addRubyRequirements(path, fullpath, pkgFiles, script=False) if self._isCIL(m): if not self.monodisPath: self.monodisPath = self._getmonodis(macros, path) if not self.monodisPath: return p = util.popen('%s --assemblyref %s' %( self.monodisPath, fullpath)) for line in [ x.strip() for x in p.readlines() ]: if ': Version=' in line: ver = line.split('=')[1] elif 'Name=' in line: name = line.split('=')[1] self._addRequirement(path, name, [ver], pkgFiles, deps.CILDependencies) p.close() elif self.CILPolicyRE.match(path): name, ver = self._CILPolicyProvides[path] self._addRequirement(path, name, [ver], pkgFiles, deps.CILDependencies) if self._isJava(m, 'requires'): self._addJavaRequirements(path, m, pkgFiles) db = self._getDb() if self._isPerl(path, m, f): perlReqs = self._getPerlReqs(path, fullpath) for req in perlReqs: thisReq = deps.parseDep('perl: ' + req) if db.getTrovesWithProvides([thisReq]) or [ x for x in self.recipe.autopkg.getComponents() if x.provides.satisfies(thisReq)]: self._addRequirement(path, req, [], pkgFiles, deps.PerlDependencies) self.whiteOut(path, pkgFiles) self.unionDeps(path, pkgFiles) def _addJavaRequirements(self, path, m, pkgFiles): if not hasattr(self.recipe, '_internalJavaDepMap'): self.recipe._internalJavaDepMap = {} fileDeps = self.recipe._internalJavaDepMap.get(path, {}) reqs = set() for fpath, (fprov, freq) in fileDeps.items(): if freq is not None: reqs.update(freq) for req in reqs: self._addRequirement(path, req, [], pkgFiles, deps.JavaDependencies) def whiteOut(self, path, pkgFiles): # remove intentionally discarded dependencies for pkg, _ in pkgFiles: if self.exceptDeps and path in pkg.requiresMap: depSet = deps.DependencySet() for depClass, dep in pkg.requiresMap[path].iterDeps(): for filt, exceptRe in self.exceptDeps: if filt.match(path): matchName = '%s: %s' %(depClass.tagName, str(dep)) if exceptRe.match(matchName): # found one to not copy dep = None break if dep is not None: depSet.addDep(depClass, dep) pkg.requiresMap[path] = depSet def unionDeps(self, path, pkgFiles): # finally, package the dependencies up for pkg, f in pkgFiles: if path in pkg.requiresMap: # files should not require items they provide directly. CNY-2177 f.requires.set(pkg.requiresMap[path] - f.provides()) pkg.requires.union(f.requires()) def _addELFRequirements(self, path, m, pkgFiles): """ Add ELF and abi dependencies, including paths when not shlibs """ def appendUnique(ul, items): for item in items: if item not in ul: ul.append(item) def _canonicalRPATH(rpath, glob=False): # normalize all elements of RPATH l = [ util.normpath(x) for x in rpath.split(':') ] # CNY-3425 # prune system paths and relative paths from RPATH l = [ x for x in l if x not in self.systemLibPaths and x.startswith('/') ] if glob: destdir = self.macros.destdir dlen = len(destdir) gl = [] for item in l: # prefer destdir elements paths = util.braceGlob(destdir + item) paths = [ os.path.normpath(x[dlen:]) for x in paths ] appendUnique(gl, paths) # then look on system paths = util.braceGlob(item) paths = [ os.path.normpath(x) for x in paths ] appendUnique(gl, paths) l = gl return l rpathList = [] def _findSonameInRpath(soname): for rpath in rpathList: destpath = '/'.join((self.macros.destdir, rpath, soname)) if os.path.exists(destpath): return rpath destpath = '/'.join((rpath, soname)) if os.path.exists(destpath): return rpath # didn't find anything return None # fixup should come first so that its path elements can override # the included RPATH if necessary if self.rpathFixup: for f, rpath in self.rpathFixup: if f.match(path): # synthetic RPATH items are globbed rpathList = _canonicalRPATH(rpath, glob=True) break if m and 'RPATH' in m.contents and m.contents['RPATH']: rpathList += _canonicalRPATH(m.contents['RPATH']) depSet = self._createELFDepSet(m, m.contents['requires'], libPathMap=self._privateDepMap, getRPATH=_findSonameInRpath, path=path, isProvides=False) for pkg, _ in pkgFiles: self._addDepSetToMap(path, pkg.requiresMap, depSet) def _getPythonRequiresSysPath(self, pathName): # Generate the correct sys.path for finding the required modules. # we use the built in site.py to generate a sys.path for the # current system and another one where destdir is the root. # note the below code is similar to code in Provides, # but it creates an ordered path list with and without destdir prefix, # while provides only needs a complete list without destdir prefix. # Returns tuple: # (sysPath, pythonModuleFinder, pythonVersion) pythonPath, bootstrapPython = self._getPython(self.macros, pathName) if not pythonPath: return (None, None, None) if pythonPath in self.pythonSysPathMap: return self.pythonSysPathMap[pythonPath] destdir = self.macros.destdir libdir = self.macros.libdir pythonVersion = self._getPythonVersion(pythonPath, destdir, libdir) # Start with paths inside the destdir so that imports within a package # are discovered correctly. systemPaths = self._getPythonSysPath(pythonPath, destdir, libdir, useDestDir=True) # Now add paths from the system (or bootstrap python) systemPaths += self._getPythonSysPath(pythonPath, destdir, libdir, useDestDir=False) if not bootstrapPython: # update pythonTroveFlagCache to require correct flags self._getPythonTroveFlags(pythonPath) # Keep original order for use with the module finder. sysPathForModuleFinder = list(systemPaths) # Strip destdir and sort in descending order for converting paths to # qualified python module names. sysPath = sorted(set(self._stripDestDir(systemPaths, destdir)), reverse=True) # load module finder after sys.path is restored # in case delayed importer is installed. pythonModuleFinder = self._getPythonRequiresModuleFinder( pythonPath, destdir, libdir, sysPathForModuleFinder, bootstrapPython) self.pythonSysPathMap[pythonPath] = ( sysPath, pythonModuleFinder, pythonVersion) return self.pythonSysPathMap[pythonPath] def _getPythonRequiresModuleFinder(self, pythonPath, destdir, libdir, sysPath, bootstrapPython): if self.recipe.isCrossCompiling(): return None if pythonPath not in self.pythonModuleFinderMap: try: self.pythonModuleFinderMap[pythonPath] = pydeps.moduleFinderProxy(pythonPath, destdir, libdir, sysPath, self.error) except pydeps.ModuleFinderInitializationError, e: if bootstrapPython: # another case, like isCrossCompiling, where we cannot # run pythonPath -- ModuleFinderInitializationError # is raised before looking at any path, so should # be consistent for any pythonPath self.pythonModuleFinderMap[pythonPath] = None else: raise return self.pythonModuleFinderMap[pythonPath] def _delPythonRequiresModuleFinder(self): for finder in self.pythonModuleFinderMap.values(): if finder is not None: finder.close() def _addPythonRequirements(self, path, fullpath, pkgFiles): destdir = self.recipe.macros.destdir destDirLen = len(destdir) (sysPath, pythonModuleFinder, pythonVersion )= self._getPythonRequiresSysPath(path) if not sysPath: # Probably a bad interpreter path return if not pythonModuleFinder: # We cannot (reliably) determine runtime python requirements # in the cross-compile case, so don't even try (for # consistency). return pythonModuleFinder.load_file(fullpath) data = pythonModuleFinder.getDepsForPath(fullpath) if data['result'] != 'ok': self.info('File %s is not a valid python file', path) return for depPath in data['paths']: if not depPath: continue flags = None absPath = None if depPath.startswith(destdir): depPath = depPath[destDirLen:] flags = self._getPythonFlagsFromPath(depPath) # The file providing this dependency is part of this package. absPath = depPath for sysPathEntry in sysPath: if depPath.startswith(sysPathEntry): newDepPath = depPath[len(sysPathEntry)+1:] if newDepPath not in ('__init__', '__init__.py'): # we don't allow bare __init__'s as dependencies. # hopefully we'll find this at deeper level in # in the sysPath if flags is None: # this is provided by the system, so we have # to see with which flags it is provided with flags = self._getPythonFlags(depPath, self.bootstrapPythonFlags) depPath = newDepPath break if depPath.startswith('/'): # a python file not found in sys.path will not have been # provided, so we must not depend on it either return if not (depPath.endswith('.py') or depPath.endswith('.pyc') or depPath.endswith('.so')): # Not something we provide, so not something we can # require either. Drop it and go on. We have seen # this when a script in /usr/bin has ended up in the # requires list. continue if depPath.endswith('module.so'): # Strip 'module.so' from the end, make it a candidate cands = [ depPath[:-9] + '.so', depPath ] cands = [ self._normalizePythonDep(x) for x in cands ] if absPath: depName = self._checkPackagePythonDeps(pkgFiles, absPath, cands, flags) else: depName = self._checkSystemPythonDeps(cands, flags) else: depName = self._normalizePythonDep(depPath) if depName == '__future__': continue self._addRequirement(path, depName, flags, pkgFiles, deps.PythonDependencies) #if data['missing']: # self.warn("Python file %s is missing requirements: %s" % ( # path, ', '.join(data['missing']))) def _checkPackagePythonDeps(self, pkgFiles, depPath, depNames, flags): # Try to match depNames against all current packages # Use the last value in depNames as the fault value assert depNames, "No dependencies passed" for pkg, _ in pkgFiles: if depPath in pkg: fileProvides = pkg[depPath][1].provides() if flags: flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ] # Walk the depNames list in order, pick the first dependency # available. for dp in depNames: depSet = deps.DependencySet() depSet.addDep(deps.PythonDependencies, deps.Dependency(dp, flags)) if fileProvides.intersection(depSet): # this dep is provided return dp # If we got here, the file doesn't provide this dep. Return the last # candidate and hope for the best return depNames[-1] def _checkSystemPythonDeps(self, depNames, flags): if flags: flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ] for dp in depNames: depSet = deps.DependencySet() depSet.addDep(deps.PythonDependencies, deps.Dependency(dp, flags)) troves = self.depCache.getProvides([depSet]) if troves: return dp return depNames[-1] def _normalizePythonDep(self, depName): # remove extension depName = depName.rsplit('.', 1)[0] depName = depName.replace('/', '.') depName = depName.replace('.__init__', '') depName = self.pythonInterpRE.sub('', depName) return depName def _addRubyRequirements(self, path, fullpath, pkgFiles, script=False): macros = self.recipe.macros destdir = macros.destdir destDirLen = len(destdir) if self.rubyInterpreter is None: self.rubyInterpreter, bootstrap = self._getRuby(macros, path) if not self.rubyInterpreter: return self.rubyInvocation, self.rubyLoadPath = self._getRubyLoadPath( macros, self.rubyInterpreter, bootstrap) self.rubyVersion = self._getRubyVersion(macros) elif self.rubyInterpreter is False: return if not script: if not util.isregular(fullpath) or os.path.islink(fullpath): return foundInLoadPath = False for pathElement in self.rubyLoadPath: if path.startswith(pathElement): foundInLoadPath = True break if not foundInLoadPath: return # This is a very limited hack, but will work for the 90% case # better parsing may be written later # Note that we only honor "require" at the beginning of # the line and only requirements enclosed in single quotes # to avoid conditional requirements and requirements that # do any sort of substitution. Because most ruby packages # contain multiple ruby modules, getting 90% of the ruby # dependencies will find most of the required packages in # practice depEntries = [x.strip() for x in file(fullpath) if x.startswith('require ') or x.startswith('require(')] depEntries = (x.split() for x in depEntries) depEntries = (x[1].strip("\"'") for x in depEntries if len(x) == 2 and x[1].startswith("'") and x[1].endswith("'")) depEntries = set(depEntries) # I know of no way to ask ruby to report deps from scripts # Unfortunately, so far it seems that there are too many # Ruby modules which have code that runs in the body; this # code runs slowly, has not been useful in practice for # filtering out bogus dependencies, and has been hanging # and causing other unintended side effects from modules # that have code in the main body. #if not script: # depClosure = util.popen(r'''%s -e "require '%s'; puts $\""''' # %(self.rubyInvocation%macros, fullpath)).readlines() # depClosure = set([x.split('.')[0] for x in depClosure]) # # remove any entries from the guessed immediate requirements # # that are not in the closure # depEntries = set(x for x in depEntries if x in depClosure) def _getDepEntryPath(depEntry): for prefix in (destdir, ''): for pathElement in self.rubyLoadPath: for suffix in ('.rb', '.so'): candidate = util.searchPath( os.path.basename(depEntry) + suffix, prefix + pathElement, ) if candidate: return candidate return None for depEntry in depEntries: depEntryPath = _getDepEntryPath(depEntry) if depEntryPath is None: continue if depEntryPath.startswith(destdir): depPath = depEntryPath[destDirLen:] else: depPath = depEntryPath flags = self._getRubyFlagsFromPath(depPath, self.rubyVersion) self._addRequirement(path, depEntry, flags, pkgFiles, deps.RubyDependencies) def _fetchPerl(self): """ Cache the perl path and @INC path with -I%(destdir)s prepended to each element if necessary """ if self.perlPath is not None: return macros = self.recipe.macros self.perlPath, perlIncPath, perlDestInc = self._getperl(macros, self.recipe) if perlDestInc: self.perlIncArgs = perlDestInc else: self.perlIncArgs = ' '.join('-I'+x for x in perlIncPath) def _getPerlReqs(self, path, fullpath): if self.perlReqs is None: self._fetchPerl() if not self.perlPath: # no perl == bootstrap, but print warning self.info('Unable to find perl interpreter,' ' disabling perl: requirements') self.perlReqs = False return [] # get the base directory where conary lives. In a checked # out version, this would be .../conary/conary/build/package.py # chop off the last 3 directories to find where # .../conary/Scandeps and .../conary/scripts/perlreqs.pl live basedir = '/'.join(sys.modules[__name__].__file__.split('/')[:-3]) scandeps = '/'.join((basedir, 'conary/ScanDeps')) if (os.path.exists(scandeps) and os.path.exists('%s/scripts/perlreqs.pl' % basedir)): perlreqs = '%s/scripts/perlreqs.pl' % basedir else: # we assume that conary is installed in # $prefix/$libdir/python?.?/site-packages. Use this # assumption to find the prefix for # /usr/lib/conary and /usr/libexec/conary regexp = re.compile(r'(.*)/lib(64){0,1}/python[1-9].[0-9]/site-packages') match = regexp.match(basedir) if not match: # our regexp didn't work. fall back to hardcoded # paths prefix = '/usr' else: prefix = match.group(1) # ScanDeps is not architecture specific scandeps = '%s/lib/conary/ScanDeps' %prefix if not os.path.exists(scandeps): # but it might have been moved to lib64 for multilib scandeps = '%s/lib64/conary/ScanDeps' %prefix perlreqs = '%s/libexec/conary/perlreqs.pl' %prefix self.perlReqs = '%s -I%s %s %s' %( self.perlPath, scandeps, self.perlIncArgs, perlreqs) if self.perlReqs is False: return [] cwd = os.getcwd() os.chdir(os.path.dirname(fullpath)) try: p = os.popen('%s %s' %(self.perlReqs, fullpath)) finally: try: os.chdir(cwd) except: pass reqlist = [x.strip().split('//') for x in p.readlines()] # make sure that the command completed successfully rc = p.close() if rc: # make sure that perl didn't blow up assert(os.WIFEXITED(rc)) # Apparantly ScanDeps could not handle this input return [] # we care only about modules right now # throwing away the filenames for now, but we might choose # to change that later reqlist = [x[2] for x in reqlist if x[0] == 'module'] # foo/bar/baz.pm -> foo::bar::baz reqlist = ['::'.join(x.split('/')).rsplit('.', 1)[0] for x in reqlist] return reqlist def _markManualRequirement(self, info, path, pkgFiles, m): flags = [] if self._checkInclusion(info, path): if info[0] == '/': depClass = deps.FileDependencies elif info.startswith('file:') and info[5:].strip()[0] == '/': info = info[5:].strip() depClass = deps.FileDependencies elif info.startswith('soname:'): if not m or m.name != 'ELF': # only an ELF file can have a soname requirement return # we need to synthesize a dependency that encodes the # same ABI as this binary depClass = deps.SonameDependencies for depType, dep, f in m.contents['requires']: if depType == 'abi': flags = tuple(x == 'Linux' and 'SysV' or x for x in f) # CNY-3604 info = '%s/%s' %(dep, info.split(None, 1)[1]) info = os.path.normpath(info) else: # by process of elimination, must be a trove if info.startswith('group-'): self.error('group dependency %s not allowed', info) return if info.startswith('fileset-'): self.error('fileset dependency %s not allowed', info) return if ':' not in info: self.error('package dependency %s not allowed', info) return depClass = deps.TroveDependencies self._addRequirement(path, info, flags, pkgFiles, depClass) def _checkInclusion(self, info, path): if info in self.excluded: for filt in self.excluded[info]: # exception handling is per-requirement, # so handled specially if filt.match(path): self.info('ignoring requirement match for %s: %s', path, info) return False return True def _addRequirement(self, path, info, flags, pkgFiles, depClass): if depClass == deps.FileDependencies: pathMap = self.recipe.autopkg.pathMap componentMap = self.recipe.autopkg.componentMap if (info in pathMap and not componentMap[info][info][1].flags.isPathDependencyTarget()): # if a package requires a file, includes that file, # and does not provide that file, it should error out self.error('%s requires %s, which is included but not' ' provided; use' " r.Provides('file', '%s')", path, info, info) return # in some cases, we get literal "(flags)" from the recipe if '(' in info: flagindex = info.index('(') flags = set(info[flagindex+1:-1].split() + list(flags)) info = info.split('(')[0] # CNY-3443 if depClass in self.removeFlagsByDependencyClassMap: flags = set(flags) for ignoreItem in self.removeFlagsByDependencyClassMap[depClass]: if isinstance(ignoreItem, set): ignoreFlags = ignoreItem else: ignoreFlags = set(f for f in flags if ignoreItem.match(f)) flags -= ignoreFlags if flags: flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ] for pkg, _ in pkgFiles: # we may need to create a few more DependencySets. if path not in pkg.requiresMap: pkg.requiresMap[path] = deps.DependencySet() pkg.requiresMap[path].addDep(depClass, deps.Dependency(info, flags)) class _basePluggableRequires(Requires): """ Base class for pluggable Requires policies. """ # This set of policies get executed before the Requires policy, # and inherits the Requires' ordering constraints requires = list(Requires.requires) + [ ('Requires', policy.REQUIRED_SUBSEQUENT), ] def preProcess(self): # We want to inherit the exceptions from the Requires class, so we # need to peek into the Required policy object. We can still pass # explicit exceptions into the pluggable sub-policies, and they will # only apply to the sub-policy. exceptions = self.recipe._policyMap['Requires'].exceptions if exceptions: Requires.updateArgs(self, exceptions=exceptions, allowUnusedFilters = True) Requires.preProcess(self) def reportErrors(self, *args, **kwargs): return self.recipe._policyMap['Requires'].reportErrors(*args, **kwargs) def error(self, *args, **kwargs): return self.recipe._policyMap['Requires'].error(*args, **kwargs) def warn(self, *args, **kwargs): return self.recipe._policyMap['Requires'].warn(*args, **kwargs) def info(self, *args, **kwargs): return self.recipe._policyMap['Requires'].info(*args, **kwargs) def _addClassName(self, *args, **kwargs): return self.recipe._policyMap['Requires']._addClassName(*args, **kwargs) def doFile(self, path): pkgs = self.recipe.autopkg.findComponents(path) if not pkgs: return pkgFiles = [(x, x.getFile(path)) for x in pkgs] macros = self.recipe.macros fullpath = macros.destdir + path self.addPluggableRequirements(path, fullpath, pkgFiles, macros) self.whiteOut(path, pkgFiles) self.unionDeps(path, pkgFiles) def addPluggableRequirements(self, path, fullpath, pkgFiles, macros): """Override in subclasses""" pass class RemoveSelfProvidedRequires(policy.Policy): """ This policy is used to remove component requirements when they are provided by the component itself. Do not call it directly; it is for internal use only. """ bucket = policy.PACKAGE_CREATION requires = ( ('Requires', policy.REQUIRED_PRIOR), ) supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def do(self): if use.Use.bootstrap._get(): return for comp in self.recipe.autopkg.getComponents(): comp.requires -= comp.provides class Flavor(policy.Policy): """ NAME ==== B{C{r.Flavor()}} - Controls the Flavor mechanism SYNOPSIS ======== C{r.Flavor([I{filterexp}] | [I{exceptions=filterexp}])} DESCRIPTION =========== The C{r.Flavor} policy marks files with the appropriate Flavor. To except a file's flavor from being marked, use: C{r.Flavor(exceptions='I{filterexp}')}. EXAMPLES ======== C{r.Flavor(exceptions='%(crossprefix)s/lib/gcc-lib/.*')} Files in the directory C{%(crossprefix)s/lib/gcc-lib} are being excepted from having their Flavor marked, because they are not flavored for the system on which the trove is being installed. """ bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('Requires', policy.REQUIRED_PRIOR), # For example: :lib component contains only a single packaged empty # directory, which must be artificially flavored for multilib ('ExcludeDirectories', policy.REQUIRED_PRIOR), ) filetree = policy.PACKAGE supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def preProcess(self): self.libRe = re.compile( '^(%(libdir)s' '|/%(lib)s' '|%(x11prefix)s/%(lib)s' '|%(krbprefix)s/%(lib)s)(/|$)' %self.recipe.macros) self.libReException = re.compile('^/usr/(lib|%(lib)s)/(python|ruby).*$') self.baseIsnset = use.Arch.getCurrentArch()._name self.baseArchFlavor = use.Arch.getCurrentArch()._toDependency() self.archFlavor = use.createFlavor(None, use.Arch._iterUsed()) self.packageFlavor = deps.Flavor() self.troveMarked = False self.componentMap = self.recipe.autopkg.componentMap ISD = deps.InstructionSetDependency TISD = deps.TargetInstructionSetDependency instructionDeps = list(self.recipe._buildFlavor.iterDepsByClass(ISD)) instructionDeps += list(self.recipe._buildFlavor.iterDepsByClass(TISD)) self.allowableIsnSets = [ x.name for x in instructionDeps ] def postProcess(self): # If this is a Windows package, include the flavor from the windows # helper. if (self._getTarget() == TARGET_WINDOWS and hasattr(self.recipe, 'winHelper')): flavorStr = self.recipe.winHelper.flavor if flavorStr: self.packageFlavor.union(deps.parseFlavor(flavorStr)) # all troves need to share the same flavor so that we can # distinguish them later for pkg in self.recipe.autopkg.components.values(): pkg.flavor.union(self.packageFlavor) def hasLibInPath(self, path): return self.libRe.match(path) and not self.libReException.match(path) def hasLibInDependencyFlag(self, path, f): for depType in (deps.PythonDependencies, deps.RubyDependencies): for dep in ([x for x in f.requires.deps.iterDepsByClass(depType)] + [x for x in f.provides.deps.iterDepsByClass(depType)]): flagNames = [x[0] for x in dep.getFlags()[0]] flagNames = [x for x in flagNames if x.startswith('lib')] if flagNames: return True return False def doFile(self, path): autopkg = self.recipe.autopkg pkg = autopkg.findComponent(path) if pkg is None: return f = pkg.getFile(path) m = self.recipe.magic[path] if m and m.name == 'ELF' and 'isnset' in m.contents: isnset = m.contents['isnset'] elif self.hasLibInPath(path) or self.hasLibInDependencyFlag(path, f): # all possible paths in a %(lib)s-derived path get default # instruction set assigned if they don't have one already if f.hasContents: isnset = self.baseIsnset else: # this file can't be marked by arch, but the troves # and package must be. (e.g. symlinks and empty directories) # we don't need to union in the base arch flavor more # than once. if self.troveMarked: return self.packageFlavor.union(self.baseArchFlavor) self.troveMarked = True return else: return flv = deps.Flavor() flv.addDep(deps.InstructionSetDependency, deps.Dependency(isnset, [])) # get the Arch.* dependencies # set the flavor for the file to match that discovered in the # magic - but do not let that propagate up to the flavor of # the package - instead the package will have the flavor that # it was cooked with. This is to avoid unnecessary or extra files # causing the entire package from being flavored inappropriately. # Such flavoring requires a bunch of Flavor exclusions to fix. # Note that we need to set all shared paths between containers # to share flavors and ensure that fileIds are the same for pkg in autopkg.findComponents(path): f = pkg.getFile(path) f.flavor.set(flv) # get the Arch.* dependencies flv.union(self.archFlavor) if isnset in self.allowableIsnSets: self.packageFlavor.union(flv) class _ProcessInfoPackage(policy.UserGroupBasePolicy): bucket = policy.PACKAGE_CREATION requires = ( ('PackageSpec', policy.REQUIRED_PRIOR), ('ComponentSpec', policy.REQUIRED_PRIOR), ('Provides', policy.CONDITIONAL_PRIOR), ('Requires', policy.CONDITIONAL_PRIOR), ('Config', policy.CONDITIONAL_PRIOR), ('InitialContents', policy.CONDITIONAL_PRIOR) ) def preProcess(self): if self.exceptions: self.error('%s does not honor exceptions' % self.__class__.__name__) self.exceptions = None if self.inclusions: self.inclusions = None def doFile(self, path): expectedName = 'info-%s:%s' % (os.path.basename(path), self.component) comp = self.recipe.autopkg.componentMap[path] compName = comp.name if not isinstance(comp.getFile(path), files.RegularFile): self.error("Only regular files may appear in '%s'" % expectedName) return if len(comp) > 1: badPaths = [x for x in comp if x != path] self.error("The following files are not allowed in '%s': '%s'" % \ (compName, "', '".join(badPaths))) else: fileObj = comp[path][1] for tag in fileObj.tags(): self.error("TagSpec '%s' is not allowed for %s" % \ (tag, expectedName)) fileObj.tags.set('%s-info' % self.component) fileObj.flags.isTransient(True) self.parseError = False self.addProvides(path) if not self.parseError: self.addRequires(path) def parseInfoFile(self, path): infoname = "info-%s:%s" % (os.path.basename(path), self.component) data = {} try: data = dict([x.strip().split('=', 1) \ for x in open(path).readlines()]) extraKeys = set(data.keys()).difference(self.legalKeys) if extraKeys: for key in extraKeys: self.error("%s is not is not a valid value for %s" % \ (key, infoname)) self.parseError = True except ValueError: self.error("Unable to parse info file for '%s'" % infoname) self.parseError = True return data def addProvides(self, path): realpath, fileObj = self.recipe.autopkg.findComponent(path)[path] data = self.parseInfoFile(realpath) pkg = self.recipe.autopkg.componentMap[path] infoname = os.path.basename(path) if path in pkg.providesMap: # only deps related to userinfo/troveinfo are allowed self.error("Illegal provision for 'info-%s:%s': '%s'" % \ (infoname, self.component, str(pkg.providesMap[path]))) pkg.providesMap[path] = deps.DependencySet() depSet = self.getProvides(infoname, data) fileObj.provides.set(depSet) pkg.providesMap[path].union(depSet) pkg.provides.union(depSet) def addRequires(self, path): realpath, fileObj = self.recipe.autopkg.findComponent(path)[path] data = self.parseInfoFile(realpath) pkg = self.recipe.autopkg.componentMap[path] infoname = os.path.basename(path) if path in pkg.requiresMap: # only deps related to userinfo/troveinfo are allowed self.error("Illegal requirement on 'info-%s:%s': '%s'" % \ (infoname, self.component, str(pkg.requiresMap[path]))) pkg.requiresMap[path] = deps.DependencySet() depSet = self.getRequires(infoname, data) fileObj.requires.set(depSet) pkg.requiresMap[path].union(depSet) pkg.requires.union(depSet) class ProcessUserInfoPackage(_ProcessInfoPackage): """ NAME ==== B{C{r.ProcessUserInfoPackage()}} - Set dependencies and tags for User info packages SYNOPSIS ======== C{r.ProcessUserInfoPackage()} DESCRIPTION =========== The C{r.ProcessUserInfoPackage} policy automatically sets up provides and requries, as well as tags for user info files create by the C{r.User} build action. This policy is not intended to be invoked from recipes. Do not use it. """ invariantsubtrees = ['%(userinfodir)s'] component = 'user' legalKeys = ['PREFERRED_UID', 'GROUP', 'GROUPID', 'HOMEDIR', 'COMMENT', 'SHELL', 'SUPPLEMENTAL', 'PASSWORD'] def parseInfoFile(self, path): if self.recipe._getCapsulePathsForFile(path): return {} data = _ProcessInfoPackage.parseInfoFile(self, path) if data: supplemental = data.get('SUPPLEMENTAL') if supplemental is not None: data['SUPPLEMENTAL'] = supplemental.split(',') return data def getProvides(self, infoname, data): depSet = deps.DependencySet() groupname = data.get('GROUP', infoname) depSet.addDep(deps.UserInfoDependencies, deps.Dependency(infoname, [])) if self.recipe._provideGroup.get(infoname, True): depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(groupname, [])) return depSet def getRequires(self, infoname, data): groupname = data.get('GROUP', infoname) supp = data.get('SUPPLEMENTAL', []) depSet = deps.DependencySet() for grpDep in supp: depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(grpDep, [])) if not self.recipe._provideGroup.get(infoname): depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(groupname, [])) return depSet class ProcessGroupInfoPackage(_ProcessInfoPackage): """ NAME ==== B{C{r.ProcessGroupInfoPackage()}} - Set dependencies and tags for Group info packages SYNOPSIS ======== C{r.ProcessGroupInfoPackage()} DESCRIPTION =========== The C{r.ProcessGroupInfoPackage} policy automatically sets up provides and requries, as well as tags for group info files create by the C{r.Group} and C{r.SupplementalGroup} build actions. This policy is not intended to be invoked from recipes. Do not use it. """ invariantsubtrees = ['%(groupinfodir)s'] component = 'group' legalKeys = ['PREFERRED_GID', 'USER'] def getProvides(self, groupname, data): depSet = deps.DependencySet() depSet.addDep(deps.GroupInfoDependencies, deps.Dependency(groupname, [])) return depSet def getRequires(self, groupname, data): infoname = data.get('USER') depSet = deps.DependencySet() if infoname: depSet.addDep(deps.UserInfoDependencies, deps.Dependency(infoname, [])) return depSet class reportExcessBuildRequires(policy.Policy): """ NAME ==== B{C{r.reportExcessBuildRequires()}} - suggest items to remove from C{buildRequires} list SYNOPSIS ======== C{r.reportExcessBuildRequires('required:component')} C{r.reportExcessBuildRequires(['list:of', 'required:components'])} DESCRIPTION =========== The C{r.reportExcessBuildRequires()} policy is used to report together all suggestions for possible items to remove from the C{buildRequires} list. The suggestions provided by this policy are build requirements listed in the recipe's C{buildRequires} list for which Conary has not specifically discovered a need. Build requirement discovery is not perfect, which means that even though this policy prints a warning that a build requirement might not be necessary, Conary does not know that it is definitely not needed. These are only hints. If you are not sure whether a component should be removed from the C{buildRequires} list, it is safer to leave it in the list. This is because an extra component in the C{buildRequires} list is very unlikely to cause trouble, but a truly missing component causes failure (by definition). Because dependencies on C{:runtime} components are the least likely dependencies to be discovered automatically, this policy currently does not recommend removing any C{:runtime} components. EXAMPLES ======== This policy is normally called only internally by other Conary policies. However, a recipe can report build requirements that are known by the recipe maintainer to be required but which Conary does not discover automatically by passing a list of these components. For example, if this policy says that C{foo:devel} and C{blah:perl} are possible extra build requirements, but you know that they are required in order to correctly build the included software, you can turn off the warnings like this: C{r.reportExcessBuildRequires(['foo:devel', 'blah:perl'])} This will tell the C{reportExcessBuildRequires} policy that C{foo:devel} and C{blah:perl} are known to be required to build the package. No regular expressions are honored. """ bucket = policy.ERROR_REPORTING processUnmodified = True filetree = policy.NO_FILES supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def __init__(self, *args, **keywords): self.found = set() policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): for arg in args: if type(arg) in (list, tuple, set): self.found.update(arg) else: self.found.add(arg) def do(self): # If absolutely no buildRequires were found automatically, # assume that the buildRequires list has been carefully crafted # for some reason that the buildRequires enforcement policy # doesn't yet support, and don't warn that all of the listed # buildRequires might be excessive. if self.found and self.recipe._logFile: r = self.recipe def getReqNames(key): return set(x.split('=')[0] for x in r._recipeRequirements[key]) recipeReqs = getReqNames('buildRequires') superReqs = getReqNames('buildRequiresSuper') foundPackages = set(x.split(':')[0] for x in self.found) superClosure = r._getTransitiveDepClosure(superReqs) foundClosure = r._getTransitiveDepClosure(self.found) def removeCore(candidates): # conary, python, and setup are always required; gcc # is often an implicit requirement, and sqlite:lib is # listed explicitly make bootstrapping easier return set(x for x in candidates if not x.startswith('conary') and not x.startswith('python:') and not x.startswith('gcc:') and not x in ('libgcc:devellib', 'setup:runtime', 'sqlite:lib')) def removeSome(candidates): # at this point, we don't have good enough detection # of :runtime in particular to recommend getting rid # of it return set(x for x in removeCore(candidates) if not x.endswith(':runtime')) def removeDupComponents(candidates): # If any component is required, we don't really need # to flag others as excessive in superclass excess return set(x for x in candidates if x.split(':')[0] not in foundPackages) # for superclass reqs excessSuperReqs = superReqs - foundClosure if excessSuperReqs: # note that as this is for debugging only, we do not # remove runtime requirements deDupedSuperReqs = sorted(list( removeDupComponents(removeCore(excessSuperReqs)))) if deDupedSuperReqs: self._reportExcessSuperclassBuildRequires(deDupedSuperReqs) excessReqs = recipeReqs - self.found redundantReqs = recipeReqs.intersection(superClosure) if excessReqs or redundantReqs: excessBuildRequires = sorted(list( removeSome(excessReqs.union(redundantReqs)))) # all potential excess build requires might have # been removed by removeSome if excessBuildRequires: self._reportExcessBuildRequires(excessBuildRequires) def _reportExcessBuildRequires(self, reqList): self.recipe._logFile.reportExcessBuildRequires( sorted(list(reqList))) def _reportExcessSuperclassBuildRequires(self, reqList): self.recipe._logFile.reportExcessSuperclassBuildRequires( sorted(list(reqList))) class reportMissingBuildRequires(policy.Policy): """ This policy is used to report together all suggestions for additions to the C{buildRequires} list. Do not call it directly; it is for internal use only. """ bucket = policy.ERROR_REPORTING processUnmodified = True filetree = policy.NO_FILES supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def __init__(self, *args, **keywords): self.errors = set() policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): for arg in args: if type(arg) in (list, tuple, set): self.errors.update(arg) else: self.errors.add(arg) def do(self): if self.errors and self.recipe._logFile: self.recipe._logFile.reportMissingBuildRequires( sorted(list(self.errors))) class reportErrors(policy.Policy, policy.GroupPolicy): """ This policy is used to report together all package errors. Do not call it directly; it is for internal use only. """ bucket = policy.ERROR_REPORTING processUnmodified = True filetree = policy.NO_FILES groupError = False supported_targets = (TARGET_LINUX, TARGET_WINDOWS) def __init__(self, *args, **keywords): self.errors = [] policy.Policy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): """ Called once, with printf-style arguments, for each warning. """ self.errors.append(args[0] %tuple(args[1:])) groupError = keywords.pop('groupError', None) if groupError is not None: self.groupError = groupError def do(self): if self.errors: msg = self.groupError and 'Group' or 'Package' raise policy.PolicyError, ('%s Policy errors found:\n%%s' % msg) \ % "\n".join(self.errors) class _TroveScript(policy.PackagePolicy): processUnmodified = False keywords = { 'contents' : None } _troveScriptName = None def __init__(self, *args, **keywords): policy.PackagePolicy.__init__(self, *args, **keywords) def updateArgs(self, *args, **keywords): if args: troveNames = args else: troveNames = [ self.recipe.name ] self.troveNames = troveNames policy.PackagePolicy.updateArgs(self, **keywords) def do(self): if not self.contents: return # Build component map availTroveNames = dict((x.name, None) for x in self.recipe.autopkg.getComponents()) availTroveNames.update(self.recipe.packages) troveNames = set(self.troveNames) & set(availTroveNames) # We don't support compatibility classes for troves (yet) self.recipe._addTroveScript(troveNames, self.contents, self._troveScriptName, None) class ScriptPreUpdate(_TroveScript): _troveScriptName = 'preUpdate' class ScriptPostUpdate(_TroveScript): _troveScriptName = 'postUpdate' class ScriptPreInstall(_TroveScript): _troveScriptName = 'preInstall' class ScriptPostInstall(_TroveScript): _troveScriptName = 'postInstall' class ScriptPreErase(_TroveScript): _troveScriptName = 'preErase' class ScriptPostErase(_TroveScript): _troveScriptName = 'postErase' class ScriptPreRollback(_TroveScript): _troveScriptName = 'preRollback' class ScriptPostRollback(_TroveScript): _troveScriptName = 'postRollback'
sassoftware/conary
conary/build/packagepolicy.py
Python
apache-2.0
195,877
<?php if(!defined('_source')) die("Error"); $act = (isset($_REQUEST['act'])) ? addslashes($_REQUEST['act']) : ""; switch($act){ case "login": if(!empty($_POST)) login(); $template = "user/login"; break; case "admin_edit": edit(); $template = "user/admin_add"; break; case "logout": logout(); break; case "man": get_items(); $template = "user/items"; break; case "add": $template = "user/item_add"; break; case "edit": get_item(); $template = "user/item_edit"; break; case "save": save_item(); break; case "delete": delete_item(); break; default: $template = "index"; } ////////////////// function get_items(){ global $d, $items, $paging; $sql = "select * from #_user where username <> 'admin' order by username"; $d->query($sql); $items = $d->result_array(); $curPage = isset($_GET['curPage']) ? $_GET['curPage'] : 1; $url="index.php?com=user&act=man"; $maxR=10; $maxP=4; $paging=paging($items, $url, $curPage, $maxR, $maxP); $items=$paging['source']; } function get_item(){ global $d, $item; $id = isset($_GET['id']) ? themdau($_GET['id']) : ""; if(!$id) transfer("Không nhận được dữ liệu", "index.php?com=user&act=man"); $sql = "select * from #_user where id='".$id."'"; $d->query($sql); if($d->num_rows()==0) transfer("Dữ liệu không có thực", "index.php?com=user&act=man"); $item = $d->fetch_array(); } function save_item(){ global $d; if(empty($_POST)) transfer("Không nhận được dữ liệu", "index.php?com=user&act=man"); $id = isset($_POST['id']) ? themdau($_POST['id']) : ""; if($id){ // cap nhat $id = themdau($_POST['id']); // kiem tra //$d->reset(); //$d->setTable('user'); //$d->setWhere('id', $id); //$d->select(); //if($d->num_rows()>0){ // $row = $d->fetch_array(); // if($row['role'] != 1) transfer("Bạn không có quyền trên tài khoản này.<br>Mọi thắc mắc xin liên hệ quản trị website.", "index.php?com=user&act=man"); //} // kiem tra ten trung $d->reset(); $sql_user="select * from table_user where username = '".$_POST['username']."' and id <> '".$id."'"; $d->query($sql_user); if($d->num_rows()>0) transfer("Tên dăng nhập nay đã có.<br>Xin chọn tên khác.", "index.php?com=user&act=edit&id=".$id); $data['username'] = $_POST['username']; /*if($_POST['password']!="") $data['password'] = md5($_POST['password']);*/ $data['email'] = $_POST['email']; $data['ten'] = $_POST['ten']; //$data['sex'] = $_POST['sex']; $data['dienthoai'] = $_POST['dienthoai']; $data['nick_yahoo'] = $_POST['nick_yahoo']; $data['nick_skype'] = $_POST['nick_skype']; $data['diachi'] = $_POST['diachi']; $data['congty'] = $_POST['congty']; $data['country'] = $_POST['country']; $data['city'] = $_POST['city']; $data['hienthi'] = isset($_POST['hienthi']) ? 1 : 0; $role = $_POST['id_list_role']; if($role == 0 ) $role = 1; $data['role'] = $role; $d->reset(); $d->setTable('user'); $d->setWhere('id', $id); //$d->setWhere('role', 1); if($d->update($data)) transfer("Dữ liệu đã được cập nhật", "index.php?com=user&act=man"); else transfer("Cập nhật dữ liệu bị lỗi", "index.php?com=user&act=man"); }else{ // them moi // kiem tra ten trung $d->reset(); $d->setTable('user'); $d->setWhere('username', $_POST['username']); $d->select(); if($d->num_rows()>0) transfer("Tên dăng nhập nay đã có.<br>Xin chọn tên khác.", "index.php?com=user&act=add"); if($_POST['password']=="") transfer("Chưa nhập mật khẩu", "index.php?com=user&act=add"); $data['username'] = $_POST['username']; $data['password'] = md5($_POST['password']); $data['email'] = $_POST['email']; $data['ten'] = $_POST['ten']; //$data['sex'] = $_POST['sex']; $data['dienthoai'] = $_POST['dienthoai']; $data['nick_yahoo'] = $_POST['nick_yahoo']; $data['nick_skype'] = $_POST['nick_skype']; $data['diachi'] = $_POST['diachi']; $data['congty'] = $_POST['congty']; $data['country'] = $_POST['country']; $data['city'] = $_POST['city']; $data['hienthi'] = isset($_POST['hienthi']) ? 1 : 0; $role = $_POST['id_list_role']; if($role == 0 ) $role = 1; $data['role'] = $role; $data['com'] = "user"; $d->setTable('user'); if($d->insert($data)) transfer("Dữ liệu đã được lưu", "index.php?com=user&act=man"); else transfer("Lưu dữ liệu bị lỗi", "index.php?com=user&act=add"); } } function delete_item(){ global $d; if(isset($_GET['id'])){ $id = themdau($_GET['id']); // kiem tra //$d->reset(); //$d->setTable('user'); //$d->setWhere('id', $id); //$d->select(); //if($d->num_rows()>0){ // $row = $d->fetch_array(); // if($row['role'] != 1) transfer("Bạn không có quyền trên tài khoản này.<br>Mọi thắc mắc xin liên hệ quản trị website.", "index.php?com=user&act=man"); //} // xoa item $sql = "delete from #_user where id='".$id."'"; if($d->query($sql)) transfer("Dữ liệu đã được xóa", "index.php?com=user&act=man"); else transfer("Xóa dữ liệu bị lỗi", "index.php?com=user&act=man"); }else transfer("Không nhận được dữ liệu", "index.php?com=user&act=man"); } /////////////////////// function edit(){ global $d, $item, $login_name; if(!empty($_POST)){ $sql = "select * from #_user where username!='".$_SESSION['login']['username']."' and username='".$_POST['username']."' and role=3"; $d->query($sql); if($d->num_rows() > 0) transfer("Tên đăng nhập này đã có","index.php?com=user&act=edit"); $sql = "select * from #_user where username='".$_SESSION['login']['username']."'"; $d->query($sql); if($d->num_rows() == 1){ $row = $d->fetch_array(); if($row['password'] != md5($_POST['oldpassword'])) transfer("Mật khẩu không chính xác","index.php?com=user&act=edit"); }else{ die('Hệ thống bị lỗi. Xin liên hệ với admin. <br>Cám ơn.'); } $data['username'] = $_POST['username']; if($_POST['new_pass']!="") $data['password'] = md5($_POST['new_pass']); $data['ten'] = $_POST['ten']; $data['email'] = $_POST['email']; $data['nick_yahoo'] = $_POST['nick_yahoo']; $data['nick_skype'] = $_POST['nick_skype']; $data['dienthoai'] = $_POST['dienthoai']; $d->reset(); $d->setTable('user'); $d->setWhere('username', $_SESSION['login']['username']); if($d->update($data)){ session_unset(); transfer("Dữ liệu đã được lưu.","index.php"); } } $sql = "select * from #_user where username='".$_SESSION['login']['username']."'"; $d->query($sql); if($d->num_rows() == 1){ $item = $d->fetch_array(); } } function login(){ global $d, $login_name; $username = $_POST['username']; $password = $_POST['password']; $sql = "select * from #_user where username='".$username."'"; $d->query($sql); if($d->num_rows() == 1){ $row = $d->fetch_array(); if(($row['password'] == md5($password)) && ($row['role'] == 3)){ $_SESSION[$login_name] = true; $_SESSION['isLoggedIn'] = true; $_SESSION['login']['username'] = $username; transfer("Đăng nhập thành công","index.php"); } } transfer("Tên đăng nhập, mật khẩu không chính xác", "index.php?com=user&act=login"); } function logout(){ global $login_name; $_SESSION[$login_name] = false; transfer("Đăng xuất thành công", "index.php?com=user&act=login"); } ?>
vikigroup/vbiketours
sacviet/sources/user.php
PHP
apache-2.0
8,203
var parentchild = require('../'), assert = require('assert'); assert.deepEqual(parentchild('foo'), []); assert.deepEqual(parentchild({}), []); assert.deepEqual(parentchild([1, 2, 3]), [['inarray', undefined, 1], ['inarray', undefined, 2], ['inarray', undefined, 3]]); assert.deepEqual(parentchild({ a: { b: 'foo' }, c: ['a', 'b'] }), [['child', undefined, 'a'], ['child', 'a', 'b'], ['value', 'b', 'foo'], ['child', undefined, 'c'], ['inarray', 'c', 'a'], ['inarray', 'c', 'b']]); assert.deepEqual(parentchild({ a: { b: 'foo' }, c: { f: 'baz', d: { e: 'bar' }, g: 'darp', h: { i: 'derp', o: { p: { q: 'qak' }, r: 'rez' } }, j: 'gar' }, k: { l: 'one', m: 'two', n: 'three', s: [1, 2, 3] } }), [['child', undefined, 'a'], ['child', 'a', 'b'], ['value', 'b', 'foo'], ['child', undefined, 'c'], ['child', 'c', 'f'], ['value', 'f', 'baz'], ['child', 'c', 'd'], ['child', 'd', 'e'], ['value', 'e', 'bar'], ['child', 'c', 'g'], ['value', 'g', 'darp'], ['child', 'c', 'h'], ['child', 'h', 'i'], ['value', 'i', 'derp'], ['child', 'h', 'o'], ['child', 'o', 'p'], ['child', 'p', 'q'], ['value', 'q', 'qak'], ['child', 'o', 'r'], ['value', 'r', 'rez'], ['child', 'c', 'j'], ['value', 'j', 'gar'], ['child', undefined, 'k'], ['child', 'k', 'l'], ['value', 'l', 'one'], ['child', 'k', 'm'], ['value', 'm', 'two'], ['child', 'k', 'n'], ['value', 'n', 'three'], ['child', 'k', 's'], ['inarray', 's', 1], ['inarray', 's', 2], ['inarray', 's', 3]]); console.log('ok');
emitdb/parentchild
test/index.js
JavaScript
apache-2.0
1,671
/* * Copyright 2015-2102 RonCoo(http://www.roncoo.com) Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.roncoo.pay.reconciliation.service.impl; import com.roncoo.pay.common.core.page.PageBean; import com.roncoo.pay.common.core.page.PageParam; import com.roncoo.pay.reconciliation.dao.RpAccountCheckBatchDao; import com.roncoo.pay.reconciliation.entity.RpAccountCheckBatch; import com.roncoo.pay.reconciliation.service.RpAccountCheckBatchService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.List; import java.util.Map; /** * 对账批次接口实现 . * * 龙果学院:www.roncoo.com * * @author:shenjialong */ @Service("rpAccountCheckBatchService") public class RpAccountCheckBatchServiceImpl implements RpAccountCheckBatchService { @Autowired private RpAccountCheckBatchDao rpAccountCheckBatchDao; @Override public void saveData(RpAccountCheckBatch rpAccountCheckBatch) { rpAccountCheckBatchDao.insert(rpAccountCheckBatch); } @Override public void updateData(RpAccountCheckBatch rpAccountCheckBatch) { rpAccountCheckBatchDao.update(rpAccountCheckBatch); } @Override public RpAccountCheckBatch getDataById(String id) { return rpAccountCheckBatchDao.getById(id); } @Override public PageBean listPage(PageParam pageParam, Map<String, Object> paramMap) { return rpAccountCheckBatchDao.listPage(pageParam, paramMap); } /** * 根据条件查询实体 * * @param paramMap */ public List<RpAccountCheckBatch> listBy(Map<String, Object> paramMap) { return rpAccountCheckBatchDao.listBy(paramMap); } }
roncoo/roncoo-pay
roncoo-pay-service/src/main/java/com/roncoo/pay/reconciliation/service/impl/RpAccountCheckBatchServiceImpl.java
Java
apache-2.0
2,174
package config import ( "fmt" "strconv" "strings" "github.com/dnephin/configtf" pth "github.com/dnephin/configtf/path" ) // ComposeConfig A **compose** resource runs ``docker-compose`` to create an // isolated environment. The **compose** resource keeps containers running // until **dobi** exits so the containers can be used by other tasks that depend // on the **compose** resource, or are listed after it in an `alias`_. // // .. note:: // // `Docker Compose <https://github.com/docker/compose>`_ must be installed // and available in ``$PATH`` to use this resource. // // name: compose // example: Start a Compose environment setting the project name to ``web-devenv`` // and using two Compose files. // // .. code-block:: yaml // // compose=devenv: // files: [docker-compose.yml, docker-compose-dev.yml] // project: 'web-devenv' // type ComposeConfig struct { // Files The Compose files to use. This field supports :doc:`variables`. // type: list of filenames Files []string // Project The project name used by Compose. This field supports // :doc:`variables`. Project string `config:"required"` // StopGrace Seconds to wait for containers to stop before killing them. // default: ``5`` StopGrace int Dependent Annotations } // StopGraceString returns StopGrace as a string func (c *ComposeConfig) StopGraceString() string { return strconv.Itoa(c.StopGrace) } // Validate the resource func (c *ComposeConfig) Validate(path pth.Path, config *Config) *pth.Error { return nil } func (c *ComposeConfig) String() string { return fmt.Sprintf("Run Compose project %q from: %v", c.Project, strings.Join(c.Files, ", ")) } // Resolve resolves variables in the resource func (c *ComposeConfig) Resolve(resolver Resolver) (Resource, error) { conf := *c var err error conf.Files, err = resolver.ResolveSlice(c.Files) if err != nil { return &conf, err } conf.Project, err = resolver.Resolve(c.Project) return &conf, err } func composeFromConfig(name string, values map[string]interface{}) (Resource, error) { compose := &ComposeConfig{Project: "{unique}", StopGrace: 5} return compose, configtf.Transform(name, values, compose) } func init() { RegisterResource("compose", composeFromConfig) }
dnephin/dobi
config/compose.go
GO
apache-2.0
2,255
using DNTBreadCrumb.Core; using DNTCommon.Web.Core; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; namespace ASPNETCoreIdentitySample.Controllers; [BreadCrumb(Title = "خانه", UseDefaultRouteUrl = true, Order = 0)] public class HomeController : Controller { [BreadCrumb(Title = "ایندکس", Order = 1)] public IActionResult Index() { return View(); } [BreadCrumb(Title = "خطا", Order = 1)] public IActionResult Error() { return View(); } /// <summary> /// To test automatic challenge after redirecting from another site /// Sample URL: http://localhost:5000/Home/CallBackResult?token=1&status=2&orderId=3&terminalNo=4&rrn=5 /// </summary> [Authorize] public IActionResult CallBackResult(long token, string status, string orderId, string terminalNo, string rrn) { var userId = User.Identity?.GetUserId(); return Json(new { userId, token, status, orderId, terminalNo, rrn }); } }
VahidN/DNTIdentity
src/ASPNETCoreIdentitySample/Controllers/HomeController.cs
C#
apache-2.0
1,024
/** * vertigo - simple java starter * * Copyright (C) 2013-2017, KleeGroup, direction.technique@kleegroup.com (http://www.kleegroup.com) * KleeGroup, Centre d'affaire la Boursidiere - BP 159 - 92357 Le Plessis Robinson Cedex - France * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.vertigo.quarto.publisher; import java.net.URL; import io.vertigo.dynamo.file.model.VFile; import io.vertigo.lang.Manager; import io.vertigo.quarto.publisher.model.PublisherData; /** * Gestionnaire centralisé des éditions. * Le choix du type d'édition est fait par l'appelant qui fournit les paramètres adaptés à son besoin. * * @author pchretien, npiedeloup */ public interface PublisherManager extends Manager { /** * Création d'une nouvelle édition. * @param fileName Nom du document à générer (! pas son emplacement de stockage !) * @param modelFileURL Chemin vers le fichier model * @param data Données à fusionner avec le model * @return Tache permettant la production d'un document au format passé en paramètre */ VFile publish(String fileName, URL modelFileURL, PublisherData data); }
KleeGroup/vertigo-quarto
vertigo-quarto-api/src/main/java/io/vertigo/quarto/publisher/PublisherManager.java
Java
apache-2.0
1,636
$('#confirmacaoExclusaoModal').on('show.bs.modal', function(event) { var button = $(event.relatedTarget); var codigoVinho = button.data('codigo'); var nomeVinho = button.data('nome'); var modal = $(this); var form = modal.find('form'); var action = form.data('url-base'); if (!action.endsWith('/')) { action += '/'; } form.attr('action', action + codigoVinho); modal.find('.modal-body span').html('Tem certeza que deseja excluir o vinho <strong>' + nomeVinho + '</strong>?'); });
EliasMG/wine
src/main/resources/static/layout/javascripts/exclusao.js
JavaScript
apache-2.0
497
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.homekit; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSError; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.homekit.protocol.HMHomeManagerDelegate; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * Manages collection of one or more homes. * <p> * This class is responsible for managing a collection of homes. */ @Generated @Library("HomeKit") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class HMHomeManager extends NSObject { static { NatJ.register(); } @Generated protected HMHomeManager(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native HMHomeManager alloc(); @Owned @Generated @Selector("allocWithZone:") public static native HMHomeManager allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native HMHomeManager new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * Adds a new home to the collection. * * @param homeName Name of the home to create and add to the collection. * @param completion Block that is invoked once the request is processed. * The NSError provides more information on the status of the request, error * will be nil on success. */ @Generated @Selector("addHomeWithName:completionHandler:") public native void addHomeWithNameCompletionHandler(String homeName, @ObjCBlock(name = "call_addHomeWithNameCompletionHandler") Block_addHomeWithNameCompletionHandler completion); /** * Delegate that receives updates on the collection of homes. */ @Generated @Selector("delegate") @MappedReturn(ObjCObjectMapper.class) public native HMHomeManagerDelegate delegate(); /** * Array of HMHome objects that represents the homes associated with the home manager. * <p> * When a new home manager is created, this array is initialized as an empty array. It is * not guaranteed to be filled with the list of homes, represented as HMHome objects, * until the homeManagerDidUpdateHomes: delegate method has been invoked. */ @Generated @Selector("homes") public native NSArray<? extends HMHome> homes(); @Generated @Selector("init") public native HMHomeManager init(); /** * The primary home for this collection. */ @Generated @Selector("primaryHome") public native HMHome primaryHome(); /** * Removes an existing home from the collection. * * @param home Home object that needs to be removed from the collection. * @param completion Block that is invoked once the request is processed. * The NSError provides more information on the status of the request, error * will be nil on success. */ @Generated @Selector("removeHome:completionHandler:") public native void removeHomeCompletionHandler(HMHome home, @ObjCBlock(name = "call_removeHomeCompletionHandler") Block_removeHomeCompletionHandler completion); /** * Delegate that receives updates on the collection of homes. */ @Generated @Selector("setDelegate:") public native void setDelegate_unsafe(@Mapped(ObjCObjectMapper.class) HMHomeManagerDelegate value); /** * Delegate that receives updates on the collection of homes. */ @Generated public void setDelegate(@Mapped(ObjCObjectMapper.class) HMHomeManagerDelegate value) { Object __old = delegate(); if (value != null) { org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value); } setDelegate_unsafe(value); if (__old != null) { org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old); } } /** * This method is used to change the primary home. * * @param home New primary home. * @param completion Block that is invoked once the request is processed. * The NSError provides more information on the status of the request, error * will be nil on success. */ @Generated @Selector("updatePrimaryHome:completionHandler:") public native void updatePrimaryHomeCompletionHandler(HMHome home, @ObjCBlock(name = "call_updatePrimaryHomeCompletionHandler") Block_updatePrimaryHomeCompletionHandler completion); @Runtime(ObjCRuntime.class) @Generated public interface Block_addHomeWithNameCompletionHandler { @Generated void call_addHomeWithNameCompletionHandler(HMHome home, NSError error); } @Runtime(ObjCRuntime.class) @Generated public interface Block_removeHomeCompletionHandler { @Generated void call_removeHomeCompletionHandler(NSError error); } @Runtime(ObjCRuntime.class) @Generated public interface Block_updatePrimaryHomeCompletionHandler { @Generated void call_updatePrimaryHomeCompletionHandler(NSError error); } /** * The current authorization status of the application. * <p> * The authorization is managed by the system, there is no need to explicitly request authorization. */ @Generated @Selector("authorizationStatus") @NUInt public native long authorizationStatus(); }
multi-os-engine/moe-core
moe.apple/moe.platform.ios/src/main/java/apple/homekit/HMHomeManager.java
Java
apache-2.0
9,411
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.network.shuffle; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import com.codahale.metrics.MetricSet; import com.google.common.collect.Lists; import org.apache.spark.network.client.RpcResponseCallback; import org.apache.spark.network.shuffle.protocol.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.spark.network.TransportContext; import org.apache.spark.network.client.TransportClient; import org.apache.spark.network.client.TransportClientBootstrap; import org.apache.spark.network.client.TransportClientFactory; import org.apache.spark.network.crypto.AuthClientBootstrap; import org.apache.spark.network.sasl.SecretKeyHolder; import org.apache.spark.network.server.NoOpRpcHandler; import org.apache.spark.network.util.TransportConf; /** * Client for reading shuffle blocks which points to an external (outside of executor) server. * This is instead of reading shuffle blocks directly from other executors (via * BlockTransferService), which has the downside of losing the shuffle data if we lose the * executors. */ public class ExternalShuffleClient extends ShuffleClient { private static final Logger logger = LoggerFactory.getLogger(ExternalShuffleClient.class); private final TransportConf conf; private final boolean authEnabled; private final SecretKeyHolder secretKeyHolder; private final long registrationTimeoutMs; protected TransportClientFactory clientFactory; protected String appId; /** * Creates an external shuffle client, with SASL optionally enabled. If SASL is not enabled, * then secretKeyHolder may be null. */ public ExternalShuffleClient( TransportConf conf, SecretKeyHolder secretKeyHolder, boolean authEnabled, long registrationTimeoutMs) { this.conf = conf; this.secretKeyHolder = secretKeyHolder; this.authEnabled = authEnabled; this.registrationTimeoutMs = registrationTimeoutMs; } protected void checkInit() { assert appId != null : "Called before init()"; } /** * Initializes the ShuffleClient, specifying this Executor's appId. * Must be called before any other method on the ShuffleClient. */ public void init(String appId) { this.appId = appId; TransportContext context = new TransportContext(conf, new NoOpRpcHandler(), true, true); List<TransportClientBootstrap> bootstraps = Lists.newArrayList(); if (authEnabled) { bootstraps.add(new AuthClientBootstrap(conf, appId, secretKeyHolder)); } clientFactory = context.createClientFactory(bootstraps); } @Override public void fetchBlocks( String host, int port, String execId, String[] blockIds, BlockFetchingListener listener, DownloadFileManager downloadFileManager) { checkInit(); logger.debug("External shuffle fetch from {}:{} (executor id {})", host, port, execId); try { RetryingBlockFetcher.BlockFetchStarter blockFetchStarter = (blockIds1, listener1) -> { TransportClient client = clientFactory.createClient(host, port); new OneForOneBlockFetcher(client, appId, execId, blockIds1, listener1, conf, downloadFileManager).start(); }; int maxRetries = conf.maxIORetries(); if (maxRetries > 0) { // Note this Fetcher will correctly handle maxRetries == 0; we avoid it just in case there's // a bug in this code. We should remove the if statement once we're sure of the stability. new RetryingBlockFetcher(conf, blockFetchStarter, blockIds, listener).start(); } else { blockFetchStarter.createAndStart(blockIds, listener); } } catch (Exception e) { logger.error("Exception while beginning fetchBlocks", e); for (String blockId : blockIds) { listener.onBlockFetchFailure(blockId, e); } } } @Override public MetricSet shuffleMetrics() { checkInit(); return clientFactory.getAllMetrics(); } /** * Registers this executor with an external shuffle server. This registration is required to * inform the shuffle server about where and how we store our shuffle files. * * @param host Host of shuffle server. * @param port Port of shuffle server. * @param execId This Executor's id. * @param executorInfo Contains all info necessary for the service to find our shuffle files. */ public void registerWithShuffleServer( String host, int port, String execId, ExecutorShuffleInfo executorInfo) throws IOException, InterruptedException { checkInit(); try (TransportClient client = clientFactory.createClient(host, port)) { ByteBuffer registerMessage = new RegisterExecutor(appId, execId, executorInfo).toByteBuffer(); client.sendRpcSync(registerMessage, registrationTimeoutMs); } } public Future<Integer> removeBlocks( String host, int port, String execId, String[] blockIds) throws IOException, InterruptedException { checkInit(); CompletableFuture<Integer> numRemovedBlocksFuture = new CompletableFuture<>(); ByteBuffer removeBlocksMessage = new RemoveBlocks(appId, execId, blockIds).toByteBuffer(); final TransportClient client = clientFactory.createClient(host, port); client.sendRpc(removeBlocksMessage, new RpcResponseCallback() { @Override public void onSuccess(ByteBuffer response) { try { BlockTransferMessage msgObj = BlockTransferMessage.Decoder.fromByteBuffer(response); numRemovedBlocksFuture.complete(((BlocksRemoved) msgObj).numRemovedBlocks); } catch (Throwable t) { logger.warn("Error trying to remove RDD blocks " + Arrays.toString(blockIds) + " via external shuffle service from executor: " + execId, t); numRemovedBlocksFuture.complete(0); } finally { client.close(); } } @Override public void onFailure(Throwable e) { logger.warn("Error trying to remove RDD blocks " + Arrays.toString(blockIds) + " via external shuffle service from executor: " + execId, e); numRemovedBlocksFuture.complete(0); client.close(); } }); return numRemovedBlocksFuture; } @Override public void close() { checkInit(); if (clientFactory != null) { clientFactory.close(); clientFactory = null; } } }
actuaryzhang/spark
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleClient.java
Java
apache-2.0
7,360
/* Copyright 2017 Ahmed Zaher Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "fmt" "os" "strings" getopt "github.com/kesselborn/go-getopt" ) // GitCommit is the git commit hash string, // gets passed from the command line using a binary release of this tool. var GitCommit string // BuildTimestamp is the current timestamp in a string format, // gets passed from the command line using a binary release of this tool. var BuildTimestamp string // ReleaseVersion is the desired release version string that represents the version of this executable. // gets passed from the command line using a binary release of this tool. var ReleaseVersion string // GoVersion indicates which version of Go has been used to build this binary. // gets passed from the command line using a binary release of this tool. var GoVersion string type configurations struct { WorkDir string Tag string Release string IgnoreTagPrefix string Package string Commit string Branch string Verbose bool } func configure(conf *configurations) (string, error) { var e error var workDirectory string if workDirectory, e = os.Getwd(); e != nil { return "", e } parser := getopt.Options{ Description: "Builds and installs a binary release of a Golang source code while embedding its release information - through a group of exported public variables in the source - based on the current status of its Git repository, all the source files must be committed into the local repository before running this command or it will complain, this tool assumes that Golang (with a valid 'GOROOT' and 'GOPATH' environment variables) and Git source control are installed and fully working though shell.", Definitions: []getopt.Option{ { OptionDefinition: "work-directory|w|REGO_WORK_DIR", Description: "The working directory that contains the project source files and its Git repository", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: workDirectory, }, { OptionDefinition: "branch|b|REGO_BRANCH", Description: "The branch name of where the binary release source is going to be taken from, the command automatically picks the most recent commit hash in the specified branch, the commit hash string is passed to the binary release while building through the public variable 'GitCommit'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "develop", }, { OptionDefinition: "commit|c|REGO_COMMIT", Description: "The commit hash string of where the binary release source is going to be taken from, specifying this option causes the '--branch' option to be ignored since this option is more specific, the commit hash string is passed to the binary release while building through the public variable 'GitCommit'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "", }, { OptionDefinition: "tag|t|REGO_TAG", Description: "The tag name of where the binary release source is going to be taken from, causes the '--branch' and '--commit' options to be ignored since this option is more specific, the commit hash string is passed to the binary release while building through the public variable 'GitCommit'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "", }, { OptionDefinition: "release|r|REGO_RELEASE", Description: "The string that is meant to represent the final binary release version, if the '--tag' option is specified this option is automatically calculated with consideration of '--ignore-tag-prefix' option if specified to represent the tag name, the value of this option is passed to the binary release while building through the public variable 'ReleaseVersion'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "SNAPSHOT", }, { OptionDefinition: "package|p|REGO_PACKAGE", Description: "The package name of which contains the declarations of the public variables" + " (GitCommit, BuildTimestamp, ReleaseVersion, GoVersion) which represent the commit hash of where the binary release source has been pulled from, the timestamp of when the build has be triggered, the release version string, the Golang version that has been used in the build, respectively", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "main", }, { OptionDefinition: "ignore-tag-prefix|i|REGO_IGNORE_TAG_PREFIX", Description: "If the '--tag' option is specified, this option trims the specified prefix off the tag name while calculating the release version string", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "", }, { OptionDefinition: "verbose", Description: "Shows more verbose output", Flags: getopt.Flag, DefaultValue: false, }, { OptionDefinition: "version|v", Description: "Prints the version and exits", Flags: getopt.Flag, DefaultValue: false, }, }, } var err *getopt.GetOptError var options map[string]getopt.OptionValue if options, _, _, err = parser.ParseCommandLine(); err != nil { return "", fmt.Errorf("failed with error code: %v, %v", err.ErrorCode, err.Error()) } else if help, wantsHelp := options["help"]; wantsHelp && help.String == "usage" { return parser.Usage(), nil } else if wantsHelp && help.String == "help" { return parser.Help(), nil } else if options["version"].Bool { return fmt.Sprintf("Release: %v%vCommit: %v%vBuild Time: %v%vBuilt with: %v", ReleaseVersion, NewLine(), GitCommit, NewLine(), BuildTimestamp, NewLine(), GoVersion), nil } conf.Verbose = options["verbose"].Bool conf.WorkDir = strings.TrimSpace(options["work-directory"].String) conf.Package = strings.TrimSpace(options["package"].String) conf.Branch = strings.TrimSpace(options["branch"].String) conf.Commit = strings.TrimSpace(options["commit"].String) conf.Tag = strings.TrimSpace(options["tag"].String) conf.IgnoreTagPrefix = strings.TrimSpace(options["ignore-tag-prefix"].String) conf.Release = strings.TrimSpace(options["release"].String) return "", nil }
adzr/rego
config.go
GO
apache-2.0
6,782
// Copyright (c) 2017 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package validator import ( "fmt" "strconv" "github.com/m3db/m3metrics/aggregation" "github.com/m3db/m3metrics/filters" "github.com/m3db/m3metrics/metric" "github.com/m3db/m3metrics/policy" "github.com/m3db/m3metrics/rules/validator/namespace" "github.com/m3db/m3metrics/rules/validator/namespace/static" ) const ( // By default we only support at most one binary transformation function in between // consecutive rollup operations in a pipeline. defaultMaxTransformationDerivativeOrder = 1 // By default we allow at most one level of rollup in a pipeline. defaultMaxRollupLevels = 1 ) // MetricTypesFn determines the possible metric types based on a set of tag based filters. type MetricTypesFn func(tagFilters filters.TagFilterValueMap) ([]metric.Type, error) // Options provide a set of options for the validator. type Options interface { // SetNamespaceValidator sets the namespace validator. SetNamespaceValidator(value namespace.Validator) Options // NamespaceValidator returns the namespace validator. NamespaceValidator() namespace.Validator // SetDefaultAllowedStoragePolicies sets the default list of allowed storage policies. SetDefaultAllowedStoragePolicies(value []policy.StoragePolicy) Options // SetDefaultAllowedFirstLevelAggregationTypes sets the default list of allowed first-level // aggregation types. SetDefaultAllowedFirstLevelAggregationTypes(value aggregation.Types) Options // SetDefaultAllowedNonFirstLevelAggregationTypes sets the default list of allowed // non-first-level aggregation types. SetDefaultAllowedNonFirstLevelAggregationTypes(value aggregation.Types) Options // SetAllowedStoragePoliciesFor sets the list of allowed storage policies for a given metric type. SetAllowedStoragePoliciesFor(t metric.Type, policies []policy.StoragePolicy) Options // SetAllowedFirstLevelAggregationTypesFor sets the list of allowed first-level aggregation // types for a given metric type. SetAllowedFirstLevelAggregationTypesFor(t metric.Type, aggTypes aggregation.Types) Options // SetAllowedNonFirstLevelAggregationTypesFor sets the list of allowed non-first-level // aggregation types for a given metric type. SetAllowedNonFirstLevelAggregationTypesFor(t metric.Type, aggTypes aggregation.Types) Options // SetMetricTypesFn sets the metric types function. SetMetricTypesFn(value MetricTypesFn) Options // MetricTypesFn returns the metric types function. MetricTypesFn() MetricTypesFn // SetMultiAggregationTypesEnabledFor sets the list of metric types that support // multiple aggregation types. SetMultiAggregationTypesEnabledFor(value []metric.Type) Options // SetRequiredRollupTags sets the list of required rollup tags. SetRequiredRollupTags(value []string) Options // RequiredRollupTags returns the list of required rollup tags. RequiredRollupTags() []string // SetMaxTransformationDerivativeOrder sets the maximum supported transformation // derivative order between rollup operations in pipelines. SetMaxTransformationDerivativeOrder(value int) Options // MaxTransformationDerivativeOrder returns the maximum supported transformation // derivative order between rollup operations in pipelines.. MaxTransformationDerivativeOrder() int // SetMaxRollupLevels sets the maximum number of rollup operations supported in pipelines. SetMaxRollupLevels(value int) Options // MaxRollupLevels returns the maximum number of rollup operations supported in pipelines. MaxRollupLevels() int // SetTagNameInvalidChars sets the list of invalid chars for a tag name. SetTagNameInvalidChars(value []rune) Options // CheckInvalidCharactersForTagName checks if the given tag name contains invalid characters // returning an error if invalid character(s) present. CheckInvalidCharactersForTagName(tagName string) error // SetMetricNameInvalidChars sets the list of invalid chars for a metric name. SetMetricNameInvalidChars(value []rune) Options // CheckInvalidCharactersForMetricName checks if the given metric name contains invalid characters // returning an error if invalid character(s) present. CheckInvalidCharactersForMetricName(metricName string) error // IsAllowedStoragePolicyFor determines whether a given storage policy is allowed for the // given metric type. IsAllowedStoragePolicyFor(t metric.Type, p policy.StoragePolicy) bool // IsMultiAggregationTypesEnabledFor checks if a metric type supports multiple aggregation types. IsMultiAggregationTypesEnabledFor(t metric.Type) bool // IsAllowedFirstLevelAggregationTypeFor determines whether a given aggregation type is allowed // as the first-level aggregation for the given metric type. IsAllowedFirstLevelAggregationTypeFor(t metric.Type, aggType aggregation.Type) bool // IsAllowedNonFirstLevelAggregationTypeFor determines whether a given aggregation type is // allowed as the non-first-level aggregation for the given metric type. IsAllowedNonFirstLevelAggregationTypeFor(t metric.Type, aggType aggregation.Type) bool } type validationMetadata struct { allowedStoragePolicies map[policy.StoragePolicy]struct{} allowedFirstLevelAggTypes map[aggregation.Type]struct{} allowedNonFirstLevelAggTypes map[aggregation.Type]struct{} } type options struct { namespaceValidator namespace.Validator defaultAllowedStoragePolicies map[policy.StoragePolicy]struct{} defaultAllowedFirstLevelAggregationTypes map[aggregation.Type]struct{} defaultAllowedNonFirstLevelAggregationTypes map[aggregation.Type]struct{} metricTypesFn MetricTypesFn multiAggregationTypesEnableFor map[metric.Type]struct{} requiredRollupTags []string maxTransformationDerivativeOrder int maxRollupLevels int metricNameInvalidChars map[rune]struct{} tagNameInvalidChars map[rune]struct{} metadatasByType map[metric.Type]validationMetadata } // NewOptions create a new set of validator options. func NewOptions() Options { return &options{ multiAggregationTypesEnableFor: map[metric.Type]struct{}{metric.TimerType: struct{}{}}, maxTransformationDerivativeOrder: defaultMaxTransformationDerivativeOrder, maxRollupLevels: defaultMaxRollupLevels, namespaceValidator: static.NewNamespaceValidator(static.Valid), metadatasByType: make(map[metric.Type]validationMetadata), } } func (o *options) SetNamespaceValidator(value namespace.Validator) Options { o.namespaceValidator = value return o } func (o *options) NamespaceValidator() namespace.Validator { return o.namespaceValidator } func (o *options) SetDefaultAllowedStoragePolicies(value []policy.StoragePolicy) Options { o.defaultAllowedStoragePolicies = toStoragePolicySet(value) return o } func (o *options) SetDefaultAllowedFirstLevelAggregationTypes(value aggregation.Types) Options { o.defaultAllowedFirstLevelAggregationTypes = toAggregationTypeSet(value) return o } func (o *options) SetDefaultAllowedNonFirstLevelAggregationTypes(value aggregation.Types) Options { o.defaultAllowedNonFirstLevelAggregationTypes = toAggregationTypeSet(value) return o } func (o *options) SetAllowedStoragePoliciesFor(t metric.Type, policies []policy.StoragePolicy) Options { metadata := o.findOrCreateMetadata(t) metadata.allowedStoragePolicies = toStoragePolicySet(policies) o.metadatasByType[t] = metadata return o } func (o *options) SetAllowedFirstLevelAggregationTypesFor(t metric.Type, aggTypes aggregation.Types) Options { metadata := o.findOrCreateMetadata(t) metadata.allowedFirstLevelAggTypes = toAggregationTypeSet(aggTypes) o.metadatasByType[t] = metadata return o } func (o *options) SetAllowedNonFirstLevelAggregationTypesFor(t metric.Type, aggTypes aggregation.Types) Options { metadata := o.findOrCreateMetadata(t) metadata.allowedNonFirstLevelAggTypes = toAggregationTypeSet(aggTypes) o.metadatasByType[t] = metadata return o } func (o *options) SetMetricTypesFn(value MetricTypesFn) Options { o.metricTypesFn = value return o } func (o *options) MetricTypesFn() MetricTypesFn { return o.metricTypesFn } func (o *options) SetMultiAggregationTypesEnabledFor(value []metric.Type) Options { o.multiAggregationTypesEnableFor = toMetricTypeSet(value) return o } func (o *options) SetRequiredRollupTags(value []string) Options { requiredRollupTags := make([]string, len(value)) copy(requiredRollupTags, value) o.requiredRollupTags = requiredRollupTags return o } func (o *options) RequiredRollupTags() []string { return o.requiredRollupTags } func (o *options) SetMaxTransformationDerivativeOrder(value int) Options { o.maxTransformationDerivativeOrder = value return o } func (o *options) MaxTransformationDerivativeOrder() int { return o.maxTransformationDerivativeOrder } func (o *options) SetMaxRollupLevels(value int) Options { o.maxRollupLevels = value return o } func (o *options) MaxRollupLevels() int { return o.maxRollupLevels } func (o *options) SetTagNameInvalidChars(values []rune) Options { tagNameInvalidChars := make(map[rune]struct{}, len(values)) for _, v := range values { tagNameInvalidChars[v] = struct{}{} } o.tagNameInvalidChars = tagNameInvalidChars return o } func (o *options) CheckInvalidCharactersForTagName(tagName string) error { return validateChars(tagName, o.tagNameInvalidChars) } func (o *options) SetMetricNameInvalidChars(values []rune) Options { metricNameInvalidChars := make(map[rune]struct{}, len(values)) for _, v := range values { metricNameInvalidChars[v] = struct{}{} } o.metricNameInvalidChars = metricNameInvalidChars return o } func (o *options) CheckInvalidCharactersForMetricName(metricName string) error { return validateChars(metricName, o.metricNameInvalidChars) } func (o *options) IsAllowedStoragePolicyFor(t metric.Type, p policy.StoragePolicy) bool { if metadata, exists := o.metadatasByType[t]; exists { _, found := metadata.allowedStoragePolicies[p] return found } _, found := o.defaultAllowedStoragePolicies[p] return found } func (o *options) IsMultiAggregationTypesEnabledFor(t metric.Type) bool { _, exists := o.multiAggregationTypesEnableFor[t] return exists } func (o *options) IsAllowedFirstLevelAggregationTypeFor(t metric.Type, aggType aggregation.Type) bool { if metadata, exists := o.metadatasByType[t]; exists { _, found := metadata.allowedFirstLevelAggTypes[aggType] return found } _, found := o.defaultAllowedFirstLevelAggregationTypes[aggType] return found } func (o *options) IsAllowedNonFirstLevelAggregationTypeFor(t metric.Type, aggType aggregation.Type) bool { if metadata, exists := o.metadatasByType[t]; exists { _, found := metadata.allowedNonFirstLevelAggTypes[aggType] return found } _, found := o.defaultAllowedNonFirstLevelAggregationTypes[aggType] return found } func (o *options) findOrCreateMetadata(t metric.Type) validationMetadata { if metadata, found := o.metadatasByType[t]; found { return metadata } return validationMetadata{ allowedStoragePolicies: o.defaultAllowedStoragePolicies, allowedFirstLevelAggTypes: o.defaultAllowedFirstLevelAggregationTypes, allowedNonFirstLevelAggTypes: o.defaultAllowedNonFirstLevelAggregationTypes, } } func toStoragePolicySet(policies []policy.StoragePolicy) map[policy.StoragePolicy]struct{} { m := make(map[policy.StoragePolicy]struct{}, len(policies)) for _, p := range policies { m[p] = struct{}{} } return m } func toAggregationTypeSet(aggTypes aggregation.Types) map[aggregation.Type]struct{} { m := make(map[aggregation.Type]struct{}, len(aggTypes)) for _, t := range aggTypes { m[t] = struct{}{} } return m } func toMetricTypeSet(metricTypes []metric.Type) map[metric.Type]struct{} { m := make(map[metric.Type]struct{}, len(metricTypes)) for _, mt := range metricTypes { m[mt] = struct{}{} } return m } func validateChars(str string, invalidChars map[rune]struct{}) error { if len(invalidChars) == 0 { return nil } // Validate that given string doesn't contain an invalid character. for _, char := range str { if _, exists := invalidChars[char]; exists { return fmt.Errorf("%s contains invalid character %s", str, strconv.QuoteRune(char)) } } return nil }
m3db/m3metrics
rules/validator/options.go
GO
apache-2.0
13,478
package com.github.davidmoten.rtree; import static com.github.davidmoten.rtree.geometry.Geometries.rectangle; import static java.util.Optional.of; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Optional; import com.github.davidmoten.guavamini.Lists; import com.github.davidmoten.guavamini.annotations.VisibleForTesting; import com.github.davidmoten.rtree.geometry.Circle; import com.github.davidmoten.rtree.geometry.Geometry; import com.github.davidmoten.rtree.geometry.HasGeometry; import com.github.davidmoten.rtree.geometry.Intersects; import com.github.davidmoten.rtree.geometry.Line; import com.github.davidmoten.rtree.geometry.Point; import com.github.davidmoten.rtree.geometry.Rectangle; import com.github.davidmoten.rtree.internal.Comparators; import com.github.davidmoten.rtree.internal.NodeAndEntries; import com.github.davidmoten.rtree.internal.operators.OperatorBoundedPriorityQueue; import rx.Observable; import rx.functions.Func1; import rx.functions.Func2; /** * Immutable in-memory 2D R-Tree with configurable splitter heuristic. * * @param <T> * the entry value type * @param <S> * the entry geometry type */ public final class RTree<T, S extends Geometry> { public static final Rectangle ZERO_RECTANGLE = rectangle(0, 0, 0, 0); private final Optional<? extends Node<T, S>> root; private final Context<T, S> context; /** * Benchmarks show that this is a good choice for up to O(10,000) entries when * using Quadratic splitter (Guttman). */ public static final int MAX_CHILDREN_DEFAULT_GUTTMAN = 4; /** * Benchmarks show that this is the sweet spot for up to O(10,000) entries when * using R*-tree heuristics. */ public static final int MAX_CHILDREN_DEFAULT_STAR = 4; /** * Current size in Entries of the RTree. */ private final int size; private static final Func2<Optional<Rectangle>, Entry<Object, Geometry>, Optional<Rectangle>> RECTANGLE_ACCUMULATOR = (rectangle, entry) -> rectangle.map(value -> Optional.of(value.add(entry.geometry().mbr()))) .orElseGet(() -> Optional.of(entry.geometry().mbr())); /** * Constructor. * * @param root * the root node of the tree if present * @param context * options for the R-tree */ private RTree(Optional<? extends Node<T, S>> root, int size, Context<T, S> context) { this.root = root; this.size = size; this.context = context; } private RTree() { this(Optional.empty(), 0, null); } /** * Constructor. * * @param root * the root node of the R-tree * @param context * options for the R-tree */ private RTree(Node<T, S> root, int size, Context<T, S> context) { this(of(root), size, context); } static <T, S extends Geometry> RTree<T, S> create(Optional<? extends Node<T, S>> root, int size, Context<T, S> context) { return new RTree<T, S>(root, size, context); } /** * Returns a new Builder instance for {@link RTree}. Defaults to * maxChildren=128, minChildren=64, splitter=QuadraticSplitter. * * @param <T> * the value type of the entries in the tree * @param <S> * the geometry type of the entries in the tree * @return a new RTree instance */ public static <T, S extends Geometry> RTree<T, S> create() { return new Builder().create(); } /** * Construct an Rtree through STR bulk loading. Default to maxChildren=128, * minChildren=64 and fill nodes by a factor of 0.7 * * @param entries * entries to add to the R-tree * * @param <T> * the value type of the entries in the tree * @param <S> * the geometry type of the entries in the tree * @return a new RTree instance */ public static <T, S extends Geometry> RTree<T, S> create(List<Entry<T, S>> entries) { return new Builder().create(entries); } /** * The tree is scanned for depth and the depth returned. This involves recursing * down to the leaf level of the tree to get the current depth. Should be * <code>log(n)</code> in complexity. * * @return depth of the R-tree */ public int calculateDepth() { return calculateDepth(root); } private static <T, S extends Geometry> int calculateDepth(Optional<? extends Node<T, S>> root) { return root.map(node -> calculateDepth(node, 0)).orElse(0); } private static <T, S extends Geometry> int calculateDepth(Node<T, S> node, int depth) { if (node instanceof Leaf) { return depth + 1; } else { return calculateDepth(((NonLeaf<T, S>) node).child(0), depth + 1); } } /** * When the number of children in an R-tree node drops below this number the * node is deleted and the children are added on to the R-tree again. * * @param minChildren * less than this number of children in a node triggers a node * deletion and redistribution of its members * @return builder */ public static Builder minChildren(int minChildren) { return new Builder().minChildren(minChildren); } /** * Sets the max number of children in an R-tree node. * * @param maxChildren * max number of children in an R-tree node * @return builder */ public static Builder maxChildren(int maxChildren) { return new Builder().maxChildren(maxChildren); } /** * Sets the {@link Splitter} to use when maxChildren is reached. * * @param splitter * the splitter algorithm to use * @return builder */ public static Builder splitter(Splitter splitter) { return new Builder().splitter(splitter); } /** * Sets the node {@link Selector} which decides which branches to follow when * inserting or searching. * * @param selector * determines which branches to follow when inserting or searching * @return builder */ public static Builder selector(Selector selector) { return new Builder().selector(selector); } /** * Sets the splitter to {@link SplitterRStar} and selector to * {@link SelectorRStar} and defaults to minChildren=10. * * @return builder */ public static Builder star() { return new Builder().star(); } /** * RTree Builder. */ public static class Builder { /** * According to http://dbs.mathematik.uni-marburg.de/publications/myPapers * /1990/BKSS90.pdf (R*-tree paper), best filling ratio is 0.4 for both * quadratic split and R*-tree split. */ private static final double DEFAULT_FILLING_FACTOR = 0.4; private static final double DEFAULT_LOADING_FACTOR = 0.7; private Optional<Integer> maxChildren = Optional.empty(); private Optional<Integer> minChildren = Optional.empty(); private Splitter splitter = new SplitterQuadratic(); private Selector selector = new SelectorMinimalAreaIncrease(); private double loadingFactor; private boolean star = false; private Factory<Object, Geometry> factory = Factories.defaultFactory(); private Builder() { loadingFactor = DEFAULT_LOADING_FACTOR; } /** * The factor is used as the fill ratio during bulk loading. * * @param factor * loading factor * @return this */ public Builder loadingFactor(double factor) { this.loadingFactor = factor; return this; } /** * When the number of children in an R-tree node drops below this number the * node is deleted and the children are added on to the R-tree again. * * @param minChildren * less than this number of children in a node triggers a * redistribution of its children. * @return builder */ public Builder minChildren(int minChildren) { this.minChildren = of(minChildren); return this; } /** * Sets the max number of children in an R-tree node. * * @param maxChildren * max number of children in R-tree node. * @return builder */ public Builder maxChildren(int maxChildren) { this.maxChildren = of(maxChildren); return this; } /** * Sets the {@link Splitter} to use when maxChildren is reached. * * @param splitter * node splitting method to use * @return builder */ public Builder splitter(Splitter splitter) { this.splitter = splitter; return this; } /** * Sets the node {@link Selector} which decides which branches to follow when * inserting or searching. * * @param selector * selects the branch to follow when inserting or searching * @return builder */ public Builder selector(Selector selector) { this.selector = selector; return this; } /** * Sets the splitter to {@link SplitterRStar} and selector to * {@link SelectorRStar} and defaults to minChildren=10. * * @return builder */ public Builder star() { selector = new SelectorRStar(); splitter = new SplitterRStar(); star = true; return this; } @SuppressWarnings("unchecked") public Builder factory(Factory<?, ? extends Geometry> factory) { // TODO could change the signature of Builder to have types to // support this method but would be breaking change for existing // clients this.factory = (Factory<Object, Geometry>) factory; return this; } /** * Builds the {@link RTree}. * * @param <T> * value type * @param <S> * geometry type * @return RTree */ @SuppressWarnings("unchecked") public <T, S extends Geometry> RTree<T, S> create() { setDefaultCapacity(); return new RTree<T, S>(Optional.<Node<T, S>>empty(), 0, new Context<T, S>(minChildren.get(), maxChildren.get(), selector, splitter, (Factory<T, S>) factory)); } /** * Create an RTree by bulk loading, using the STR method. STR: a simple and * efficient algorithm for R-tree packing * http://ieeexplore.ieee.org/abstract/document/582015/ * <p> * Note: this method mutates the input entries, the internal order of the List * may be changed. * </p> * * @param entries * entries to be added to the r-tree * @return a loaded RTree */ @SuppressWarnings("unchecked") public <T, S extends Geometry> RTree<T, S> create(List<Entry<T, S>> entries) { setDefaultCapacity(); Context<T, S> context = new Context<T, S>(minChildren.get(), maxChildren.get(), selector, splitter, (Factory<T, S>) factory); return packingSTR(entries, true, entries.size(), context); } private void setDefaultCapacity() { if (!maxChildren.isPresent()) { if (star) { maxChildren = Optional.of(MAX_CHILDREN_DEFAULT_STAR); } else { maxChildren = Optional.of(MAX_CHILDREN_DEFAULT_GUTTMAN); } } if (!minChildren.isPresent()) { minChildren = Optional.of((int) Math.round(maxChildren.get() * DEFAULT_FILLING_FACTOR)); } } @SuppressWarnings("unchecked") private <T, S extends Geometry> RTree<T, S> packingSTR(List<? extends HasGeometry> objects, boolean isLeaf, int size, Context<T, S> context) { int capacity = (int) Math.round(maxChildren.get() * loadingFactor); int nodeCount = (int) Math.ceil(1.0 * objects.size() / capacity); if (nodeCount == 0) { return create(); } else if (nodeCount == 1) { Node<T, S> root; if (isLeaf) { root = context.factory().createLeaf((List<Entry<T, S>>) objects, context); } else { root = context.factory().createNonLeaf((List<Node<T, S>>) objects, context); } return new RTree<T, S>(of(root), size, context); } int nodePerSlice = (int) Math.ceil(Math.sqrt(nodeCount)); int sliceCapacity = nodePerSlice * capacity; int sliceCount = (int) Math.ceil(1.0 * objects.size() / sliceCapacity); Collections.sort(objects, new MidComparator((short) 0)); List<Node<T, S>> nodes = new ArrayList<Node<T, S>>(nodeCount); for (int s = 0; s < sliceCount; s++) { @SuppressWarnings("rawtypes") List slice = objects.subList(s * sliceCapacity, Math.min((s + 1) * sliceCapacity, objects.size())); Collections.sort(slice, new MidComparator((short) 1)); for (int i = 0; i < slice.size(); i += capacity) { if (isLeaf) { List<Entry<T, S>> entries = slice.subList(i, Math.min(slice.size(), i + capacity)); Node<T, S> leaf = context.factory().createLeaf(entries, context); nodes.add(leaf); } else { List<Node<T, S>> children = slice.subList(i, Math.min(slice.size(), i + capacity)); Node<T, S> nonleaf = context.factory().createNonLeaf(children, context); nodes.add(nonleaf); } } } return packingSTR(nodes, false, size, context); } private static final class MidComparator implements Comparator<HasGeometry> { private final short dimension; // leave space for multiple dimensions, 0 for x, 1 for y, // ... public MidComparator(short dim) { dimension = dim; } @Override public int compare(HasGeometry o1, HasGeometry o2) { return Double.compare(mid(o1), mid(o2)); } private double mid(HasGeometry o) { Rectangle mbr = o.geometry().mbr(); if (dimension == 0) return (mbr.x1() + mbr.x2()) / 2; else return (mbr.y1() + mbr.y2()) / 2; } } } /** * Returns an immutable copy of the RTree with the addition of given entry. * * @param entry * item to add to the R-tree. * @return a new immutable R-tree including the new entry */ @SuppressWarnings("unchecked") public RTree<T, S> add(Entry<? extends T, ? extends S> entry) { if (root.isPresent()) { List<Node<T, S>> nodes = root.get().add(entry); Node<T, S> node; if (nodes.size() == 1) node = nodes.get(0); else { node = context.factory().createNonLeaf(nodes, context); } return new RTree<T, S>(node, size + 1, context); } else { Leaf<T, S> node = context.factory().createLeaf(Lists.newArrayList((Entry<T, S>) entry), context); return new RTree<T, S>(node, size + 1, context); } } /** * Returns an immutable copy of the RTree with the addition of an entry * comprised of the given value and Geometry. * * @param value * the value of the {@link Entry} to be added * @param geometry * the geometry of the {@link Entry} to be added * @return a new immutable R-tree including the new entry */ public RTree<T, S> add(T value, S geometry) { return add(context.factory().createEntry(value, geometry)); } /** * Returns an immutable RTree with the current entries and the additional * entries supplied as a parameter. * * @param entries * entries to add * @return R-tree with entries added */ public RTree<T, S> add(Iterable<Entry<T, S>> entries) { RTree<T, S> tree = this; for (Entry<T, S> entry : entries) tree = tree.add(entry); return tree; } /** * Returns the Observable sequence of trees created by progressively adding * entries. * * @param entries * the entries to add * @return a sequence of trees */ public Observable<RTree<T, S>> add(Observable<Entry<T, S>> entries) { return entries.scan(this, (tree, entry) -> tree.add(entry)); } /** * Returns the Observable sequence of trees created by progressively deleting * entries. * * @param entries * the entries to add * @param all * if true delete all matching otherwise just first matching * @return a sequence of trees */ public Observable<RTree<T, S>> delete(Observable<Entry<T, S>> entries, final boolean all) { return entries.scan(this, new Func2<RTree<T, S>, Entry<T, S>, RTree<T, S>>() { @Override public RTree<T, S> call(RTree<T, S> tree, Entry<T, S> entry) { return tree.delete(entry, all); } }); } /** * Returns a new R-tree with the given entries deleted. If <code>all</code> is * false deletes only one if exists. If <code>all</code> is true deletes all * matching entries. * * @param entries * entries to delete * @param all * if false deletes one if exists else deletes all * @return R-tree with entries deleted */ public RTree<T, S> delete(Iterable<Entry<T, S>> entries, boolean all) { RTree<T, S> tree = this; for (Entry<T, S> entry : entries) tree = tree.delete(entry, all); return tree; } /** * Returns a new R-tree with the given entries deleted but only one matching * occurence of each entry is deleted. * * @param entries * entries to delete * @return R-tree with entries deleted up to one matching occurence per entry */ public RTree<T, S> delete(Iterable<Entry<T, S>> entries) { RTree<T, S> tree = this; for (Entry<T, S> entry : entries) tree = tree.delete(entry); return tree; } /** * If <code>all</code> is false deletes one entry matching the given value and * Geometry. If <code>all</code> is true deletes all entries matching the given * value and geometry. This method has no effect if the entry is not present. * The entry must match on both value and geometry to be deleted. * * @param value * the value of the {@link Entry} to be deleted * @param geometry * the geometry of the {@link Entry} to be deleted * @param all * if false deletes one if exists else deletes all * @return a new immutable R-tree without one or many instances of the specified * entry if it exists otherwise returns the original RTree object */ public RTree<T, S> delete(T value, S geometry, boolean all) { return delete(context.factory().createEntry(value, geometry), all); } /** * Deletes maximum one entry matching the given value and geometry. This method * has no effect if the entry is not present. The entry must match on both value * and geometry to be deleted. * * @param value * the value to be matched for deletion * @param geometry * the geometry to be matched for deletion * @return an immutable RTree without one entry (if found) matching the given * value and geometry */ public RTree<T, S> delete(T value, S geometry) { return delete(context.factory().createEntry(value, geometry), false); } /** * Deletes one or all matching entries depending on the value of * <code>all</code>. If multiple copies of the entry are in the R-tree only one * will be deleted if all is false otherwise all matching entries will be * deleted. The entry must match on both value and geometry to be deleted. * * @param entry * the {@link Entry} to be deleted * @param all * if true deletes all matches otherwise deletes first found * @return a new immutable R-tree without one instance of the specified entry */ public RTree<T, S> delete(Entry<? extends T, ? extends S> entry, boolean all) { if (root.isPresent()) { NodeAndEntries<T, S> nodeAndEntries = root.get().delete(entry, all); if (nodeAndEntries.node().isPresent() && nodeAndEntries.node().get() == root.get()) return this; else return new RTree<T, S>(nodeAndEntries.node(), size - nodeAndEntries.countDeleted() - nodeAndEntries.entriesToAdd().size(), context).add(nodeAndEntries.entriesToAdd()); } else return this; } /** * Deletes one entry if it exists, returning an immutable copy of the RTree * without that entry. If multiple copies of the entry are in the R-tree only * one will be deleted. The entry must match on both value and geometry to be * deleted. * * @param entry * the {@link Entry} to be deleted * @return a new immutable R-tree without one instance of the specified entry */ public RTree<T, S> delete(Entry<? extends T, ? extends S> entry) { return delete(entry, false); } /** * <p> * Returns an Observable sequence of {@link Entry} that satisfy the given * condition. Note that this method is well-behaved only if: * * * <p> * {@code condition(g)} is true for {@link Geometry} g implies * {@code condition(r)} is true for the minimum bounding rectangles of the * ancestor nodes. * * <p> * {@code distance(g) < D} is an example of such a condition. * * * @param condition * return Entries whose geometry satisfies the given condition * @return sequence of matching entries */ @VisibleForTesting Observable<Entry<T, S>> search(Func1<? super Geometry, Boolean> condition) { return root .map(node -> Observable.unsafeCreate(new OnSubscribeSearch<>(node, condition))) .orElseGet(Observable::empty); } /** * Returns a predicate function that indicates if {@link Geometry} intersects * with a given rectangle. * * @param r * the rectangle to check intersection with * @return whether the geometry and the rectangle intersect */ public static Func1<Geometry, Boolean> intersects(final Rectangle r) { return g -> g.intersects(r); } /** * Returns the always true predicate. See {@link RTree#entries()} for example * use. */ private static final Func1<Geometry, Boolean> ALWAYS_TRUE = rectangle -> true; /** * Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree * whose minimum bounding rectangle intersects with the given rectangle. * * @param r * rectangle to check intersection with the entry mbr * @return entries that intersect with the rectangle r */ public Observable<Entry<T, S>> search(final Rectangle r) { return search(intersects(r)); } /** * Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree * whose minimum bounding rectangle intersects with the given point. * * @param p * point to check intersection with the entry mbr * @return entries that intersect with the point p */ public Observable<Entry<T, S>> search(final Point p) { return search(p.mbr()); } public Observable<Entry<T, S>> search(Circle circle) { return search(circle, Intersects.geometryIntersectsCircle); } public Observable<Entry<T, S>> search(Line line) { return search(line, Intersects.geometryIntersectsLine); } /** * Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree * whose minimum bounding rectangles are strictly less than maxDistance from the * given rectangle. * * @param r * rectangle to measure distance from * @param maxDistance * entries returned must be within this distance from rectangle r * @return the sequence of matching entries */ public Observable<Entry<T, S>> search(final Rectangle r, final double maxDistance) { return search(g -> g.distance(r) < maxDistance); } /** * Returns the intersections with the the given (arbitrary) geometry using an * intersection function to filter the search results returned from a search of * the mbr of <code>g</code>. * * @param <R> * type of geometry being searched for intersection with * @param g * geometry being searched for intersection with * @param intersects * function to determine if the two geometries intersect * @return a sequence of entries that intersect with g */ public <R extends Geometry> Observable<Entry<T, S>> search(final R g, final Func2<? super S, ? super R, Boolean> intersects) { return search(g.mbr()).filter(entry -> intersects.call(entry.geometry(), g)); } /** * Returns all entries strictly less than <code>maxDistance</code> from the * given geometry. Because the geometry may be of an arbitrary type it is * necessary to also pass a distance function. * * @param <R> * type of the geometry being searched for * @param g * geometry to search for entries within maxDistance of * @param maxDistance * strict max distance that entries must be from g * @param distance * function to calculate the distance between geometries of type S * and R. * @return entries strictly less than maxDistance from g */ public <R extends Geometry> Observable<Entry<T, S>> search(final R g, final double maxDistance, final Func2<? super S, ? super R, Double> distance) { // just use the mbr initially return search(entry -> entry.distance(g.mbr()) < maxDistance) // refine with distance function .filter(entry -> distance.call(entry.geometry(), g) < maxDistance); } /** * Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree * whose minimum bounding rectangles are within maxDistance from the given * point. * * @param p * point to measure distance from * @param maxDistance * entries returned must be within this distance from point p * @return the sequence of matching entries */ public Observable<Entry<T, S>> search(final Point p, final double maxDistance) { return search(p.mbr(), maxDistance); } /** * Returns the nearest k entries (k=maxCount) to the given rectangle where the * entries are strictly less than a given maximum distance from the rectangle. * * @param r * rectangle * @param maxDistance * max distance of returned entries from the rectangle * @param maxCount * max number of entries to return * @return nearest entries to maxCount, in ascending order of distance */ public Observable<Entry<T, S>> nearest(final Rectangle r, final double maxDistance, int maxCount) { return search(r, maxDistance).lift(new OperatorBoundedPriorityQueue<Entry<T, S>>(maxCount, Comparators.<T, S>ascendingDistance(r))); } /** * Returns the nearest k entries (k=maxCount) to the given point where the * entries are strictly less than a given maximum distance from the point. * * @param p * point * @param maxDistance * max distance of returned entries from the point * @param maxCount * max number of entries to return * @return nearest entries to maxCount, in ascending order of distance */ public Observable<Entry<T, S>> nearest(final Point p, final double maxDistance, int maxCount) { return nearest(p.mbr(), maxDistance, maxCount); } /** * Returns all entries in the tree as an {@link Observable} sequence. * * @return all entries in the R-tree */ public Observable<Entry<T, S>> entries() { return search(ALWAYS_TRUE); } /** * Returns a {@link Visualizer} for an image of given width and height and * restricted to the given view of the coordinates. The points in the view are * scaled to match the aspect ratio defined by the width and height. * * @param width * of the image in pixels * @param height * of the image in pixels * @param view * using the coordinate system of the entries * @return visualizer */ @SuppressWarnings("unchecked") public Visualizer visualize(int width, int height, Rectangle view) { return new Visualizer((RTree<?, Geometry>) this, width, height, view); } /** * Returns a {@link Visualizer} for an image of given width and height and * restricted to the the smallest view that fully contains the coordinates. The * points in the view are scaled to match the aspect ratio defined by the width * and height. * * @param width * of the image in pixels * @param height * of the image in pixels * @return visualizer */ public Visualizer visualize(int width, int height) { return visualize(width, height, calculateMaxView(this)); } private Rectangle calculateMaxView(RTree<T, S> tree) { @SuppressWarnings("unchecked") Func2<Optional<Rectangle>, Entry<T, S>, Optional<Rectangle>> ra = // (Func2<Optional<Rectangle>, Entry<T, S>, Optional<Rectangle>>) // (Func2<?,?,?>) // RECTANGLE_ACCUMULATOR; return tree.entries() .reduce(Optional.empty(), ra) .toBlocking().single() .orElse(ZERO_RECTANGLE); } public Optional<? extends Node<T, S>> root() { return root; } /** * If the RTree has no entries returns {@link Optional#absent} otherwise returns * the minimum bounding rectangle of all entries in the RTree. * * @return minimum bounding rectangle of all entries in RTree */ public Optional<Rectangle> mbr() { return root.map(r -> r.geometry().mbr()); } /** * Returns true if and only if the R-tree is empty of entries. * * @return is R-tree empty */ public boolean isEmpty() { return size == 0; } /** * Returns the number of entries in the RTree. * * @return the number of entries */ public int size() { return size; } /** * Returns a {@link Context} containing the configuration of the RTree at the * time of instantiation. * * @return the configuration of the RTree prior to instantiation */ public Context<T, S> context() { return context; } /** * Returns a human readable form of the RTree. Here's an example: * * <pre> * mbr=Rectangle [x1=10.0, y1=4.0, x2=62.0, y2=85.0] * mbr=Rectangle [x1=28.0, y1=4.0, x2=34.0, y2=85.0] * entry=Entry [value=2, geometry=Point [x=29.0, y=4.0]] * entry=Entry [value=1, geometry=Point [x=28.0, y=19.0]] * entry=Entry [value=4, geometry=Point [x=34.0, y=85.0]] * mbr=Rectangle [x1=10.0, y1=45.0, x2=62.0, y2=63.0] * entry=Entry [value=5, geometry=Point [x=62.0, y=45.0]] * entry=Entry [value=3, geometry=Point [x=10.0, y=63.0]] * </pre> * * @return a string representation of the RTree */ public String asString() { if (!root.isPresent()) return ""; else return asString(root.get(), ""); } private static final String MARGIN_INCREMENT = " "; private String asString(Node<T, S> node, String margin) { StringBuilder s = new StringBuilder(); s.append(margin); s.append("mbr="); s.append(node.geometry()); s.append('\n'); if (node instanceof NonLeaf) { NonLeaf<T, S> n = (NonLeaf<T, S>) node; for (int i = 0; i < n.count(); i++) { Node<T, S> child = n.child(i); s.append(asString(child, margin + MARGIN_INCREMENT)); } } else { Leaf<T, S> leaf = (Leaf<T, S>) node; for (Entry<T, S> entry : leaf.entries()) { s.append(margin); s.append(MARGIN_INCREMENT); s.append("entry="); s.append(entry); s.append('\n'); } } return s.toString(); } }
davidmoten/rtree
src/main/java/com/github/davidmoten/rtree/RTree.java
Java
apache-2.0
34,613
/* * Copyright 2016 The OpenDCT Authors. All Rights Reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opendct.tuning.discovery; import opendct.config.options.DeviceOptions; import java.net.InetAddress; public interface DiscoveredDeviceParent extends DeviceOptions { /** * The unique name of this capture device parent. * <p/> * This should always return exactly the same name every time this device is detected. This is * used to verify that we are not potentially loading a duplicate device. * * @return The unchangeable unique name of this capture device. */ public String getName(); /** * The friendly/modifiable name of this capture device parent. * <p/> * This can be the same as the unique name, but this value should be user assignable. * * @return The modifiable name of this capture device parent. */ public String getFriendlyName(); /** * The unique id of this capture device parent. * <p/> * This ID must be exactly the same every time this device is detected. This is used to verify * that we are not potentially loading a duplicate device. * * @return The unique ID for this capture device. */ public int getParentId(); /** * Is this a network device? * * @return <i>true</i> if this is a network device. */ public boolean isNetworkDevice(); /** * Returns the local IP address to be used when streaming to this computer. * <p/> * Return <i>null</i> if this is not a network device. * * @return Returns the local IP address if this is a network device or <i>null</i>. */ public InetAddress getLocalAddress(); /** * Returns the current IP address of the capture device parent. * <p/> * Return <i>null</i> if this is not a network device. * * @return Returns the remote IP address if this is a network device or <i>null</i>. */ public InetAddress getRemoteAddress(); /** * Returns the unique IDs of all child devices for this parent device. * <p/> * This list is allowed to expand. When a capture device is detected, the device parent should * always be added first. * * @return An array of the child devices by unique ID. */ public int[] getChildDevices(); }
enternoescape/opendct
src/main/java/opendct/tuning/discovery/DiscoveredDeviceParent.java
Java
apache-2.0
2,882
/* * Copyright 2016 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dns.it; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.api.gax.paging.Page; import com.google.cloud.dns.ChangeRequest; import com.google.cloud.dns.ChangeRequestInfo; import com.google.cloud.dns.Dns; import com.google.cloud.dns.Dns.ChangeRequestField; import com.google.cloud.dns.Dns.ProjectField; import com.google.cloud.dns.Dns.RecordSetField; import com.google.cloud.dns.Dns.ZoneField; import com.google.cloud.dns.DnsBatch; import com.google.cloud.dns.DnsBatchResult; import com.google.cloud.dns.DnsException; import com.google.cloud.dns.DnsOptions; import com.google.cloud.dns.ProjectInfo; import com.google.cloud.dns.RecordSet; import com.google.cloud.dns.Zone; import com.google.cloud.dns.ZoneInfo; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; public class ITDnsTest { private static final String PREFIX = "gcldjvit-"; private static final Dns DNS = DnsOptions.getDefaultInstance().getService(); private static final String ZONE_NAME1 = (PREFIX + UUID.randomUUID()).substring(0, 32); private static final String ZONE_NAME_EMPTY_DESCRIPTION = (PREFIX + UUID.randomUUID()).substring(0, 32); private static final String ZONE_NAME_TOO_LONG = ZONE_NAME1 + UUID.randomUUID(); private static final String ZONE_DESCRIPTION1 = "first zone"; private static final String ZONE_DNS_NAME1 = ZONE_NAME1 + ".com."; private static final String ZONE_DNS_EMPTY_DESCRIPTION = ZONE_NAME_EMPTY_DESCRIPTION + ".com."; private static final String ZONE_DNS_NAME_NO_PERIOD = ZONE_NAME1 + ".com"; private static final ZoneInfo ZONE1 = ZoneInfo.of(ZONE_NAME1, ZONE_DNS_EMPTY_DESCRIPTION, ZONE_DESCRIPTION1); private static final ZoneInfo ZONE_EMPTY_DESCRIPTION = ZoneInfo.of(ZONE_NAME_EMPTY_DESCRIPTION, ZONE_DNS_NAME1, ZONE_DESCRIPTION1); private static final ZoneInfo ZONE_NAME_ERROR = ZoneInfo.of(ZONE_NAME_TOO_LONG, ZONE_DNS_NAME1, ZONE_DESCRIPTION1); private static final ZoneInfo ZONE_DNS_NO_PERIOD = ZoneInfo.of(ZONE_NAME1, ZONE_DNS_NAME_NO_PERIOD, ZONE_DESCRIPTION1); private static final RecordSet A_RECORD_ZONE1 = RecordSet.newBuilder("www." + ZONE1.getDnsName(), RecordSet.Type.A) .setRecords(ImmutableList.of("123.123.55.1")) .setTtl(25, TimeUnit.SECONDS) .build(); private static final RecordSet AAAA_RECORD_ZONE1 = RecordSet.newBuilder("www." + ZONE1.getDnsName(), RecordSet.Type.AAAA) .setRecords(ImmutableList.of("ed:ed:12:aa:36:3:3:105")) .setTtl(25, TimeUnit.SECONDS) .build(); private static final ChangeRequestInfo CHANGE_ADD_ZONE1 = ChangeRequest.newBuilder().add(A_RECORD_ZONE1).add(AAAA_RECORD_ZONE1).build(); private static final ChangeRequestInfo CHANGE_DELETE_ZONE1 = ChangeRequest.newBuilder().delete(A_RECORD_ZONE1).delete(AAAA_RECORD_ZONE1).build(); private static final List<String> ZONE_NAMES = ImmutableList.of(ZONE_NAME1, ZONE_NAME_EMPTY_DESCRIPTION); @Rule public Timeout globalTimeout = Timeout.seconds(300); private static void clear() { for (String zoneName : ZONE_NAMES) { Zone zone = DNS.getZone(zoneName); if (zone != null) { /* We wait for all changes to complete before retrieving a list of DNS records to be deleted. Waiting is necessary as changes potentially might create more records between when the list has been retrieved and executing the subsequent delete operation. */ Iterator<ChangeRequest> iterator = zone.listChangeRequests().iterateAll().iterator(); while (iterator.hasNext()) { waitForChangeToComplete(zoneName, iterator.next().getGeneratedId()); } Iterator<RecordSet> recordSetIterator = zone.listRecordSets().iterateAll().iterator(); List<RecordSet> toDelete = new LinkedList<>(); while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); if (!ImmutableList.of(RecordSet.Type.NS, RecordSet.Type.SOA) .contains(recordSet.getType())) { toDelete.add(recordSet); } } if (!toDelete.isEmpty()) { ChangeRequest deletion = zone.applyChangeRequest(ChangeRequest.newBuilder().setDeletions(toDelete).build()); waitForChangeToComplete(zone.getName(), deletion.getGeneratedId()); } zone.delete(); } } } private static List<Zone> filter(Iterator<Zone> iterator) { List<Zone> result = new LinkedList<>(); while (iterator.hasNext()) { Zone zone = iterator.next(); if (ZONE_NAMES.contains(zone.getName())) { result.add(zone); } } return result; } @BeforeClass public static void before() { clear(); } @AfterClass public static void after() { clear(); } private static void assertEqChangesIgnoreStatus(ChangeRequest expected, ChangeRequest actual) { assertEquals(expected.getAdditions(), actual.getAdditions()); assertEquals(expected.getDeletions(), actual.getDeletions()); assertEquals(expected.getGeneratedId(), actual.getGeneratedId()); assertEquals(expected.getStartTimeMillis(), actual.getStartTimeMillis()); } private static void waitForChangeToComplete(String zoneName, String changeId) { ChangeRequest changeRequest = DNS.getChangeRequest( zoneName, changeId, Dns.ChangeRequestOption.fields(ChangeRequestField.STATUS)); waitForChangeToComplete(changeRequest); } private static void waitForChangeToComplete(ChangeRequest changeRequest) { while (!changeRequest.isDone()) { try { Thread.sleep(500); } catch (InterruptedException e) { fail("Thread was interrupted while waiting for change processing."); } } } @Test public void testCreateValidZone() { try { Zone created = DNS.create(ZONE1); assertEquals(ZONE1.getDescription(), created.getDescription()); assertEquals(ZONE1.getDnsName(), created.getDnsName()); assertEquals(ZONE1.getName(), created.getName()); assertNotNull(created.getCreationTimeMillis()); assertNotNull(created.getNameServers()); assertNull(created.getNameServerSet()); assertNotNull(created.getGeneratedId()); Zone retrieved = DNS.getZone(ZONE1.getName()); assertEquals(created, retrieved); created = DNS.create(ZONE_EMPTY_DESCRIPTION); assertEquals(ZONE_EMPTY_DESCRIPTION.getDescription(), created.getDescription()); assertEquals(ZONE_EMPTY_DESCRIPTION.getDnsName(), created.getDnsName()); assertEquals(ZONE_EMPTY_DESCRIPTION.getName(), created.getName()); assertNotNull(created.getCreationTimeMillis()); assertNotNull(created.getNameServers()); assertNull(created.getNameServerSet()); assertNotNull(created.getGeneratedId()); retrieved = DNS.getZone(ZONE_EMPTY_DESCRIPTION.getName()); assertEquals(created, retrieved); } finally { DNS.delete(ZONE1.getName()); DNS.delete(ZONE_EMPTY_DESCRIPTION.getName()); } } @Test public void testCreateZoneWithErrors() { try { try { DNS.create(ZONE_NAME_ERROR); fail("Zone name is too long. The service returns an error."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); } try { DNS.create(ZONE_DNS_NO_PERIOD); fail("Zone name is missing a period. The service returns an error."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); } } finally { DNS.delete(ZONE_NAME_ERROR.getName()); DNS.delete(ZONE_DNS_NO_PERIOD.getName()); } } @Test public void testCreateZoneWithOptions() { try { Zone created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.CREATION_TIME)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNotNull(created.getCreationTimeMillis()); assertNull(created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.DESCRIPTION)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.DNS_NAME)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDnsName(), created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME_SERVER_SET)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); // we did not set it assertNull(created.getGeneratedId()); created.delete(); created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME_SERVERS)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); created = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.ZONE_ID)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertNotNull(created.getNameServers()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNotNull(created.getGeneratedId()); created.delete(); // combination of multiple things created = DNS.create( ZONE1, Dns.ZoneOption.fields( ZoneField.ZONE_ID, ZoneField.NAME_SERVERS, ZoneField.NAME_SERVER_SET, ZoneField.DESCRIPTION)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); // we did not set it assertNotNull(created.getGeneratedId()); } finally { DNS.delete(ZONE1.getName()); } } @Test public void testGetZone() { try { DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); Zone created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.CREATION_TIME)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNotNull(created.getCreationTimeMillis()); assertNull(created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.DESCRIPTION)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.DNS_NAME)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDnsName(), created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.NAME)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.NAME_SERVER_SET)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); // we did not set it assertNull(created.getGeneratedId()); created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.NAME_SERVERS)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = DNS.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.ZONE_ID)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertNotNull(created.getNameServers()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNotNull(created.getGeneratedId()); // combination of multiple things created = DNS.getZone( ZONE1.getName(), Dns.ZoneOption.fields( ZoneField.ZONE_ID, ZoneField.NAME_SERVERS, ZoneField.NAME_SERVER_SET, ZoneField.DESCRIPTION)); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); // we did not set it assertNotNull(created.getGeneratedId()); } finally { DNS.delete(ZONE1.getName()); } } @Test public void testListZones() { try { List<Zone> zones = filter(DNS.listZones().iterateAll().iterator()); assertEquals(0, zones.size()); // some zones exists Zone created = DNS.create(ZONE1); zones = filter(DNS.listZones().iterateAll().iterator()); assertEquals(created, zones.get(0)); assertEquals(1, zones.size()); created = DNS.create(ZONE_EMPTY_DESCRIPTION); zones = filter(DNS.listZones().iterateAll().iterator()); assertEquals(2, zones.size()); assertTrue(zones.contains(created)); // error in options try { DNS.listZones(Dns.ZoneListOption.pageSize(0)); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { DNS.listZones(Dns.ZoneListOption.pageSize(-1)); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } // ok size zones = filter(DNS.listZones(Dns.ZoneListOption.pageSize(1000)).iterateAll().iterator()); assertEquals(2, zones.size()); // we still have only 2 zones // dns name problems try { DNS.listZones(Dns.ZoneListOption.dnsName("aaaaa")); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } // ok name zones = filter( DNS.listZones(Dns.ZoneListOption.dnsName(ZONE1.getDnsName())) .iterateAll() .iterator()); assertEquals(1, zones.size()); // field options Iterator<Zone> zoneIterator = DNS.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.ZONE_ID)) .iterateAll() .iterator(); Zone zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNotNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = DNS.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.CREATION_TIME)) .iterateAll() .iterator(); zone = zoneIterator.next(); assertNotNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = DNS.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.DNS_NAME)) .iterateAll() .iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNotNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = DNS.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.DESCRIPTION)) .iterateAll() .iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNotNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = DNS.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.NAME_SERVERS)) .iterateAll() .iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertFalse(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = DNS.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.NAME_SERVER_SET)) .iterateAll() .iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); // we cannot set it using google-cloud assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); // several combined zones = filter( DNS.listZones( Dns.ZoneListOption.fields(ZoneField.ZONE_ID, ZoneField.DESCRIPTION), Dns.ZoneListOption.pageSize(1)) .iterateAll() .iterator()); assertEquals(2, zones.size()); for (Zone current : zones) { assertNull(current.getCreationTimeMillis()); assertNotNull(current.getName()); assertNull(current.getDnsName()); assertNotNull(current.getDescription()); assertNull(current.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNotNull(current.getGeneratedId()); } } finally { DNS.delete(ZONE1.getName()); DNS.delete(ZONE_EMPTY_DESCRIPTION.getName()); } } @Test public void testDeleteZone() { try { Zone created = DNS.create(ZONE1); assertEquals(created, DNS.getZone(ZONE1.getName())); DNS.delete(ZONE1.getName()); assertNull(DNS.getZone(ZONE1.getName())); } finally { DNS.delete(ZONE1.getName()); } } @Test public void testCreateChange() { try { DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); ChangeRequest created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); assertEquals(CHANGE_ADD_ZONE1.getAdditions(), created.getAdditions()); assertNotNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertTrue( ImmutableList.of(ChangeRequest.Status.PENDING, ChangeRequest.Status.DONE) .contains(created.status())); assertEqChangesIgnoreStatus(created, DNS.getChangeRequest(ZONE1.getName(), "1")); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); // with options created = DNS.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.ID)); assertTrue(created.getAdditions().isEmpty()); assertNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); created = DNS.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.STATUS)); assertTrue(created.getAdditions().isEmpty()); assertNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNotNull(created.status()); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); created = DNS.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.START_TIME)); assertTrue(created.getAdditions().isEmpty()); assertNotNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); created = DNS.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.ADDITIONS)); assertEquals(CHANGE_ADD_ZONE1.getAdditions(), created.getAdditions()); assertNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); // finishes with delete otherwise we cannot delete the zone waitForChangeToComplete(created); created = DNS.applyChangeRequest( ZONE1.getName(), CHANGE_DELETE_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.DELETIONS)); waitForChangeToComplete(created); assertEquals(CHANGE_DELETE_ZONE1.getDeletions(), created.getDeletions()); assertNull(created.getStartTimeMillis()); assertTrue(created.getAdditions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); waitForChangeToComplete(created); } finally { clear(); } } @Test public void testInvalidChangeRequest() { Zone zone = DNS.create(ZONE1); RecordSet validA = RecordSet.newBuilder("subdomain." + zone.getDnsName(), RecordSet.Type.A) .setRecords(ImmutableList.of("0.255.1.5")) .build(); boolean recordAdded = false; try { ChangeRequestInfo validChange = ChangeRequest.newBuilder().add(validA).build(); zone.applyChangeRequest(validChange); recordAdded = true; try { zone.applyChangeRequest(validChange); fail("Created a record set which already exists."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); assertEquals(409, ex.getCode()); } // delete with field mismatch RecordSet mismatch = validA.toBuilder().setTtl(20, TimeUnit.SECONDS).build(); ChangeRequestInfo deletion = ChangeRequest.newBuilder().delete(mismatch).build(); try { zone.applyChangeRequest(deletion); fail("Deleted a record set without a complete match."); } catch (DnsException ex) { // expected assertEquals(412, ex.getCode()); assertFalse(ex.isRetryable()); } // delete and add SOA Iterator<RecordSet> recordSetIterator = zone.listRecordSets().iterateAll().iterator(); LinkedList<RecordSet> deletions = new LinkedList<>(); LinkedList<RecordSet> additions = new LinkedList<>(); while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); if (recordSet.getType() == RecordSet.Type.SOA) { deletions.add(recordSet); // the subdomain is necessary to get 400 instead of 412 RecordSet copy = recordSet.toBuilder().setName("x." + recordSet.getName()).build(); additions.add(copy); break; } } deletion = deletion.toBuilder().setDeletions(deletions).build(); ChangeRequestInfo addition = ChangeRequest.newBuilder().setAdditions(additions).build(); try { zone.applyChangeRequest(deletion); fail("Deleted SOA."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); assertEquals(400, ex.getCode()); } try { zone.applyChangeRequest(addition); fail("Added second SOA."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); assertEquals(400, ex.getCode()); } } finally { if (recordAdded) { ChangeRequestInfo deletion = ChangeRequest.newBuilder().delete(validA).build(); ChangeRequest request = zone.applyChangeRequest(deletion); waitForChangeToComplete(zone.getName(), request.getGeneratedId()); } zone.delete(); } } @Test public void testListChanges() { try { // no such zone exists try { DNS.listChangeRequests(ZONE1.getName()); fail(); } catch (DnsException ex) { // expected assertEquals(404, ex.getCode()); assertFalse(ex.isRetryable()); } // zone exists but has no changes DNS.create(ZONE1); ImmutableList<ChangeRequest> changes = ImmutableList.copyOf(DNS.listChangeRequests(ZONE1.getName()).iterateAll()); assertEquals(1, changes.size()); // default change creating SOA and NS // zone has changes ChangeRequest change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); changes = ImmutableList.copyOf(DNS.listChangeRequests(ZONE1.getName()).iterateAll()); assertEquals(5, changes.size()); // error in options try { DNS.listChangeRequests(ZONE1.getName(), Dns.ChangeRequestListOption.pageSize(0)); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { DNS.listChangeRequests(ZONE1.getName(), Dns.ChangeRequestListOption.pageSize(-1)); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } // sorting order ImmutableList<ChangeRequest> ascending = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING)) .iterateAll()); ImmutableList<ChangeRequest> descending = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.DESCENDING)) .iterateAll()); int size = 5; assertEquals(size, descending.size()); assertEquals(size, ascending.size()); for (int i = 0; i < size; i++) { assertEquals(descending.get(i), ascending.get(size - i - 1)); } // field options changes = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.ADDITIONS)) .iterateAll()); change = changes.get(1); assertEquals(CHANGE_ADD_ZONE1.getAdditions(), change.getAdditions()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertNull(change.status()); changes = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.DELETIONS)) .iterateAll()); change = changes.get(2); assertTrue(change.getAdditions().isEmpty()); assertNotNull(change.getDeletions()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertNull(change.status()); changes = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.ID)) .iterateAll()); change = changes.get(1); assertTrue(change.getAdditions().isEmpty()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertNull(change.status()); changes = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.START_TIME)) .iterateAll()); change = changes.get(1); assertTrue(change.getAdditions().isEmpty()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNotNull(change.getStartTimeMillis()); assertNull(change.status()); changes = ImmutableList.copyOf( DNS.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.STATUS)) .iterateAll()); change = changes.get(1); assertTrue(change.getAdditions().isEmpty()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertEquals(ChangeRequest.Status.DONE, change.status()); } finally { clear(); } } @Test public void testGetChange() { try { Zone zone = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); ChangeRequest created = zone.applyChangeRequest(CHANGE_ADD_ZONE1); ChangeRequest retrieved = DNS.getChangeRequest(zone.getName(), created.getGeneratedId()); assertEqChangesIgnoreStatus(created, retrieved); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); zone.applyChangeRequest(CHANGE_DELETE_ZONE1); // with options created = zone.applyChangeRequest( CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.ID)); retrieved = DNS.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.ID)); assertEqChangesIgnoreStatus(created, retrieved); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); zone.applyChangeRequest(CHANGE_DELETE_ZONE1); created = zone.applyChangeRequest( CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.STATUS)); retrieved = DNS.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.STATUS)); assertEqChangesIgnoreStatus(created, retrieved); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); zone.applyChangeRequest(CHANGE_DELETE_ZONE1); created = zone.applyChangeRequest( CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.START_TIME)); retrieved = DNS.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.START_TIME)); assertEqChangesIgnoreStatus(created, retrieved); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); zone.applyChangeRequest(CHANGE_DELETE_ZONE1); created = zone.applyChangeRequest( CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.ADDITIONS)); retrieved = DNS.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.ADDITIONS)); assertEqChangesIgnoreStatus(created, retrieved); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); // finishes with delete otherwise we cannot delete the zone created = zone.applyChangeRequest( CHANGE_DELETE_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.DELETIONS)); retrieved = DNS.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.DELETIONS)); assertEqChangesIgnoreStatus(created, retrieved); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); } finally { clear(); } } @Test public void testGetProject() { // fetches all fields ProjectInfo project = DNS.getProject(); assertNotNull(project.getQuota()); // options project = DNS.getProject(Dns.ProjectOption.fields(ProjectField.QUOTA)); assertNotNull(project.getQuota()); project = DNS.getProject(Dns.ProjectOption.fields(ProjectField.PROJECT_ID)); assertNull(project.getQuota()); project = DNS.getProject(Dns.ProjectOption.fields(ProjectField.PROJECT_NUMBER)); assertNull(project.getQuota()); project = DNS.getProject( Dns.ProjectOption.fields( ProjectField.PROJECT_NUMBER, ProjectField.QUOTA, ProjectField.PROJECT_ID)); assertNotNull(project.getQuota()); } @Test public void testListDnsRecords() { try { Zone zone = DNS.create(ZONE1); ImmutableList<RecordSet> recordSets = ImmutableList.copyOf(DNS.listRecordSets(zone.getName()).iterateAll()); assertEquals(2, recordSets.size()); ImmutableList<RecordSet.Type> defaultRecords = ImmutableList.of(RecordSet.Type.NS, RecordSet.Type.SOA); for (RecordSet recordSet : recordSets) { assertTrue(defaultRecords.contains(recordSet.getType())); } // field options Iterator<RecordSet> recordSetIterator = DNS.listRecordSets(zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.TTL)) .iterateAll() .iterator(); int counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getTtl(), recordSet.getTtl()); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertTrue(recordSet.getRecords().isEmpty()); counter++; } assertEquals(2, counter); recordSetIterator = DNS.listRecordSets(zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.NAME)) .iterateAll() .iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertTrue(recordSet.getRecords().isEmpty()); assertNull(recordSet.getTtl()); counter++; } assertEquals(2, counter); recordSetIterator = DNS.listRecordSets( zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.DNS_RECORDS)) .iterateAll() .iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getRecords(), recordSet.getRecords()); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertNull(recordSet.getTtl()); counter++; } assertEquals(2, counter); recordSetIterator = DNS.listRecordSets( zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.TYPE), Dns.RecordSetListOption.pageSize(1)) .iterateAll() .iterator(); // also test paging counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertTrue(recordSet.getRecords().isEmpty()); assertNull(recordSet.getTtl()); counter++; } assertEquals(2, counter); // test page size Page<RecordSet> recordSetPage = DNS.listRecordSets( zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.TYPE), Dns.RecordSetListOption.pageSize(1)); assertEquals(1, ImmutableList.copyOf(recordSetPage.getValues().iterator()).size()); // test name filter ChangeRequest change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); recordSetIterator = DNS.listRecordSets( ZONE1.getName(), Dns.RecordSetListOption.dnsName(A_RECORD_ZONE1.getName())) .iterateAll() .iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertTrue( ImmutableList.of(A_RECORD_ZONE1.getType(), AAAA_RECORD_ZONE1.getType()) .contains(recordSet.getType())); counter++; } assertEquals(2, counter); // test type filter waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); recordSetIterator = DNS.listRecordSets( ZONE1.getName(), Dns.RecordSetListOption.dnsName(A_RECORD_ZONE1.getName()), Dns.RecordSetListOption.type(A_RECORD_ZONE1.getType())) .iterateAll() .iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(A_RECORD_ZONE1, recordSet); counter++; } assertEquals(1, counter); change = zone.applyChangeRequest(CHANGE_DELETE_ZONE1); // check wrong arguments try { // name is not set DNS.listRecordSets(ZONE1.getName(), Dns.RecordSetListOption.type(A_RECORD_ZONE1.getType())); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { DNS.listRecordSets(ZONE1.getName(), Dns.RecordSetListOption.pageSize(0)); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { DNS.listRecordSets(ZONE1.getName(), Dns.RecordSetListOption.pageSize(-1)); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); } finally { clear(); } } @Test public void testListZonesBatch() { try { DnsBatch batch = DNS.batch(); DnsBatchResult<Page<Zone>> result = batch.listZones(); batch.submit(); List<Zone> zones = filter(result.get().iterateAll().iterator()); assertEquals(0, zones.size()); // some zones exists Zone firstZone = DNS.create(ZONE1); batch = DNS.batch(); result = batch.listZones(); batch.submit(); zones = filter(result.get().iterateAll().iterator()); assertEquals(1, zones.size()); assertEquals(firstZone, zones.get(0)); Zone created = DNS.create(ZONE_EMPTY_DESCRIPTION); batch = DNS.batch(); result = batch.listZones(); DnsBatchResult<Page<Zone>> zeroSizeError = batch.listZones(Dns.ZoneListOption.pageSize(0)); DnsBatchResult<Page<Zone>> negativeSizeError = batch.listZones(Dns.ZoneListOption.pageSize(-1)); DnsBatchResult<Page<Zone>> okSize = batch.listZones(Dns.ZoneListOption.pageSize(1)); DnsBatchResult<Page<Zone>> nameError = batch.listZones(Dns.ZoneListOption.dnsName("aaaaa")); DnsBatchResult<Page<Zone>> okName = batch.listZones(Dns.ZoneListOption.dnsName(ZONE1.getDnsName())); DnsBatchResult<Page<Zone>> idResult = batch.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.ZONE_ID)); DnsBatchResult<Page<Zone>> timeResult = batch.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.CREATION_TIME)); DnsBatchResult<Page<Zone>> dnsNameResult = batch.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.DNS_NAME)); DnsBatchResult<Page<Zone>> descriptionResult = batch.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.DESCRIPTION)); DnsBatchResult<Page<Zone>> nameServersResult = batch.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.NAME_SERVERS)); DnsBatchResult<Page<Zone>> nameServerSetResult = batch.listZones( Dns.ZoneListOption.dnsName(ZONE1.getDnsName()), Dns.ZoneListOption.fields(ZoneField.NAME_SERVER_SET)); DnsBatchResult<Page<Zone>> combinationResult = batch.listZones( Dns.ZoneListOption.fields(ZoneField.ZONE_ID, ZoneField.DESCRIPTION), Dns.ZoneListOption.pageSize(1)); batch.submit(); zones = filter(result.get().iterateAll().iterator()); assertEquals(2, zones.size()); assertTrue(zones.contains(firstZone)); assertTrue(zones.contains(created)); // error in options try { zeroSizeError.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { negativeSizeError.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } // ok size assertEquals(1, Iterables.size(okSize.get().getValues())); // dns name problems try { nameError.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } // ok name zones = filter(okName.get().iterateAll().iterator()); assertEquals(1, zones.size()); // field options Iterator<Zone> zoneIterator = idResult.get().iterateAll().iterator(); Zone zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNotNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = timeResult.get().iterateAll().iterator(); zone = zoneIterator.next(); assertNotNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = dnsNameResult.get().iterateAll().iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNotNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = descriptionResult.get().iterateAll().iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNotNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = nameServersResult.get().iterateAll().iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); assertFalse(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); zoneIterator = nameServerSetResult.get().iterateAll().iterator(); zone = zoneIterator.next(); assertNull(zone.getCreationTimeMillis()); assertNotNull(zone.getName()); assertNull(zone.getDnsName()); assertNull(zone.getDescription()); assertNull(zone.getNameServerSet()); // we cannot set it using google-cloud assertTrue(zone.getNameServers().isEmpty()); assertNull(zone.getGeneratedId()); assertFalse(zoneIterator.hasNext()); // several combined zones = filter(combinationResult.get().iterateAll().iterator()); assertEquals(2, zones.size()); for (Zone current : zones) { assertNull(current.getCreationTimeMillis()); assertNotNull(current.getName()); assertNull(current.getDnsName()); assertNotNull(current.getDescription()); assertNull(current.getNameServerSet()); assertTrue(zone.getNameServers().isEmpty()); assertNotNull(current.getGeneratedId()); } } finally { DNS.delete(ZONE1.getName()); DNS.delete(ZONE_EMPTY_DESCRIPTION.getName()); } } @Test public void testCreateValidZoneBatch() { try { DnsBatch batch = DNS.batch(); DnsBatchResult<Zone> completeZoneResult = batch.createZone(ZONE1); DnsBatchResult<Zone> partialZoneResult = batch.createZone(ZONE_EMPTY_DESCRIPTION); batch.submit(); Zone created = completeZoneResult.get(); assertEquals(ZONE1.getDescription(), created.getDescription()); assertEquals(ZONE1.getDnsName(), created.getDnsName()); assertEquals(ZONE1.getName(), created.getName()); assertNotNull(created.getCreationTimeMillis()); assertNotNull(created.getNameServers()); assertNull(created.getNameServerSet()); assertNotNull(created.getGeneratedId()); Zone retrieved = DNS.getZone(ZONE1.getName()); assertEquals(created, retrieved); created = partialZoneResult.get(); assertEquals(ZONE_EMPTY_DESCRIPTION.getDescription(), created.getDescription()); assertEquals(ZONE_EMPTY_DESCRIPTION.getDnsName(), created.getDnsName()); assertEquals(ZONE_EMPTY_DESCRIPTION.getName(), created.getName()); assertNotNull(created.getCreationTimeMillis()); assertNotNull(created.getNameServers()); assertNull(created.getNameServerSet()); assertNotNull(created.getGeneratedId()); retrieved = DNS.getZone(ZONE_EMPTY_DESCRIPTION.getName()); assertEquals(created, retrieved); } finally { DNS.delete(ZONE1.getName()); DNS.delete(ZONE_EMPTY_DESCRIPTION.getName()); } } @Test public void testCreateZoneWithErrorsBatch() { try { DnsBatch batch = DNS.batch(); DnsBatchResult<Zone> nameErrorResult = batch.createZone(ZONE_NAME_ERROR); DnsBatchResult<Zone> noPeriodResult = batch.createZone(ZONE_DNS_NO_PERIOD); batch.submit(); try { nameErrorResult.get(); fail("Zone name is too long. The service returns an error."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); } try { noPeriodResult.get(); fail("Zone name is missing a period. The service returns an error."); } catch (DnsException ex) { // expected assertFalse(ex.isRetryable()); } } finally { DNS.delete(ZONE_NAME_ERROR.getName()); DNS.delete(ZONE_DNS_NO_PERIOD.getName()); } } @Test public void testCreateZoneWithOptionsBatch() { try { DnsBatch batch = DNS.batch(); DnsBatchResult<Zone> batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.CREATION_TIME)); batch.submit(); Zone created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNotNull(created.getCreationTimeMillis()); assertNull(created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.DESCRIPTION)); batch.submit(); created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.DNS_NAME)); batch.submit(); created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDnsName(), created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); batch.submit(); created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME_SERVER_SET)); batch.submit(); created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); // we did not set it assertNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME_SERVERS)); batch.submit(); created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone(ZONE1, Dns.ZoneOption.fields(ZoneField.ZONE_ID)); batch.submit(); created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertNotNull(created.getNameServers()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNotNull(created.getGeneratedId()); created.delete(); batch = DNS.batch(); batchResult = batch.createZone( ZONE1, Dns.ZoneOption.fields( ZoneField.ZONE_ID, ZoneField.NAME_SERVERS, ZoneField.NAME_SERVER_SET, ZoneField.DESCRIPTION)); batch.submit(); // combination of multiple things created = batchResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); // we did not set it assertNotNull(created.getGeneratedId()); } finally { DNS.delete(ZONE1.getName()); } } @Test public void testGetZoneBatch() { try { DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); DnsBatch batch = DNS.batch(); DnsBatchResult<Zone> timeResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.CREATION_TIME)); DnsBatchResult<Zone> descriptionResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.DESCRIPTION)); DnsBatchResult<Zone> dnsNameResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.DNS_NAME)); DnsBatchResult<Zone> nameResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.NAME)); DnsBatchResult<Zone> nameServerSetResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.NAME_SERVER_SET)); DnsBatchResult<Zone> nameServersResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.NAME_SERVERS)); DnsBatchResult<Zone> idResult = batch.getZone(ZONE1.getName(), Dns.ZoneOption.fields(ZoneField.ZONE_ID)); DnsBatchResult<Zone> combinationResult = batch.getZone( ZONE1.getName(), Dns.ZoneOption.fields( ZoneField.ZONE_ID, ZoneField.NAME_SERVERS, ZoneField.NAME_SERVER_SET, ZoneField.DESCRIPTION)); batch.submit(); Zone created = timeResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNotNull(created.getCreationTimeMillis()); assertNull(created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = descriptionResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertNull(created.getDnsName()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = dnsNameResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertEquals(ZONE1.getDnsName(), created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = nameResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = nameServerSetResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNull(created.getNameServerSet()); // we did not set it assertNull(created.getGeneratedId()); created = nameServersResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); assertNull(created.getGeneratedId()); created = idResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertNull(created.getDescription()); assertNotNull(created.getNameServers()); assertTrue(created.getNameServers().isEmpty()); // never returns null assertNotNull(created.getGeneratedId()); // combination of multiple things created = combinationResult.get(); assertEquals(ZONE1.getName(), created.getName()); // always returned assertNull(created.getCreationTimeMillis()); assertNull(created.getDnsName()); assertEquals(ZONE1.getDescription(), created.getDescription()); assertFalse(created.getNameServers().isEmpty()); assertNull(created.getNameServerSet()); // we did not set it assertNotNull(created.getGeneratedId()); } finally { DNS.delete(ZONE1.getName()); } } @Test public void testDeleteZoneBatch() { try { Zone created = DNS.create(ZONE1); assertEquals(created, DNS.getZone(ZONE1.getName())); DnsBatch batch = DNS.batch(); DnsBatchResult<Boolean> result = batch.deleteZone(ZONE1.getName()); batch.submit(); assertNull(DNS.getZone(ZONE1.getName())); assertTrue(result.get()); } finally { DNS.delete(ZONE1.getName()); } } @Test public void testGetProjectBatch() { // fetches all fields DnsBatch batch = DNS.batch(); DnsBatchResult<ProjectInfo> result = batch.getProject(); DnsBatchResult<ProjectInfo> resultQuota = batch.getProject(Dns.ProjectOption.fields(ProjectField.QUOTA)); DnsBatchResult<ProjectInfo> resultId = batch.getProject(Dns.ProjectOption.fields(ProjectField.PROJECT_ID)); DnsBatchResult<ProjectInfo> resultNumber = batch.getProject(Dns.ProjectOption.fields(ProjectField.PROJECT_NUMBER)); DnsBatchResult<ProjectInfo> resultCombination = batch.getProject( Dns.ProjectOption.fields( ProjectField.PROJECT_NUMBER, ProjectField.QUOTA, ProjectField.PROJECT_ID)); batch.submit(); assertNotNull(result.get().getQuota()); assertNotNull(resultQuota.get().getQuota()); assertNull(resultId.get().getQuota()); assertNull(resultNumber.get().getQuota()); assertNotNull(resultCombination.get().getQuota()); } @Test public void testCreateChangeBatch() { try { DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); DnsBatch batch = DNS.batch(); DnsBatchResult<ChangeRequest> result = batch.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); batch.submit(); ChangeRequest created = result.get(); assertEquals(CHANGE_ADD_ZONE1.getAdditions(), created.getAdditions()); assertNotNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertTrue( ImmutableList.of(ChangeRequest.Status.PENDING, ChangeRequest.Status.DONE) .contains(created.status())); assertEqChangesIgnoreStatus(created, DNS.getChangeRequest(ZONE1.getName(), "1")); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); // with options batch = DNS.batch(); result = batch.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.ID)); batch.submit(); created = result.get(); assertTrue(created.getAdditions().isEmpty()); assertNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); batch = DNS.batch(); result = batch.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.STATUS)); batch.submit(); created = result.get(); assertTrue(created.getAdditions().isEmpty()); assertNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNotNull(created.status()); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); batch = DNS.batch(); result = batch.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.START_TIME)); batch.submit(); created = result.get(); assertTrue(created.getAdditions().isEmpty()); assertNotNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); waitForChangeToComplete(created); created = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(created); batch = DNS.batch(); result = batch.applyChangeRequest( ZONE1.getName(), CHANGE_ADD_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.ADDITIONS)); batch.submit(); created = result.get(); assertEquals(CHANGE_ADD_ZONE1.getAdditions(), created.getAdditions()); assertNull(created.getStartTimeMillis()); assertTrue(created.getDeletions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); // finishes with delete otherwise we cannot delete the zone waitForChangeToComplete(created); batch = DNS.batch(); result = batch.applyChangeRequest( ZONE1.getName(), CHANGE_DELETE_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.DELETIONS)); batch.submit(); created = result.get(); waitForChangeToComplete(created); assertEquals(CHANGE_DELETE_ZONE1.getDeletions(), created.getDeletions()); assertNull(created.getStartTimeMillis()); assertTrue(created.getAdditions().isEmpty()); assertNotNull(created.getGeneratedId()); assertNull(created.status()); waitForChangeToComplete(created); } finally { clear(); } } @Test public void testGetChangeBatch() { try { Zone zone = DNS.create(ZONE1, Dns.ZoneOption.fields(ZoneField.NAME)); ChangeRequest created = zone.applyChangeRequest(CHANGE_ADD_ZONE1); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); DnsBatch batch = DNS.batch(); DnsBatchResult<ChangeRequest> completeResult = batch.getChangeRequest(zone.getName(), created.getGeneratedId()); DnsBatchResult<ChangeRequest> idResult = batch.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.ID)); DnsBatchResult<ChangeRequest> statusResult = batch.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.STATUS)); DnsBatchResult<ChangeRequest> timeResult = batch.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.START_TIME)); DnsBatchResult<ChangeRequest> additionsResult = batch.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.ADDITIONS)); batch.submit(); assertEqChangesIgnoreStatus(created, completeResult.get()); // with options ChangeRequest retrieved = idResult.get(); assertEquals(created.getGeneratedId(), retrieved.getGeneratedId()); assertEquals(0, retrieved.getAdditions().size()); assertEquals(0, retrieved.getDeletions().size()); assertNull(retrieved.getStartTimeMillis()); assertNull(retrieved.status()); retrieved = statusResult.get(); assertEquals(created.getGeneratedId(), retrieved.getGeneratedId()); assertEquals(0, retrieved.getAdditions().size()); assertEquals(0, retrieved.getDeletions().size()); assertNull(retrieved.getStartTimeMillis()); assertEquals(ChangeRequestInfo.Status.DONE, retrieved.status()); retrieved = timeResult.get(); assertEquals(created.getGeneratedId(), retrieved.getGeneratedId()); assertEquals(0, retrieved.getAdditions().size()); assertEquals(0, retrieved.getDeletions().size()); assertEquals(created.getStartTimeMillis(), retrieved.getStartTimeMillis()); assertNull(retrieved.status()); retrieved = additionsResult.get(); assertEquals(created.getGeneratedId(), retrieved.getGeneratedId()); assertEquals(2, retrieved.getAdditions().size()); assertTrue(retrieved.getAdditions().contains(A_RECORD_ZONE1)); assertTrue(retrieved.getAdditions().contains(AAAA_RECORD_ZONE1)); assertEquals(0, retrieved.getDeletions().size()); assertNull(retrieved.getStartTimeMillis()); assertNull(retrieved.status()); // finishes with delete otherwise we cannot delete the zone created = zone.applyChangeRequest( CHANGE_DELETE_ZONE1, Dns.ChangeRequestOption.fields(ChangeRequestField.DELETIONS)); batch = DNS.batch(); DnsBatchResult<ChangeRequest> deletionsResult = batch.getChangeRequest( zone.getName(), created.getGeneratedId(), Dns.ChangeRequestOption.fields(ChangeRequestField.DELETIONS)); batch.submit(); retrieved = deletionsResult.get(); assertEquals(created.getGeneratedId(), retrieved.getGeneratedId()); assertEquals(0, retrieved.getAdditions().size()); assertEquals(2, retrieved.getDeletions().size()); assertTrue(retrieved.getDeletions().contains(AAAA_RECORD_ZONE1)); assertTrue(retrieved.getDeletions().contains(A_RECORD_ZONE1)); assertNull(retrieved.getStartTimeMillis()); assertNull(retrieved.status()); waitForChangeToComplete(zone.getName(), created.getGeneratedId()); } finally { clear(); } } @Test public void testListChangesBatch() { try { DnsBatch batch = DNS.batch(); DnsBatchResult<Page<ChangeRequest>> result = batch.listChangeRequests(ZONE1.getName()); batch.submit(); try { result.get(); fail("Zone does not exist yet"); } catch (DnsException ex) { // expected assertEquals(404, ex.getCode()); assertFalse(ex.isRetryable()); } // zone exists but has no changes DNS.create(ZONE1); batch = DNS.batch(); result = batch.listChangeRequests(ZONE1.getName()); batch.submit(); // default change creating SOA and NS assertEquals(1, Iterables.size(result.get().getValues())); // zone has changes ChangeRequest change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_DELETE_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); batch = DNS.batch(); result = batch.listChangeRequests(ZONE1.getName()); DnsBatchResult<Page<ChangeRequest>> errorPageSize = batch.listChangeRequests(ZONE1.getName(), Dns.ChangeRequestListOption.pageSize(0)); DnsBatchResult<Page<ChangeRequest>> errorPageNegative = batch.listChangeRequests(ZONE1.getName(), Dns.ChangeRequestListOption.pageSize(-1)); DnsBatchResult<Page<ChangeRequest>> resultAscending = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING)); DnsBatchResult<Page<ChangeRequest>> resultDescending = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.DESCENDING)); DnsBatchResult<Page<ChangeRequest>> resultAdditions = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.ADDITIONS)); DnsBatchResult<Page<ChangeRequest>> resultDeletions = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.DELETIONS)); DnsBatchResult<Page<ChangeRequest>> resultId = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.ID)); DnsBatchResult<Page<ChangeRequest>> resultTime = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.START_TIME)); DnsBatchResult<Page<ChangeRequest>> resultStatus = batch.listChangeRequests( ZONE1.getName(), Dns.ChangeRequestListOption.sortOrder(Dns.SortingOrder.ASCENDING), Dns.ChangeRequestListOption.fields(ChangeRequestField.STATUS)); batch.submit(); assertEquals(3, Iterables.size(result.get().getValues())); // error in options try { errorPageSize.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { errorPageNegative.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } // sorting order ImmutableList<ChangeRequest> ascending = ImmutableList.copyOf(resultAscending.get().iterateAll()); ImmutableList<ChangeRequest> descending = ImmutableList.copyOf(resultDescending.get().iterateAll()); int size = 3; assertEquals(size, descending.size()); assertEquals(size, ascending.size()); for (int i = 0; i < size; i++) { assertEquals(descending.get(i), ascending.get(size - i - 1)); } // field options change = Iterables.get(resultAdditions.get().getValues(), 1); assertEquals(CHANGE_ADD_ZONE1.getAdditions(), change.getAdditions()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertNull(change.status()); change = Iterables.get(resultDeletions.get().getValues(), 2); assertTrue(change.getAdditions().isEmpty()); assertNotNull(change.getDeletions()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertNull(change.status()); change = Iterables.get(resultId.get().getValues(), 1); assertTrue(change.getAdditions().isEmpty()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertNull(change.status()); change = Iterables.get(resultTime.get().getValues(), 1); assertTrue(change.getAdditions().isEmpty()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNotNull(change.getStartTimeMillis()); assertNull(change.status()); change = Iterables.get(resultStatus.get().getValues(), 1); assertTrue(change.getAdditions().isEmpty()); assertTrue(change.getDeletions().isEmpty()); assertNotNull(change.getGeneratedId()); assertNull(change.getStartTimeMillis()); assertEquals(ChangeRequest.Status.DONE, change.status()); } finally { clear(); } } @Test public void testListDnsRecordSetsBatch() { try { Zone zone = DNS.create(ZONE1); DnsBatch batch = DNS.batch(); DnsBatchResult<Page<RecordSet>> result = batch.listRecordSets(zone.getName()); batch.submit(); ImmutableList<RecordSet> recordSets = ImmutableList.copyOf(result.get().iterateAll()); assertEquals(2, recordSets.size()); ImmutableList<RecordSet.Type> defaultRecords = ImmutableList.of(RecordSet.Type.NS, RecordSet.Type.SOA); for (RecordSet recordSet : recordSets) { assertTrue(defaultRecords.contains(recordSet.getType())); } // field options batch = DNS.batch(); DnsBatchResult<Page<RecordSet>> ttlResult = batch.listRecordSets(zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.TTL)); DnsBatchResult<Page<RecordSet>> nameResult = batch.listRecordSets(zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.NAME)); DnsBatchResult<Page<RecordSet>> recordsResult = batch.listRecordSets( zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.DNS_RECORDS)); DnsBatchResult<Page<RecordSet>> pageSizeResult = batch.listRecordSets( zone.getName(), Dns.RecordSetListOption.fields(RecordSetField.TYPE), Dns.RecordSetListOption.pageSize(1)); batch.submit(); Iterator<RecordSet> recordSetIterator = ttlResult.get().iterateAll().iterator(); int counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getTtl(), recordSet.getTtl()); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertTrue(recordSet.getRecords().isEmpty()); counter++; } assertEquals(2, counter); recordSetIterator = nameResult.get().iterateAll().iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertTrue(recordSet.getRecords().isEmpty()); assertNull(recordSet.getTtl()); counter++; } assertEquals(2, counter); recordSetIterator = recordsResult.get().iterateAll().iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getRecords(), recordSet.getRecords()); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertNull(recordSet.getTtl()); counter++; } assertEquals(2, counter); recordSetIterator = pageSizeResult.get().iterateAll().iterator(); // also test paging counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(recordSets.get(counter).getType(), recordSet.getType()); assertEquals(recordSets.get(counter).getName(), recordSet.getName()); assertTrue(recordSet.getRecords().isEmpty()); assertNull(recordSet.getTtl()); counter++; } assertEquals(2, counter); // test page size Page<RecordSet> recordSetPage = pageSizeResult.get(); assertEquals(1, ImmutableList.copyOf(recordSetPage.getValues().iterator()).size()); // test name filter ChangeRequest change = DNS.applyChangeRequest(ZONE1.getName(), CHANGE_ADD_ZONE1); waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); batch = DNS.batch(); result = batch.listRecordSets( ZONE1.getName(), Dns.RecordSetListOption.dnsName(A_RECORD_ZONE1.getName())); batch.submit(); recordSetIterator = result.get().iterateAll().iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertTrue( ImmutableList.of(A_RECORD_ZONE1.getType(), AAAA_RECORD_ZONE1.getType()) .contains(recordSet.getType())); counter++; } assertEquals(2, counter); // test type filter batch = DNS.batch(); result = batch.listRecordSets( ZONE1.getName(), Dns.RecordSetListOption.dnsName(A_RECORD_ZONE1.getName()), Dns.RecordSetListOption.type(A_RECORD_ZONE1.getType())); batch.submit(); recordSetIterator = result.get().iterateAll().iterator(); counter = 0; while (recordSetIterator.hasNext()) { RecordSet recordSet = recordSetIterator.next(); assertEquals(A_RECORD_ZONE1, recordSet); counter++; } assertEquals(1, counter); batch = DNS.batch(); DnsBatchResult<Page<RecordSet>> noNameError = batch.listRecordSets( ZONE1.getName(), Dns.RecordSetListOption.type(A_RECORD_ZONE1.getType())); DnsBatchResult<Page<RecordSet>> zeroSizeError = batch.listRecordSets(ZONE1.getName(), Dns.RecordSetListOption.pageSize(0)); DnsBatchResult<Page<RecordSet>> negativeSizeError = batch.listRecordSets(ZONE1.getName(), Dns.RecordSetListOption.pageSize(-1)); batch.submit(); // check wrong arguments try { // name is not set noNameError.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { zeroSizeError.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } try { negativeSizeError.get(); fail(); } catch (DnsException ex) { // expected assertEquals(400, ex.getCode()); assertFalse(ex.isRetryable()); } waitForChangeToComplete(ZONE1.getName(), change.getGeneratedId()); } finally { clear(); } } @Test public void testBatchCombined() { // only testing that the combination is possible // the results are validated in the other test methods try { DNS.create(ZONE1); DnsBatch batch = DNS.batch(); DnsBatchResult<Zone> zoneResult = batch.getZone(ZONE_NAME1); DnsBatchResult<ChangeRequest> changeRequestResult = batch.getChangeRequest(ZONE_NAME1, "0"); DnsBatchResult<Page<RecordSet>> pageResult = batch.listRecordSets(ZONE_NAME1); DnsBatchResult<ProjectInfo> projectResult = batch.getProject(); assertFalse(zoneResult.completed()); try { zoneResult.get(); fail("this should be submitted first"); } catch (IllegalStateException ex) { // expected } batch.submit(); assertNotNull(zoneResult.get().getCreationTimeMillis()); assertEquals(ZONE1.getDnsName(), zoneResult.get().getDnsName()); assertEquals(ZONE1.getDescription(), zoneResult.get().getDescription()); assertFalse(zoneResult.get().getNameServers().isEmpty()); assertNull(zoneResult.get().getNameServerSet()); // we did not set it assertNotNull(zoneResult.get().getGeneratedId()); assertNotNull(projectResult.get().getQuota()); assertEquals(2, Iterables.size(pageResult.get().getValues())); assertNotNull(changeRequestResult.get()); } finally { DNS.delete(ZONE1.getName()); } } }
vam-google/google-cloud-java
google-cloud-clients/google-cloud-dns/src/test/java/com/google/cloud/dns/it/ITDnsTest.java
Java
apache-2.0
88,484
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.io.network.group.impl.driver; import org.apache.reef.io.network.group.api.driver.TaskNode; import org.apache.reef.io.network.group.impl.GroupCommunicationMessage; import org.apache.reef.io.network.group.impl.utils.Utils; import org.apache.reef.io.network.proto.ReefNetworkGroupCommProtos; import org.apache.reef.tang.annotations.Name; import org.apache.reef.wake.EStage; import org.apache.reef.wake.EventHandler; import java.util.List; import java.util.logging.Logger; /** * */ public class TopologyUpdateWaitHandler implements EventHandler<List<TaskNode>> { private static final Logger LOG = Logger.getLogger(TopologyUpdateWaitHandler.class.getName()); private final EStage<GroupCommunicationMessage> senderStage; private final Class<? extends Name<String>> groupName; private final Class<? extends Name<String>> operName; private final String driverId; private final int driverVersion; private final String dstId; private final int dstVersion; private final String qualifiedName; /** * The handler will wait for all nodes to acquire topoLock * and send TopologySetup msg. Then it will send TopologyUpdated * msg. However, any local topology changes are not in effect * till driver sends TopologySetup once statusMap is emptied * The operations in the tasks that have topology changes will * wait for this. However other tasks that do not have any changes * will continue their regular operation */ public TopologyUpdateWaitHandler(final EStage<GroupCommunicationMessage> senderStage, final Class<? extends Name<String>> groupName, final Class<? extends Name<String>> operName, final String driverId, final int driverVersion, final String dstId, final int dstVersion, final String qualifiedName) { super(); this.senderStage = senderStage; this.groupName = groupName; this.operName = operName; this.driverId = driverId; this.driverVersion = driverVersion; this.dstId = dstId; this.dstVersion = dstVersion; this.qualifiedName = qualifiedName; } @Override public void onNext(final List<TaskNode> nodes) { LOG.entering("TopologyUpdateWaitHandler", "onNext", new Object[]{qualifiedName, nodes}); for (final TaskNode node : nodes) { LOG.fine(qualifiedName + "Waiting for " + node + " to enter TopologyUdate phase"); node.waitForTopologySetupOrFailure(); if (node.isRunning()) { LOG.fine(qualifiedName + node + " is in TopologyUpdate phase"); } else { LOG.fine(qualifiedName + node + " has failed"); } } LOG.finest(qualifiedName + "NodeTopologyUpdateWaitStage All to be updated nodes " + "have received TopologySetup"); LOG.fine(qualifiedName + "All affected parts of the topology are in TopologyUpdate phase. Will send a note to (" + dstId + "," + dstVersion + ")"); senderStage.onNext(Utils.bldVersionedGCM(groupName, operName, ReefNetworkGroupCommProtos.GroupCommMessage.Type.TopologyUpdated, driverId, driverVersion, dstId, dstVersion, Utils.EMPTY_BYTE_ARR)); LOG.exiting("TopologyUpdateWaitHandler", "onNext", qualifiedName); } }
taegeonum/incubator-reef
lang/java/reef-io/src/main/java/org/apache/reef/io/network/group/impl/driver/TopologyUpdateWaitHandler.java
Java
apache-2.0
4,136
// // Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.5-2 generiert // Siehe <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // �nderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren. // Generiert: 2014.02.19 um 02:35:56 PM CET // package org.onvif.ver10.media.wsdl; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p> * Java-Klasse f�r anonymous complex type. * * <p> * Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist. * * <pre> * <complexType> * <complexContent> * <restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * <sequence> * <element name="Name" type="{http://www.onvif.org/ver10/schema}Name"/> * <element name="Token" type="{http://www.onvif.org/ver10/schema}ReferenceToken" minOccurs="0"/> * </sequence> * </restriction> * </complexContent> * </complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "name", "token" }) @XmlRootElement(name = "CreateProfile") public class CreateProfile { @XmlElement(name = "Name", required = true) protected String name; @XmlElement(name = "Token") protected String token; /** * Ruft den Wert der name-Eigenschaft ab. * * @return possible object is {@link String } * */ public String getName() { return name; } /** * Legt den Wert der name-Eigenschaft fest. * * @param value * allowed object is {@link String } * */ public void setName(String value) { this.name = value; } /** * Ruft den Wert der token-Eigenschaft ab. * * @return possible object is {@link String } * */ public String getToken() { return token; } /** * Legt den Wert der token-Eigenschaft fest. * * @param value * allowed object is {@link String } * */ public void setToken(String value) { this.token = value; } }
milg0/onvif-java-lib
src/org/onvif/ver10/media/wsdl/CreateProfile.java
Java
apache-2.0
2,297
import java.util.Scanner; /** * @author Oleg Cherednik * @since 04.08.2018 */ public class Solution { static class SinglyLinkedListNode { public int data; public SinglyLinkedListNode next; public SinglyLinkedListNode(int nodeData) { this.data = nodeData; this.next = null; } } static class SinglyLinkedList { public SinglyLinkedListNode head; public SinglyLinkedListNode tail; public SinglyLinkedList() { this.head = null; this.tail = null; } public void insertNode(int nodeData) { SinglyLinkedListNode node = new SinglyLinkedListNode(nodeData); if (this.head == null) { this.head = node; } else { this.tail.next = node; } this.tail = node; } } static int findMergeNode(SinglyLinkedListNode head1, SinglyLinkedListNode head2) { SinglyLinkedListNode it1 = head1; SinglyLinkedListNode it2 = head2; while (it1 != it2) { it1 = it1.next != null ? it1.next : head2; it2 = it2.next != null ? it2.next : head1; } return it1.data; } private static final Scanner scanner = new Scanner(System.in); public static void main(String[] args) { int tests = scanner.nextInt(); scanner.skip("\r\n|([\n\r\u2028\u2029\u0085])?"); for (int testsItr = 0; testsItr < tests; testsItr++) { int index = scanner.nextInt(); scanner.skip("\r\n|([\n\r\u2028\u2029\u0085])?"); SinglyLinkedList llist1 = new SinglyLinkedList(); int llist1Count = scanner.nextInt(); scanner.skip("\r\n|([\n\r\u2028\u2029\u0085])?"); for (int i = 0; i < llist1Count; i++) { int llistItem = scanner.nextInt(); scanner.skip("\r\n|([\n\r\u2028\u2029\u0085])?"); llist1.insertNode(llistItem); } SinglyLinkedList llist2 = new SinglyLinkedList(); int llist2Count = scanner.nextInt(); scanner.skip("\r\n|([\n\r\u2028\u2029\u0085])?"); for (int i = 0; i < llist2Count; i++) { int llist2Item = scanner.nextInt(); scanner.skip("\r\n|([\n\r\u2028\u2029\u0085])?"); llist2.insertNode(llist2Item); } SinglyLinkedListNode ptr1 = llist1.head; SinglyLinkedListNode ptr2 = llist2.head; for (int i = 0; i < llist1Count; i++) { if (i < index) { ptr1 = ptr1.next; } } for (int i = 0; i < llist2Count; i++) { if (i != llist2Count - 1) { ptr2 = ptr2.next; } } ptr2.next = ptr1; int result = findMergeNode(llist1.head, llist2.head); System.out.println(String.valueOf(result)); } scanner.close(); } }
oleg-cherednik/hackerrank
Tutorials/Interview Preparation Kit/Linked Lists/Find Merge Point of Two Lists/Solution.java
Java
apache-2.0
3,072
package com.myit.server.service.admin.impl; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.myit.common.beans.PageQueryParam; import com.myit.common.beans.PageQueryResult; import com.myit.intf.bean.admin.Menu; import com.myit.intf.service.admin.MenuService; import com.myit.server.dao.admin.MenuDao; @Service("menuService") public class MenuServiceImpl implements MenuService { private Logger logger = Logger.getLogger(this.getClass()); @Autowired private MenuDao menuDao; public Menu findMenuById(Long id) throws Exception { logger.info("findMenuById in."); Menu menu = null; if (logger.isDebugEnabled()) { logger.debug("parameters: id=" + id); } // 调用dao查询板块 logger.info("invoke plateDao.findMenuById"); com.myit.server.model.admin.Menu plate = menuDao.findMenuById(id); logger.info("findMenuById out."); return menu; } public List<Menu> findAllMenus() throws Exception { logger.info("findAllMenus in."); List<Menu> menus = null; List<com.myit.server.model.admin.Menu> plates = menuDao.findAllMenus(); logger.info("findAllMenus out."); return menus; } public int getMenusCount(Menu menu) throws Exception { logger.info("getMenusCount in."); // 调用dao查询板块记录数 com.myit.server.model.admin.Menu menuBean = null; int platesCount = menuDao.getMenusCount(menuBean); logger.info("getMenusCount out."); return platesCount; } public PageQueryResult<Menu> findMenus(PageQueryParam<Menu> pageQueryParam) throws Exception { logger.info("findMenus in."); if (logger.isDebugEnabled()) { logger.debug("pageQueryParam=" + pageQueryParam); } int total = 0; PageQueryResult<Menu> pageQueryResult = null; try { com.myit.server.model.admin.Menu menuBean = null; // 调用dao查询板块总数 total = menuDao.getMenusCount(menuBean); PageQueryResult<com.myit.server.model.admin.Menu> pageQueryResultTemp = new PageQueryResult<com.myit.server.model.admin.Menu>(total, pageQueryParam.getPageNo(), pageQueryParam.getPageSize()); if (total > 0) { // 调用dao分页查询板块 List<com.myit.server.model.admin.Menu> menus = menuDao.findMenus(pageQueryResult.getStart(), pageQueryResult.getPageSize(), menuBean); pageQueryResultTemp.setRows(menus); } } catch (Exception e) { logger.warn("Exception occured", e); throw e; } if (logger.isDebugEnabled()) { logger.debug("pageQueryResult=" + pageQueryResult); } logger.info("findMenus out."); return pageQueryResult; } public boolean saveMenu(Menu menu) throws Exception { logger.info("saveMenu in."); // 调用dao保存板块信息 com.myit.server.model.admin.Menu menuBean = null; boolean isSuccess = menuDao.persistMenu(menuBean); if (logger.isDebugEnabled()) { logger.debug("isSuccess=" + isSuccess); } logger.info("saveMenu out."); return isSuccess; } public List<Menu> getLoginMenus(Long uId) throws Exception { logger.info("getLoginMenus in."); List<Menu> menus = null; // 调用dao查询板块信息 List<com.myit.server.model.admin.Menu> menuBeans = menuDao.findMenusByUId(uId); if (menus != null) { logger.debug("menus.size=" + menus.size()); } logger.info("getLoginMenus out."); return menus; } public List<Menu> findChildMenus(Long mId) throws Exception { logger.info("findPlatesByRId in."); List<Menu> menus = null; Map<String, Object> queryParam = new HashMap<String, Object>(); queryParam.put("pId", mId); // 调用dao查询板块信息 List<com.myit.server.model.admin.Menu> menuBeans = menuDao.findChildMenus(queryParam); if (menus != null) { logger.debug("menus.size=" + menus.size()); } logger.info("findPlatesByRId out."); return menus; } }
346674058/SuRui
code/myit-server/src/main/java/com/myit/server/service/admin/impl/MenuServiceImpl.java
Java
apache-2.0
4,661
/* * Copyright (C) 2016 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package OptimizationTests.ShortMethodsInliningNonVirtualInvokes.InvokeSuperAObjectThrowNullGet_001; // The test checks that stack after NullPointerException occurs is correct despite inlining class Test extends SuperTest { Test(int iterations) { super(iterations); } public Foo getThingies(Foo[] arr, int i) { return super.getThingies(arr, i); } public void setThingies(Foo[] arr, Foo newThingy, int i) { super.setThingies(arr, newThingy, i); } }
android-art-intel/Nougat
art-extension/opttests/src/OptimizationTests/ShortMethodsInliningNonVirtualInvokes/InvokeSuperAObjectThrowNullGet_001/Test.java
Java
apache-2.0
1,106
<!DOCTYPE html> <html> <head> <title>Kendo UI Web Examples</title> <link href="../content/shared/styles/suite.css" rel="stylesheet"> </head> <body> <div id="page"> <a class="offline-button" href="../index.php">Back to all suites</a> <?php $jsonFilename = '../content/web.nav.json'; require_once '../include/navigation.php'; ?> </div> </body> </html>
saqlainmediasoft/Maintenance-Management-System
admin/kendoui/wrappers/php/web/index.php
PHP
apache-2.0
383
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest from mri import MriServer from mri.dispatch import MriServerDispatch class TestMriServer(unittest.TestCase): def test_new_dispatch(self): server = MriServer("http://www.httpbin.com", "testuser", "testpass") task = {"title": "TEST", "id": "000112233"} dispatch = server.new_dispatch(task) test_against = MriServerDispatch(task, "http://www.httpbin.com", "testuser", "testpass") self.assertEqual(dispatch, test_against) if __name__ == '__main__': unittest.main()
Mri-monitoring/Mri-python-client
tests/TestMriServer.py
Python
apache-2.0
750
/******************************************************************************* * Copyright 2012 Sevket Seref Arikan, David Ingram * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package uk.ac.ucl.chime.web; import gov.nih.nci.cagrid.gums.client.GetGridProxy; import java.util.ArrayList; import javax.el.ELContext; import javax.faces.context.FacesContext; import javax.faces.model.SelectItem; import org.apache.poi.hssf.record.formula.Ptg; import uk.ac.ucl.chime.utilities.TextValueInfo; import uk.ac.ucl.chime.utils.RMDataTypeAdapter; import uk.ac.ucl.chime.wrappers.archetypewrappers.ArchetypeWrapper; /* * This class descends from RMDataTypeAdapter to use its syntax resolving mechanism * it is not an adapter for a data type operation, instead it provides access to a groupId * using the node path as key. so same nodeId from a set of components gets back a random guid * everytime the request level bean is initialized. this is not a nice trick, but JSF does not leave much choice in this case. * */ public class RandomGroupIdAdapter extends RMDataTypeAdapter { public RandomGroupIdAdapter(ArchetypeWrapper pArchetypeWrapper) { archetypeWrapper = pArchetypeWrapper; } @Override protected Object getValue() throws Exception { ELContext elContext = FacesContext.getCurrentInstance().getELContext(); ConnectorBean connector = (ConnectorBean) FacesContext.getCurrentInstance().getApplication().getELResolver().getValue(elContext, null, "connectorBean"); return connector.getGroupGUID(targetNodePath); } @Override protected void setValue(String nodePath, Object value) throws Exception { //simply ignore set value } }
josealmeida/opereffa
Opereffa/src/uk/ac/ucl/chime/web/RandomGroupIdAdapter.java
Java
apache-2.0
2,256
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.version; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.OutputStream; import com.orientechnologies.common.serialization.OBinaryConverter; import com.orientechnologies.common.serialization.OBinaryConverterFactory; import com.orientechnologies.orient.core.serialization.OBinaryProtocol; import com.orientechnologies.orient.core.storage.fs.OFile; /** * Implementation of {@link ORecordVersion} adapted to distributed environment. Opposite of {@link OSimpleVersion} contains * additional information about timestamp of last change and mac address of server that made this change. * * @see OVersionFactory * @see ORecordVersion * * @author <a href="mailto:enisher@gmail.com">Artem Orobets</a> */ public final class ODistributedVersion implements ORecordVersion { public static final int STREAMED_SIZE = OBinaryProtocol.SIZE_INT + OBinaryProtocol.SIZE_LONG + OBinaryProtocol.SIZE_LONG; public static final OBinaryConverter CONVERTER = OBinaryConverterFactory.getConverter(); private int counter; private long timestamp; private long macAddress; public ODistributedVersion() { } public ODistributedVersion(int counter) { this.counter = counter; this.timestamp = System.currentTimeMillis(); this.macAddress = OVersionFactory.instance().getMacAddress(); } public ODistributedVersion(int counter, long timestamp, long macAddress) { this.counter = counter; this.timestamp = timestamp; this.macAddress = macAddress; } @Override public void increment() { if (isTombstone()) throw new IllegalStateException("Record was deleted and can not be updated."); counter++; timestamp = System.currentTimeMillis(); macAddress = OVersionFactory.instance().getMacAddress(); } @Override public void decrement() { if (isTombstone()) throw new IllegalStateException("Record was deleted and can not be updated."); counter--; timestamp = System.currentTimeMillis(); macAddress = OVersionFactory.instance().getMacAddress(); } @Override public boolean isUntracked() { return counter == -1; } @Override public boolean isTemporary() { return counter < -1; } @Override public boolean isValid() { return counter > -1; } @Override public void setCounter(int iVersion) { counter = iVersion; } @Override public int getCounter() { return counter; } @Override public boolean isTombstone() { return counter < 0; } public void convertToTombstone() { if (isTombstone()) throw new IllegalStateException("Record was deleted and can not be updated."); counter++; counter = -counter; timestamp = System.currentTimeMillis(); macAddress = OVersionFactory.instance().getMacAddress(); } @Override public void copyFrom(ORecordVersion version) { ODistributedVersion other = (ODistributedVersion) version; update(other.counter, other.timestamp, other.macAddress); } public void update(int recordVersion, long timestamp, long macAddress) { this.counter = recordVersion; this.timestamp = timestamp; this.macAddress = macAddress; } @Override public void reset() { counter = 0; timestamp = System.currentTimeMillis(); macAddress = OVersionFactory.instance().getMacAddress(); } @Override public void setRollbackMode() { counter = Integer.MIN_VALUE + counter; } @Override public void clearRollbackMode() { counter = counter - Integer.MIN_VALUE; } @Override public void disable() { counter = -1; } @Override public void revive() { counter = -counter; } @Override public ORecordVersion copy() { ODistributedVersion copy = new ODistributedVersion(); copy.counter = counter; copy.timestamp = timestamp; copy.macAddress = macAddress; return copy; } @Override public ORecordVersionSerializer getSerializer() { return ODistributedVersionSerializer.INSTANCE; } @Override public boolean equals(Object other) { return other instanceof ODistributedVersion && ((ODistributedVersion) other).compareTo(this) == 0; } @Override public int hashCode() { int result = counter; result = 31 * result + (int) (timestamp ^ (timestamp >>> 32)); result = 31 * result + (int) (macAddress ^ (macAddress >>> 32)); return result; } @Override public String toString() { return ODistributedVersionSerializer.INSTANCE.toString(this); } @Override public int compareTo(ORecordVersion o) { ODistributedVersion other = (ODistributedVersion) o; final int myCounter; if (isTombstone()) myCounter = -counter; else myCounter = counter; final int otherCounter; if (o.isTombstone()) otherCounter = -o.getCounter(); else otherCounter = o.getCounter(); if (myCounter != otherCounter) return myCounter > otherCounter ? 1 : -1; if (timestamp != other.timestamp) return (timestamp > other.timestamp) ? 1 : -1; if (macAddress > other.macAddress) return 1; else if (macAddress < other.macAddress) return -1; else return 0; } public long getTimestamp() { return timestamp; } public long getMacAddress() { return macAddress; } @Override public void writeExternal(ObjectOutput out) throws IOException { ODistributedVersionSerializer.INSTANCE.writeTo(out, this); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { ODistributedVersionSerializer.INSTANCE.readFrom(in, this); } private static final class ODistributedVersionSerializer implements ORecordVersionSerializer { private static final ODistributedVersionSerializer INSTANCE = new ODistributedVersionSerializer(); @Override public void writeTo(DataOutput out, ORecordVersion version) throws IOException { final ODistributedVersion distributedVersion = (ODistributedVersion) version; out.writeInt(distributedVersion.counter); out.writeLong(distributedVersion.timestamp); out.writeLong(distributedVersion.macAddress); } @Override public void readFrom(DataInput in, ORecordVersion version) throws IOException { final ODistributedVersion distributedVersion = (ODistributedVersion) version; distributedVersion.counter = in.readInt(); distributedVersion.timestamp = in.readLong(); distributedVersion.macAddress = in.readLong(); } @Override public void writeTo(OutputStream stream, ORecordVersion version) throws IOException { final ODistributedVersion distributedVersion = (ODistributedVersion) version; OBinaryProtocol.int2bytes(distributedVersion.counter, stream); OBinaryProtocol.long2bytes(distributedVersion.timestamp, stream); OBinaryProtocol.long2bytes(distributedVersion.macAddress, stream); } @Override public void readFrom(InputStream stream, ORecordVersion version) throws IOException { final ODistributedVersion distributedVersion = (ODistributedVersion) version; distributedVersion.counter = OBinaryProtocol.bytes2int(stream); distributedVersion.timestamp = OBinaryProtocol.bytes2long(stream); distributedVersion.macAddress = OBinaryProtocol.bytes2long(stream); } @Override public int writeTo(byte[] stream, int pos, ORecordVersion version) { final ODistributedVersion distributedVersion = (ODistributedVersion) version; int len = 0; OBinaryProtocol.int2bytes(distributedVersion.counter, stream, pos + len); len += OBinaryProtocol.SIZE_INT; OBinaryProtocol.long2bytes(distributedVersion.timestamp, stream, pos + len); len += OBinaryProtocol.SIZE_LONG; OBinaryProtocol.long2bytes(distributedVersion.macAddress, stream, pos + len); len += OBinaryProtocol.SIZE_LONG; return len; } @Override public int readFrom(byte[] iStream, int pos, ORecordVersion version) { final ODistributedVersion distributedVersion = (ODistributedVersion) version; int len = 0; distributedVersion.counter = OBinaryProtocol.bytes2int(iStream, pos + len); len += OBinaryProtocol.SIZE_INT; distributedVersion.timestamp = OBinaryProtocol.bytes2long(iStream, pos + len); len += OBinaryProtocol.SIZE_LONG; distributedVersion.macAddress = OBinaryProtocol.bytes2long(iStream, pos + len); len += OBinaryProtocol.SIZE_LONG; return len; } @Override public int writeTo(OFile file, long pos, ORecordVersion version) throws IOException { final ODistributedVersion distributedVersion = (ODistributedVersion) version; int len = 0; file.writeInt(pos + len, distributedVersion.counter); len += OBinaryProtocol.SIZE_INT; file.writeLong(pos + len, distributedVersion.timestamp); len += OBinaryProtocol.SIZE_LONG; file.writeLong(pos + len, distributedVersion.macAddress); len += OBinaryProtocol.SIZE_LONG; return len; } @Override public long readFrom(OFile file, long pos, ORecordVersion version) throws IOException { final ODistributedVersion distributedVersion = (ODistributedVersion) version; int len = 0; distributedVersion.counter = file.readInt(pos + len); len += OBinaryProtocol.SIZE_INT; distributedVersion.timestamp = file.readLong(pos + len); len += OBinaryProtocol.SIZE_LONG; distributedVersion.macAddress = file.readLong(pos + len); len += OBinaryProtocol.SIZE_LONG; return len; } @Override public int fastWriteTo(byte[] iStream, int pos, ORecordVersion version) { final ODistributedVersion distributedVersion = (ODistributedVersion) version; int len = 0; CONVERTER.putInt(iStream, pos + len, distributedVersion.counter); len += OBinaryProtocol.SIZE_INT; CONVERTER.putLong(iStream, pos + len, distributedVersion.timestamp); len += OBinaryProtocol.SIZE_LONG; CONVERTER.putLong(iStream, pos + len, distributedVersion.macAddress); len += OBinaryProtocol.SIZE_LONG; return len; } @Override public int fastReadFrom(byte[] iStream, int pos, ORecordVersion version) { final ODistributedVersion distributedVersion = (ODistributedVersion) version; int len = 0; distributedVersion.counter = CONVERTER.getInt(iStream, pos + len); len += OBinaryProtocol.SIZE_INT; distributedVersion.timestamp = CONVERTER.getLong(iStream, pos + len); len += OBinaryProtocol.SIZE_LONG; distributedVersion.macAddress = CONVERTER.getLong(iStream, pos + len); len += OBinaryProtocol.SIZE_LONG; return len; } @Override public byte[] toByteArray(ORecordVersion version) { int size = OBinaryProtocol.SIZE_INT + OBinaryProtocol.SIZE_LONG + OBinaryProtocol.SIZE_LONG; byte[] buffer = new byte[size]; fastWriteTo(buffer, 0, version); return buffer; } @Override public String toString(ORecordVersion version) { final ODistributedVersion distributedVersion = (ODistributedVersion) version; return distributedVersion.counter + "." + distributedVersion.timestamp + "." + distributedVersion.macAddress; } @Override public void fromString(String string, ORecordVersion version) { final ODistributedVersion distributedVersion = (ODistributedVersion) version; String[] parts = string.split("\\."); if (parts.length != 3) throw new IllegalArgumentException( "Not correct format of distributed version. Expected <recordVersion>.<timestamp>.<macAddress>"); distributedVersion.counter = Integer.valueOf(parts[0]); distributedVersion.timestamp = Long.valueOf(parts[1]); distributedVersion.macAddress = Long.valueOf(parts[2]); } } }
nengxu/OrientDB
core/src/main/java/com/orientechnologies/orient/core/version/ODistributedVersion.java
Java
apache-2.0
12,956
package com.latte.ui.recycler; /** * Created by Administrator on 2017/9/18 0018. */ public enum MultipleFields { ITEM_TYPE, TITLE, TEXT, IMAGE_URL, BANNERS, SPAN_SIZE, ID, NAME, TAG }
xiao125/LatteDamo
latte-ui/src/main/java/com/latte/ui/recycler/MultipleFields.java
Java
apache-2.0
225
package com.xhuihui.app.json; /** * Created by lihuiguang on 2017/7/13. */ public class json { }
x-huihui/x-huihui.github.io
src/main/java/com/xhuihui/app/json/json.java
Java
apache-2.0
100
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_interface.java // Do not modify package org.projectfloodlight.openflow.protocol.oxm; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import io.netty.buffer.ByteBuf; public interface OFOxmBsnInPorts128 extends OFObject, OFOxm<OFBitMask128> { long getTypeLen(); OFBitMask128 getValue(); MatchField<OFBitMask128> getMatchField(); boolean isMasked(); OFOxm<OFBitMask128> getCanonical(); OFBitMask128 getMask(); OFVersion getVersion(); void writeTo(ByteBuf channelBuffer); Builder createBuilder(); public interface Builder extends OFOxm.Builder<OFBitMask128> { OFOxmBsnInPorts128 build(); long getTypeLen(); OFBitMask128 getValue(); Builder setValue(OFBitMask128 value); MatchField<OFBitMask128> getMatchField(); boolean isMasked(); OFOxm<OFBitMask128> getCanonical(); OFBitMask128 getMask(); OFVersion getVersion(); } }
floodlight/loxigen-artifacts
openflowj/gen-src/main/java/org/projectfloodlight/openflow/protocol/oxm/OFOxmBsnInPorts128.java
Java
apache-2.0
2,170
from JumpScale import j descr = """ This jumpscript returns network info """ category = "monitoring" organization = "jumpscale" author = "kristof@incubaid.com" license = "bsd" version = "1.0" roles = [] def action(): return j.sal.nettools.getNetworkInfo() if __name__ == "__main__": print(action())
Jumpscale/jumpscale_core8
apps/agentcontroller/jumpscripts/jumpscale/network_info.py
Python
apache-2.0
312
/* Copyright 2015 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { assert, BaseException, isString, removeNullCharacters, stringToBytes, Util, warn, } from "../shared/util.js"; import { BaseCanvasFactory, BaseCMapReaderFactory, BaseStandardFontDataFactory, BaseSVGFactory, } from "./base_factory.js"; const DEFAULT_LINK_REL = "noopener noreferrer nofollow"; const SVG_NS = "http://www.w3.org/2000/svg"; class DOMCanvasFactory extends BaseCanvasFactory { constructor({ ownerDocument = globalThis.document } = {}) { super(); this._document = ownerDocument; } _createCanvas(width, height) { const canvas = this._document.createElement("canvas"); canvas.width = width; canvas.height = height; return canvas; } } async function fetchData(url, asTypedArray = false) { if ( (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) || isValidFetchUrl(url, document.baseURI) ) { const response = await fetch(url); if (!response.ok) { throw new Error(response.statusText); } return asTypedArray ? new Uint8Array(await response.arrayBuffer()) : stringToBytes(await response.text()); } // The Fetch API is not supported. return new Promise((resolve, reject) => { const request = new XMLHttpRequest(); request.open("GET", url, /* asTypedArray = */ true); if (asTypedArray) { request.responseType = "arraybuffer"; } request.onreadystatechange = () => { if (request.readyState !== XMLHttpRequest.DONE) { return; } if (request.status === 200 || request.status === 0) { let data; if (asTypedArray && request.response) { data = new Uint8Array(request.response); } else if (!asTypedArray && request.responseText) { data = stringToBytes(request.responseText); } if (data) { resolve(data); return; } } reject(new Error(request.statusText)); }; request.send(null); }); } class DOMCMapReaderFactory extends BaseCMapReaderFactory { _fetchData(url, compressionType) { return fetchData(url, /* asTypedArray = */ this.isCompressed).then(data => { return { cMapData: data, compressionType }; }); } } class DOMStandardFontDataFactory extends BaseStandardFontDataFactory { _fetchData(url) { return fetchData(url, /* asTypedArray = */ true); } } class DOMSVGFactory extends BaseSVGFactory { _createSVG(type) { return document.createElementNS(SVG_NS, type); } } /** * @typedef {Object} PageViewportParameters * @property {Array<number>} viewBox - The xMin, yMin, xMax and * yMax coordinates. * @property {number} scale - The scale of the viewport. * @property {number} rotation - The rotation, in degrees, of the viewport. * @property {number} [offsetX] - The horizontal, i.e. x-axis, offset. The * default value is `0`. * @property {number} [offsetY] - The vertical, i.e. y-axis, offset. The * default value is `0`. * @property {boolean} [dontFlip] - If true, the y-axis will not be flipped. * The default value is `false`. */ /** * @typedef {Object} PageViewportCloneParameters * @property {number} [scale] - The scale, overriding the one in the cloned * viewport. The default value is `this.scale`. * @property {number} [rotation] - The rotation, in degrees, overriding the one * in the cloned viewport. The default value is `this.rotation`. * @property {number} [offsetX] - The horizontal, i.e. x-axis, offset. * The default value is `this.offsetX`. * @property {number} [offsetY] - The vertical, i.e. y-axis, offset. * The default value is `this.offsetY`. * @property {boolean} [dontFlip] - If true, the x-axis will not be flipped. * The default value is `false`. */ /** * PDF page viewport created based on scale, rotation and offset. */ class PageViewport { /** * @param {PageViewportParameters} */ constructor({ viewBox, scale, rotation, offsetX = 0, offsetY = 0, dontFlip = false, }) { this.viewBox = viewBox; this.scale = scale; this.rotation = rotation; this.offsetX = offsetX; this.offsetY = offsetY; // creating transform to convert pdf coordinate system to the normal // canvas like coordinates taking in account scale and rotation const centerX = (viewBox[2] + viewBox[0]) / 2; const centerY = (viewBox[3] + viewBox[1]) / 2; let rotateA, rotateB, rotateC, rotateD; // Normalize the rotation, by clamping it to the [0, 360) range. rotation %= 360; if (rotation < 0) { rotation += 360; } switch (rotation) { case 180: rotateA = -1; rotateB = 0; rotateC = 0; rotateD = 1; break; case 90: rotateA = 0; rotateB = 1; rotateC = 1; rotateD = 0; break; case 270: rotateA = 0; rotateB = -1; rotateC = -1; rotateD = 0; break; case 0: rotateA = 1; rotateB = 0; rotateC = 0; rotateD = -1; break; default: throw new Error( "PageViewport: Invalid rotation, must be a multiple of 90 degrees." ); } if (dontFlip) { rotateC = -rotateC; rotateD = -rotateD; } let offsetCanvasX, offsetCanvasY; let width, height; if (rotateA === 0) { offsetCanvasX = Math.abs(centerY - viewBox[1]) * scale + offsetX; offsetCanvasY = Math.abs(centerX - viewBox[0]) * scale + offsetY; width = Math.abs(viewBox[3] - viewBox[1]) * scale; height = Math.abs(viewBox[2] - viewBox[0]) * scale; } else { offsetCanvasX = Math.abs(centerX - viewBox[0]) * scale + offsetX; offsetCanvasY = Math.abs(centerY - viewBox[1]) * scale + offsetY; width = Math.abs(viewBox[2] - viewBox[0]) * scale; height = Math.abs(viewBox[3] - viewBox[1]) * scale; } // creating transform for the following operations: // translate(-centerX, -centerY), rotate and flip vertically, // scale, and translate(offsetCanvasX, offsetCanvasY) this.transform = [ rotateA * scale, rotateB * scale, rotateC * scale, rotateD * scale, offsetCanvasX - rotateA * scale * centerX - rotateC * scale * centerY, offsetCanvasY - rotateB * scale * centerX - rotateD * scale * centerY, ]; this.width = width; this.height = height; } /** * Clones viewport, with optional additional properties. * @param {PageViewportCloneParameters} [params] * @returns {PageViewport} Cloned viewport. */ clone({ scale = this.scale, rotation = this.rotation, offsetX = this.offsetX, offsetY = this.offsetY, dontFlip = false, } = {}) { return new PageViewport({ viewBox: this.viewBox.slice(), scale, rotation, offsetX, offsetY, dontFlip, }); } /** * Converts PDF point to the viewport coordinates. For examples, useful for * converting PDF location into canvas pixel coordinates. * @param {number} x - The x-coordinate. * @param {number} y - The y-coordinate. * @returns {Object} Object containing `x` and `y` properties of the * point in the viewport coordinate space. * @see {@link convertToPdfPoint} * @see {@link convertToViewportRectangle} */ convertToViewportPoint(x, y) { return Util.applyTransform([x, y], this.transform); } /** * Converts PDF rectangle to the viewport coordinates. * @param {Array} rect - The xMin, yMin, xMax and yMax coordinates. * @returns {Array} Array containing corresponding coordinates of the * rectangle in the viewport coordinate space. * @see {@link convertToViewportPoint} */ convertToViewportRectangle(rect) { const topLeft = Util.applyTransform([rect[0], rect[1]], this.transform); const bottomRight = Util.applyTransform([rect[2], rect[3]], this.transform); return [topLeft[0], topLeft[1], bottomRight[0], bottomRight[1]]; } /** * Converts viewport coordinates to the PDF location. For examples, useful * for converting canvas pixel location into PDF one. * @param {number} x - The x-coordinate. * @param {number} y - The y-coordinate. * @returns {Object} Object containing `x` and `y` properties of the * point in the PDF coordinate space. * @see {@link convertToViewportPoint} */ convertToPdfPoint(x, y) { return Util.applyInverseTransform([x, y], this.transform); } } class RenderingCancelledException extends BaseException { constructor(msg, type) { super(msg, "RenderingCancelledException"); this.type = type; } } const LinkTarget = { NONE: 0, // Default value. SELF: 1, BLANK: 2, PARENT: 3, TOP: 4, }; /** * @typedef ExternalLinkParameters * @typedef {Object} ExternalLinkParameters * @property {string} url - An absolute URL. * @property {LinkTarget} [target] - The link target. The default value is * `LinkTarget.NONE`. * @property {string} [rel] - The link relationship. The default value is * `DEFAULT_LINK_REL`. * @property {boolean} [enabled] - Whether the link should be enabled. The * default value is true. */ /** * Adds various attributes (href, title, target, rel) to hyperlinks. * @param {HTMLLinkElement} link - The link element. * @param {ExternalLinkParameters} params */ function addLinkAttributes(link, { url, target, rel, enabled = true } = {}) { assert( url && typeof url === "string", 'addLinkAttributes: A valid "url" parameter must provided.' ); const urlNullRemoved = removeNullCharacters(url); if (enabled) { link.href = link.title = urlNullRemoved; } else { link.href = ""; link.title = `Disabled: ${urlNullRemoved}`; link.onclick = () => { return false; }; } let targetStr = ""; // LinkTarget.NONE switch (target) { case LinkTarget.NONE: break; case LinkTarget.SELF: targetStr = "_self"; break; case LinkTarget.BLANK: targetStr = "_blank"; break; case LinkTarget.PARENT: targetStr = "_parent"; break; case LinkTarget.TOP: targetStr = "_top"; break; } link.target = targetStr; link.rel = typeof rel === "string" ? rel : DEFAULT_LINK_REL; } function isDataScheme(url) { const ii = url.length; let i = 0; while (i < ii && url[i].trim() === "") { i++; } return url.substring(i, i + 5).toLowerCase() === "data:"; } function isPdfFile(filename) { return typeof filename === "string" && /\.pdf$/i.test(filename); } /** * Gets the filename from a given URL. * @param {string} url * @returns {string} */ function getFilenameFromUrl(url) { const anchor = url.indexOf("#"); const query = url.indexOf("?"); const end = Math.min( anchor > 0 ? anchor : url.length, query > 0 ? query : url.length ); return url.substring(url.lastIndexOf("/", end) + 1, end); } /** * Returns the filename or guessed filename from the url (see issue 3455). * @param {string} url - The original PDF location. * @param {string} defaultFilename - The value returned if the filename is * unknown, or the protocol is unsupported. * @returns {string} Guessed PDF filename. */ function getPdfFilenameFromUrl(url, defaultFilename = "document.pdf") { if (typeof url !== "string") { return defaultFilename; } if (isDataScheme(url)) { warn('getPdfFilenameFromUrl: ignore "data:"-URL for performance reasons.'); return defaultFilename; } const reURI = /^(?:(?:[^:]+:)?\/\/[^/]+)?([^?#]*)(\?[^#]*)?(#.*)?$/; // SCHEME HOST 1.PATH 2.QUERY 3.REF // Pattern to get last matching NAME.pdf const reFilename = /[^/?#=]+\.pdf\b(?!.*\.pdf\b)/i; const splitURI = reURI.exec(url); let suggestedFilename = reFilename.exec(splitURI[1]) || reFilename.exec(splitURI[2]) || reFilename.exec(splitURI[3]); if (suggestedFilename) { suggestedFilename = suggestedFilename[0]; if (suggestedFilename.includes("%")) { // URL-encoded %2Fpath%2Fto%2Ffile.pdf should be file.pdf try { suggestedFilename = reFilename.exec( decodeURIComponent(suggestedFilename) )[0]; } catch (ex) { // Possible (extremely rare) errors: // URIError "Malformed URI", e.g. for "%AA.pdf" // TypeError "null has no properties", e.g. for "%2F.pdf" } } } return suggestedFilename || defaultFilename; } class StatTimer { constructor() { this.started = Object.create(null); this.times = []; } time(name) { if (name in this.started) { warn(`Timer is already running for ${name}`); } this.started[name] = Date.now(); } timeEnd(name) { if (!(name in this.started)) { warn(`Timer has not been started for ${name}`); } this.times.push({ name, start: this.started[name], end: Date.now(), }); // Remove timer from started so it can be called again. delete this.started[name]; } toString() { // Find the longest name for padding purposes. const outBuf = []; let longest = 0; for (const time of this.times) { const name = time.name; if (name.length > longest) { longest = name.length; } } for (const time of this.times) { const duration = time.end - time.start; outBuf.push(`${time.name.padEnd(longest)} ${duration}ms\n`); } return outBuf.join(""); } } function isValidFetchUrl(url, baseUrl) { try { const { protocol } = baseUrl ? new URL(url, baseUrl) : new URL(url); // The Fetch API only supports the http/https protocols, and not file/ftp. return protocol === "http:" || protocol === "https:"; } catch (ex) { return false; // `new URL()` will throw on incorrect data. } } /** * @param {string} src * @param {boolean} [removeScriptElement] * @returns {Promise<void>} */ function loadScript(src, removeScriptElement = false) { return new Promise((resolve, reject) => { const script = document.createElement("script"); script.src = src; script.onload = function (evt) { if (removeScriptElement) { script.remove(); } resolve(evt); }; script.onerror = function () { reject(new Error(`Cannot load script at: ${script.src}`)); }; (document.head || document.documentElement).appendChild(script); }); } // Deprecated API function -- display regardless of the `verbosity` setting. function deprecated(details) { console.log("Deprecated API usage: " + details); } let pdfDateStringRegex; class PDFDateString { /** * Convert a PDF date string to a JavaScript `Date` object. * * The PDF date string format is described in section 7.9.4 of the official * PDF 32000-1:2008 specification. However, in the PDF 1.7 reference (sixth * edition) Adobe describes the same format including a trailing apostrophe. * This syntax in incorrect, but Adobe Acrobat creates PDF files that contain * them. We ignore all apostrophes as they are not necessary for date parsing. * * Moreover, Adobe Acrobat doesn't handle changing the date to universal time * and doesn't use the user's time zone (effectively ignoring the HH' and mm' * parts of the date string). * * @param {string} input * @returns {Date|null} */ static toDateObject(input) { if (!input || !isString(input)) { return null; } // Lazily initialize the regular expression. if (!pdfDateStringRegex) { pdfDateStringRegex = new RegExp( "^D:" + // Prefix (required) "(\\d{4})" + // Year (required) "(\\d{2})?" + // Month (optional) "(\\d{2})?" + // Day (optional) "(\\d{2})?" + // Hour (optional) "(\\d{2})?" + // Minute (optional) "(\\d{2})?" + // Second (optional) "([Z|+|-])?" + // Universal time relation (optional) "(\\d{2})?" + // Offset hour (optional) "'?" + // Splitting apostrophe (optional) "(\\d{2})?" + // Offset minute (optional) "'?" // Trailing apostrophe (optional) ); } // Optional fields that don't satisfy the requirements from the regular // expression (such as incorrect digit counts or numbers that are out of // range) will fall back the defaults from the specification. const matches = pdfDateStringRegex.exec(input); if (!matches) { return null; } // JavaScript's `Date` object expects the month to be between 0 and 11 // instead of 1 and 12, so we have to correct for that. const year = parseInt(matches[1], 10); let month = parseInt(matches[2], 10); month = month >= 1 && month <= 12 ? month - 1 : 0; let day = parseInt(matches[3], 10); day = day >= 1 && day <= 31 ? day : 1; let hour = parseInt(matches[4], 10); hour = hour >= 0 && hour <= 23 ? hour : 0; let minute = parseInt(matches[5], 10); minute = minute >= 0 && minute <= 59 ? minute : 0; let second = parseInt(matches[6], 10); second = second >= 0 && second <= 59 ? second : 0; const universalTimeRelation = matches[7] || "Z"; let offsetHour = parseInt(matches[8], 10); offsetHour = offsetHour >= 0 && offsetHour <= 23 ? offsetHour : 0; let offsetMinute = parseInt(matches[9], 10) || 0; offsetMinute = offsetMinute >= 0 && offsetMinute <= 59 ? offsetMinute : 0; // Universal time relation 'Z' means that the local time is equal to the // universal time, whereas the relations '+'/'-' indicate that the local // time is later respectively earlier than the universal time. Every date // is normalized to universal time. if (universalTimeRelation === "-") { hour += offsetHour; minute += offsetMinute; } else if (universalTimeRelation === "+") { hour -= offsetHour; minute -= offsetMinute; } return new Date(Date.UTC(year, month, day, hour, minute, second)); } } /** * NOTE: This is (mostly) intended to support printing of XFA forms. */ function getXfaPageViewport(xfaPage, { scale = 1, rotation = 0 }) { const { width, height } = xfaPage.attributes.style; const viewBox = [0, 0, parseInt(width), parseInt(height)]; return new PageViewport({ viewBox, scale, rotation, }); } export { addLinkAttributes, DEFAULT_LINK_REL, deprecated, DOMCanvasFactory, DOMCMapReaderFactory, DOMStandardFontDataFactory, DOMSVGFactory, getFilenameFromUrl, getPdfFilenameFromUrl, getXfaPageViewport, isDataScheme, isPdfFile, isValidFetchUrl, LinkTarget, loadScript, PageViewport, PDFDateString, RenderingCancelledException, StatTimer, };
xavier114fch/pdf.js
src/display/display_utils.js
JavaScript
apache-2.0
19,246
package com.smates.dbc2.utils; /** * 参数 * @author 刘晓庆 * */ public class SysConst { //md5加密时用到的盐值 public static final String SALTSOURCE = "baijw"; //七牛云外连接域名 public static final String QNIUYUNURL = "http://ohyi4k153.bkt.clouddn.com/"; //七牛云图片处理样式 public static final String QNIUYUNSTYLE = "?imageView2/2/w/80/h/100/interlace/0/q/50"; //资源type public static final String VIP = "0"; public static final String LEARN = "1"; public static final String GAME = "2"; }
MarchMachao/ZHFS-WEB
src/main/java/com/smates/dbc2/utils/SysConst.java
Java
apache-2.0
573
package ca.corefacility.bioinformatics.irida.ria.web.services; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.stream.Collectors; import javax.validation.ConstraintViolationException; import org.apache.commons.lang3.RandomStringUtils; import org.springframework.context.MessageSource; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.jpa.domain.Specification; import org.springframework.security.oauth2.provider.NoSuchClientException; import org.springframework.stereotype.Component; import ca.corefacility.bioinformatics.irida.exceptions.EntityExistsException; import ca.corefacility.bioinformatics.irida.model.IridaClientDetails; import ca.corefacility.bioinformatics.irida.repositories.specification.IridaClientDetailsSpecification; import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.ClientTableModel; import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.ClientTableRequest; import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.CreateUpdateClientDetails; import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableResponse; import ca.corefacility.bioinformatics.irida.service.IridaClientDetailsService; import com.google.common.collect.Sets; /** * UI Service to handle IRIDA Clients */ @Component public class UIClientService { private final IridaClientDetailsService clientDetailsService; private final MessageSource messageSource; private final String AUTO_APPROVE = "auto"; private final String SCOPE_READ = "read"; private final String SCOPE_WRITE = "write"; private final String GRANT_TYPE_AUTH_CODE = "authorization_code"; public UIClientService(IridaClientDetailsService clientDetailsService, MessageSource messageSource) { this.clientDetailsService = clientDetailsService; this.messageSource = messageSource; } /** * Get a listing of clients based on the table request. * * @param tableRequest Information about the sort and page of the table. * @return Current status of the table */ public TableResponse<ClientTableModel> getClientList(ClientTableRequest tableRequest) { Specification<IridaClientDetails> specification = IridaClientDetailsSpecification.searchClient( tableRequest.getSearch()); Page<IridaClientDetails> page = clientDetailsService.search(specification, PageRequest.of(tableRequest.getCurrent(), tableRequest.getPageSize(), tableRequest.getSort())); List<ClientTableModel> models = page.getContent() .stream() .map(client -> new ClientTableModel(client, clientDetailsService.countActiveTokensForClient(client))) .collect(Collectors.toList()); return new TableResponse<>(models, page.getTotalElements()); } /** * Revoke all tokens for a specific client * * @param id Identifier for a client */ public void deleteClientTokens(Long id) { IridaClientDetails details = clientDetailsService.read(id); clientDetailsService.revokeTokensForClient(details); } /** * Validate a client identifier for a new client * * @param clientId Identifier to check to see if it exists * @throws NoSuchClientException thrown if a client does not exist with the given client id. */ public void validateClientId(String clientId) throws NoSuchClientException { clientDetailsService.loadClientByClientId(clientId); } /** * Create a new client * * @param request Details about the new client * @param locale Current users {@link Locale} * @return A message to the user about the result of the create/update * @throws EntityExistsException thrown if the client id already is used. * @throws ConstraintViolationException thrown if the client id violates any of its constraints */ public String createOrUpdateClient(CreateUpdateClientDetails request, Locale locale) throws EntityExistsException, ConstraintViolationException { IridaClientDetails client; if (request.getId() != null) { // Existing client client = clientDetailsService.read(request.getId()); } else { // New client, so need to set up a few things that cannot be mutated in an existing one client = new IridaClientDetails(); client.setClientSecret(generateClientSecret()); client.setClientId(request.getClientId()); } client.setAccessTokenValiditySeconds(request.getTokenValidity()); // Let's set up the scopes for this client Set<String> scopes = new HashSet<>(); Set<String> autoScopes = new HashSet<>(); // 1. Read scope if (request.getRead() .equals(SCOPE_READ)) { scopes.add(SCOPE_READ); } else if (request.getRead() .equals(AUTO_APPROVE)) { scopes.add(SCOPE_READ); autoScopes.add(SCOPE_READ); } // 2. Write scope if (request.getWrite() .equals(SCOPE_WRITE)) { scopes.add(SCOPE_WRITE); } else if (request.getWrite() .equals(AUTO_APPROVE)) { scopes.add(SCOPE_WRITE); autoScopes.add(SCOPE_WRITE); } client.setScope(scopes); client.setAutoApprovableScopes(autoScopes); // Set the grant type client.setAuthorizedGrantTypes(Sets.newHashSet(request.getGrantType())); if (request.getGrantType() .equals(GRANT_TYPE_AUTH_CODE)) { client.setRegisteredRedirectUri(request.getRedirectURI()); } // See if allowed refresh tokens if (request.getRefreshToken() > 0) { client.getAuthorizedGrantTypes().add("refresh_token"); client.setRefreshTokenValiditySeconds(request.getRefreshToken()); } if (client.getId() != null) { clientDetailsService.update(client); return messageSource.getMessage("server.UpdateClientForm.success", new Object[] { client.getClientId() }, locale); } else { client = clientDetailsService.create(client); return messageSource.getMessage("server.AddClientForm.success", new Object[] { client.getClientId() }, locale); } } /** * Delete a client * * @param id Identifier for the client to delete */ public void deleteClient(Long id) { clientDetailsService.delete(id); } /** * Generate a new client secret * * @param id identifier for a client */ public void regenerateClientSecret(Long id) { IridaClientDetails details = clientDetailsService.read(id); String secret = generateClientSecret(); details.setClientSecret(secret); clientDetailsService.update(details); } private String generateClientSecret() { return RandomStringUtils.randomAlphanumeric(42); } }
phac-nml/irida
src/main/java/ca/corefacility/bioinformatics/irida/ria/web/services/UIClientService.java
Java
apache-2.0
7,213
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.mina.common; /** * Represents a thread model of an {@link IoService}. There's no essential * difference from {@link IoFilterChainBuilder}. The only difference is that * {@link ThreadModel} is executed later than the {@link IoFilterChainBuilder} * you specified. However, please don't abuse this internal behavior; it can * change. * * @author The Apache Directory Project (mina-dev@directory.apache.org) * @version $Rev$, $Date$ */ public interface ThreadModel extends IoFilterChainBuilder { /** * A {@link ThreadModel} which make MINA not manage a thread model at all. */ static final ThreadModel MANUAL = new ThreadModel() { public void buildFilterChain(IoFilterChain chain) throws Exception { // Do nothing. } }; }
sarvex/MINA
core/src/main/java/org/apache/mina/common/ThreadModel.java
Java
apache-2.0
1,632
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/core/utils/Outcome.h> #include <aws/core/auth/AWSAuthSigner.h> #include <aws/core/client/CoreErrors.h> #include <aws/core/client/RetryStrategy.h> #include <aws/core/http/HttpClient.h> #include <aws/core/http/HttpResponse.h> #include <aws/core/http/HttpClientFactory.h> #include <aws/core/auth/AWSCredentialsProviderChain.h> #include <aws/core/utils/json/JsonSerializer.h> #include <aws/core/utils/memory/stl/AWSStringStream.h> #include <aws/core/utils/threading/Executor.h> #include <aws/core/utils/DNS.h> #include <aws/core/utils/logging/LogMacros.h> #include <aws/serverlessrepo/ServerlessApplicationRepositoryClient.h> #include <aws/serverlessrepo/ServerlessApplicationRepositoryEndpoint.h> #include <aws/serverlessrepo/ServerlessApplicationRepositoryErrorMarshaller.h> #include <aws/serverlessrepo/model/CreateApplicationRequest.h> #include <aws/serverlessrepo/model/CreateApplicationVersionRequest.h> #include <aws/serverlessrepo/model/CreateCloudFormationChangeSetRequest.h> #include <aws/serverlessrepo/model/CreateCloudFormationTemplateRequest.h> #include <aws/serverlessrepo/model/DeleteApplicationRequest.h> #include <aws/serverlessrepo/model/GetApplicationRequest.h> #include <aws/serverlessrepo/model/GetApplicationPolicyRequest.h> #include <aws/serverlessrepo/model/GetCloudFormationTemplateRequest.h> #include <aws/serverlessrepo/model/ListApplicationDependenciesRequest.h> #include <aws/serverlessrepo/model/ListApplicationVersionsRequest.h> #include <aws/serverlessrepo/model/ListApplicationsRequest.h> #include <aws/serverlessrepo/model/PutApplicationPolicyRequest.h> #include <aws/serverlessrepo/model/UnshareApplicationRequest.h> #include <aws/serverlessrepo/model/UpdateApplicationRequest.h> using namespace Aws; using namespace Aws::Auth; using namespace Aws::Client; using namespace Aws::ServerlessApplicationRepository; using namespace Aws::ServerlessApplicationRepository::Model; using namespace Aws::Http; using namespace Aws::Utils::Json; static const char* SERVICE_NAME = "serverlessrepo"; static const char* ALLOCATION_TAG = "ServerlessApplicationRepositoryClient"; ServerlessApplicationRepositoryClient::ServerlessApplicationRepositoryClient(const Client::ClientConfiguration& clientConfiguration) : BASECLASS(clientConfiguration, Aws::MakeShared<AWSAuthV4Signer>(ALLOCATION_TAG, Aws::MakeShared<DefaultAWSCredentialsProviderChain>(ALLOCATION_TAG), SERVICE_NAME, clientConfiguration.region), Aws::MakeShared<ServerlessApplicationRepositoryErrorMarshaller>(ALLOCATION_TAG)), m_executor(clientConfiguration.executor) { init(clientConfiguration); } ServerlessApplicationRepositoryClient::ServerlessApplicationRepositoryClient(const AWSCredentials& credentials, const Client::ClientConfiguration& clientConfiguration) : BASECLASS(clientConfiguration, Aws::MakeShared<AWSAuthV4Signer>(ALLOCATION_TAG, Aws::MakeShared<SimpleAWSCredentialsProvider>(ALLOCATION_TAG, credentials), SERVICE_NAME, clientConfiguration.region), Aws::MakeShared<ServerlessApplicationRepositoryErrorMarshaller>(ALLOCATION_TAG)), m_executor(clientConfiguration.executor) { init(clientConfiguration); } ServerlessApplicationRepositoryClient::ServerlessApplicationRepositoryClient(const std::shared_ptr<AWSCredentialsProvider>& credentialsProvider, const Client::ClientConfiguration& clientConfiguration) : BASECLASS(clientConfiguration, Aws::MakeShared<AWSAuthV4Signer>(ALLOCATION_TAG, credentialsProvider, SERVICE_NAME, clientConfiguration.region), Aws::MakeShared<ServerlessApplicationRepositoryErrorMarshaller>(ALLOCATION_TAG)), m_executor(clientConfiguration.executor) { init(clientConfiguration); } ServerlessApplicationRepositoryClient::~ServerlessApplicationRepositoryClient() { } void ServerlessApplicationRepositoryClient::init(const ClientConfiguration& config) { m_configScheme = SchemeMapper::ToString(config.scheme); if (config.endpointOverride.empty()) { m_uri = m_configScheme + "://" + ServerlessApplicationRepositoryEndpoint::ForRegion(config.region, config.useDualStack); } else { OverrideEndpoint(config.endpointOverride); } } void ServerlessApplicationRepositoryClient::OverrideEndpoint(const Aws::String& endpoint) { if (endpoint.compare(0, 7, "http://") == 0 || endpoint.compare(0, 8, "https://") == 0) { m_uri = endpoint; } else { m_uri = m_configScheme + "://" + endpoint; } } CreateApplicationOutcome ServerlessApplicationRepositoryClient::CreateApplication(const CreateApplicationRequest& request) const { Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return CreateApplicationOutcome(CreateApplicationResult(outcome.GetResult())); } else { return CreateApplicationOutcome(outcome.GetError()); } } CreateApplicationOutcomeCallable ServerlessApplicationRepositoryClient::CreateApplicationCallable(const CreateApplicationRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< CreateApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateApplication(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::CreateApplicationAsync(const CreateApplicationRequest& request, const CreateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->CreateApplicationAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::CreateApplicationAsyncHelper(const CreateApplicationRequest& request, const CreateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, CreateApplication(request), context); } CreateApplicationVersionOutcome ServerlessApplicationRepositoryClient::CreateApplicationVersion(const CreateApplicationVersionRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("CreateApplicationVersion", "Required field: ApplicationId, is not set"); return CreateApplicationVersionOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } if (!request.SemanticVersionHasBeenSet()) { AWS_LOGSTREAM_ERROR("CreateApplicationVersion", "Required field: SemanticVersion, is not set"); return CreateApplicationVersionOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [SemanticVersion]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/versions/"; ss << request.GetSemanticVersion(); uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_PUT, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return CreateApplicationVersionOutcome(CreateApplicationVersionResult(outcome.GetResult())); } else { return CreateApplicationVersionOutcome(outcome.GetError()); } } CreateApplicationVersionOutcomeCallable ServerlessApplicationRepositoryClient::CreateApplicationVersionCallable(const CreateApplicationVersionRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< CreateApplicationVersionOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateApplicationVersion(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::CreateApplicationVersionAsync(const CreateApplicationVersionRequest& request, const CreateApplicationVersionResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->CreateApplicationVersionAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::CreateApplicationVersionAsyncHelper(const CreateApplicationVersionRequest& request, const CreateApplicationVersionResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, CreateApplicationVersion(request), context); } CreateCloudFormationChangeSetOutcome ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSet(const CreateCloudFormationChangeSetRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("CreateCloudFormationChangeSet", "Required field: ApplicationId, is not set"); return CreateCloudFormationChangeSetOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/changesets"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return CreateCloudFormationChangeSetOutcome(CreateCloudFormationChangeSetResult(outcome.GetResult())); } else { return CreateCloudFormationChangeSetOutcome(outcome.GetError()); } } CreateCloudFormationChangeSetOutcomeCallable ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSetCallable(const CreateCloudFormationChangeSetRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< CreateCloudFormationChangeSetOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateCloudFormationChangeSet(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSetAsync(const CreateCloudFormationChangeSetRequest& request, const CreateCloudFormationChangeSetResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->CreateCloudFormationChangeSetAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::CreateCloudFormationChangeSetAsyncHelper(const CreateCloudFormationChangeSetRequest& request, const CreateCloudFormationChangeSetResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, CreateCloudFormationChangeSet(request), context); } CreateCloudFormationTemplateOutcome ServerlessApplicationRepositoryClient::CreateCloudFormationTemplate(const CreateCloudFormationTemplateRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("CreateCloudFormationTemplate", "Required field: ApplicationId, is not set"); return CreateCloudFormationTemplateOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/templates"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return CreateCloudFormationTemplateOutcome(CreateCloudFormationTemplateResult(outcome.GetResult())); } else { return CreateCloudFormationTemplateOutcome(outcome.GetError()); } } CreateCloudFormationTemplateOutcomeCallable ServerlessApplicationRepositoryClient::CreateCloudFormationTemplateCallable(const CreateCloudFormationTemplateRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< CreateCloudFormationTemplateOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->CreateCloudFormationTemplate(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::CreateCloudFormationTemplateAsync(const CreateCloudFormationTemplateRequest& request, const CreateCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->CreateCloudFormationTemplateAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::CreateCloudFormationTemplateAsyncHelper(const CreateCloudFormationTemplateRequest& request, const CreateCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, CreateCloudFormationTemplate(request), context); } DeleteApplicationOutcome ServerlessApplicationRepositoryClient::DeleteApplication(const DeleteApplicationRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("DeleteApplication", "Required field: ApplicationId, is not set"); return DeleteApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_DELETE, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return DeleteApplicationOutcome(NoResult()); } else { return DeleteApplicationOutcome(outcome.GetError()); } } DeleteApplicationOutcomeCallable ServerlessApplicationRepositoryClient::DeleteApplicationCallable(const DeleteApplicationRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< DeleteApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->DeleteApplication(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::DeleteApplicationAsync(const DeleteApplicationRequest& request, const DeleteApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->DeleteApplicationAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::DeleteApplicationAsyncHelper(const DeleteApplicationRequest& request, const DeleteApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, DeleteApplication(request), context); } GetApplicationOutcome ServerlessApplicationRepositoryClient::GetApplication(const GetApplicationRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("GetApplication", "Required field: ApplicationId, is not set"); return GetApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return GetApplicationOutcome(GetApplicationResult(outcome.GetResult())); } else { return GetApplicationOutcome(outcome.GetError()); } } GetApplicationOutcomeCallable ServerlessApplicationRepositoryClient::GetApplicationCallable(const GetApplicationRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< GetApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->GetApplication(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::GetApplicationAsync(const GetApplicationRequest& request, const GetApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->GetApplicationAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::GetApplicationAsyncHelper(const GetApplicationRequest& request, const GetApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, GetApplication(request), context); } GetApplicationPolicyOutcome ServerlessApplicationRepositoryClient::GetApplicationPolicy(const GetApplicationPolicyRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("GetApplicationPolicy", "Required field: ApplicationId, is not set"); return GetApplicationPolicyOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/policy"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return GetApplicationPolicyOutcome(GetApplicationPolicyResult(outcome.GetResult())); } else { return GetApplicationPolicyOutcome(outcome.GetError()); } } GetApplicationPolicyOutcomeCallable ServerlessApplicationRepositoryClient::GetApplicationPolicyCallable(const GetApplicationPolicyRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< GetApplicationPolicyOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->GetApplicationPolicy(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::GetApplicationPolicyAsync(const GetApplicationPolicyRequest& request, const GetApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->GetApplicationPolicyAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::GetApplicationPolicyAsyncHelper(const GetApplicationPolicyRequest& request, const GetApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, GetApplicationPolicy(request), context); } GetCloudFormationTemplateOutcome ServerlessApplicationRepositoryClient::GetCloudFormationTemplate(const GetCloudFormationTemplateRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("GetCloudFormationTemplate", "Required field: ApplicationId, is not set"); return GetCloudFormationTemplateOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } if (!request.TemplateIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("GetCloudFormationTemplate", "Required field: TemplateId, is not set"); return GetCloudFormationTemplateOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [TemplateId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/templates/"; ss << request.GetTemplateId(); uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return GetCloudFormationTemplateOutcome(GetCloudFormationTemplateResult(outcome.GetResult())); } else { return GetCloudFormationTemplateOutcome(outcome.GetError()); } } GetCloudFormationTemplateOutcomeCallable ServerlessApplicationRepositoryClient::GetCloudFormationTemplateCallable(const GetCloudFormationTemplateRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< GetCloudFormationTemplateOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->GetCloudFormationTemplate(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::GetCloudFormationTemplateAsync(const GetCloudFormationTemplateRequest& request, const GetCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->GetCloudFormationTemplateAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::GetCloudFormationTemplateAsyncHelper(const GetCloudFormationTemplateRequest& request, const GetCloudFormationTemplateResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, GetCloudFormationTemplate(request), context); } ListApplicationDependenciesOutcome ServerlessApplicationRepositoryClient::ListApplicationDependencies(const ListApplicationDependenciesRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("ListApplicationDependencies", "Required field: ApplicationId, is not set"); return ListApplicationDependenciesOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/dependencies"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return ListApplicationDependenciesOutcome(ListApplicationDependenciesResult(outcome.GetResult())); } else { return ListApplicationDependenciesOutcome(outcome.GetError()); } } ListApplicationDependenciesOutcomeCallable ServerlessApplicationRepositoryClient::ListApplicationDependenciesCallable(const ListApplicationDependenciesRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< ListApplicationDependenciesOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->ListApplicationDependencies(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::ListApplicationDependenciesAsync(const ListApplicationDependenciesRequest& request, const ListApplicationDependenciesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->ListApplicationDependenciesAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::ListApplicationDependenciesAsyncHelper(const ListApplicationDependenciesRequest& request, const ListApplicationDependenciesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, ListApplicationDependencies(request), context); } ListApplicationVersionsOutcome ServerlessApplicationRepositoryClient::ListApplicationVersions(const ListApplicationVersionsRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("ListApplicationVersions", "Required field: ApplicationId, is not set"); return ListApplicationVersionsOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/versions"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return ListApplicationVersionsOutcome(ListApplicationVersionsResult(outcome.GetResult())); } else { return ListApplicationVersionsOutcome(outcome.GetError()); } } ListApplicationVersionsOutcomeCallable ServerlessApplicationRepositoryClient::ListApplicationVersionsCallable(const ListApplicationVersionsRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< ListApplicationVersionsOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->ListApplicationVersions(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::ListApplicationVersionsAsync(const ListApplicationVersionsRequest& request, const ListApplicationVersionsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->ListApplicationVersionsAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::ListApplicationVersionsAsyncHelper(const ListApplicationVersionsRequest& request, const ListApplicationVersionsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, ListApplicationVersions(request), context); } ListApplicationsOutcome ServerlessApplicationRepositoryClient::ListApplications(const ListApplicationsRequest& request) const { Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_GET, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return ListApplicationsOutcome(ListApplicationsResult(outcome.GetResult())); } else { return ListApplicationsOutcome(outcome.GetError()); } } ListApplicationsOutcomeCallable ServerlessApplicationRepositoryClient::ListApplicationsCallable(const ListApplicationsRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< ListApplicationsOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->ListApplications(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::ListApplicationsAsync(const ListApplicationsRequest& request, const ListApplicationsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->ListApplicationsAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::ListApplicationsAsyncHelper(const ListApplicationsRequest& request, const ListApplicationsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, ListApplications(request), context); } PutApplicationPolicyOutcome ServerlessApplicationRepositoryClient::PutApplicationPolicy(const PutApplicationPolicyRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("PutApplicationPolicy", "Required field: ApplicationId, is not set"); return PutApplicationPolicyOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/policy"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_PUT, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return PutApplicationPolicyOutcome(PutApplicationPolicyResult(outcome.GetResult())); } else { return PutApplicationPolicyOutcome(outcome.GetError()); } } PutApplicationPolicyOutcomeCallable ServerlessApplicationRepositoryClient::PutApplicationPolicyCallable(const PutApplicationPolicyRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< PutApplicationPolicyOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->PutApplicationPolicy(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::PutApplicationPolicyAsync(const PutApplicationPolicyRequest& request, const PutApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->PutApplicationPolicyAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::PutApplicationPolicyAsyncHelper(const PutApplicationPolicyRequest& request, const PutApplicationPolicyResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, PutApplicationPolicy(request), context); } UnshareApplicationOutcome ServerlessApplicationRepositoryClient::UnshareApplication(const UnshareApplicationRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("UnshareApplication", "Required field: ApplicationId, is not set"); return UnshareApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); ss << "/unshare"; uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_POST, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return UnshareApplicationOutcome(NoResult()); } else { return UnshareApplicationOutcome(outcome.GetError()); } } UnshareApplicationOutcomeCallable ServerlessApplicationRepositoryClient::UnshareApplicationCallable(const UnshareApplicationRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< UnshareApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->UnshareApplication(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::UnshareApplicationAsync(const UnshareApplicationRequest& request, const UnshareApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->UnshareApplicationAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::UnshareApplicationAsyncHelper(const UnshareApplicationRequest& request, const UnshareApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, UnshareApplication(request), context); } UpdateApplicationOutcome ServerlessApplicationRepositoryClient::UpdateApplication(const UpdateApplicationRequest& request) const { if (!request.ApplicationIdHasBeenSet()) { AWS_LOGSTREAM_ERROR("UpdateApplication", "Required field: ApplicationId, is not set"); return UpdateApplicationOutcome(Aws::Client::AWSError<ServerlessApplicationRepositoryErrors>(ServerlessApplicationRepositoryErrors::MISSING_PARAMETER, "MISSING_PARAMETER", "Missing required field [ApplicationId]", false)); } Aws::Http::URI uri = m_uri; Aws::StringStream ss; ss << "/applications/"; ss << request.GetApplicationId(); uri.SetPath(uri.GetPath() + ss.str()); JsonOutcome outcome = MakeRequest(uri, request, Aws::Http::HttpMethod::HTTP_PATCH, Aws::Auth::SIGV4_SIGNER); if(outcome.IsSuccess()) { return UpdateApplicationOutcome(UpdateApplicationResult(outcome.GetResult())); } else { return UpdateApplicationOutcome(outcome.GetError()); } } UpdateApplicationOutcomeCallable ServerlessApplicationRepositoryClient::UpdateApplicationCallable(const UpdateApplicationRequest& request) const { auto task = Aws::MakeShared< std::packaged_task< UpdateApplicationOutcome() > >(ALLOCATION_TAG, [this, request](){ return this->UpdateApplication(request); } ); auto packagedFunction = [task]() { (*task)(); }; m_executor->Submit(packagedFunction); return task->get_future(); } void ServerlessApplicationRepositoryClient::UpdateApplicationAsync(const UpdateApplicationRequest& request, const UpdateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { m_executor->Submit( [this, request, handler, context](){ this->UpdateApplicationAsyncHelper( request, handler, context ); } ); } void ServerlessApplicationRepositoryClient::UpdateApplicationAsyncHelper(const UpdateApplicationRequest& request, const UpdateApplicationResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const { handler(this, request, UpdateApplication(request), context); }
cedral/aws-sdk-cpp
aws-cpp-sdk-serverlessrepo/source/ServerlessApplicationRepositoryClient.cpp
C++
apache-2.0
35,528
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.managedblockchain.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.managedblockchain.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * DeleteMemberRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class DeleteMemberRequestProtocolMarshaller implements Marshaller<Request<DeleteMemberRequest>, DeleteMemberRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON) .requestUri("/networks/{networkId}/members/{memberId}").httpMethodName(HttpMethodName.DELETE).hasExplicitPayloadMember(false) .hasPayloadMembers(false).serviceName("AmazonManagedBlockchain").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public DeleteMemberRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<DeleteMemberRequest> marshall(DeleteMemberRequest deleteMemberRequest) { if (deleteMemberRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<DeleteMemberRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, deleteMemberRequest); protocolMarshaller.startMarshalling(); DeleteMemberRequestMarshaller.getInstance().marshall(deleteMemberRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
jentfoo/aws-sdk-java
aws-java-sdk-managedblockchain/src/main/java/com/amazonaws/services/managedblockchain/model/transform/DeleteMemberRequestProtocolMarshaller.java
Java
apache-2.0
2,674
package com.googlecode.blaisemath.graph.test; /* * #%L * BlaiseGraphTheory * -- * Copyright (C) 2009 - 2021 Elisha Peterson * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.common.graph.Graph; import com.google.common.graph.Graphs; import com.googlecode.blaisemath.graph.layout.CircleLayout; import com.googlecode.blaisemath.graph.layout.CircleLayout.CircleLayoutParameters; import com.googlecode.blaisemath.graph.layout.RandomBoxLayout; import com.googlecode.blaisemath.graph.layout.RandomBoxLayout.BoxLayoutParameters; import com.googlecode.blaisemath.graph.layout.SpringLayout; import com.googlecode.blaisemath.graph.layout.SpringLayoutParameters; import com.googlecode.blaisemath.graph.view.GraphComponent; import com.googlecode.blaisemath.graph.view.VisualGraph; import com.googlecode.blaisemath.graphics.Graphic; import com.googlecode.blaisemath.util.Instrument; import com.googlecode.blaisemath.firestarter.editor.EditorRegistration; import com.googlecode.blaisemath.firestarter.property.PropertySheet; import com.googlecode.blaisemath.firestarter.swing.RollupPanel; import javax.swing.*; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.geom.Rectangle2D; @SuppressWarnings("ALL") public class DynamicGraphTestFrame extends javax.swing.JFrame { VisualGraph pga; /** Flag for when el needs points updated */ boolean updateEL = true; SpringLayout energyLayout; final SpringLayoutParameters layoutParams; final MyTestGraph graph = new MyTestGraph(); Graph<String> graphCopy; public DynamicGraphTestFrame() { EditorRegistration.registerEditors(); initComponents(); graphCopy = Graphs.copyOf(graph); plot.setGraph(graphCopy); plot.getAdapter().getViewGraph().setDragEnabled(true); plot.getAdapter().getViewGraph().setPointSelectionEnabled(true); // PANELS energyLayout = new SpringLayout(); layoutParams = energyLayout.createParameters(); rollupPanel1.add("Energy Layout", PropertySheet.forBean(layoutParams)); for (Graphic p : plot.getGraphicRoot().getGraphics()) { rollupPanel1.add(p.toString(), PropertySheet.forBean(p)); } addWindowListener(new WindowAdapter(){ @Override public void windowClosing(WindowEvent e) { Instrument.print(System.out, 50); } }); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jToolBar1 = new javax.swing.JToolBar(); randomLB = new javax.swing.JButton(); circleLB = new javax.swing.JButton(); jSeparator1 = new javax.swing.JToolBar.Separator(); jLabel1 = new javax.swing.JLabel(); energyIB = new javax.swing.JButton(); energyAB = new javax.swing.JButton(); energySB = new javax.swing.JButton(); jSeparator2 = new javax.swing.JToolBar.Separator(); jLabel2 = new javax.swing.JLabel(); addEdgesB = new javax.swing.JButton(); rewireB = new javax.swing.JButton(); addThreadedB = new javax.swing.JButton(); addNodesB = new javax.swing.JButton(); threadStopB = new javax.swing.JButton(); jScrollPane1 = new javax.swing.JScrollPane(); rollupPanel1 = new RollupPanel(); plot = new GraphComponent(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setBackground(new java.awt.Color(0, 0, 0)); jToolBar1.setRollover(true); randomLB.setText("Random Layout"); randomLB.setFocusable(false); randomLB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); randomLB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); randomLB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { randomLBActionPerformed(evt); } }); jToolBar1.add(randomLB); circleLB.setText("Circle Layout"); circleLB.setFocusable(false); circleLB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); circleLB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); circleLB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { circleLBActionPerformed(evt); } }); jToolBar1.add(circleLB); jToolBar1.add(jSeparator1); jLabel1.setText("ENERGY:"); jToolBar1.add(jLabel1); energyIB.setText("iterate"); energyIB.setFocusable(false); energyIB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); energyIB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); energyIB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { energyIBActionPerformed(evt); } }); jToolBar1.add(energyIB); energyAB.setText("animate"); energyAB.setFocusable(false); energyAB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); energyAB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); energyAB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { energyABActionPerformed(evt); } }); jToolBar1.add(energyAB); energySB.setText("stop"); energySB.setFocusable(false); energySB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); energySB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); energySB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { energySBActionPerformed(evt); } }); jToolBar1.add(energySB); jToolBar1.add(jSeparator2); jLabel2.setText("ADD:"); jToolBar1.add(jLabel2); addNodesB.setText("nodes"); addNodesB.setFocusable(false); addNodesB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); addNodesB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); addNodesB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { addNodesBActionPerformed(evt); } }); jToolBar1.add(addNodesB); addEdgesB.setText("edges"); addEdgesB.setFocusable(false); addEdgesB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); addEdgesB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); addEdgesB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { addEdgesBActionPerformed(evt); } }); jToolBar1.add(addEdgesB); rewireB.setText("rewire"); rewireB.setFocusable(false); rewireB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); rewireB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); rewireB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rewireBActionPerformed(evt); } }); jToolBar1.add(rewireB); addThreadedB.setText("threaded"); addThreadedB.setFocusable(false); addThreadedB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); addThreadedB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); addThreadedB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { addThreadedBActionPerformed(evt); } }); jToolBar1.add(addThreadedB); threadStopB.setText("stop"); threadStopB.setFocusable(false); threadStopB.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); threadStopB.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); threadStopB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { threadStopBActionPerformed(evt); } }); jToolBar1.add(threadStopB); getContentPane().add(jToolBar1, java.awt.BorderLayout.PAGE_START); jScrollPane1.setViewportView(rollupPanel1); getContentPane().add(jScrollPane1, java.awt.BorderLayout.EAST); getContentPane().add(plot, java.awt.BorderLayout.CENTER); pack(); }// </editor-fold>//GEN-END:initComponents private void randomLBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_randomLBActionPerformed updateEL = true; plot.getLayoutManager().applyLayout(RandomBoxLayout.getInstance(), null, new BoxLayoutParameters(new Rectangle2D.Double(-500, -500, 1000, 1000))); }//GEN-LAST:event_randomLBActionPerformed private void circleLBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_circleLBActionPerformed updateEL = true; plot.getLayoutManager().applyLayout(CircleLayout.getInstance(), null, new CircleLayoutParameters(500.0)); }//GEN-LAST:event_circleLBActionPerformed private void energyIBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_energyIBActionPerformed if (energyLayout == null) { energyLayout = new SpringLayout(); } plot.getLayoutManager().setLayoutAlgorithm(energyLayout); plot.getLayoutManager().setLayoutParameters(layoutParams); plot.getLayoutManager().iterateLayout(); updateEL = false; }//GEN-LAST:event_energyIBActionPerformed private void energyABActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_energyABActionPerformed if (energyLayout == null) { energyLayout = new SpringLayout(); } plot.getLayoutManager().setLayoutAlgorithm(energyLayout); plot.getLayoutManager().setLayoutParameters(layoutParams); plot.getLayoutManager().setLayoutTaskActive(true); }//GEN-LAST:event_energyABActionPerformed private void energySBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_energySBActionPerformed plot.getLayoutManager().setLayoutTaskActive(false); }//GEN-LAST:event_energySBActionPerformed private synchronized void updateGraph() { SwingUtilities.invokeLater(() -> { graphCopy = Graphs.copyOf(graph); plot.getLayoutManager().setGraph(graphCopy); plot.getAdapter().getViewGraph().setEdgeSet(graphCopy.edges()); }); } private void addNodesBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addNodesBActionPerformed graph.addNodes(5); updateGraph(); }//GEN-LAST:event_addNodesBActionPerformed private void addEdgesBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addEdgesBActionPerformed graph.addEdges(5); updateGraph(); }//GEN-LAST:event_addEdgesBActionPerformed private void rewireBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rewireBActionPerformed graph.rewire(50, 5); updateGraph(); }//GEN-LAST:event_rewireBActionPerformed final java.util.Timer t = new java.util.Timer(); java.util.TimerTask tt; private void addThreadedBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addThreadedBActionPerformed if (tt != null) { tt.cancel(); } tt = new java.util.TimerTask() { @Override public void run() { graph.removeEdges(10); graph.addNodes(1); graph.removeNodes(1); graph.addEdges(2); updateGraph(); } }; t.schedule(tt, 100, 500); }//GEN-LAST:event_addThreadedBActionPerformed private void threadStopBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_threadStopBActionPerformed if (tt != null) { tt.cancel(); } }//GEN-LAST:event_threadStopBActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { java.awt.EventQueue.invokeLater(() -> new DynamicGraphTestFrame().setVisible(true)); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton addEdgesB; private javax.swing.JButton addThreadedB; private javax.swing.JButton addNodesB; private javax.swing.JButton circleLB; private javax.swing.JButton energyAB; private javax.swing.JButton energyIB; private javax.swing.JButton energySB; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JToolBar.Separator jSeparator1; private javax.swing.JToolBar.Separator jSeparator2; private javax.swing.JToolBar jToolBar1; private GraphComponent plot; private javax.swing.JButton randomLB; private javax.swing.JButton rewireB; private RollupPanel rollupPanel1; private javax.swing.JButton threadStopB; // End of variables declaration//GEN-END:variables }
triathematician/blaisemath
blaise-graph-theory-ui/src/test/java/com/googlecode/blaisemath/graph/test/DynamicGraphTestFrame.java
Java
apache-2.0
14,531
package com.github.christianfranco.geomatch.exception; import com.github.christianfranco.geomatch.entities.ErrorCode; /** * Created by Christian Franco on 12/12/2016 14:03. */ public class PhoneNumberRepositoryException extends GeoMathException { public PhoneNumberRepositoryException(ErrorCode errorCode) { super(errorCode); } public PhoneNumberRepositoryException(ErrorCode errorCode, String... parameters) { super(errorCode, parameters); } }
christianfranco/the-clean-code-bay
geo-match/server/core/src/main/java/com/github/christianfranco/geomatch/exception/PhoneNumberRepositoryException.java
Java
apache-2.0
498
// Code generated by msgraph.go/gen DO NOT EDIT. package msgraph import "context" // FieldValueSetRequestBuilder is request builder for FieldValueSet type FieldValueSetRequestBuilder struct{ BaseRequestBuilder } // Request returns FieldValueSetRequest func (b *FieldValueSetRequestBuilder) Request() *FieldValueSetRequest { return &FieldValueSetRequest{ BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client}, } } // FieldValueSetRequest is request for FieldValueSet type FieldValueSetRequest struct{ BaseRequest } // Get performs GET request for FieldValueSet func (r *FieldValueSetRequest) Get(ctx context.Context) (resObj *FieldValueSet, err error) { var query string if r.query != nil { query = "?" + r.query.Encode() } err = r.JSONRequest(ctx, "GET", query, nil, &resObj) return } // Update performs PATCH request for FieldValueSet func (r *FieldValueSetRequest) Update(ctx context.Context, reqObj *FieldValueSet) error { return r.JSONRequest(ctx, "PATCH", "", reqObj, nil) } // Delete performs DELETE request for FieldValueSet func (r *FieldValueSetRequest) Delete(ctx context.Context) error { return r.JSONRequest(ctx, "DELETE", "", nil, nil) }
42wim/matterbridge
vendor/github.com/yaegashi/msgraph.go/beta/RequestField.go
GO
apache-2.0
1,181
package com.ts.util.doctor; import java.util.HashMap; import java.util.Map; public class DoctorConst { public static Map<String,String> rstypeMap = new HashMap<String,String>(); static{ rstypeMap.put("diaginfo","禁"); rstypeMap.put("dosage","法"); rstypeMap.put("ingredien","重"); rstypeMap.put("interaction","相"); rstypeMap.put("iv_effect","配"); rstypeMap.put("side","反"); rstypeMap.put("administrator","途"); rstypeMap.put("specpeople","特"); rstypeMap.put("manager","管"); rstypeMap.put("manager4Two", "管"); } public static Map<String,String> rstypeColorMap = new HashMap<String,String>(); static{ rstypeColorMap.put("diaginfo","btn-pink"); rstypeColorMap.put("dosage","btn-warning"); rstypeColorMap.put("ingredien","btn-success"); rstypeColorMap.put("interaction","btn-yellow"); rstypeColorMap.put("iv_effect","btn-grey"); rstypeColorMap.put("side","btn-danger"); rstypeColorMap.put("administrator","btn-info"); rstypeColorMap.put("specpeople","btn-purple"); rstypeColorMap.put("manager","btn-success"); rstypeColorMap.put("manager4Two","btn-success"); } }
ljcservice/autumnprogram
src/main/java/com/ts/util/doctor/DoctorConst.java
Java
apache-2.0
1,159
/*- * << * UAVStack * == * Copyright (C) 2016 - 2017 UAVStack * == * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * >> */ package com.creditease.agent.feature.hbagent; import com.creditease.agent.http.api.UAVHttpMessage; import com.creditease.agent.spi.AbstractHttpServiceComponent; import com.creditease.agent.spi.HttpMessage; public class HeartBeatQueryListenWorker extends AbstractHttpServiceComponent<UAVHttpMessage> { public HeartBeatQueryListenWorker(String cName, String feature, String initHandlerKey) { super(cName, feature, initHandlerKey); } @Override protected UAVHttpMessage adaptRequest(HttpMessage message) { String messageBody = message.getRequestBodyAsString("UTF-8"); if (log.isDebugEnable()) { log.debug(this, "HeartBeatQueryListenWorker Request: " + messageBody); } UAVHttpMessage msg = new UAVHttpMessage(messageBody); return msg; } @Override protected void adaptResponse(HttpMessage message, UAVHttpMessage t) { String response = t.getResponseAsJsonString(); message.putResponseBodyInString(response, 200, "utf-8"); if (log.isDebugEnable()) { log.debug(this, "HeartBeatQueryListenWorker Response: " + response); } } }
xxxllluuu/uavstack
com.creditease.uav.agent.heartbeat/src/main/java/com/creditease/agent/feature/hbagent/HeartBeatQueryListenWorker.java
Java
apache-2.0
1,807
namespace VisionsInCode.Feature.AppDeeplink.Repositories { using System.Web; using Sitecore.Foundation.SitecoreExtensions.Extensions; using Sitecore.Mvc.Presentation; using Sitecore.Web.UI.WebControls; using VisionsInCode.Feature.AppDeeplink.ViewModels; public class AppDeeplinkViewModelRepository : IAppDeeplinkViewModelRepository { public AppDeeplinkViewModel Get() { if (RenderingContext.Current == null || RenderingContext.Current.Rendering == null || RenderingContext.Current.Rendering.Item == null) return null; if (!RenderingContext.Current.Rendering.Item.IsDerived(AppDeeplinkViewModel.Constants.Templates.AppDeeplink)) return null; string deviceUrl = FieldRenderer.Render(RenderingContext.Current.Rendering.Item, AppDeeplinkViewModel.Constants.Fields.AppDeeplink.AppDeeplinkSettingsAppDeviceURL.ToString()); return new AppDeeplinkViewModel() { StoreUrl = new HtmlString(FieldRenderer.Render(RenderingContext.Current.Rendering.Item, AppDeeplinkViewModel.Constants.Fields.AppDeeplink.AppDeeplinkSettingsAppStoreURL.ToString())), DeviceUrl = new HtmlString(deviceUrl), DeviceUrlWithParams = new HtmlString(BuildDeviceUrlWithParams(deviceUrl)) }; } private string BuildDeviceUrlWithParams(string deviceUrl) { if(string.IsNullOrWhiteSpace(deviceUrl)) return string.Empty; string decodedQuery = System.Web.HttpUtility.UrlDecode(RenderingContext.Current.PageContext.RequestContext.HttpContext.Request.Url.Query); if (string.IsNullOrWhiteSpace(decodedQuery)) return deviceUrl; object param = HttpUtility.ParseQueryString(decodedQuery).Get(AppDeeplinkViewModel.Constants.QueryParams.DeviceParams); if (param == null) return deviceUrl; return $"{deviceUrl}{param}"; } } }
GoranHalvarsson/Habitat
src/Feature/AppDeeplink/code/Repositories/AppDeeplinkViewModelRepository.cs
C#
apache-2.0
1,871
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests.""" import mock import pandas import pytest from google.api_core import exceptions from google.auth.credentials import AnonymousCredentials from google.cloud import automl_v1beta1 from google.cloud.automl_v1beta1.proto import data_types_pb2 PROJECT = "project" REGION = "region" LOCATION_PATH = "projects/{}/locations/{}".format(PROJECT, REGION) class TestTablesClient(object): def tables_client( self, client_attrs={}, prediction_client_attrs={}, gcs_client_attrs={} ): client_mock = mock.Mock(**client_attrs) prediction_client_mock = mock.Mock(**prediction_client_attrs) gcs_client_mock = mock.Mock(**gcs_client_attrs) return automl_v1beta1.TablesClient( client=client_mock, prediction_client=prediction_client_mock, gcs_client=gcs_client_mock, project=PROJECT, region=REGION, ) def test_list_datasets_empty(self): client = self.tables_client( { "list_datasets.return_value": [], "location_path.return_value": LOCATION_PATH, }, {}, ) ds = client.list_datasets() client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION) client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH) assert ds == [] def test_list_datasets_not_empty(self): datasets = ["some_dataset"] client = self.tables_client( { "list_datasets.return_value": datasets, "location_path.return_value": LOCATION_PATH, }, {}, ) ds = client.list_datasets() client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION) client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH) assert len(ds) == 1 assert ds[0] == "some_dataset" def test_get_dataset_no_value(self): dataset_actual = "dataset" client = self.tables_client({}, {}) with pytest.raises(ValueError): dataset = client.get_dataset() client.auto_ml_client.get_dataset.assert_not_called() def test_get_dataset_name(self): dataset_actual = "dataset" client = self.tables_client({"get_dataset.return_value": dataset_actual}, {}) dataset = client.get_dataset(dataset_name="my_dataset") client.auto_ml_client.get_dataset.assert_called_with("my_dataset") assert dataset == dataset_actual def test_get_no_dataset(self): client = self.tables_client( {"get_dataset.side_effect": exceptions.NotFound("err")}, {} ) with pytest.raises(exceptions.NotFound): client.get_dataset(dataset_name="my_dataset") client.auto_ml_client.get_dataset.assert_called_with("my_dataset") def test_get_dataset_from_empty_list(self): client = self.tables_client({"list_datasets.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.get_dataset(dataset_display_name="my_dataset") def test_get_dataset_from_list_not_found(self): client = self.tables_client( {"list_datasets.return_value": [mock.Mock(display_name="not_it")]}, {} ) with pytest.raises(exceptions.NotFound): client.get_dataset(dataset_display_name="my_dataset") def test_get_dataset_from_list(self): client = self.tables_client( { "list_datasets.return_value": [ mock.Mock(display_name="not_it"), mock.Mock(display_name="my_dataset"), ] }, {}, ) dataset = client.get_dataset(dataset_display_name="my_dataset") assert dataset.display_name == "my_dataset" def test_get_dataset_from_list_ambiguous(self): client = self.tables_client( { "list_datasets.return_value": [ mock.Mock(display_name="my_dataset"), mock.Mock(display_name="not_my_dataset"), mock.Mock(display_name="my_dataset"), ] }, {}, ) with pytest.raises(ValueError): client.get_dataset(dataset_display_name="my_dataset") def test_create_dataset(self): client = self.tables_client( { "location_path.return_value": LOCATION_PATH, "create_dataset.return_value": mock.Mock(display_name="name"), }, {}, ) metadata = {"metadata": "values"} dataset = client.create_dataset("name", metadata=metadata) client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION) client.auto_ml_client.create_dataset.assert_called_with( LOCATION_PATH, {"display_name": "name", "tables_dataset_metadata": metadata} ) assert dataset.display_name == "name" def test_delete_dataset(self): dataset = mock.Mock() dataset.configure_mock(name="name") client = self.tables_client({"delete_dataset.return_value": None}, {}) client.delete_dataset(dataset=dataset) client.auto_ml_client.delete_dataset.assert_called_with("name") def test_delete_dataset_not_found(self): client = self.tables_client({"list_datasets.return_value": []}, {}) client.delete_dataset(dataset_display_name="not_found") client.auto_ml_client.delete_dataset.assert_not_called() def test_delete_dataset_name(self): client = self.tables_client({"delete_dataset.return_value": None}, {}) client.delete_dataset(dataset_name="name") client.auto_ml_client.delete_dataset.assert_called_with("name") def test_export_not_found(self): client = self.tables_client({"list_datasets.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.export_data(dataset_display_name="name", gcs_input_uris="uri") client.auto_ml_client.export_data.assert_not_called() def test_export_gcs_uri(self): client = self.tables_client({"export_data.return_value": None}, {}) client.export_data(dataset_name="name", gcs_output_uri_prefix="uri") client.auto_ml_client.export_data.assert_called_with( "name", {"gcs_destination": {"output_uri_prefix": "uri"}} ) def test_export_bq_uri(self): client = self.tables_client({"export_data.return_value": None}, {}) client.export_data(dataset_name="name", bigquery_output_uri="uri") client.auto_ml_client.export_data.assert_called_with( "name", {"bigquery_destination": {"output_uri": "uri"}} ) def test_import_not_found(self): client = self.tables_client({"list_datasets.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.import_data(dataset_display_name="name", gcs_input_uris="uri") client.auto_ml_client.import_data.assert_not_called() def test_import_pandas_dataframe(self): client = self.tables_client( gcs_client_attrs={ "bucket_name": "my_bucket", "upload_pandas_dataframe.return_value": "uri", } ) dataframe = pandas.DataFrame({}) client.import_data( project=PROJECT, region=REGION, dataset_name="name", pandas_dataframe=dataframe, ) client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) client.auto_ml_client.import_data.assert_called_with( "name", {"gcs_source": {"input_uris": ["uri"]}} ) def test_import_pandas_dataframe_init_gcs(self): client = automl_v1beta1.TablesClient( client=mock.Mock(), prediction_client=mock.Mock(), project=PROJECT, region=REGION, credentials=AnonymousCredentials(), ) dataframe = pandas.DataFrame({}) patch = mock.patch( "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", bucket_name="my_bucket", ) with patch as MockGcsClient: mockInstance = MockGcsClient.return_value mockInstance.upload_pandas_dataframe.return_value = "uri" client.import_data(dataset_name="name", pandas_dataframe=dataframe) assert client.gcs_client is mockInstance client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) client.auto_ml_client.import_data.assert_called_with( "name", {"gcs_source": {"input_uris": ["uri"]}} ) def test_import_gcs_uri(self): client = self.tables_client({"import_data.return_value": None}, {}) client.import_data(dataset_name="name", gcs_input_uris="uri") client.auto_ml_client.import_data.assert_called_with( "name", {"gcs_source": {"input_uris": ["uri"]}} ) def test_import_gcs_uris(self): client = self.tables_client({"import_data.return_value": None}, {}) client.import_data(dataset_name="name", gcs_input_uris=["uri", "uri"]) client.auto_ml_client.import_data.assert_called_with( "name", {"gcs_source": {"input_uris": ["uri", "uri"]}} ) def test_import_bq_uri(self): client = self.tables_client({"import_data.return_value": None}, {}) client.import_data(dataset_name="name", bigquery_input_uri="uri") client.auto_ml_client.import_data.assert_called_with( "name", {"bigquery_source": {"input_uri": "uri"}} ) def test_list_table_specs(self): client = self.tables_client({"list_table_specs.return_value": None}, {}) client.list_table_specs(dataset_name="name") client.auto_ml_client.list_table_specs.assert_called_with("name") def test_list_table_specs_not_found(self): client = self.tables_client( {"list_table_specs.side_effect": exceptions.NotFound("not found")}, {} ) with pytest.raises(exceptions.NotFound): client.list_table_specs(dataset_name="name") client.auto_ml_client.list_table_specs.assert_called_with("name") def test_get_table_spec(self): client = self.tables_client({}, {}) client.get_table_spec("name") client.auto_ml_client.get_table_spec.assert_called_with("name") def test_get_column_spec(self): client = self.tables_client({}, {}) client.get_column_spec("name") client.auto_ml_client.get_column_spec.assert_called_with("name") def test_list_column_specs(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [], }, {}, ) client.list_column_specs(dataset_name="name") client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") def test_update_column_spec_not_found(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) with pytest.raises(exceptions.NotFound): client.update_column_spec(dataset_name="name", column_spec_name="column2") client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_not_called() def test_update_column_spec_display_name_not_found(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) with pytest.raises(exceptions.NotFound): client.update_column_spec( dataset_name="name", column_spec_display_name="column2" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_not_called() def test_update_column_spec_name_no_args(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column/2", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.update_column_spec(dataset_name="name", column_spec_name="column/2") client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_called_with( {"name": "column/2", "data_type": {"type_code": "type_code"}} ) def test_update_column_spec_no_args(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.update_column_spec( dataset_name="name", column_spec_display_name="column" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_called_with( {"name": "column", "data_type": {"type_code": "type_code"}} ) def test_update_column_spec_nullable(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.update_column_spec( dataset_name="name", column_spec_display_name="column", nullable=True ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_called_with( { "name": "column", "data_type": {"type_code": "type_code", "nullable": True}, } ) def test_update_column_spec_type_code(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.update_column_spec( dataset_name="name", column_spec_display_name="column", type_code="type_code2", ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_called_with( {"name": "column", "data_type": {"type_code": "type_code2"}} ) def test_update_column_spec_type_code_nullable(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.update_column_spec( dataset_name="name", nullable=True, column_spec_display_name="column", type_code="type_code2", ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_called_with( { "name": "column", "data_type": {"type_code": "type_code2", "nullable": True}, } ) def test_update_column_spec_type_code_nullable_false(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() data_type_mock = mock.Mock(type_code="type_code") column_spec_mock.configure_mock( name="column", display_name="column", data_type=data_type_mock ) client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.update_column_spec( dataset_name="name", nullable=False, column_spec_display_name="column", type_code="type_code2", ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_column_spec.assert_called_with( { "name": "column", "data_type": {"type_code": "type_code2", "nullable": False}, } ) def test_set_target_column_table_not_found(self): client = self.tables_client( {"list_table_specs.side_effect": exceptions.NotFound("err")}, {} ) with pytest.raises(exceptions.NotFound): client.set_target_column( dataset_name="name", column_spec_display_name="column2" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_not_called() client.auto_ml_client.update_dataset.assert_not_called() def test_set_target_column_not_found(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/1", display_name="column") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) with pytest.raises(exceptions.NotFound): client.set_target_column( dataset_name="name", column_spec_display_name="column2" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_dataset.assert_not_called() def test_set_target_column(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/1", display_name="column") dataset_mock = mock.Mock() tables_dataset_metadata_mock = mock.Mock() tables_dataset_metadata_mock.configure_mock( target_column_spec_id="2", weight_column_spec_id="2", ml_use_column_spec_id="3", ) dataset_mock.configure_mock( name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock ) client = self.tables_client( { "get_dataset.return_value": dataset_mock, "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.set_target_column(dataset_name="name", column_spec_display_name="column") client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_dataset.assert_called_with( { "name": "dataset", "tables_dataset_metadata": { "target_column_spec_id": "1", "weight_column_spec_id": "2", "ml_use_column_spec_id": "3", }, } ) def test_set_weight_column_table_not_found(self): client = self.tables_client( {"list_table_specs.side_effect": exceptions.NotFound("err")}, {} ) try: client.set_weight_column( dataset_name="name", column_spec_display_name="column2" ) except exceptions.NotFound: pass client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_not_called() client.auto_ml_client.update_dataset.assert_not_called() def test_set_weight_column_not_found(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/1", display_name="column") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) with pytest.raises(exceptions.NotFound): client.set_weight_column( dataset_name="name", column_spec_display_name="column2" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_dataset.assert_not_called() def test_set_weight_column(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/2", display_name="column") dataset_mock = mock.Mock() tables_dataset_metadata_mock = mock.Mock() tables_dataset_metadata_mock.configure_mock( target_column_spec_id="1", weight_column_spec_id="1", ml_use_column_spec_id="3", ) dataset_mock.configure_mock( name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock ) client = self.tables_client( { "get_dataset.return_value": dataset_mock, "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.set_weight_column(dataset_name="name", column_spec_display_name="column") client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_dataset.assert_called_with( { "name": "dataset", "tables_dataset_metadata": { "target_column_spec_id": "1", "weight_column_spec_id": "2", "ml_use_column_spec_id": "3", }, } ) def test_clear_weight_column(self): dataset_mock = mock.Mock() tables_dataset_metadata_mock = mock.Mock() tables_dataset_metadata_mock.configure_mock( target_column_spec_id="1", weight_column_spec_id="2", ml_use_column_spec_id="3", ) dataset_mock.configure_mock( name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock ) client = self.tables_client({"get_dataset.return_value": dataset_mock}, {}) client.clear_weight_column(dataset_name="name") client.auto_ml_client.update_dataset.assert_called_with( { "name": "dataset", "tables_dataset_metadata": { "target_column_spec_id": "1", "weight_column_spec_id": None, "ml_use_column_spec_id": "3", }, } ) def test_set_test_train_column_table_not_found(self): client = self.tables_client( {"list_table_specs.side_effect": exceptions.NotFound("err")}, {} ) with pytest.raises(exceptions.NotFound): client.set_test_train_column( dataset_name="name", column_spec_display_name="column2" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_not_called() client.auto_ml_client.update_dataset.assert_not_called() def test_set_test_train_column_not_found(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/1", display_name="column") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) with pytest.raises(exceptions.NotFound): client.set_test_train_column( dataset_name="name", column_spec_display_name="column2" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_dataset.assert_not_called() def test_set_test_train_column(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/3", display_name="column") dataset_mock = mock.Mock() tables_dataset_metadata_mock = mock.Mock() tables_dataset_metadata_mock.configure_mock( target_column_spec_id="1", weight_column_spec_id="2", ml_use_column_spec_id="2", ) dataset_mock.configure_mock( name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock ) client = self.tables_client( { "get_dataset.return_value": dataset_mock, "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.set_test_train_column( dataset_name="name", column_spec_display_name="column" ) client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_dataset.assert_called_with( { "name": "dataset", "tables_dataset_metadata": { "target_column_spec_id": "1", "weight_column_spec_id": "2", "ml_use_column_spec_id": "3", }, } ) def test_clear_test_train_column(self): dataset_mock = mock.Mock() tables_dataset_metadata_mock = mock.Mock() tables_dataset_metadata_mock.configure_mock( target_column_spec_id="1", weight_column_spec_id="2", ml_use_column_spec_id="2", ) dataset_mock.configure_mock( name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock ) client = self.tables_client({"get_dataset.return_value": dataset_mock}, {}) client.clear_test_train_column(dataset_name="name") client.auto_ml_client.update_dataset.assert_called_with( { "name": "dataset", "tables_dataset_metadata": { "target_column_spec_id": "1", "weight_column_spec_id": "2", "ml_use_column_spec_id": None, }, } ) def test_set_time_column(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/3", display_name="column") dataset_mock = mock.Mock() dataset_mock.configure_mock(name="dataset") client = self.tables_client( { "get_dataset.return_value": dataset_mock, "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], }, {}, ) client.set_time_column(dataset_name="name", column_spec_display_name="column") client.auto_ml_client.list_table_specs.assert_called_with("name") client.auto_ml_client.list_column_specs.assert_called_with("table") client.auto_ml_client.update_table_spec.assert_called_with( {"name": "table", "time_column_spec_id": "3"} ) def test_clear_time_column(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") dataset_mock = mock.Mock() dataset_mock.configure_mock(name="dataset") client = self.tables_client( { "get_dataset.return_value": dataset_mock, "list_table_specs.return_value": [table_spec_mock], }, {}, ) client.clear_time_column(dataset_name="name") client.auto_ml_client.update_table_spec.assert_called_with( {"name": "table", "time_column_spec_id": None} ) def test_get_model_evaluation(self): client = self.tables_client({}, {}) ds = client.get_model_evaluation(model_evaluation_name="x") client.auto_ml_client.get_model_evaluation.assert_called_with("x") def test_list_model_evaluations_empty(self): client = self.tables_client({"list_model_evaluations.return_value": []}, {}) ds = client.list_model_evaluations(model_name="model") client.auto_ml_client.list_model_evaluations.assert_called_with("model") assert ds == [] def test_list_model_evaluations_not_empty(self): evaluations = ["eval"] client = self.tables_client( { "list_model_evaluations.return_value": evaluations, "location_path.return_value": LOCATION_PATH, }, {}, ) ds = client.list_model_evaluations(model_name="model") client.auto_ml_client.list_model_evaluations.assert_called_with("model") assert len(ds) == 1 assert ds[0] == "eval" def test_list_models_empty(self): client = self.tables_client( { "list_models.return_value": [], "location_path.return_value": LOCATION_PATH, }, {}, ) ds = client.list_models() client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION) client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH) assert ds == [] def test_list_models_not_empty(self): models = ["some_model"] client = self.tables_client( { "list_models.return_value": models, "location_path.return_value": LOCATION_PATH, }, {}, ) ds = client.list_models() client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION) client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH) assert len(ds) == 1 assert ds[0] == "some_model" def test_get_model_name(self): model_actual = "model" client = self.tables_client({"get_model.return_value": model_actual}, {}) model = client.get_model(model_name="my_model") client.auto_ml_client.get_model.assert_called_with("my_model") assert model == model_actual def test_get_no_model(self): client = self.tables_client( {"get_model.side_effect": exceptions.NotFound("err")}, {} ) with pytest.raises(exceptions.NotFound): client.get_model(model_name="my_model") client.auto_ml_client.get_model.assert_called_with("my_model") def test_get_model_from_empty_list(self): client = self.tables_client({"list_models.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.get_model(model_display_name="my_model") def test_get_model_from_list_not_found(self): client = self.tables_client( {"list_models.return_value": [mock.Mock(display_name="not_it")]}, {} ) with pytest.raises(exceptions.NotFound): client.get_model(model_display_name="my_model") def test_get_model_from_list(self): client = self.tables_client( { "list_models.return_value": [ mock.Mock(display_name="not_it"), mock.Mock(display_name="my_model"), ] }, {}, ) model = client.get_model(model_display_name="my_model") assert model.display_name == "my_model" def test_get_model_from_list_ambiguous(self): client = self.tables_client( { "list_models.return_value": [ mock.Mock(display_name="my_model"), mock.Mock(display_name="not_my_model"), mock.Mock(display_name="my_model"), ] }, {}, ) with pytest.raises(ValueError): client.get_model(model_display_name="my_model") def test_delete_model(self): model = mock.Mock() model.configure_mock(name="name") client = self.tables_client({"delete_model.return_value": None}, {}) client.delete_model(model=model) client.auto_ml_client.delete_model.assert_called_with("name") def test_delete_model_not_found(self): client = self.tables_client({"list_models.return_value": []}, {}) client.delete_model(model_display_name="not_found") client.auto_ml_client.delete_model.assert_not_called() def test_delete_model_name(self): client = self.tables_client({"delete_model.return_value": None}, {}) client.delete_model(model_name="name") client.auto_ml_client.delete_model.assert_called_with("name") def test_deploy_model_no_args(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.deploy_model() client.auto_ml_client.deploy_model.assert_not_called() def test_deploy_model(self): client = self.tables_client({}, {}) client.deploy_model(model_name="name") client.auto_ml_client.deploy_model.assert_called_with("name") def test_deploy_model_not_found(self): client = self.tables_client({"list_models.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.deploy_model(model_display_name="name") client.auto_ml_client.deploy_model.assert_not_called() def test_undeploy_model(self): client = self.tables_client({}, {}) client.undeploy_model(model_name="name") client.auto_ml_client.undeploy_model.assert_called_with("name") def test_undeploy_model_not_found(self): client = self.tables_client({"list_models.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.undeploy_model(model_display_name="name") client.auto_ml_client.undeploy_model.assert_not_called() def test_create_model(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock = mock.Mock() column_spec_mock.configure_mock(name="column/2", display_name="column") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [column_spec_mock], "location_path.return_value": LOCATION_PATH, }, {}, ) client.create_model( "my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1000 ) client.auto_ml_client.create_model.assert_called_with( LOCATION_PATH, { "display_name": "my_model", "dataset_id": "my_dataset", "tables_model_metadata": {"train_budget_milli_node_hours": 1000}, }, ) def test_create_model_include_columns(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock1 = mock.Mock() column_spec_mock1.configure_mock(name="column/1", display_name="column1") column_spec_mock2 = mock.Mock() column_spec_mock2.configure_mock(name="column/2", display_name="column2") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [ column_spec_mock1, column_spec_mock2, ], "location_path.return_value": LOCATION_PATH, }, {}, ) client.create_model( "my_model", dataset_name="my_dataset", include_column_spec_names=["column1"], train_budget_milli_node_hours=1000, ) client.auto_ml_client.create_model.assert_called_with( LOCATION_PATH, { "display_name": "my_model", "dataset_id": "my_dataset", "tables_model_metadata": { "train_budget_milli_node_hours": 1000, "input_feature_column_specs": [column_spec_mock1], }, }, ) def test_create_model_exclude_columns(self): table_spec_mock = mock.Mock() # name is reserved in use of __init__, needs to be passed here table_spec_mock.configure_mock(name="table") column_spec_mock1 = mock.Mock() column_spec_mock1.configure_mock(name="column/1", display_name="column1") column_spec_mock2 = mock.Mock() column_spec_mock2.configure_mock(name="column/2", display_name="column2") client = self.tables_client( { "list_table_specs.return_value": [table_spec_mock], "list_column_specs.return_value": [ column_spec_mock1, column_spec_mock2, ], "location_path.return_value": LOCATION_PATH, }, {}, ) client.create_model( "my_model", dataset_name="my_dataset", exclude_column_spec_names=["column1"], train_budget_milli_node_hours=1000, ) client.auto_ml_client.create_model.assert_called_with( LOCATION_PATH, { "display_name": "my_model", "dataset_id": "my_dataset", "tables_model_metadata": { "train_budget_milli_node_hours": 1000, "input_feature_column_specs": [column_spec_mock2], }, }, ) def test_create_model_invalid_hours_small(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.create_model( "my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1 ) client.auto_ml_client.create_model.assert_not_called() def test_create_model_invalid_hours_large(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.create_model( "my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1000000, ) client.auto_ml_client.create_model.assert_not_called() def test_create_model_invalid_no_dataset(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.create_model("my_model", train_budget_milli_node_hours=1000) client.auto_ml_client.get_dataset.assert_not_called() client.auto_ml_client.create_model.assert_not_called() def test_create_model_invalid_include_exclude(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.create_model( "my_model", dataset_name="my_dataset", include_column_spec_names=["a"], exclude_column_spec_names=["b"], train_budget_milli_node_hours=1000, ) client.auto_ml_client.get_dataset.assert_not_called() client.auto_ml_client.create_model.assert_not_called() def test_predict_from_array(self): data_type = mock.Mock(type_code=data_types_pb2.CATEGORY) column_spec = mock.Mock(display_name="a", data_type=data_type) model_metadata = mock.Mock(input_feature_column_specs=[column_spec]) model = mock.Mock() model.configure_mock(tables_model_metadata=model_metadata, name="my_model") client = self.tables_client({"get_model.return_value": model}, {}) client.predict(["1"], model_name="my_model") client.prediction_client.predict.assert_called_with( "my_model", {"row": {"values": [{"string_value": "1"}]}}, None ) def test_predict_from_dict(self): data_type = mock.Mock(type_code=data_types_pb2.CATEGORY) column_spec_a = mock.Mock(display_name="a", data_type=data_type) column_spec_b = mock.Mock(display_name="b", data_type=data_type) model_metadata = mock.Mock( input_feature_column_specs=[column_spec_a, column_spec_b] ) model = mock.Mock() model.configure_mock(tables_model_metadata=model_metadata, name="my_model") client = self.tables_client({"get_model.return_value": model}, {}) client.predict({"a": "1", "b": "2"}, model_name="my_model") client.prediction_client.predict.assert_called_with( "my_model", {"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}}, None, ) def test_predict_from_dict_with_feature_importance(self): data_type = mock.Mock(type_code=data_types_pb2.CATEGORY) column_spec_a = mock.Mock(display_name="a", data_type=data_type) column_spec_b = mock.Mock(display_name="b", data_type=data_type) model_metadata = mock.Mock( input_feature_column_specs=[column_spec_a, column_spec_b] ) model = mock.Mock() model.configure_mock(tables_model_metadata=model_metadata, name="my_model") client = self.tables_client({"get_model.return_value": model}, {}) client.predict( {"a": "1", "b": "2"}, model_name="my_model", feature_importance=True ) client.prediction_client.predict.assert_called_with( "my_model", {"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}}, {"feature_importance": "true"}, ) def test_predict_from_dict_missing(self): data_type = mock.Mock(type_code=data_types_pb2.CATEGORY) column_spec_a = mock.Mock(display_name="a", data_type=data_type) column_spec_b = mock.Mock(display_name="b", data_type=data_type) model_metadata = mock.Mock( input_feature_column_specs=[column_spec_a, column_spec_b] ) model = mock.Mock() model.configure_mock(tables_model_metadata=model_metadata, name="my_model") client = self.tables_client({"get_model.return_value": model}, {}) client.predict({"a": "1"}, model_name="my_model") client.prediction_client.predict.assert_called_with( "my_model", {"row": {"values": [{"string_value": "1"}, {"null_value": 0}]}}, None, ) def test_predict_all_types(self): float_type = mock.Mock(type_code=data_types_pb2.FLOAT64) timestamp_type = mock.Mock(type_code=data_types_pb2.TIMESTAMP) string_type = mock.Mock(type_code=data_types_pb2.STRING) array_type = mock.Mock(type_code=data_types_pb2.ARRAY) struct_type = mock.Mock(type_code=data_types_pb2.STRUCT) category_type = mock.Mock(type_code=data_types_pb2.CATEGORY) column_spec_float = mock.Mock(display_name="float", data_type=float_type) column_spec_timestamp = mock.Mock( display_name="timestamp", data_type=timestamp_type ) column_spec_string = mock.Mock(display_name="string", data_type=string_type) column_spec_array = mock.Mock(display_name="array", data_type=array_type) column_spec_struct = mock.Mock(display_name="struct", data_type=struct_type) column_spec_category = mock.Mock( display_name="category", data_type=category_type ) column_spec_null = mock.Mock(display_name="null", data_type=category_type) model_metadata = mock.Mock( input_feature_column_specs=[ column_spec_float, column_spec_timestamp, column_spec_string, column_spec_array, column_spec_struct, column_spec_category, column_spec_null, ] ) model = mock.Mock() model.configure_mock(tables_model_metadata=model_metadata, name="my_model") client = self.tables_client({"get_model.return_value": model}, {}) client.predict( { "float": 1.0, "timestamp": "EST", "string": "text", "array": [1], "struct": {"a": "b"}, "category": "a", "null": None, }, model_name="my_model", ) client.prediction_client.predict.assert_called_with( "my_model", { "row": { "values": [ {"number_value": 1.0}, {"string_value": "EST"}, {"string_value": "text"}, {"list_value": [1]}, {"struct_value": {"a": "b"}}, {"string_value": "a"}, {"null_value": 0}, ] } }, None, ) def test_predict_from_array_missing(self): data_type = mock.Mock(type_code=data_types_pb2.CATEGORY) column_spec = mock.Mock(display_name="a", data_type=data_type) model_metadata = mock.Mock(input_feature_column_specs=[column_spec]) model = mock.Mock() model.configure_mock(tables_model_metadata=model_metadata, name="my_model") client = self.tables_client({"get_model.return_value": model}, {}) with pytest.raises(ValueError): client.predict([], model_name="my_model") client.prediction_client.predict.assert_not_called() def test_batch_predict_pandas_dataframe(self): client = self.tables_client( gcs_client_attrs={ "bucket_name": "my_bucket", "upload_pandas_dataframe.return_value": "gs://input", } ) dataframe = pandas.DataFrame({}) client.batch_predict( project=PROJECT, region=REGION, model_name="my_model", pandas_dataframe=dataframe, gcs_output_uri_prefix="gs://output", ) client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) client.prediction_client.batch_predict.assert_called_with( "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"gcs_destination": {"output_uri_prefix": "gs://output"}}, ) def test_batch_predict_pandas_dataframe_init_gcs(self): client = automl_v1beta1.TablesClient( client=mock.Mock(), prediction_client=mock.Mock(), project=PROJECT, region=REGION, credentials=AnonymousCredentials(), ) dataframe = pandas.DataFrame({}) patch = mock.patch( "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", bucket_name="my_bucket", ) with patch as MockGcsClient: mockInstance = MockGcsClient.return_value mockInstance.upload_pandas_dataframe.return_value = "gs://input" dataframe = pandas.DataFrame({}) client.batch_predict( model_name="my_model", pandas_dataframe=dataframe, gcs_output_uri_prefix="gs://output", ) client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) client.prediction_client.batch_predict.assert_called_with( "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"gcs_destination": {"output_uri_prefix": "gs://output"}}, ) def test_batch_predict_gcs(self): client = self.tables_client({}, {}) client.batch_predict( model_name="my_model", gcs_input_uris="gs://input", gcs_output_uri_prefix="gs://output", ) client.prediction_client.batch_predict.assert_called_with( "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"gcs_destination": {"output_uri_prefix": "gs://output"}}, ) def test_batch_predict_bigquery(self): client = self.tables_client({}, {}) client.batch_predict( model_name="my_model", bigquery_input_uri="bq://input", bigquery_output_uri="bq://output", ) client.prediction_client.batch_predict.assert_called_with( "my_model", {"bigquery_source": {"input_uri": "bq://input"}}, {"bigquery_destination": {"output_uri": "bq://output"}}, ) def test_batch_predict_mixed(self): client = self.tables_client({}, {}) client.batch_predict( model_name="my_model", gcs_input_uris="gs://input", bigquery_output_uri="bq://output", ) client.prediction_client.batch_predict.assert_called_with( "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"bigquery_destination": {"output_uri": "bq://output"}}, ) def test_batch_predict_missing_input_gcs_uri(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.batch_predict( model_name="my_model", gcs_input_uris=None, gcs_output_uri_prefix="gs://output", ) client.prediction_client.batch_predict.assert_not_called() def test_batch_predict_missing_input_bigquery_uri(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.batch_predict( model_name="my_model", bigquery_input_uri=None, gcs_output_uri_prefix="gs://output", ) client.prediction_client.batch_predict.assert_not_called() def test_batch_predict_missing_output_gcs_uri(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.batch_predict( model_name="my_model", gcs_input_uris="gs://input", gcs_output_uri_prefix=None, ) client.prediction_client.batch_predict.assert_not_called() def test_batch_predict_missing_output_bigquery_uri(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.batch_predict( model_name="my_model", gcs_input_uris="gs://input", bigquery_output_uri=None, ) client.prediction_client.batch_predict.assert_not_called() def test_batch_predict_missing_model(self): client = self.tables_client({"list_models.return_value": []}, {}) with pytest.raises(exceptions.NotFound): client.batch_predict( model_display_name="my_model", gcs_input_uris="gs://input", gcs_output_uri_prefix="gs://output", ) client.prediction_client.batch_predict.assert_not_called() def test_batch_predict_no_model(self): client = self.tables_client({}, {}) with pytest.raises(ValueError): client.batch_predict( gcs_input_uris="gs://input", gcs_output_uri_prefix="gs://output" ) client.auto_ml_client.list_models.assert_not_called() client.prediction_client.batch_predict.assert_not_called() def test_auto_ml_client_credentials(self): credentials_mock = mock.Mock() patch_auto_ml_client = mock.patch( "google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient" ) with patch_auto_ml_client as MockAutoMlClient: client = automl_v1beta1.TablesClient(credentials=credentials_mock) _, auto_ml_client_kwargs = MockAutoMlClient.call_args assert "credentials" in auto_ml_client_kwargs assert auto_ml_client_kwargs["credentials"] == credentials_mock def test_prediction_client_credentials(self): credentials_mock = mock.Mock() patch_prediction_client = mock.patch( "google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient" ) with patch_prediction_client as MockPredictionClient: client = automl_v1beta1.TablesClient(credentials=credentials_mock) _, prediction_client_kwargs = MockPredictionClient.call_args assert "credentials" in prediction_client_kwargs assert prediction_client_kwargs["credentials"] == credentials_mock def test_prediction_client_client_info(self): client_info_mock = mock.Mock() patch_prediction_client = mock.patch( "google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient" ) with patch_prediction_client as MockPredictionClient: client = automl_v1beta1.TablesClient(client_info=client_info_mock) _, prediction_client_kwargs = MockPredictionClient.call_args assert "client_info" in prediction_client_kwargs assert prediction_client_kwargs["client_info"] == client_info_mock
tswast/google-cloud-python
automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py
Python
apache-2.0
59,675
from rest_framework import status from rest_framework.exceptions import APIException, ParseError def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for error_key, error_description in message.iteritems(): if error_key in top_level_error_keys: errors.append({error_key: error_description}) else: if isinstance(error_description, basestring): error_description = [error_description] errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason} for reason in error_description]) else: if isinstance(message, basestring): message = [message] errors.extend([{'detail': error} for error in message]) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') class InvalidFilterError(ParseError): """Raised when client passes an invalid filter in the querystring.""" default_detail = 'Querystring contains an invalid filter.'
arpitar/osf.io
api/base/exceptions.py
Python
apache-2.0
1,838
/* * Code contributed to the Learning Layers project * http://www.learning-layers.eu * Development is partly funded by the FP7 Programme of the European * Commission under Grant Agreement FP7-ICT-318209. * Copyright (c) 2016, Karlsruhe University of Applied Sciences. * For a list of contributors see the AUTHORS file at the top-level directory * of this distribution. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.hska.ld.etherpad.service; import de.hska.ld.etherpad.persistence.domain.UserEtherpadInfo; public interface UserEtherpadInfoService { public UserEtherpadInfo save(UserEtherpadInfo userEtherpadInfo); public UserEtherpadInfo findById(Long id); public void storeSessionForUser(String sessionId, String groupId, Long validUntil, UserEtherpadInfo userEtherpadInfo); public void storeAuthorIdForCurrentUser(String authorId); public UserEtherpadInfo getUserEtherpadInfoForCurrentUser(); public UserEtherpadInfo findByAuthorId(String authorId); UserEtherpadInfo findBySessionId(String sessionId); }
learning-layers/LivingDocumentsServer
ld-etherpad/src/main/java/de/hska/ld/etherpad/service/UserEtherpadInfoService.java
Java
apache-2.0
1,589
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.config; import com.streamsets.pipeline.api.impl.Utils; import java.util.HashMap; import java.util.List; import java.util.Map; public class ModelDefinition { private final ModelType modelType; private final String valuesProviderClass; private final List<ConfigDefinition> configDefinitions; private final Map<String, ConfigDefinition> configDefinitionsAsMap; private List<String> values; private List<String> labels; private final Class listBeanClass; public static ModelDefinition localizedValueChooser(ModelDefinition model, List<String> values, List<String> labels) { return new ModelDefinition(model.getModelType(), model.getValuesProviderClass(), values, labels, model.getListBeanClass(), model.getConfigDefinitions()); } public static ModelDefinition localizedComplexField(ModelDefinition model, List<ConfigDefinition> configDefs) { return new ModelDefinition(model.getModelType(), model.getValuesProviderClass(), model.getValues(), model.getLabels(), model.getListBeanClass(), configDefs); } public ModelDefinition(ModelType modelType, String valuesProviderClass, List<String> values, List<String> labels, Class listBeanClass, List<ConfigDefinition> configDefinitions) { this.modelType = modelType; this.valuesProviderClass = valuesProviderClass; this.configDefinitions = configDefinitions; configDefinitionsAsMap = new HashMap<>(); if (configDefinitions != null) { for (ConfigDefinition def : configDefinitions) { configDefinitionsAsMap.put(def.getName(), def); } } this.values = values; this.labels = labels; this.listBeanClass = listBeanClass; } public ModelType getModelType() { return modelType; } public List<String> getValues() { return values; } public List<String> getLabels() { return labels; } public String getValuesProviderClass() { return valuesProviderClass; } public void setValues(List<String> values) { this.values = values; } public void setLabels(List<String> labels) { this.labels = labels; } public Class getListBeanClass() { return listBeanClass; } public List<ConfigDefinition> getConfigDefinitions() { return configDefinitions; } public Map<String, ConfigDefinition> getConfigDefinitionsAsMap() { return configDefinitionsAsMap; } @Override public String toString() { return Utils.format("ModelDefinition[type='{}' valuesProviderClass='{}' values='{}']", getModelType(), getValues(), getValuesProviderClass()); } }
z123/datacollector
container/src/main/java/com/streamsets/datacollector/config/ModelDefinition.java
Java
apache-2.0
3,255