code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from tripleo_common._i18n import _
LOG = logging.getLogger(__name__)
class CloudConfigException(Exception):
"""Base tripleo-common exception
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if not message:
try:
message = self.msg_fmt % kwargs
except Exception:
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception('Exception in string format operation')
for name, value in kwargs.iteritems():
LOG.error("%s: %s" % (name, value))
# at least get the core message out if something happened
message = self.msg_fmt
super(CloudConfigException, self).__init__(message)
class MissingEnvironment(CloudConfigException):
message = "Required environment variables are not set."
| jprovaznik/tripleo-common | tripleo_common/exception.py | Python | apache-2.0 | 1,750 |
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
__author__ = 'https://github.com/password123456/'
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import requests
import urllib
import urllib2
import json
import datetime
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def get_recent_lotto():
url = 'http://www.nlotto.co.kr/common.do?method=getLottoNumber'
try:
r = requests.get(url)
data = json.loads(r.text)
except Exception, e:
print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC)
sys.exit(0)
else:
r.close()
drwNoDate = data['drwNoDate']
drwNo = data['drwNo']
firstWinamnt = data['firstWinamnt']
drwtNo1 = data['drwtNo1']
drwtNo2 = data['drwtNo2']
drwtNo3 = data['drwtNo3']
drwtNo4 = data['drwtNo4']
drwtNo5 = data['drwtNo5']
drwtNo6 = data['drwtNo6']
bnusNo = data['bnusNo']
# 당첨금 자리수 변환
firstWinamnt = format(firstWinamnt, ',')
content = '** 최근 로또 조회 **\n'
content = content + ' [+] 로또 : http://www.nlotto.co.kr\n [+] 추첨일자: %s\n [+] 회차: %d 회\n [+] 당첨번호: %d %d %d %d %d %d\n [+] 보너스: %d \n [*] 당첨금: %s 원\n' % (drwNoDate, drwNo, drwtNo1, drwtNo2, drwtNo3, drwtNo4, drwtNo5, drwtNo6, bnusNo, firstWinamnt )
#print content
return content
content = get_recent_lotto()
def main():
get_recent_lotto()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.exit(0)
except Exception, e:
print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC)
| password123456/lotto | get_recent_lotto.py | Python | apache-2.0 | 1,787 |
/*
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.kernel.api;
/**
* A service to provide Classloaders to the kernel based on component
* specifications. These classloaders should be fully populate with the the
* class path elements in the component specification, and where appropriate
* packages should be exported and dependencies injected into the shared
* classloader. The resulting classloader should configured and ready for use.
*/
public interface ClassLoaderService {
/**
* Create a ComponentClassLoader based on the specification
*
* @param spec
* the ComponentSpecification
* @return the new Component Classloader
* @throws ComponentSpecificationException
*/
ClassLoader getComponentClassLoader(ComponentSpecification spec)
throws ComponentSpecificationException;
}
| sakai-mirror/k2 | agnostic/shared/src/main/java/org/sakaiproject/kernel/api/ClassLoaderService.java | Java | apache-2.0 | 1,593 |
/*
* Copyright (c) Created by Cody.yi on 2016/9/5.
*/
package com.cody.handler.framework.viewmodel;
/**
* Created by cody.yi on 2016/8/24.
* <p>
* 不包含头部的list view
*
* @param <ItemViewModel> ListView中的item ViewModel
*/
public class ListWithSearchViewModel<ItemViewModel extends XItemViewModel> extends ListViewModel<ItemViewModel> implements IWithSearchViewModel {
private final SearchViewModel mSearchViewModel = SearchViewModel.createDefaultSearch();
@Override
public SearchViewModel getSearchViewModel() {
return mSearchViewModel;
}
}
| codyer/CleanFramework | handler/src/main/java/com/cody/handler/framework/viewmodel/ListWithSearchViewModel.java | Java | apache-2.0 | 589 |
// Copyright © 2013-2020 Andy Goryachev <andy@goryachev.com>
package goryachev.common.io;
public class BitStreamCommon
{
protected static final int BITS_PER_BYTE = 8;
protected static final int MASK[] =
{
0x00000000,
0x00000001,
0x00000003,
0x00000007,
0x0000000f,
0x0000001f,
0x0000003f,
0x0000007f,
0x000000ff,
0x000001ff,
0x000003ff,
0x000007ff,
0x00000fff,
0x00001fff,
0x00003fff,
0x00007fff,
0x0000ffff,
0x0001ffff,
0x0003ffff,
0x0007ffff,
0x000fffff,
0x001fffff,
0x003fffff,
0x007fffff,
0x00ffffff,
0x01ffffff,
0x03ffffff,
0x07ffffff,
0x0fffffff,
0x1fffffff,
0x3fffffff,
0x7fffffff,
0xffffffff
};
public static int getMask(int bits)
{
return MASK[bits];
}
}
| andy-goryachev/JsonPretty | src/goryachev/common/io/BitStreamCommon.java | Java | apache-2.0 | 828 |
package br.ufrn.Myeclone.model;
import javax.persistence.Entity;
@Entity
public class Acao extends Tarefa{
}
| JorgePereiraUFRN/Myeclone | Myeclone/src/br/ufrn/Myeclone/model/Acao.java | Java | apache-2.0 | 123 |
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var ts = require("typescript");
var index_1 = require("../../models/index");
var components_1 = require("../components");
var AliasConverter = (function (_super) {
__extends(AliasConverter, _super);
function AliasConverter() {
_super.apply(this, arguments);
this.priority = 100;
}
AliasConverter.prototype.supportsNode = function (context, node, type) {
if (!type || !node || !node.typeName)
return false;
if (!type.symbol)
return true;
var checker = context.checker;
var symbolName = checker.getFullyQualifiedName(type.symbol).split('.');
if (!symbolName.length)
return false;
if (symbolName[0].substr(0, 1) == '"')
symbolName.shift();
var nodeName = ts.getTextOfNode(node.typeName).split('.');
if (!nodeName.length)
return false;
var common = Math.min(symbolName.length, nodeName.length);
symbolName = symbolName.slice(-common);
nodeName = nodeName.slice(-common);
return nodeName.join('.') != symbolName.join('.');
};
AliasConverter.prototype.convertNode = function (context, node) {
var name = ts.getTextOfNode(node.typeName);
return new index_1.ReferenceType(name, index_1.ReferenceType.SYMBOL_ID_RESOLVE_BY_NAME);
};
AliasConverter = __decorate([
components_1.Component({ name: 'type:alias' })
], AliasConverter);
return AliasConverter;
})(components_1.ConverterTypeComponent);
exports.AliasConverter = AliasConverter;
| aciccarello/typedoc | lib/converter/types/alias.js | JavaScript | apache-2.0 | 2,391 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.plugins.conflict;
import java.util.Collection;
import org.apache.ivy.core.module.descriptor.DependencyDescriptor;
import org.apache.ivy.core.module.id.ModuleRevisionId;
import org.apache.ivy.core.settings.IvySettings;
import org.apache.ivy.plugins.IvySettingsAware;
public abstract class AbstractConflictManager implements ConflictManager, IvySettingsAware {
private String name;
private IvySettings settings;
public IvySettings getSettings() {
return settings;
}
public void setSettings(IvySettings settings) {
this.settings = settings;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return name;
}
public void handleAllBlacklistedRevisions(DependencyDescriptor dd,
Collection<ModuleRevisionId> foundBlacklisted) {
}
}
| jaikiran/ant-ivy | src/java/org/apache/ivy/plugins/conflict/AbstractConflictManager.java | Java | apache-2.0 | 1,770 |
/* Copyright 2015 1060 Research Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.polestar.data.login;
import org.netkernel.layer0.nkf.*;
import org.netkernel.mod.hds.*;
import org.netkernel.module.standard.endpoint.StandardAccessorImpl;
import io.polestar.data.db.PersistenceFactory;
import io.polestar.data.util.MonitorUtils;
public class AuthenticationDataAccessor extends StandardAccessorImpl
{
public AuthenticationDataAccessor()
{ this.declareThreadSafe();
this.declareInhibitCheckForBadExpirationOnMutableResource();
}
public void onSource(INKFRequestContext aContext) throws Exception
{
IHDSDocument authData=PersistenceFactory.getPersistence(aContext).getAuthentication(aContext);
INKFResponse resp=aContext.createResponseFrom(authData);
MonitorUtils.attachGoldenThread(aContext, "gt:auth");
}
public void onSink(INKFRequestContext aContext) throws Exception
{
IHDSMutator m=aContext.source("res:/md/authentication",IHDSDocument.class).getMutableClone();
IHDSReader primary=aContext.sourcePrimary(IHDSDocument.class).getReader();
String username=(String)primary.getFirstValue("/*/username");
String password=(String)primary.getFirstValue("/*/password");
IHDSMutator user=m.getFirstNodeOrNull("/authentication/user[username='"+username+"']");
if (user!=null)
{
INKFRequest req=aContext.createRequest("active:generatePasswordHash");
req.addArgumentByValue("password",password);
req.setRepresentationClass(String.class);
String hash=(String)aContext.issueRequest(req);
user.setCursor("password").setValue(hash);
IHDSDocument state=m.toDocument(false);
PersistenceFactory.getPersistence(aContext).setAuthentication(state, aContext);
MonitorUtils.cutGoldenThread(aContext, "gt:auth");
}
}
}
| tonbut/polestar | module/urn.io.polestar/src/io/polestar/data/login/AuthenticationDataAccessor.java | Java | apache-2.0 | 2,279 |
using System.Collections.Generic;
namespace Angular_ASPNETCore_Seed.Models
{
public struct PagingResult<T>
{
public IEnumerable<T> Records { get; set; }
public int TotalRecords { get; set; }
public PagingResult(IEnumerable<T> items, int totalRecords)
{
TotalRecords = totalRecords;
Records = items;
}
}
}
| HadwinJ/PerfectDemo | PerfectSolution/Angular_ASPNETCore_Seed/Models/PagingResult.cs | C# | apache-2.0 | 386 |
/*
* Copyright (C) 2015-2021 KeepSafe Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.getkeepsafe.dexcount.plugin;
import com.android.repository.Revision;
import com.android.repository.Revision.PreviewComparison;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
/**
* Utility for obtaining an appropriate task applicator factory for a given
* Android Gradle Plugin revision.
*/
public final class TaskApplicators {
private TaskApplicators() {
// no instances
}
public static Optional<TaskApplicator.Factory> getFactory(Revision revision) {
List<TaskApplicator.Factory> factories = Arrays.asList(
new SevenOhApplicator.Factory(),
new FourTwoApplicator.Factory(),
new FourOneApplicator.Factory(),
new ThreeSixApplicator.Factory(),
new ThreeFourApplicator.Factory(),
new JavaOnlyApplicator.Factory()
);
return factories.stream()
.filter(it -> revision.compareTo(it.getMinimumRevision(), PreviewComparison.IGNORE) >= 0)
.findFirst();
}
}
| KeepSafe/dexcount-gradle-plugin | src/main/java/com/getkeepsafe/dexcount/plugin/TaskApplicators.java | Java | apache-2.0 | 1,649 |
/**
* @file
* Declares the array type.
*/
#pragma once
#include "tuple.hpp"
#include "data.hpp"
#include "../values/array.hpp"
#include "../../cast.hpp"
#include <boost/functional/hash.hpp>
#include <boost/variant.hpp>
#include <limits>
#include <ostream>
#include <functional>
namespace puppet { namespace runtime { namespace types {
/**
* Represents the Puppet Array type.
* @tparam Type The type of a runtime type.
*/
template <typename Type>
struct basic_array
{
/**
* Constructs an Array type.
* @param type The element type of the array. Defaults to the Data type.
* @param from The "from" type parameter.
* @param to The "to" type parameter.
*/
explicit basic_array(Type type = data(), int64_t from = std::numeric_limits<int64_t>::min(), int64_t to = std::numeric_limits<int64_t>::max()) :
_element_type(rvalue_cast(type)),
_from(from),
_to(to)
{
}
/**
* Gets the element type of the array.
* @return Returns the element type of the array.
*/
Type const& element_type() const
{
return _element_type;
}
/**
* Gets the "from" type parameter.
* @return Returns the "from" type parameter.
*/
int64_t from() const
{
return _from;
}
/**
* Gets the "to" type parameter.
* @return Returns the "to" type parameter.
*/
int64_t to() const
{
return _to;
}
/**
* Gets the name of the type.
* @return Returns the name of the type (i.e. Array).
*/
static const char* name()
{
return "Array";
}
/**
* Determines if the given value is an instance of this type.
* @tparam Value The type of the runtime value.
* @param value The value to determine if it is an instance of this type. This value will never be a variable.
* @return Returns true if the given value is an instance of this type or false if not.
*/
template <typename Value>
bool is_instance(Value const& value) const
{
// Forward declaration of unsafe_is_instance for recursion
bool unsafe_is_instance(void const*, void const*);
// Check for array
auto ptr = boost::get<values::basic_array<Value>>(&value);
if (!ptr) {
return false;
}
// Check for size is range
int64_t size = static_cast<int64_t>(ptr->size());
if (!(_to < _from ? (size >= _to && size <= _from) : (size >= _from && size <= _to))) {
return false;
}
// Check that each element is of the type
for (auto const& element : *ptr) {
if (!unsafe_is_instance(&element, &_element_type)) {
return false;
}
}
return true;
}
/**
* Determines if the given type is a specialization (i.e. more specific) of this type.
* @param other The other type to check for specialization.
* @return Returns true if the other type is a specialization or false if not.
*/
bool is_specialization(Type const& other) const
{
// For the other type to be a specialization, it must be an Array or Tuple
// For Tuple, the number of types must be 1
// The element types must match
// And the range of other needs to be inside of this type's range
int64_t from, to;
auto array = boost::get<basic_array<Type>>(&other);
if (!array) {
// Check for Array[ElementType]
if (array->element_type() != _element_type) {
return false;
}
from = array->from();
to = array->to();
} else {
// Check for a Tuple[ElementType]
auto tuple = boost::get<basic_tuple<Type>>(&other);
if (!tuple || tuple->types().size() != 1 || tuple->types().front() != _element_type) {
return false;
}
from = tuple->from();
to = tuple->to();
}
// Check for equality
if (from == _from && to == _to) {
return false;
}
return std::min(from, to) >= std::min(_from, _to) &&
std::max(from, to) <= std::max(_from, _to);
}
private:
Type _element_type;
int64_t _from;
int64_t _to;
};
/**
* Stream insertion operator for array type.
* @tparam Type The runtime "type" type.
* @param os The output stream to write the type to.
* @param type The type to write.
* @return Returns the given output stream.
*/
template <typename Type>
std::ostream& operator<<(std::ostream& os, basic_array<Type> const& type)
{
os << basic_array<Type>::name() << '[' << type.element_type();
bool from_default = type.from() == std::numeric_limits<int64_t>::min();
bool to_default = type.to() == std::numeric_limits<int64_t>::max();
if (from_default && to_default) {
// Only output the type
os << ']';
return os;
}
os << ", ";
if (from_default) {
os << "default";
} else {
os << type.from();
}
os << ", ";
if (to_default) {
os << "default";
} else {
os << type.to();
}
os << ']';
return os;
}
/**
* Equality operator for array.
* @tparam Type The "runtime type" type.
* @param left The left type to compare.
* @param right The right type to compare.
* @return Returns true if the two types are equal or false if not.
*/
template <typename Type>
bool operator==(basic_array<Type> const& left, basic_array<Type> const& right)
{
return left.from() == right.from() && left.to() == right.to() && left.element_type() == right.element_type();
}
}}} // puppet::runtime::types
namespace boost {
/**
* Hash specialization for Array type.
* @tparam Type The "runtime type" type.
*/
template <typename Type>
struct hash<puppet::runtime::types::basic_array<Type>>
{
/**
* Hashes the Array type.
* @param type The type to hash.
* @return Returns the hash value for the type.
*/
size_t operator()(puppet::runtime::types::basic_array<Type> const& type) const
{
size_t seed = 0;
hash_combine(seed, puppet::runtime::types::basic_array<Type>::name());
hash_combine(seed, type.element_type());
hash_combine(seed, type.from());
hash_combine(seed, type.to());
return seed;
}
};
}
| iankronquist/puppetcpp | lib/include/puppet/runtime/types/array.hpp | C++ | apache-2.0 | 7,145 |
public class Test5
{
public static void main(String[] args)
{
Goods iphone = new Iphone();
iphone.setName("iphone");
iphone.setPrice(5000.0);
Goods shoes =new Shoes();
shoes.setName("鞋");
shoes.setPrice(123.0);
Item item =new Item();
item.setGoods(iphone);
item.setAmount(2);
Item item1 =new Item();
item1.setGoods(shoes);
item1.setAmount(3);
Order order = new Order();
order.order(item);
Order order1 = new Order();
order1.order(item1);
Customer customer = new Customer();
customer.order(order);
Customer customer2 = new Customer();
customer2.order(order1);
Payment aliay = new Aliay();
customer.pay(aliay);
aliay.pay(order.getTotalMoney());
}
}
class Customer
{
public void order(Order order)
{
System.out.println("订单内容:");
}
public void pay(Payment payment)
{
System.out.println("你选择的支付方式是:");
}
}
class Order
{
private String id;
// private date createDate;
Item[] items = new Item[2];
public void order(Item item)
{
}
public double getTotalMoney()
{
Goods iphone = new Iphone();
Goods shoes =new Shoes();
return (double)(iphone.getPrice()*items[0].getAmount()+shoes.getPrice()*items[1].getAmount());
}
}
class Item
{
private Goods goods;
private int amount;
public void setGoods(Goods goods)
{
this.goods = goods;
}
public Goods getGoods()
{
return goods;
}
public void setAmount(int amount)
{
this.amount = amount;
}
public int getAmount()
{
return amount;
}
}
class Goods
{
private double price;
private String name;
public void setPrice(double price)
{
this.price = price;
}
public double getPrice()
{
return price;
}
public void setName(String name)
{
this.name = name;
}
public String getName()
{
return name;
}
}
class Iphone extends Goods
{
}
class Shoes extends Goods
{
}
interface Payment
{
void pay(double money);
}
class Aliay implements Payment
{
public void pay(double money)
{
System.out.println("阿里支付"+money);
}
}
class ApplePay implements Payment
{
public void pay(double money)
{
System.out.println("苹果支付"+money);
}
}
class WeixinPay implements Payment
{
public void pay(double money)
{
System.out.println("微信支付"+money);
}
} | JAVA201708/Homework | 201709/20170915/Team1/Malanlan/编程题/ex05/Test5.java | Java | apache-2.0 | 2,394 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeoutException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.db.filter.QueryPath;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.gms.ApplicationState;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.service.*;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.WrappedRunnable;
import org.cliffc.high_scale_lib.NonBlockingHashSet;
/**
* For each endpoint for which we have hints, there is a row in the system hints CF.
* The key for this row is ByteBuffer.wrap(string), i.e. "127.0.0.1".
* (We have to use String keys for compatibility with OPP.)
* SuperColumns in these rows are the mutations to replay, with uuid names:
*
* <dest ip>: { // key
* <uuid>: { // supercolumn
* mutation: <mutation> // subcolumn
* version: <mutation serialization version>
* table: <table of hinted mutation>
* key: <key of hinted mutation>
* }
* }
*
* When FailureDetector signals that a node that was down is back up, we page through
* the hinted mutations and send them over one at a time, waiting for
* hinted_handoff_throttle_delay in between each.
*
* deliverHints is also exposed to JMX so it can be run manually if FD ever misses
* its cue somehow.
*/
public class HintedHandOffManager implements HintedHandOffManagerMBean
{
public static final HintedHandOffManager instance = new HintedHandOffManager();
public static final String HINTS_CF = "HintsColumnFamily";
private static final Logger logger_ = LoggerFactory.getLogger(HintedHandOffManager.class);
private static final int PAGE_SIZE = 1024;
private static final String SEPARATOR = "-";
private static final int LARGE_NUMBER = 65536; // 64k nodes ought to be enough for anybody.
private final NonBlockingHashSet<InetAddress> queuedDeliveries = new NonBlockingHashSet<InetAddress>();
private final ExecutorService executor_ = new JMXEnabledThreadPoolExecutor("HintedHandoff");
public HintedHandOffManager()
{
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(this, new ObjectName("org.apache.cassandra.db:type=HintedHandoffManager"));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
public void registerMBean()
{
logger_.debug("Created HHOM instance, registered MBean.");
}
private static boolean sendMutation(InetAddress endpoint, RowMutation mutation) throws IOException
{
IWriteResponseHandler responseHandler = WriteResponseHandler.create(endpoint);
MessagingService.instance().sendRR(mutation, endpoint, responseHandler);
try
{
responseHandler.get();
}
catch (TimeoutException e)
{
return false;
}
try
{
Thread.sleep(DatabaseDescriptor.getHintedHandoffThrottleDelay());
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
return true;
}
private static void deleteHint(ByteBuffer tokenBytes, ByteBuffer hintId, long timestamp) throws IOException
{
RowMutation rm = new RowMutation(Table.SYSTEM_TABLE, tokenBytes);
rm.delete(new QueryPath(HINTS_CF, hintId), timestamp);
rm.apply();
}
public void deleteHintsForEndpoint(final String ipOrHostname)
{
try
{
InetAddress endpoint = InetAddress.getByName(ipOrHostname);
deleteHintsForEndpoint(endpoint);
}
catch (UnknownHostException e)
{
logger_.warn("Unable to find "+ipOrHostname+", not a hostname or ipaddr of a node?:");
e.printStackTrace();
throw new RuntimeException(e);
}
}
public void deleteHintsForEndpoint(final InetAddress endpoint)
{
if (!StorageService.instance.getTokenMetadata().isMember(endpoint))
return;
Token<?> token = StorageService.instance.getTokenMetadata().getToken(endpoint);
ByteBuffer tokenBytes = StorageService.getPartitioner().getTokenFactory().toByteArray(token);
final ColumnFamilyStore hintStore = Table.open(Table.SYSTEM_TABLE).getColumnFamilyStore(HINTS_CF);
final RowMutation rm = new RowMutation(Table.SYSTEM_TABLE, tokenBytes);
rm.delete(new QueryPath(HINTS_CF), System.currentTimeMillis());
// execute asynchronously to avoid blocking caller (which may be processing gossip)
Runnable runnable = new Runnable()
{
public void run()
{
try
{
logger_.info("Deleting any stored hints for " + endpoint);
rm.apply();
hintStore.forceFlush();
CompactionManager.instance.submitMaximal(hintStore, Integer.MAX_VALUE);
}
catch (Exception e)
{
logger_.warn("Could not delete hints for " + endpoint + ": " + e);
}
}
};
StorageService.tasks.execute(runnable);
}
private static boolean pagingFinished(ColumnFamily hintColumnFamily, ByteBuffer startColumn)
{
// done if no hints found or the start column (same as last column processed in previous iteration) is the only one
return hintColumnFamily == null
|| (hintColumnFamily.getSortedColumns().size() == 1 && hintColumnFamily.getColumn(startColumn) != null);
}
private int waitForSchemaAgreement(InetAddress endpoint) throws InterruptedException
{
Gossiper gossiper = Gossiper.instance;
int waited = 0;
// first, wait for schema to be gossiped.
while (gossiper.getEndpointStateForEndpoint(endpoint).getApplicationState(ApplicationState.SCHEMA) == null) {
Thread.sleep(1000);
waited += 1000;
if (waited > 2 * StorageService.RING_DELAY)
throw new RuntimeException("Didin't receive gossiped schema from " + endpoint + " in " + 2 * StorageService.RING_DELAY + "ms");
}
waited = 0;
// then wait for the correct schema version.
// usually we use DD.getDefsVersion, which checks the local schema uuid as stored in the system table.
// here we check the one in gossip instead; this serves as a canary to warn us if we introduce a bug that
// causes the two to diverge (see CASSANDRA-2946)
while (!gossiper.getEndpointStateForEndpoint(endpoint).getApplicationState(ApplicationState.SCHEMA).value.equals(
gossiper.getEndpointStateForEndpoint(FBUtilities.getBroadcastAddress()).getApplicationState(ApplicationState.SCHEMA).value))
{
Thread.sleep(1000);
waited += 1000;
if (waited > 2 * StorageService.RING_DELAY)
throw new RuntimeException("Could not reach schema agreement with " + endpoint + " in " + 2 * StorageService.RING_DELAY + "ms");
}
logger_.debug("schema for {} matches local schema", endpoint);
return waited;
}
private void deliverHintsToEndpoint(InetAddress endpoint) throws IOException, DigestMismatchException, InvalidRequestException, TimeoutException, InterruptedException
{
try
{
logger_.debug("Checking remote({}) schema before delivering hints", endpoint);
int waited = waitForSchemaAgreement(endpoint);
// sleep a random amount to stagger handoff delivery from different replicas.
// (if we had to wait, then gossiper randomness took care of that for us already.)
if (waited == 0) {
int sleep = new Random().nextInt(60000);
logger_.debug("Sleeping {}ms to stagger hint delivery", sleep);
Thread.sleep(sleep);
}
if (!FailureDetector.instance.isAlive(endpoint))
{
logger_.info("Endpoint {} died before hint delivery, aborting", endpoint);
return;
}
}
finally
{
queuedDeliveries.remove(endpoint);
}
// 1. Get the key of the endpoint we need to handoff
// 2. For each column, deserialize the mutation and send it to the endpoint
// 3. Delete the subcolumn if the write was successful
// 4. Force a flush
// 5. Do major compaction to clean up all deletes etc.
// find the hints for the node using its token.
Token<?> token = StorageService.instance.getTokenMetadata().getToken(endpoint);
logger_.info("Started hinted handoff for token: {} with IP: {}", token, endpoint);
ByteBuffer tokenBytes = StorageService.getPartitioner().getTokenFactory().toByteArray(token);
DecoratedKey<?> epkey = StorageService.getPartitioner().decorateKey(tokenBytes);
int rowsReplayed = 0;
ColumnFamilyStore hintStore = Table.open(Table.SYSTEM_TABLE).getColumnFamilyStore(HINTS_CF);
ByteBuffer startColumn = ByteBufferUtil.EMPTY_BYTE_BUFFER;
delivery:
while (true)
{
QueryFilter filter = QueryFilter.getSliceFilter(epkey, new QueryPath(HINTS_CF), startColumn, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, PAGE_SIZE);
ColumnFamily hintColumnFamily = ColumnFamilyStore.removeDeleted(hintStore.getColumnFamily(filter), Integer.MAX_VALUE);
if (pagingFinished(hintColumnFamily, startColumn))
break;
for (IColumn hint : hintColumnFamily.getSortedColumns())
{
startColumn = hint.name();
IColumn versionColumn = hint.getSubColumn(ByteBufferUtil.bytes("version"));
IColumn tableColumn = hint.getSubColumn(ByteBufferUtil.bytes("table"));
IColumn keyColumn = hint.getSubColumn(ByteBufferUtil.bytes("key"));
IColumn mutationColumn = hint.getSubColumn(ByteBufferUtil.bytes("mutation"));
assert versionColumn != null;
assert tableColumn != null;
assert keyColumn != null;
assert mutationColumn != null;
DataInputStream in = new DataInputStream(ByteBufferUtil.inputStream(mutationColumn.value()));
RowMutation rm = RowMutation.serializer().deserialize(in, ByteBufferUtil.toInt(versionColumn.value()));
if (sendMutation(endpoint, rm))
{
deleteHint(tokenBytes, hint.name(), versionColumn.timestamp());
rowsReplayed++;
}
else
{
logger_.info("Could not complete hinted handoff to " + endpoint);
break delivery;
}
}
}
if (rowsReplayed > 0)
{
hintStore.forceFlush();
try
{
CompactionManager.instance.submitMaximal(hintStore, Integer.MAX_VALUE).get();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
logger_.info(String.format("Finished hinted handoff of %s rows to endpoint %s",
rowsReplayed, endpoint));
}
/*
* This method is used to deliver hints to a particular endpoint.
* When we learn that some endpoint is back up we deliver the data
* to him via an event driven mechanism.
*/
public void deliverHints(final InetAddress to)
{
if (!queuedDeliveries.add(to))
return;
Runnable r = new WrappedRunnable()
{
public void runMayThrow() throws Exception
{
deliverHintsToEndpoint(to);
}
};
executor_.execute(r);
}
public void deliverHints(String to) throws UnknownHostException
{
deliverHints(InetAddress.getByName(to));
}
public List<String> listEndpointsPendingHints()
{
List<Row> rows = getHintsSlice(1);
// Extract the keys as strings to be reported.
LinkedList<String> result = new LinkedList<String>();
for (Row r : rows)
{
if (r.cf != null) //ignore removed rows
result.addFirst(new String(r.key.key.array()));
}
return result;
}
public Map<String, Integer> countPendingHints()
{
List<Row> rows = getHintsSlice(Integer.MAX_VALUE);
Map<String, Integer> result = new HashMap<String, Integer>();
for (Row r : rows)
{
if (r.cf != null) //ignore removed rows
result.put(new String(r.key.key.array()), r.cf.getColumnCount());
}
return result;
}
private List<Row> getHintsSlice(int column_count)
{
// ColumnParent for HintsCF...
ColumnParent parent = new ColumnParent(HINTS_CF);
// Get count # of columns...
SlicePredicate predicate = new SlicePredicate();
SliceRange sliceRange = new SliceRange();
sliceRange.setStart(new byte[0]).setFinish(new byte[0]);
sliceRange.setCount(column_count);
predicate.setSlice_range(sliceRange);
// From keys "" to ""...
IPartitioner<?> partitioner = StorageService.getPartitioner();
ByteBuffer empty = ByteBufferUtil.EMPTY_BYTE_BUFFER;
Range range = new Range(partitioner.getToken(empty), partitioner.getToken(empty));
// Get a bunch of rows!
List<Row> rows;
try
{
rows = StorageProxy.getRangeSlice(new RangeSliceCommand("system", parent, predicate, range, LARGE_NUMBER), ConsistencyLevel.ONE);
}
catch (Exception e)
{
logger_.info("HintsCF getEPPendingHints timed out.");
throw new RuntimeException(e);
}
return rows;
}
}
| segfault/apache_cassandra | src/java/org/apache/cassandra/db/HintedHandOffManager.java | Java | apache-2.0 | 15,835 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
namespace NetFreeSwitch.Framework.FreeSwitch.Commands {
/// <summary>
/// DivertEvent
/// </summary>
public class DivertEventsCommand : BaseCommand {
private readonly bool _flag;
public DivertEventsCommand(bool flag) { _flag = flag; }
public override string Command { get { return "divert_events"; } }
public override string Argument { get { return _flag ? "on" : "off"; } }
}
}
| Tochemey/NetFreeSwitch.Framework | NetFreeSwitch.Framework/FreeSwitch/Commands/DivertEventsCommand.cs | C# | apache-2.0 | 1,199 |
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from lxml import etree
from keystone.logic.types import fault
class User(object):
"""Document me!"""
def __init__(self, password=None, id=None, name=None, tenant_id=None,
email=None, enabled=None, tenant_roles=None):
self.id = id
self.name = name
self.tenant_id = tenant_id
self.password = password
self.email = email
self.enabled = enabled and True or False
self.tenant_roles = tenant_roles
@staticmethod
def from_xml(xml_str):
try:
dom = etree.Element("root")
dom.append(etree.fromstring(xml_str))
root = dom.find("{http://docs.openstack.org/identity/api/v2.0}" \
"user")
if root == None:
raise fault.BadRequestFault("Expecting User")
name = root.get("name")
tenant_id = root.get("tenantId")
email = root.get("email")
password = root.get("password")
enabled = root.get("enabled")
if not name:
raise fault.BadRequestFault("Expecting User")
elif not password:
raise fault.BadRequestFault("Expecting User password")
elif not email:
raise fault.BadRequestFault("Expecting User email")
enabled = enabled is None or enabled.lower() in ["true", "yes"]
return User(password, id, name, tenant_id, email, enabled)
except etree.LxmlError as e:
raise fault.BadRequestFault("Cannot parse User", str(e))
@staticmethod
def from_json(json_str):
try:
obj = json.loads(json_str)
if not "user" in obj:
raise fault.BadRequestFault("Expecting User")
user = obj["user"]
id = user.get('id', None)
name = user.get('name', None)
if not "password" in user:
raise fault.BadRequestFault("Expecting User Password")
password = user["password"]
if (id == None or len(id.strip()) == 0) and (
name == None or len(name.strip()) == 0):
raise fault.BadRequestFault("Expecting User")
elif password == None or len(password.strip()) == 0:
raise fault.BadRequestFault("Expecting User password")
if "tenantId" in user:
tenant_id = user["tenantId"]
else:
tenant_id = None
if "email" not in user:
raise fault.BadRequestFault("Expecting User Email")
email = user["email"]
if "enabled" in user:
set_enabled = user["enabled"]
if not isinstance(set_enabled, bool):
raise fault.BadRequestFault("Bad enabled attribute!")
else:
set_enabled = True
return User(password, id, name, tenant_id, email, set_enabled)
except (ValueError, TypeError) as e:
raise fault.BadRequestFault("Cannot parse Tenant", str(e))
def to_dom(self):
dom = etree.Element("user",
xmlns="http://docs.openstack.org/identity/api/v2.0")
if self.email:
dom.set("email", unicode(self.email))
if self.tenant_id:
dom.set("tenantId", unicode(self.tenant_id))
if self.id:
dom.set("id", unicode(self.id))
if self.name:
dom.set("name", unicode(self.name))
if self.enabled:
dom.set("enabled", unicode(self.enabled).lower())
if self.password:
dom.set("password", unicode(self.password))
if self.tenant_roles:
dom_roles = etree.Element("tenantRoles")
for role in self.tenant_roles:
dom_role = etree.Element("tenantRole")
dom_role.text = role
dom_roles.append(dom_role)
dom.append(dom_roles)
return dom
def to_xml(self):
return etree.tostring(self.to_dom())
def to_dict(self):
user = {}
if self.id:
user["id"] = unicode(self.id)
if self.name:
user["name"] = unicode(self.name)
if self.tenant_id:
user["tenantId"] = unicode(self.tenant_id)
if self.password:
user["password"] = unicode(self.password)
user["email"] = unicode(self.email)
user["enabled"] = self.enabled
if self.tenant_roles:
user["tenantRoles"] = list(self.tenant_roles)
return {'user': user}
def to_json(self):
return json.dumps(self.to_dict())
class User_Update(object):
"""Document me!"""
def __init__(self, password=None, id=None, name=None, tenant_id=None,
email=None, enabled=None):
self.id = id
self.name = name
self.tenant_id = tenant_id
self.password = password
self.email = email
self.enabled = bool(enabled) if enabled is not None else None
@staticmethod
def from_xml(xml_str):
try:
dom = etree.Element("root")
dom.append(etree.fromstring(xml_str))
root = dom.find("{http://docs.openstack.org/identity/api/v2.0}" \
"user")
if root == None:
raise fault.BadRequestFault("Expecting User")
id = root.get("id")
name = root.get("name")
tenant_id = root.get("tenantId")
email = root.get("email")
password = root.get("password")
enabled = root.get("enabled")
if enabled == None or enabled == "true" or enabled == "yes":
set_enabled = True
elif enabled == "false" or enabled == "no":
set_enabled = False
else:
raise fault.BadRequestFault("Bad enabled attribute!")
# TODO: WTF is this?!
if password == '':
password = id
return User(password=password, id=id, name=name,
tenant_id=tenant_id, email=email, enabled=set_enabled)
except etree.LxmlError as e:
raise fault.BadRequestFault("Cannot parse User", str(e))
@staticmethod
def from_json(json_str):
try:
obj = json.loads(json_str)
if not "user" in obj:
raise fault.BadRequestFault("Expecting User")
user = obj["user"]
id = user.get('id', None)
name = user.get('name', None)
password = user.get('password', None)
tenant_id = user.get('tenantId', None)
email = user.get('email', None)
enabled = user.get('enabled', True)
if not isinstance(enabled, bool):
raise fault.BadRequestFault("Bad enabled attribute!")
# TODO: WTF is this?!
if password == '':
password = id
return User(password, id, name, tenant_id, email, enabled)
except (ValueError, TypeError) as e:
raise fault.BadRequestFault("Cannot parse Tenant", str(e))
def to_dom(self):
dom = etree.Element("user",
xmlns="http://docs.openstack.org/identity/api/v2.0")
if self.email:
dom.set("email", unicode(self.email))
if self.tenant_id:
dom.set("tenantId", unicode(self.tenant_id))
if self.id:
dom.set("id", unicode(self.id))
if self.name:
dom.set("name", unicode(self.name))
if self.enabled is not None:
dom.set("enabled", unicode(self.enabled).lower())
if self.password:
dom.set("password", unicode(self.password))
return dom
def to_xml(self):
return etree.tostring(self.to_dom())
def to_dict(self):
user = {}
if self.id:
user["id"] = unicode(self.id)
if self.name:
user["name"] = unicode(self.name)
if self.tenant_id:
user["tenantId"] = unicode(self.tenant_id)
if self.password:
user["password"] = unicode(self.password)
if self.email:
user["email"] = unicode(self.email)
if self.enabled is not None:
user["enabled"] = self.enabled
return {'user': user}
def to_json(self):
return json.dumps(self.to_dict())
class Users(object):
"""A collection of users."""
def __init__(self, values, links):
self.values = values
self.links = links
def to_xml(self):
dom = etree.Element("users")
dom.set(u"xmlns", "http://docs.openstack.org/identity/api/v2.0")
for t in self.values:
dom.append(t.to_dom())
for t in self.links:
dom.append(t.to_dom())
return etree.tostring(dom)
def to_json(self):
values = [t.to_dict()["user"] for t in self.values]
links = [t.to_dict()["links"] for t in self.links]
return json.dumps({"users": values, "users_links": links})
| genius1611/Keystone | keystone/logic/types/user.py | Python | apache-2.0 | 9,678 |
// Copyright 2022 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package storage
import (
"context"
"github.com/apigee/registry/server/registry/internal/storage/models"
"github.com/apigee/registry/server/registry/names"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"gorm.io/gorm"
)
func (c *Client) GetProject(ctx context.Context, name names.Project) (*models.Project, error) {
v := new(models.Project)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetApi(ctx context.Context, name names.Api) (*models.Api, error) {
v := new(models.Api)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetVersion(ctx context.Context, name names.Version) (*models.Version, error) {
v := new(models.Version)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetSpec(ctx context.Context, name names.Spec) (*models.Spec, error) {
name = name.Normal()
op := c.db.
Where("project_id = ?", name.ProjectID).
Where("api_id = ?", name.ApiID).
Where("version_id = ?", name.VersionID).
Where("spec_id = ?", name.SpecID).
Order("revision_create_time desc")
v := new(models.Spec)
if err := op.First(v).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetSpecRevision(ctx context.Context, name names.SpecRevision) (*models.Spec, error) {
name, err := c.unwrapSpecRevisionTag(ctx, name)
if err != nil {
return nil, err
}
v := new(models.Spec)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetSpecRevisionContents(ctx context.Context, name names.SpecRevision) (*models.Blob, error) {
name, err := c.unwrapSpecRevisionTag(ctx, name)
if err != nil {
return nil, err
}
v := new(models.Blob)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetDeployment(ctx context.Context, name names.Deployment) (*models.Deployment, error) {
name = name.Normal()
op := c.db.
Where("project_id = ?", name.ProjectID).
Where("api_id = ?", name.ApiID).
Where("deployment_id = ?", name.DeploymentID).
Order("revision_create_time desc")
v := new(models.Deployment)
if err := op.First(v).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetDeploymentRevision(ctx context.Context, name names.DeploymentRevision) (*models.Deployment, error) {
name, err := c.unwrapDeploymentRevisionTag(ctx, name)
if err != nil {
return nil, err
}
v := new(models.Deployment)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetArtifact(ctx context.Context, name names.Artifact) (*models.Artifact, error) {
v := new(models.Artifact)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
func (c *Client) GetArtifactContents(ctx context.Context, name names.Artifact) (*models.Blob, error) {
v := new(models.Blob)
if err := c.db.Take(v, "key = ?", name.String()).Error; err == gorm.ErrRecordNotFound {
return nil, status.Errorf(codes.NotFound, "%q not found in database", name)
} else if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
return v, nil
}
| apigee/registry | server/registry/internal/storage/get.go | GO | apache-2.0 | 5,445 |
package com.cody.app.framework.hybrid.core;
import com.cody.xf.common.NotProguard;
/**
* Created by cody.yi on 2017/4/13.
* 所有Js处理类实现这个接口
* Js调用的方法必须按照一定的格式定义,否则不生效
* 格式:
* public static void ***(WebView webView, JsonObject data, JsCallback callback){}
*/
@NotProguard
public interface JsHandler {
}
| codyer/CleanFramework | app/src/main/java/com/cody/app/framework/hybrid/core/JsHandler.java | Java | apache-2.0 | 380 |
var SPI = require('spi');
var extend = require('util')._extend;
var openSpi = function() {
var device = '/dev/spidev0.0';
return new SPI.Spi(device, [], function(s) {
s.open();
});
};
var Adc = function(options) {
var self = this;
var settings = extend( {
voltage: 3.3, //3.3V by default
spi: null
}, options);
self.voltage = settings.voltage;
self.spi = settings.spi || openSpi();
/**
* Read voltage (in a range 0-3.3V by default).
* @param channel
* @param callback
*/
self.readVoltage = function(channel, callback) {
self.readSpi(channel, function(value) {
var voltage = ((value * self.voltage) / self.maxValue);
callback(voltage);
});
};
/**
* Read normalized value (in a range 0-1).
* @param channel
* @param callback
*/
self.readNormalizedValue = function(channel, callback) {
self.readSpi(channel, function(value) {
var normalizedValue = ((value) / self.maxValue);
callback(normalizedValue);
});
};
/**
* Read raw ADC value (the range depends on ADC resolution).
* @param channel
* @param callback
*/
self.readRawValue = function(channel, callback) {
self.readSpi(channel, callback);
}
};
module.exports.Mcp3208 = function(options) {
var self = this;
Adc.call(self, options);
self.maxValue = 4095;
self.readSpi = function(channel, callback) {
var tx = new Buffer([4 | 2 | (channel >> 2), (channel & 3) << 6, 0]);
var rx = new Buffer([0, 0, 0]);
self.spi.transfer(tx, rx, function(dev, buffer) {
var value = ((buffer[1] & 15) << 8) + buffer[2];
callback(value);
})
};
};
module.exports.Mcp3008 = function(options) {
var self = this;
Adc.call(self, options);
self.maxValue = 1023;
self.readSpi = function(channel, callback) {
var tx = new Buffer([1, (8 + channel) << 4, 0]);
var rx = new Buffer([0, 0, 0]);
self.spi.transfer(tx, rx, function(dev, buffer) {
var value = ((buffer[1] & 3) << 8) + buffer[2];
callback(value);
})
};
};
| anha1/mcp-adc | mcp-adc.js | JavaScript | apache-2.0 | 2,219 |
package org.estatio.module.turnover.dom.entry;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.List;
import javax.inject.Inject;
import org.joda.time.LocalDate;
import org.apache.isis.applib.annotation.Action;
import org.apache.isis.applib.annotation.ActionLayout;
import org.apache.isis.applib.annotation.Contributed;
import org.apache.isis.applib.annotation.DomainObject;
import org.apache.isis.applib.annotation.Editing;
import org.apache.isis.applib.annotation.Nature;
import org.apache.isis.applib.annotation.Property;
import org.apache.isis.applib.annotation.SemanticsOf;
import org.apache.isis.applib.annotation.Where;
import org.estatio.module.asset.dom.Unit;
import org.estatio.module.lease.dom.Lease;
import org.estatio.module.turnover.dom.Frequency;
import org.estatio.module.turnover.dom.Turnover;
import org.estatio.module.turnover.dom.TurnoverReportingConfig;
import org.estatio.module.turnover.dom.TurnoverRepository;
import org.estatio.module.turnover.dom.Type;
import lombok.Getter;
import lombok.Setter;
@DomainObject(nature = Nature.VIEW_MODEL, objectType = "org.estatio.module.turnover.dom.entry.TurnoverEntryRequest")
public class TurnoverEntryRequest {
public TurnoverEntryRequest(){}
public TurnoverEntryRequest(
final TurnoverReportingConfig config,
final LocalDate date,
final Type type,
final Frequency frequency
){
this.config = config;
this.date = date;
this.type = type;
this.frequency = frequency;
}
@Getter @Setter
@Property(hidden = Where.EVERYWHERE)
private TurnoverReportingConfig config;
public Lease getlease(){
return getConfig().getOccupancy().getLease();
}
public Unit getUnit(){
return getConfig().getOccupancy().getUnit();
}
@Getter @Setter
private LocalDate date;
@Getter @Setter
private Type type;
@Getter @Setter
private Frequency frequency;
@Getter @Setter
@Property(editing = Editing.ENABLED)
private BigDecimal netAmount;
@Getter @Setter
@Property(editing = Editing.ENABLED)
private BigDecimal grossAmount;
@Getter @Setter
@Property(editing = Editing.ENABLED)
private BigInteger purchaseCount;
@Getter @Setter
@Property(editing = Editing.ENABLED)
private boolean nonComparable;
@Action(semantics = SemanticsOf.SAFE)
@ActionLayout(contributed = Contributed.AS_ASSOCIATION)
public List<Turnover> getPreviousTurnoverEntries(){
return turnoverRepository.findApprovedByConfigAndTypeAndFrequencyBeforeDate(getConfig(), getType(), getFrequency(), getDate());
}
@Inject
TurnoverRepository turnoverRepository;
}
| estatio/estatio | estatioapp/app/src/main/java/org/estatio/module/turnover/dom/entry/TurnoverEntryRequest.java | Java | apache-2.0 | 2,742 |
<?php
@session_start();
@require_once("db.php");
@header("Content-Type: application/json; charset=utf8");
if(!empty($_SESSION['username'])) {
$username = $_SESSION['username'];
$cn = connect();
$valid = true;
$isPasswordCorrect = false;
$message = "";
// Checking form data.
if(!empty($_POST['changePasswordNewPassword'])) {
$newPassword = $_POST['changePasswordNewPassword'];
} else {
$valid = false;
$message .= "No new password received.<br>";
}
if(!empty($_POST['changePasswordConfirmPassword'])) {
$cpassword = $_POST['changePasswordConfirmPassword'];
} else {
$valid = false;
$message .= "No confirm password received.<br>";
}
if((!empty($newPassword) && !empty($cpassword)) && ($newPassword != $cpassword)) {
$valid = false;
$message .= "New password and confirm password is mismatch.<br>";
}
if(!empty($_POST['changePasswordOldPassword'])) {
$oldPassword = $_POST['changePasswordOldPassword'];
$rs = @mysql_query("SELECT COUNT(*) FROM `user` WHERE `username`='" . $username . "' AND `password`='" . md5($oldPassword) . "';", $cn);
$n = (int) @mysql_result($rs, 0, 0);
if($n == 1) {
$isPasswordCorrect = true;
}
} else {
$valid = false;
$message .= "No old password received.<br>";
}
if($valid) {
if($isPasswordCorrect) {
// Changing password.
$encNewPassword = md5($newPassword);
$encOldPassword = md5($oldPassword);
$query = <<<QUERY
UPDATE `user` SET `password`='$encNewPassword' WHERE `username`='$username' AND `password`='$encOldPassword';
QUERY;
if(@mysql_query($query, $cn)) {
$result = array(
"result" => "success"
);
} else {
$result = array(
"result" => "fail"
);
}
} else {
$result = array(
"result" => "incorrect_pass"
);
}
} else {
$result = array(
"result" => $message
);
}
disconnect($cn);
} else {
$result = array(
"result" => "fail"
);
}
echo(json_encode($result));
?> | gluons/Cooking-Community | changepassword.php | PHP | apache-2.0 | 1,922 |
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from sqlalchemy import Column, ForeignKey, CheckConstraint, \
PrimaryKeyConstraint, func, or_, and_, true, literal_column, \
select, cast, TEXT
from sqlalchemy.orm import relationship, backref, Query
from sqlalchemy.types import BigInteger, Enum, Integer
from pycroft.model.base import ModelBase
from pycroft.model.ddl import DDLManager, Function, Trigger, View
from pycroft.model.types import DateTimeTz
from pycroft.model.user import User
from pycroft.model.host import IP, Host, Interface
ddl = DDLManager()
class TrafficEvent:
timestamp = Column(DateTimeTz, server_default=func.current_timestamp(), nullable=False)
amount = Column(BigInteger, CheckConstraint('amount >= 0'),
nullable=False)
class TrafficVolume(TrafficEvent, ModelBase):
__table_args__ = (
PrimaryKeyConstraint('ip_id', 'type', 'timestamp'),
)
type = Column(Enum("Ingress", "Egress", name="traffic_direction"),
nullable=False)
ip_id = Column(Integer, ForeignKey(IP.id, ondelete="CASCADE"),
nullable=False, index=True)
ip = relationship(IP, backref=backref("traffic_volumes",
cascade="all, delete-orphan",
cascade_backrefs=False))
user_id = Column(Integer, ForeignKey(User.id, ondelete='CASCADE'),
nullable=True, index=True)
user = relationship(User,
backref=backref("traffic_volumes",
cascade="all, delete-orphan",
cascade_backrefs=False),
uselist=False)
packets = Column(Integer, CheckConstraint('packets >= 0'),
nullable=False)
TrafficVolume.__table__.add_is_dependent_on(IP.__table__)
pmacct_traffic_egress = View(
name='pmacct_traffic_egress',
query=(
Query([])
.add_columns(TrafficVolume.packets.label('packets'),
TrafficVolume.amount.label('bytes'),
TrafficVolume.timestamp.label('stamp_inserted'),
TrafficVolume.timestamp.label('stamp_updated'),
IP.address.label('ip_src'))
.select_from(TrafficVolume)
.filter_by(type='Egress')
.join(IP)
.statement # turns our `Selectable` into something compilable
),
)
ddl.add_view(TrafficVolume.__table__, pmacct_traffic_egress)
pmacct_expression_replacements = dict(
tv_tname=TrafficVolume.__tablename__,
tv_type=TrafficVolume.type.key,
tv_ip_id=TrafficVolume.ip_id.key,
tv_timestamp=TrafficVolume.timestamp.key,
tv_amount=TrafficVolume.amount.key,
tv_packets=TrafficVolume.packets.key,
tv_user_id=TrafficVolume.user_id.key,
ip_tname=IP.__tablename__,
ip_id=str(IP.id.expression),
ip_interface_id=str(IP.interface_id.expression),
ip_address=str(IP.address.expression),
host_tname=Host.__tablename__,
host_id=str(Host.id.expression),
host_owner_id=str(Host.owner_id.expression),
interface_tname=Interface.__tablename__,
interface_id=str(Interface.id.expression),
interface_host_id=str(Interface.host_id.expression),
)
pmacct_egress_upsert = Function(
name="pmacct_traffic_egress_insert", arguments=[], language="plpgsql", rtype="trigger",
definition="""BEGIN
INSERT INTO traffic_volume ({tv_type}, {tv_ip_id}, "{tv_timestamp}", {tv_amount}, {tv_packets}, {tv_user_id})
SELECT
'Egress',
{ip_id},
date_trunc('day', NEW.stamp_inserted),
NEW.bytes,
NEW.packets,
{host_owner_id}
FROM {ip_tname}
JOIN {interface_tname} ON {ip_interface_id} = {interface_id}
JOIN {host_tname} ON {interface_host_id} = {host_id}
WHERE NEW.ip_src = {ip_address}
ON CONFLICT ({tv_ip_id}, {tv_type}, "{tv_timestamp}")
DO UPDATE SET ({tv_amount}, {tv_packets}) = ({tv_tname}.{tv_amount} + NEW.bytes,
{tv_tname}.{tv_packets} + NEW.packets);
RETURN NULL;
END;""".format(**pmacct_expression_replacements),
)
pmacct_egress_upsert_trigger = Trigger(
name='pmacct_traffic_egress_insert_trigger', table=pmacct_traffic_egress.table,
events=["INSERT"], function_call="pmacct_traffic_egress_insert()", when="INSTEAD OF"
)
ddl.add_function(TrafficVolume.__table__, pmacct_egress_upsert)
ddl.add_trigger(TrafficVolume.__table__, pmacct_egress_upsert_trigger)
pmacct_traffic_ingress = View(
name='pmacct_traffic_ingress',
query=(
Query([])
.add_columns(TrafficVolume.packets.label('packets'),
TrafficVolume.amount.label('bytes'),
TrafficVolume.timestamp.label('stamp_inserted'),
TrafficVolume.timestamp.label('stamp_updated'),
IP.address.label('ip_dst'))
.select_from(TrafficVolume)
.filter_by(type='Ingress')
.join(IP)
.statement # turns our `Selectable` into something compilable
),
)
ddl.add_view(TrafficVolume.__table__, pmacct_traffic_ingress)
pmacct_ingress_upsert = Function(
name="pmacct_traffic_ingress_insert", arguments=[], language="plpgsql", rtype="trigger",
definition="""BEGIN
INSERT INTO traffic_volume ({tv_type}, {tv_ip_id}, "{tv_timestamp}", {tv_amount}, {tv_packets}, {tv_user_id})
SELECT
'Ingress',
{ip_id},
date_trunc('day', NEW.stamp_inserted),
NEW.bytes,
NEW.packets,
{host_owner_id}
FROM {ip_tname}
JOIN {interface_tname} ON {ip_interface_id} = {interface_id}
JOIN {host_tname} ON {interface_host_id} = {host_id}
WHERE NEW.ip_dst = {ip_address}
ON CONFLICT ({tv_ip_id}, {tv_type}, "{tv_timestamp}")
DO UPDATE SET ({tv_amount}, {tv_packets}) = ({tv_tname}.{tv_amount} + NEW.bytes,
{tv_tname}.{tv_packets} + NEW.packets);
RETURN NULL;
END;""".format(**pmacct_expression_replacements),
)
pmacct_ingress_upsert_trigger = Trigger(
name='pmacct_traffic_ingress_insert_trigger', table=pmacct_traffic_ingress.table,
events=["INSERT"], function_call="pmacct_traffic_ingress_insert()", when="INSTEAD OF"
)
ddl.add_function(TrafficVolume.__table__, pmacct_ingress_upsert)
ddl.add_trigger(TrafficVolume.__table__, pmacct_ingress_upsert_trigger)
def traffic_history_query():
events = (select(func.sum(TrafficVolume.amount).label('amount'),
literal_column('day'),
cast(TrafficVolume.type, TEXT).label('type')
)
.select_from(
func.generate_series(
func.date_trunc('day', literal_column('arg_start')),
func.date_trunc('day', literal_column('arg_end')),
'1 day'
).alias('day')
.outerjoin(TrafficVolume.__table__, and_(
func.date_trunc('day', TrafficVolume.timestamp) == literal_column('day'),
TrafficVolume.user_id == literal_column('arg_user_id'))
)
)
.group_by(literal_column('day'), literal_column('type'))
).cte()
events_ingress = select(events).where(or_(events.c.type == 'Ingress', events.c.type == None)).cte()
events_egress = select(events).where(or_(events.c.type == 'Egress', events.c.type == None)).cte()
hist = (select(func.coalesce(events_ingress.c.day, events_egress.c.day).label('timestamp'),
events_ingress.c.amount.label('ingress'),
events_egress.c.amount.label('egress'))
.select_from(events_ingress.join(events_egress,
events_ingress.c.day == events_egress.c.day,
full=true))
.order_by(literal_column('timestamp'))
)
return hist
traffic_history_function = Function(
'traffic_history', ['arg_user_id int', 'arg_start timestamptz', 'arg_end timestamptz'],
'TABLE ("timestamp" timestamptz, ingress numeric, egress numeric)',
definition=traffic_history_query(),
volatility='stable',
)
ddl.add_function(
TrafficVolume.__table__,
traffic_history_function
)
class TrafficHistoryEntry:
def __init__(self, timestamp, ingress, egress):
self.timestamp = timestamp
self.ingress = ingress or 0
self.egress = egress or 0
def __repr__(self):
return str(self.__dict__)
ddl.register()
| agdsn/pycroft | pycroft/model/traffic.py | Python | apache-2.0 | 8,996 |
# Copyright (c) 2013 MaestroDev. All rights reserved.
require 'maestro_plugin'
require 'maestro_shell'
require 'open-uri'
require 'xmlsimple'
module MaestroDev
module Plugin
class MavenWorker < Maestro::MaestroWorker
def execute
validate_execute_parameters
Maestro.log.info "Inputs: goals = #{@goals}"
Maestro.log.debug "Using Maven version #{@mvn_version}" if !@mvn_version.empty?
shell = Maestro::Util::Shell.new
command = create_command
shell.create_script(command)
write_output("\nRunning command:\n----------\n#{command.chomp}\n----------\n")
exit_code = shell.run_script_with_delegate(self, :on_output)
if !exit_code.success?
# Error executing maven
# Lets see if we can make the error a little more specific.
# Maven thoughtfully prefixes all lines with [ERROR] that might be useful to us...
# so lets iunclude them in the error
error_lines = shell.output.split(/\r\n|\n/).select { |line| line.start_with?('[ERROR]') }
raise PluginError, "Maven failed executing goal list '#{@goals.empty? ? '[default]' : @goals}'\n" + error_lines.join("\n")
end
end
def wget_latest_snapshot
validate_snapshot_parameters
Maestro.log.info "Inputs: path = #{@path}," \
" url = #{@base_url}," \
" packaging = #{@packaging}"
maven_metadata_xml = get_maven_metadata_xml
artifactId = maven_metadata_xml["artifactId"][0]
unless maven_metadata_xml["versioning"].nil? or maven_metadata_xml["versioning"][0]["snapshot"][0]["timestamp"].nil?
buildNumber = maven_metadata_xml["versioning"][0]["snapshot"][0]["buildNumber"][0]
timestamp = maven_metadata_xml["versioning"][0]["snapshot"][0]["timestamp"][0]
version = maven_metadata_xml["version"][0].gsub(/\-SNAPSHOT/,'')
file = "#{artifactId}-#{version}-#{timestamp}-#{buildNumber}.#{@packaging}"
else
version = maven_metadata_xml["version"][0]
file = "#{artifactId}-#{version}.#{@packaging}"
end
url = @base_url + "/#{file}"
Maestro.log.debug "Removing Existing File At #{@path}/#{artifactId}.#{@packaging}"
begin
FileUtils.rm "#{@path}/#{artifactId}.#{@packaging}"
rescue Exception
end
write_output("\nDownloading File From #{url}", :buffer => true)
wget = Maestro::Util::Shell.new
command = "LANG=C #{@wget_executable} --progress=dot #{url} -O #{@path}/#{artifactId}.#{@packaging} --user=#{@username} --password=#{@password}"
wget.create_script(command)
write_output("\nRunning command:\n----------\n#{command.chomp}\n----------\n")
exit_code = wget.run_script_with_delegate(self, :on_output)
write_output("\nDownloaded File #{url} To #{@path}/#{artifactId}.#{@packaging}", :buffer => true) if File.exists? "#{@path}/#{artifactId}.#{@packaging}"
raise PluginError, "Failed to download #{url} to #{@path}/#{artifactId}.#{@packaging}" unless exit_code.success?
end
def on_output(text)
write_output(text, :buffer => true)
end
###########
# PRIVATE #
###########
private
# because we want to be able to string stuff together with &&
# can't really test the executable.
def valid_executable?(executable)
Maestro::Util::Shell.run_command("#{executable} --version")[0].success?
end
def get_version
result = Maestro::Util::Shell.run_command("#{@mvn_executable} -version")
result[1].split("\n")[0].split(' (')[0].split(' ')[2] if result[0].success?
end
def validate_execute_parameters
errors = []
@mvn_executable = get_field('maven_executable', 'mvn')
@mvn_version = get_field('maven_version', '')
@path = get_field('path') || get_field('scm_path')
@goals = get_field('goals', '')
@settingsfile = get_field('settingsfile', '')
@profiles = get_field('profiles', '')
@properties = get_field('properties', '')
@environment = get_field('environment', '')
@env = @environment.empty? ? "" : "#{Maestro::Util::Shell::ENV_EXPORT_COMMAND} #{@environment.gsub(/(&&|[;&])\s*$/, '')} && "
if valid_executable?(@mvn_executable)
if !@mvn_version.empty?
version = get_version
errors << "maven is the wrong version: #{version}. Expected: #{@mvn_version}" if version != @mvn_version
end
else
errors << 'maven not installed'
end
errors << 'missing field path' if @path.nil?
errors << "not found path '#{@path}'" if !@path.nil? && !File.exist?(@path)
if !@settingsfile.empty?
if !File.exists?(@settingsfile)
errors << "specified settings file (#{@settingsfile}) not found"
end
end
process_goals_field
process_profiles_field
process_properties_field
if !errors.empty?
raise ConfigError, "Configuration errors: #{errors.join(', ')}"
end
end
def validate_snapshot_parameters
errors = []
@base_url = get_field('url', '')
@path = get_field('path', '')
@username = get_field('username', '')
@password = get_field('password', '')
@packaging = get_field('packaging', '')
@wget_executable = get_field('wget_executable', 'wget')
errors << 'missing field url' if @base_url.empty?
errors << 'missing field path' if @path.empty?
errors << "path not found '#{@path}'" if !@path.empty? && !File.exist?(@path)
errors << 'missing field username' if @username.empty?
errors << 'missing field password' if @password.empty?
errors << 'missing field packaging' if @packaging.empty?
errors << 'wget not installed' unless valid_executable?(@wget_executable)
if !errors.empty?
raise ConfigError, "Configuration errors: #{errors.join(', ')}"
end
end
def process_goals_field
begin
if is_json(@goals)
@goals = JSON.parse(@goals) if @goals.is_a? String
end
rescue Exception
end
if @goals.is_a? Array
@goals = @goals.join(' ')
end
end
def process_profiles_field
begin
if is_json(@profiles)
@profiles = JSON.parse(@profiles) if @profiles.is_a? String
end
rescue Exception
end
if @profiles.is_a? Array
@profiles.delete_if{ |profile| profile.empty? }
@profiles = @profiles.join(',')
end
Maestro.log.debug "Enabling Maven profiles: #{@profiles}"
@profiles = " -P#{@profiles}" unless @profiles.empty?
end
def process_properties_field
begin
if is_json(@properties)
@properties = JSON.parse(@properties) if @properties.is_a? String
end
rescue Exception
end
if @properties.is_a? Array
@properties.delete_if{ |property| property.empty? }
@properties = @properties.map{|x| "-D#{x}"}.join(' ')
end
Maestro.log.debug "Using Maven properties: #{@properties}"
end
def create_command
settings = "--settings #{@settingsfile} " if !@settingsfile.empty?
shell_command = "#{@env}cd #{@path} && #{@mvn_executable} -B #{settings}#{@goals}#{@profiles} #{@properties}"
set_field('command', shell_command)
Maestro.log.debug("Running #{shell_command}")
shell_command
end
def get_maven_metadata_xml
write_output "\nRequesting maven-metadata.xml With Username #{@username} And Password"
begin
response = open(@base_url + "/maven-metadata.xml", :http_basic_authentication => [@username, @password])
raise PluginError, 'Failed To Retrieve maven-metadata.xml (no response from server)' unless response
case response.status[0]
when "200"
maven_metadata_xml = XmlSimple.xml_in(response.read)
Maestro.log.debug "\nResponse Received #{response.status[0]}\n#{maven_metadata_xml}"
else
raise PluginError, "Failed To Retrieve maven-metadata.xml #{response}"
end
raise PluginError, "Missing Version Or ArtifactID " if maven_metadata_xml["artifactId"].nil? or maven_metadata_xml["version"].nil?
rescue Timeout::Error
raise PluginError, 'Failed To Retrieve maven-metadata.xml (timeout)'
rescue
raise PluginError, 'Failed To Retrieve maven-metadata.xml (unable to connect to server)'
end
maven_metadata_xml
end
end
end
end
| maestrodev/maestro-maven-plugin | src/maven_worker.rb | Ruby | apache-2.0 | 8,897 |
var nbinput = 1;
var $collectionHolder;
$(function() {
var $alreadyUsed = $("#alreadyExistVar").html();
if ($alreadyUsed) {
showDialog("#some-exists-dialog");
}
// Get the ul that holds the collection of tags
$collectionHolder = $('#inputBlock');
// count the current form inputs we have (e.g. 2), use that as the new
// index when inserting a new item (e.g. 2)
$collectionHolder.data('index', $collectionHolder.find('.fileInput').length);
$('#newInputButton').on('click', function(e) {
if (nbinput < 5) {
// prevent the link from creating a "#" on the URL
e.preventDefault();
// add a new tag form (see next code block)
addTagForm($collectionHolder);
} else {
showDialog("#limitFileDialog");
}
});
});
$("#confirmOverride").click(function(){
$("#upload-block form").submit();
});
function changeInputFile(element) {
var lineInput = $(element).parent().parent().parent();
var filename = $(element).val();
var n = filename.lastIndexOf('.');
var format = filename.substring(n + 1);
$('.input-format', lineInput).val(format);
$(element).parent().addClass("bg-green");
$(element).parent().removeClass("bg-gray");
}
function addTagForm($collectionHolder) {
// Get the data-prototype explained earlier
var prototype = $collectionHolder.data('prototype');
// get the new index
var index = $collectionHolder.data('index');
// Replace '__name__' in the prototype's HTML to
// instead be a number based on how many items we have
var newForm = prototype.replace(/__name__/g, index);
// increase the index with one for the next item
$collectionHolder.data('index', index + 1);
var $newFormLi = $('<div></div>').append(newForm);
var newBlock =$('<div id="lineInput_' + (index+1) + '" class="row cells12 fileInput"></div>');
$('input', $newFormLi).each(function(index, element){
var name = element.name;
var $block = $('<div class="input-control text full-size"></div>').append(element);
if (contains(name, 'category') || contains(name, 'creator') || contains(name, 'nameAlreadyUsed'))
return;
if (contains(name, 'initials') || contains(name, 'date')) {
$block = $('<div class="cell colspan2"></div>').append($block);
if(contains(name, 'initials')) {
$('input', $block).val("AM");
}
if(contains(name, 'date')) {
$('input', $block).val($('#sw_docmanagerbundle_uploadsession_documents_0_date').val());
}
} else if (contains(name, 'name')) {
$block = $('<div class="cell colspan4"></div>').append($block);
} else if (contains(name, 'format') || contains(name, 'code')) {
$block = $('<div class="cell colspan1"></div>').append($block);
if(contains(name, 'code')) {
$('input', $block).val($('#sw_docmanagerbundle_uploadsession_documents_0_code').val());
$('input', $block).attr("readonly","readonly");
}
if(contains(name, 'format')) {
$('input', $block).attr("class",'input-format');
}
} else if (contains(name, 'file')) {
$('input', $block).attr("onchange",'changeInputFile(this)');
$block = $('<div class="input-control full-size input-file-custom button full-size bg-gray fg-white">Datei</div>').append(element);
$block = $('<div class="cell colspan1"></div>').append($block);
} else {
return true;
}
newBlock.append($block);
});
var $blockRemove = '<div class="cell colspan1 v-align-middle padding10">';
$blockRemove += '<button onclick="removeLine(lineInput_' + (index+1) + ')" class="removeInputButton button mini-button cycle-button">-</button>';
$blockRemove += '</div>';
newBlock.append($blockRemove);
newBlock.hide();
newBlock.appendTo($("#inputBlock")).show("slow");
$("#sw_docmanagerbundle_uploadsession_weiter").prop(
'disabled', ($("#inputBlock").children().length < 1));
}
function contains(text, chartext) {
return text.indexOf(chartext) > -1;
}
function removeLine(id) {
$(id).remove();
$("#sw_docmanagerbundle_uploadsession_weiter").prop(
'disabled', ($("#inputBlock").children().length < 1));
} | adrienManikon/docmanager | src/SW/DocManagerBundle/Resources/public/js/upload.js | JavaScript | apache-2.0 | 4,586 |
package org.jfrog.hudson.pipeline.scripted.steps.conan;
import com.google.inject.Inject;
import hudson.EnvVars;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.util.ArgumentListBuilder;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.plugins.workflow.steps.AbstractStepDescriptorImpl;
import org.jenkinsci.plugins.workflow.steps.AbstractStepImpl;
import org.jenkinsci.plugins.workflow.steps.AbstractSynchronousStepExecution;
import org.jenkinsci.plugins.workflow.steps.StepContextParameter;
import org.jfrog.hudson.pipeline.common.Utils;
import org.jfrog.hudson.pipeline.common.types.ConanClient;
import org.jfrog.hudson.util.ExtractorUtils;
import org.kohsuke.stapler.DataBoundConstructor;
import java.io.File;
import java.util.Calendar;
public class InitConanClientStep extends AbstractStepImpl {
private ConanClient client;
@DataBoundConstructor
public InitConanClientStep(ConanClient client) {
this.client = client;
}
public ConanClient getClient() {
return client;
}
public static class Execution extends AbstractSynchronousStepExecution<Boolean> {
private static final long serialVersionUID = 1L;
@StepContextParameter
private transient Run build;
@StepContextParameter
private transient TaskListener listener;
@StepContextParameter
private transient Launcher launcher;
@Inject(optional = true)
private transient InitConanClientStep step;
@StepContextParameter
private transient FilePath ws;
@StepContextParameter
private transient EnvVars env;
@Override
protected Boolean run() throws Exception {
ConanClient conanClient = getConanClient();
EnvVars extendedEnv = new EnvVars(env);
extendedEnv.put(Utils.CONAN_USER_HOME, conanClient.getUserPath());
ArgumentListBuilder args = new ArgumentListBuilder();
String logFilePath = conanClient.getLogFilePath();
args.addTokenized("conan config set");
// We need to add quotation marks before we save the log file path
args.add("log.trace_file=\"" + StringUtils.trim(logFilePath) + "\"");
Utils.exeConan(args, ws, launcher, listener, build, extendedEnv);
return true;
}
private ConanClient getConanClient() throws Exception {
ConanClient conanClient = step.getClient();
conanClient.setUnixAgent(launcher.isUnix());
FilePath conanHomeDirectory;
if (StringUtils.isEmpty(conanClient.getUserPath())) {
conanHomeDirectory = env.containsKey(Utils.CONAN_USER_HOME) ? new FilePath(new File(env.get(Utils.CONAN_USER_HOME))) : createConanTempHome();
} else {
conanHomeDirectory = new FilePath(launcher.getChannel(), conanClient.getUserPath());
if (!conanHomeDirectory.exists()) {
conanHomeDirectory.mkdirs();
}
}
conanClient.setUserPath(conanHomeDirectory.getRemote());
conanHomeDirectory.child(ConanClient.CONAN_LOG_FILE).touch(Calendar.getInstance().getTimeInMillis());
return conanClient;
}
private FilePath createConanTempHome() throws Exception {
// Create the @tmp directory
FilePath tempDir = ExtractorUtils.createAndGetTempDir(ws);
// Create the conan directory
return tempDir.createTempDir("conan", "");
}
}
@Extension
public static final class DescriptorImpl extends AbstractStepDescriptorImpl {
public DescriptorImpl() {
super(InitConanClientStep.Execution.class);
}
@Override
public String getFunctionName() {
return "initConanClient";
}
@Override
public String getDisplayName() {
return "Create Conan Client";
}
@Override
public boolean isAdvanced() {
return true;
}
}
} | AlexeiVainshtein/jenkins-artifactory-plugin | src/main/java/org/jfrog/hudson/pipeline/scripted/steps/conan/InitConanClientStep.java | Java | apache-2.0 | 4,174 |
package it.unibz.inf.ontop.ontology.impl;
/*
* #%L
* ontop-obdalib-core
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.inf.ontop.ontology.DataPropertyExpression;
import it.unibz.inf.ontop.ontology.DataSomeValuesFrom;
import it.unibz.inf.ontop.ontology.Datatype;
/**
* DataSomeValuesFrom in OWL 2 QL Specification
* <p>
* DataSomeValuesFrom := 'DataSomeValuesFrom' '(' DataPropertyExpression DataRange ')'
* <p>
* Implements (partially) rule [D5] in methods isTop and isBottom<br>
* - the class expression is equivalent to top if the property is top (the data range cannot be empty)<br>
* - the class expression is equivalent to bot if the property is bot
*
* @author Roman Kontchakov
*
*/
public class DataSomeValuesFromImpl implements DataSomeValuesFrom {
private static final long serialVersionUID = 593821958539751283L;
private final DataPropertyExpression property;
private final Datatype filler;
private final String string;
DataSomeValuesFromImpl(DataPropertyExpression property, Datatype filler) {
this.property = property;
this.filler = filler;
this.string = new StringBuilder().append("E ").append(property.toString()).append(".")
.append(filler.toString()).toString();
}
@Override
public DataPropertyExpression getProperty() {
return property;
}
@Override
public Datatype getDatatype() {
return filler;
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof DataSomeValuesFromImpl) {
DataSomeValuesFromImpl other = (DataSomeValuesFromImpl) obj;
return property.equals(other.property) && filler.equals(other.filler);
}
return false;
}
@Override
public int hashCode() {
return string.hashCode();
}
@Override
public String toString() {
return string;
}
@Override
public boolean isBottom() {
return property.isBottom();
}
@Override
public boolean isTop() {
return property.isTop();
}
}
| srapisarda/ontop | obdalib-core/src/main/java/it/unibz/inf/ontop/ontology/impl/DataSomeValuesFromImpl.java | Java | apache-2.0 | 2,538 |
using MvcRouteTester.Assertions;
using NUnit.Framework;
namespace MvcRouteTester.AttributeRouting.Test
{
public class NunitAssertEngine : IAssertEngine
{
[System.Diagnostics.DebuggerNonUserCode]
public void Fail(string message)
{
Assert.Fail(message);
}
/// <summary>
/// This could be a wrapper over "Fail" and reduce the size of the IAssertEngine interface
/// But then we would lose out on the error details that StringAssert.AreEqual provides
/// </summary>
[System.Diagnostics.DebuggerNonUserCode]
public void StringsEqualIgnoringCase(string s1, string s2, string message)
{
if (string.IsNullOrEmpty(s1) && string.IsNullOrEmpty(s2))
{
return;
}
StringAssert.AreEqualIgnoringCase(s1, s2, message);
}
}
}
| AnthonySteele/MvcRouteTester | src/MvcRouteTester.AttributeRouting.Test/NunitAssertEngine.cs | C# | apache-2.0 | 762 |
package org.deepjava.runtime.mpc555;
import org.deepjava.runtime.ppc32.Ippc32;
// Auto generated file (2021-02-17 13:57:50)
public interface Impc555 extends Ippc32 {
// System constants of CPU mpc555
public static final int SRR1init = 0x3802;
public static final int stackSize = 0x2000;
public static final int sysTabBaseAddr = 0x4000;
public static final int excpCodeSize = 0x4000;
public static final int excpCodeBase = 0x0;
public static final int CMFB_Size = 0x30000;
public static final int CMFB_BaseAddr = 0x40000;
public static final int CMFA_Size = 0x40000;
public static final int CMFA_BaseAddr = 0x0;
public static final int SRAMB_Size = 0x4000;
public static final int SRAMB_BaseAddr = 0x3fc000;
public static final int SRAMA_Size = 0x2800;
public static final int SRAMA_BaseAddr = 0x3f9800;
public static final int IMB = 0x0;
// Specific registers of CPU mpc555
public static final int SPR80 = 0x50;
public static final int EIE = 0x50;
public static final int SPR81 = 0x51;
public static final int EID = 0x51;
public static final int SPR82 = 0x52;
public static final int NRI = 0x52;
public static final int SPR144 = 0x90;
public static final int CMPA = 0x90;
public static final int SPR145 = 0x91;
public static final int CMPB = 0x91;
public static final int SPR146 = 0x92;
public static final int CMPC = 0x92;
public static final int SPR147 = 0x93;
public static final int CMPD = 0x93;
public static final int SPR148 = 0x94;
public static final int ECR = 0x94;
public static final int SPR149 = 0x95;
public static final int DER = 0x95;
public static final int SPR150 = 0x96;
public static final int COUNTA = 0x96;
public static final int SPR151 = 0x97;
public static final int COUNTB = 0x97;
public static final int SPR152 = 0x98;
public static final int CMPE = 0x98;
public static final int SPR153 = 0x99;
public static final int CMPF = 0x99;
public static final int SPR154 = 0x9a;
public static final int CMPG = 0x9a;
public static final int SPR155 = 0x9b;
public static final int CMPH = 0x9b;
public static final int SPR156 = 0x9c;
public static final int LCTRL1 = 0x9c;
public static final int SPR157 = 0x9d;
public static final int LCTRL2 = 0x9d;
public static final int SPR158 = 0x9e;
public static final int ICTRL = 0x9e;
public static final int SPR159 = 0x9f;
public static final int BAR = 0x9f;
public static final int PVR = 0x11f;
public static final int SPR528 = 0x210;
public static final int MI_GRA = 0x210;
public static final int SPR536 = 0x218;
public static final int L2U_GRA = 0x218;
public static final int SPR560 = 0x230;
public static final int BBCMCR = 0x230;
public static final int SPR568 = 0x238;
public static final int L2U_MCR = 0x238;
public static final int SPR638 = 0x27e;
public static final int IMMR = 0x27e;
public static final int SPR784 = 0x310;
public static final int MI_RBA0 = 0x310;
public static final int SPR785 = 0x311;
public static final int MI_RBA1 = 0x311;
public static final int SPR786 = 0x312;
public static final int MI_RBA2 = 0x312;
public static final int SPR787 = 0x313;
public static final int MI_RBA3 = 0x313;
public static final int SPR792 = 0x318;
public static final int L2U_RBA0 = 0x318;
public static final int SPR793 = 0x319;
public static final int L2U_RBA1 = 0x319;
public static final int SPR794 = 0x31a;
public static final int L2U_RBA2 = 0x31a;
public static final int SPR795 = 0x31b;
public static final int L2U_RBA3 = 0x31b;
public static final int SPR816 = 0x330;
public static final int MI_RA0 = 0x330;
public static final int SPR817 = 0x331;
public static final int MI_RA1 = 0x331;
public static final int SPR818 = 0x332;
public static final int MI_RA2 = 0x332;
public static final int SPR819 = 0x333;
public static final int MI_RA3 = 0x333;
public static final int SPR824 = 0x338;
public static final int L2U_RA0 = 0x338;
public static final int SPR825 = 0x339;
public static final int L2U_RA1 = 0x339;
public static final int SPR826 = 0x33a;
public static final int L2U_RA2 = 0x33a;
public static final int SPR827 = 0x33b;
public static final int L2U_RA3 = 0x33b;
public static final int SPR1022 = 0x3fe;
public static final int FPECR = 0x3fe;
public static final int SIUMCR = 0x2fc000;
public static final int SYPCR = 0x2fc004;
public static final int SWSR = 0x2fc00e;
public static final int SIPEND = 0x2fc010;
public static final int SIMASK = 0x2fc014;
public static final int SIEL = 0x2fc018;
public static final int SIVEC = 0x2fc01c;
public static final int TESR = 0x2fc020;
public static final int SGPIODT1 = 0x2fc024;
public static final int SGPIODT2 = 0x2fc028;
public static final int SGPIOCR = 0x2fc02c;
public static final int EMCR = 0x2fc030;
public static final int PDMCR = 0x2fc03c;
public static final int BR0 = 0x2fc100;
public static final int OR0 = 0x2fc104;
public static final int BR1 = 0x2fc108;
public static final int OR1 = 0x2fc10c;
public static final int BR2 = 0x2fc110;
public static final int OR2 = 0x2fc114;
public static final int BR3 = 0x2fc118;
public static final int OR3 = 0x2fc11c;
public static final int DMBR = 0x2fc140;
public static final int DMOR = 0x2fc144;
public static final int MSTAT = 0x2fc178;
public static final int TBSCR = 0x2fc200;
public static final int TBREF0 = 0x2fc204;
public static final int TBREF1 = 0x2fc208;
public static final int RTCSC = 0x2fc220;
public static final int RTC = 0x2fc224;
public static final int RTSEC = 0x2fc228;
public static final int RTCAL = 0x2fc22c;
public static final int PISCR = 0x2fc240;
public static final int PITC = 0x2fc244;
public static final int PITR = 0x2fc248;
public static final int SCCR = 0x2fc280;
public static final int PLPRCR = 0x2fc284;
public static final int RSR = 0x2fc288;
public static final int COLIR = 0x2fc28c;
public static final int VSRMCR = 0x2fc290;
public static final int TBSCRK = 0x2fc300;
public static final int TBREF0K = 0x2fc304;
public static final int TBREF1K = 0x2fc308;
public static final int TBK = 0x2fc30c;
public static final int RTCSCK = 0x2fc320;
public static final int RTCK = 0x2fc324;
public static final int RTSECK = 0x2fc328;
public static final int RTCALK = 0x2fc32c;
public static final int PISCRIK = 0x2fc340;
public static final int PITCK = 0x2fc344;
public static final int SCCRK = 0x2fc380;
public static final int PLPRCRK = 0x2fc384;
public static final int RSRK = 0x2fc388;
public static final int CMFMCR_A = 0x2fc800;
public static final int CMFTST_A = 0x2fc804;
public static final int CMFCTL_A = 0x2fc808;
public static final int CMFMCR_B = 0x2fc840;
public static final int CMFTST_B = 0x2fc844;
public static final int CMFCTL_B = 0x2fc848;
public static final int DPTMCR = 0x300000;
public static final int RAMBAR = 0x300004;
public static final int MISRH = 0x300006;
public static final int MISRL = 0x300008;
public static final int MISCNT = 0x30000a;
public static final int TPUMCR_A = 0x304000;
public static final int DSCR_A = 0x304004;
public static final int DSSR_A = 0x304006;
public static final int TICR_A = 0x304008;
public static final int CIER_A = 0x30400a;
public static final int CFSR0_A = 0x30400c;
public static final int CFSR1_A = 0x30400e;
public static final int CFSR2_A = 0x304010;
public static final int CFSR3_A = 0x304012;
public static final int HSQR0_A = 0x304014;
public static final int HSQR1_A = 0x304016;
public static final int HSRR0_A = 0x304018;
public static final int HSRR1_A = 0x30401a;
public static final int CPR0_A = 0x30401c;
public static final int CPR1_A = 0x30401e;
public static final int CISR_A = 0x304020;
public static final int TPUMCR2_A = 0x304028;
public static final int TPUMCR3_A = 0x30402a;
public static final int TPURAM0_A = 0x304100;
public static final int TPURAM1_A = 0x304110;
public static final int TPURAM2_A = 0x304120;
public static final int TPURAM3_A = 0x304130;
public static final int TPURAM4_A = 0x304140;
public static final int TPURAM5_A = 0x304150;
public static final int TPURAM6_A = 0x304160;
public static final int TPURAM7_A = 0x304170;
public static final int TPURAM8_A = 0x304180;
public static final int TPURAM9_A = 0x304190;
public static final int TPURAM10_A = 0x3041a0;
public static final int TPURAM11_A = 0x3041b0;
public static final int TPURAM12_A = 0x3041c0;
public static final int TPURAM13_A = 0x3041d0;
public static final int TPURAM14_A = 0x3041e0;
public static final int TPURAM15_A = 0x3041f0;
public static final int TPUMCR_B = 0x304400;
public static final int DSCR_B = 0x304404;
public static final int DSSR_B = 0x304406;
public static final int TICR_B = 0x304408;
public static final int CIER_B = 0x30440a;
public static final int CFSR0_B = 0x30440c;
public static final int CFSR1_B = 0x30440e;
public static final int CFSR2_B = 0x304410;
public static final int CFSR3_B = 0x304412;
public static final int HSQR0_B = 0x304414;
public static final int HSQR1_B = 0x304416;
public static final int HSRR0_B = 0x304418;
public static final int HSRR1_B = 0x30441a;
public static final int CPR0_B = 0x30441c;
public static final int CPR1_B = 0x30441e;
public static final int CISR_B = 0x304420;
public static final int TPUMCR2_B = 0x304428;
public static final int TPUMCR3_B = 0x30442a;
public static final int TPURAM0_B = 0x304500;
public static final int TPURAM1_B = 0x304510;
public static final int TPURAM2_B = 0x304520;
public static final int TPURAM3_B = 0x304530;
public static final int TPURAM4_B = 0x304540;
public static final int TPURAM5_B = 0x304550;
public static final int TPURAM6_B = 0x304560;
public static final int TPURAM7_B = 0x304570;
public static final int TPURAM8_B = 0x304580;
public static final int TPURAM9_B = 0x304590;
public static final int TPURAM10_B = 0x3045a0;
public static final int TPURAM11_B = 0x3045b0;
public static final int TPURAM12_B = 0x3045c0;
public static final int TPURAM13_B = 0x3045d0;
public static final int TPURAM14_B = 0x3045e0;
public static final int TPURAM15_B = 0x3045f0;
public static final int QADC64MCR_A = 0x304800;
public static final int QADC64INT_A = 0x304804;
public static final int PORTQA_A = 0x304806;
public static final int PORTQB_A = 0x304807;
public static final int DDRQA_A = 0x304808;
public static final int QACR0_A = 0x30480a;
public static final int QACR1_A = 0x30480c;
public static final int QACR2_A = 0x30480e;
public static final int QASR0_A = 0x304810;
public static final int QASR1_A = 0x304812;
public static final int CCW_A = 0x304a00;
public static final int RJURR_A = 0x304a80;
public static final int LJSRR_A = 0x304b00;
public static final int LJURR_A = 0x304b80;
public static final int QADC64MCR_B = 0x304c00;
public static final int QADC64INT_B = 0x304c04;
public static final int PORTQA_B = 0x304c06;
public static final int PORTQB_B = 0x304c07;
public static final int DDRQA_B = 0x304c08;
public static final int QACR0_B = 0x304c0a;
public static final int QACR1_B = 0x304c0c;
public static final int QACR2_B = 0x304c0e;
public static final int QASR0_B = 0x304c10;
public static final int QASR1_B = 0x304c12;
public static final int CCW_B = 0x304e00;
public static final int RJURR_B = 0x304e80;
public static final int LJSRR_B = 0x304f00;
public static final int LJURR_B = 0x304f80;
public static final int QSMCMMCR = 0x305000;
public static final int QDSCI_IL = 0x305004;
public static final int QSPI_IL = 0x305007;
public static final int SCC1R0 = 0x305008;
public static final int SCC1R1 = 0x30500a;
public static final int SC1SR = 0x30500c;
public static final int SC1DR = 0x30500e;
public static final int PORTQS = 0x305014;
public static final int PQSPAR = 0x305016;
public static final int DDRQS = 0x305017;
public static final int SPCR0 = 0x305018;
public static final int SPCR1 = 0x30501a;
public static final int SPCR2 = 0x30501c;
public static final int SPCR3 = 0x30501e;
public static final int SPSR = 0x30501f;
public static final int SCC2R0 = 0x305020;
public static final int SCC2R1 = 0x305022;
public static final int SC2SR = 0x305024;
public static final int SC2DR = 0x305026;
public static final int QSCI1CR = 0x305028;
public static final int QSCI1SR = 0x30502a;
public static final int SCTQ = 0x30502c;
public static final int SCRQ = 0x30504c;
public static final int RECRAM = 0x305140;
public static final int TRANRAM = 0x305180;
public static final int COMDRAM = 0x3051c0;
public static final int MPWMSM0PERR = 0x306000;
public static final int MPWMSM0PULR = 0x306002;
public static final int MPWMSM0CNTR = 0x306004;
public static final int MPWMSM0SCR = 0x306006;
public static final int MPWMSM1PERR = 0x306008;
public static final int MPWMSM1PULR = 0x30600a;
public static final int MPWMSM1CNTR = 0x30600c;
public static final int MPWMSM1SCR = 0x30600e;
public static final int MPWMSM2PERR = 0x306010;
public static final int MPWMSM2PULR = 0x306012;
public static final int MPWMSM2CNTR = 0x306014;
public static final int MPWMSM2SCR = 0x306016;
public static final int MPWMSM3PERR = 0x306018;
public static final int MPWMSM3PULR = 0x30601a;
public static final int MPWMSM3CNTR = 0x30601c;
public static final int MPWMSM3SCR = 0x30601e;
public static final int MMCSM6CNT = 0x306030;
public static final int MMCSM6ML = 0x306032;
public static final int MMCSM6SCRD = 0x306034;
public static final int MMCSM6SCR = 0x306036;
public static final int MDASM11AR = 0x306058;
public static final int MDASM11BR = 0x30605a;
public static final int MDASM11SCRD = 0x30605c;
public static final int MDASM11SCR = 0x30605e;
public static final int MDASM12AR = 0x306060;
public static final int MDASM12BR = 0x306062;
public static final int MDASM12SCRD = 0x306064;
public static final int MDASM12SCR = 0x306066;
public static final int MDASM13AR = 0x306068;
public static final int MDASM13BR = 0x30606a;
public static final int MDASM13SCRD = 0x30606c;
public static final int MDASM13SCR = 0x30606e;
public static final int MDASM14AR = 0x306070;
public static final int MDASM14BR = 0x306072;
public static final int MDASM14SCRD = 0x306074;
public static final int MDASM14SCR = 0x306076;
public static final int MDASM15AR = 0x306078;
public static final int MDASM15BR = 0x30607a;
public static final int MDASM15SCRD = 0x30607c;
public static final int MDASM15SCR = 0x30607e;
public static final int MPWMSM16PERR = 0x306080;
public static final int MPWMSM16PULR = 0x306082;
public static final int MPWMSM16CNTR = 0x306084;
public static final int MPWMSM16SCR = 0x306086;
public static final int MPWMSM17PERR = 0x306088;
public static final int MPWMSM17PULR = 0x30608a;
public static final int MPWMSM17CNTR = 0x30608c;
public static final int MPWMSM17SCR = 0x30608e;
public static final int MPWMSM18PERR = 0x306090;
public static final int MPWMSM18PULR = 0x306092;
public static final int MPWMSM18CNTR = 0x306094;
public static final int MPWMSM18SCR = 0x306096;
public static final int MPWMSM19PERR = 0x306098;
public static final int MPWMSM19PULR = 0x30609a;
public static final int MPWMSM19CNTR = 0x30609c;
public static final int MPWMSM19SCR = 0x30609e;
public static final int MMCSM22CNT = 0x3060b0;
public static final int MMCSM22ML = 0x3060b2;
public static final int MMCSM22SCRD = 0x3060b4;
public static final int MMCSM22SCR = 0x3060b6;
public static final int MDASM27AR = 0x3060d8;
public static final int MDASM27BR = 0x3060da;
public static final int MDASM27SCRD = 0x3060dc;
public static final int MDASM27SCR = 0x3060de;
public static final int MDASM28AR = 0x3060e0;
public static final int MDASM28BR = 0x3060e2;
public static final int MDASM28SCRD = 0x3060e4;
public static final int MDASM28SCR = 0x3060e6;
public static final int MDASM29AR = 0x3060e8;
public static final int MDASM29BR = 0x3060ea;
public static final int MDASM29SCRD = 0x3060ec;
public static final int MDASM29SCR = 0x3060ee;
public static final int MDASM30AR = 0x3060f0;
public static final int MDASM30BR = 0x3060f2;
public static final int MDASM30SCRD = 0x3060f4;
public static final int MDASM30SCR = 0x3060f6;
public static final int MDASM31AR = 0x3060f8;
public static final int MDASM31BR = 0x3060fa;
public static final int MDASM31SCRD = 0x3060fc;
public static final int MDASM31SCR = 0x3060fe;
public static final int MPIOSMDR = 0x306100;
public static final int MPIOSMDDR = 0x306102;
public static final int MIOS1TPCR = 0x306800;
public static final int MIOS1VNR = 0x306804;
public static final int MIOS1MCR = 0x306806;
public static final int MCPSMSCR = 0x306816;
public static final int MIOS1SR0 = 0x306c00;
public static final int MIOS1ER0 = 0x306c04;
public static final int MIOS1RPR0 = 0x306c06;
public static final int MIOS1LVL0 = 0x306c30;
public static final int MIOS1SR1 = 0x306c40;
public static final int MIOS1ER1 = 0x306c44;
public static final int MIOS1RPR1 = 0x306c46;
public static final int MIOS1LVL1 = 0x306c70;
public static final int UMCR = 0x307f80;
public static final int UTSTCREG = 0x307f90;
public static final int UIPEND = 0x307fa0;
public static final int SRAMMCR_A = 0x380000;
public static final int SRAMMCR_B = 0x380008;
} | deepjava/runtime-library | src/org/deepjava/runtime/mpc555/Impc555.java | Java | apache-2.0 | 17,248 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp
{
/// <summary>
/// This class stores several source parsing related options and offers access to their values.
/// </summary>
public sealed class CSharpParseOptions : ParseOptions, IEquatable<CSharpParseOptions>
{
/// <summary>
/// The default parse options.
/// </summary>
public static CSharpParseOptions Default { get; } = new CSharpParseOptions();
private ImmutableDictionary<string, string> _features;
/// <summary>
/// Gets the language version.
/// </summary>
public LanguageVersion LanguageVersion { get; private set; }
internal ImmutableArray<string> PreprocessorSymbols { get; private set; }
/// <summary>
/// Gets the names of defined preprocessor symbols.
/// </summary>
public override IEnumerable<string> PreprocessorSymbolNames
{
get { return PreprocessorSymbols; }
}
public CSharpParseOptions(
LanguageVersion languageVersion = LanguageVersion.CSharp6,
DocumentationMode documentationMode = DocumentationMode.Parse,
SourceCodeKind kind = SourceCodeKind.Regular,
IEnumerable<string> preprocessorSymbols = null)
: this(languageVersion, documentationMode, kind, preprocessorSymbols.ToImmutableArrayOrEmpty())
{
if (!languageVersion.IsValid())
{
throw new ArgumentOutOfRangeException(nameof(languageVersion));
}
if (!kind.IsValid())
{
throw new ArgumentOutOfRangeException(nameof(kind));
}
if (preprocessorSymbols != null)
{
foreach (var preprocessorSymbol in preprocessorSymbols)
{
if (!SyntaxFacts.IsValidIdentifier(preprocessorSymbol))
{
throw new ArgumentException("preprocessorSymbols");
}
}
}
}
internal CSharpParseOptions(
LanguageVersion languageVersion,
DocumentationMode documentationMode,
SourceCodeKind kind,
IEnumerable<string> preprocessorSymbols,
ImmutableDictionary<string, string> features)
: this(languageVersion, documentationMode, kind, preprocessorSymbols)
{
if (features == null)
{
throw new ArgumentNullException(nameof(features));
}
_features = features;
}
private CSharpParseOptions(CSharpParseOptions other) : this(
languageVersion: other.LanguageVersion,
documentationMode: other.DocumentationMode,
kind: other.Kind,
preprocessorSymbols: other.PreprocessorSymbols,
features: other.Features.ToImmutableDictionary())
{
}
// No validation
internal CSharpParseOptions(
LanguageVersion languageVersion,
DocumentationMode documentationMode,
SourceCodeKind kind,
ImmutableArray<string> preprocessorSymbols)
: base(kind, documentationMode)
{
Debug.Assert(!preprocessorSymbols.IsDefault);
this.LanguageVersion = languageVersion;
this.PreprocessorSymbols = preprocessorSymbols;
_features = ImmutableDictionary<string, string>.Empty;
}
public new CSharpParseOptions WithKind(SourceCodeKind kind)
{
if (kind == this.Kind)
{
return this;
}
if (!kind.IsValid())
{
throw new ArgumentOutOfRangeException(nameof(kind));
}
return new CSharpParseOptions(this) { Kind = kind };
}
public CSharpParseOptions WithLanguageVersion(LanguageVersion version)
{
if (version == this.LanguageVersion)
{
return this;
}
if (!version.IsValid())
{
throw new ArgumentOutOfRangeException(nameof(version));
}
return new CSharpParseOptions(this) { LanguageVersion = version };
}
public CSharpParseOptions WithPreprocessorSymbols(IEnumerable<string> preprocessorSymbols)
{
return WithPreprocessorSymbols(preprocessorSymbols.AsImmutableOrNull());
}
public CSharpParseOptions WithPreprocessorSymbols(params string[] preprocessorSymbols)
{
return WithPreprocessorSymbols(ImmutableArray.Create(preprocessorSymbols));
}
public CSharpParseOptions WithPreprocessorSymbols(ImmutableArray<string> symbols)
{
if (symbols.IsDefault)
{
symbols = ImmutableArray<string>.Empty;
}
if (symbols.Equals(this.PreprocessorSymbols))
{
return this;
}
return new CSharpParseOptions(this) { PreprocessorSymbols = symbols };
}
public new CSharpParseOptions WithDocumentationMode(DocumentationMode documentationMode)
{
if (documentationMode == this.DocumentationMode)
{
return this;
}
if (!documentationMode.IsValid())
{
throw new ArgumentOutOfRangeException(nameof(documentationMode));
}
return new CSharpParseOptions(this) { DocumentationMode = documentationMode };
}
public override ParseOptions CommonWithKind(SourceCodeKind kind)
{
return WithKind(kind);
}
protected override ParseOptions CommonWithDocumentationMode(DocumentationMode documentationMode)
{
return WithDocumentationMode(documentationMode);
}
protected override ParseOptions CommonWithFeatures(IEnumerable<KeyValuePair<string, string>> features)
{
return WithFeatures(features);
}
/// <summary>
/// Enable some experimental language features for testing.
/// </summary>
public new CSharpParseOptions WithFeatures(IEnumerable<KeyValuePair<string, string>> features)
{
if (features == null)
{
throw new ArgumentNullException(nameof(features));
}
return new CSharpParseOptions(this) { _features = features.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) };
}
public override IReadOnlyDictionary<string, string> Features
{
get
{
return _features;
}
}
internal bool IsFeatureEnabled(MessageID feature)
{
switch (feature)
{
case MessageID.IDS_FeatureBinaryLiteral:
case MessageID.IDS_FeatureDigitSeparator:
case MessageID.IDS_FeatureLocalFunctions:
case MessageID.IDS_FeatureRefLocalsReturns:
case MessageID.IDS_FeaturePatternMatching:
case MessageID.IDS_FeatureTuples:
case MessageID.IDS_FeatureReplace:
// in "demo" mode enable proposed new C# 7 language features.
if (PreprocessorSymbols.Contains("__DEMO__"))
{
return true;
}
break;
default:
break;
}
string featureFlag = feature.RequiredFeature();
if (featureFlag != null)
{
return Features.ContainsKey(featureFlag);
}
LanguageVersion availableVersion = LanguageVersion;
LanguageVersion requiredVersion = feature.RequiredVersion();
return availableVersion >= requiredVersion;
}
public override bool Equals(object obj)
{
return this.Equals(obj as CSharpParseOptions);
}
public bool Equals(CSharpParseOptions other)
{
if (object.ReferenceEquals(this, other))
{
return true;
}
if (!base.EqualsHelper(other))
{
return false;
}
return this.LanguageVersion == other.LanguageVersion;
}
public override int GetHashCode()
{
return
Hash.Combine(base.GetHashCodeHelper(),
Hash.Combine((int)this.LanguageVersion, 0));
}
}
}
| ericfe-ms/roslyn | src/Compilers/CSharp/Portable/CSharpParseOptions.cs | C# | apache-2.0 | 8,998 |
package com.ctrip.xpipe.redis.integratedtest.keeper;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
/**
* @author wenchao.meng
*
* May 17, 2016 2:09:41 PM
*/
@RunWith(Suite.class)
@SuiteClasses({
KeeperPsync2.class,
KeeperPsync2Continue.class,
KeeperSingleDc.class,
KeeperMultiDc.class,
KeeperMultiDcChangePrimary.class,
KeeperSingleDcRestart.class,
KeeperSingleDcSlaveof.class,
KeeperSingleDcWipeOutData.class,
KeeperSingleDcEof.class,
KeeperMultiDc.class,
KeeperSingleDcWaitForOffset.class,
KeeperSingleDcVersionTest.class,
XRedisXpipeCommandTest.class,
XRedisPartialTest.class
})
public class AllKeeperTest {
/*
* before run test, you should
* 1. start redis 2.8.19 at localhost, for testCase: KeeperSingleDcVersionTest
*/
}
| Yiiinsh/x-pipe | redis/redis-integration-test/src/test/java/com/ctrip/xpipe/redis/integratedtest/keeper/AllKeeperTest.java | Java | apache-2.0 | 833 |
// Copyright 2013 Marc-Antoine Ruel. All rights reserved.
// Use of this source code is governed under the Apache License, Version 2.0
// that can be found in the LICENSE file.
package fortuna
import (
"bytes"
"compress/flate"
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"hash"
"io"
"io/ioutil"
"path/filepath"
"runtime"
"sync"
"testing"
)
type generatorLenTest struct {
h hash.Hash
request int
expected int
}
var generatorLenTestData = []generatorLenTest{
// 64 bits of security (128/2).
{md5.New(), 1024, 1024},
// Maximum data is 512kb.
{md5.New(), 4 * 1024 * 1024, 512 * 1024},
// 128 bits of security (256/2).
{sha256.New(), 1024, 1024},
// Maximum data is 1Mb.
{sha256.New(), 8 * 1024 * 1024, 1024 * 1024},
}
func init() {
// Enable parallel execution if not already enabled.
// These tests are highly CPU intensive and the scale linearly with NumCPU.
if runtime.GOMAXPROCS(0) == 1 {
runtime.GOMAXPROCS(runtime.NumCPU())
}
}
func TestnewGeneratorDefault(t *testing.T) {
t.Parallel()
g := newGenerator(sha256.New(), nil)
if g.h.Size() != 32 {
t.Fatal("Unexpected default")
}
g = newGenerator(sha256.New(), []byte{})
if g.h.Size() != 32 {
t.Fatal("Unexpected default")
}
}
// Compress data and returns the resulting size.
func compress(t *testing.T, d []byte) int {
buf := &bytes.Buffer{}
// It's a bit slow, flate.BestSpeed could be used but that would go against
// the check here.
f, err := flate.NewWriter(buf, flate.BestCompression)
if err != nil {
t.Fatal(err)
}
if i, err := f.Write(d); err != nil {
t.Fatal(err)
} else if i != len(d) {
t.Fatal("Unexpected len")
}
_ = f.Flush()
return buf.Len()
}
// Reads data and ensures this is the expected size.
func read(t *testing.T, r io.Reader, out []byte, expected int) {
l, err := r.Read(out)
if err != nil {
t.Fatal(err)
}
if l != expected {
t.Fatalf("Requested %d, expected %d, got %d", len(out), expected, l)
}
}
func testGeneratorLen(t *testing.T, i int, s generatorLenTest) {
g := NewGenerator(s.h, []byte{0})
d := make([]byte, s.request)
read(t, g, d, s.expected)
// Verify that the data is not compressible.
// Note that it's not using d[:l] but the whole buffer. The reason is that
// otherwise flate will go on the quick path skip compression, so the result
// is not useful.
compressed := compress(t, d)
ratio := float64(compressed) / float64(s.expected)
// Data will be larger because of the flate header.
if ratio < 1. {
t.Fatalf("%d H:%d; data is too compressible: %.1f %d -> %d\n%v", i, s.h.Size(), ratio*100., s.expected, compressed, d)
}
// Make sure the 0-filled block at the end is compressed.
if compressed > (s.expected+8192) || ratio > 1.1 {
t.Fatalf("%d H:%d; data is not enough compressed: %.1f %d -> %d", i, s.h.Size(), ratio*100., s.expected, compressed)
}
}
func TestGeneratorCutShort(t *testing.T) {
t.Parallel()
// This test is CPU intensive so parallelize as much as possible.
var wg sync.WaitGroup
for index, line := range generatorLenTestData {
wg.Add(1)
go func(i int, s generatorLenTest) {
defer wg.Done()
testGeneratorLen(t, i, s)
}(index, line)
}
wg.Wait()
}
type blockRead struct {
Len int
Expected []byte
}
type generatorTestData struct {
Input []byte
Expected []blockRead
}
func loadGeneratorTestData(t *testing.T, name string) []generatorTestData {
content, err := ioutil.ReadFile(filepath.Join("testdata", name))
if err != nil {
t.Fatal(err)
}
var data []generatorTestData
if err = json.Unmarshal(content, &data); err != nil {
t.Fatal(err)
}
return data
}
// Ensures Generator is completely deterministic and has the exact same output
// than the python implementation.
func TestGeneratorDeterminism(t *testing.T) {
t.Parallel()
for i, v := range loadGeneratorTestData(t, "generator.json") {
{
g1 := NewGenerator(nil, v.Input)
for j, e := range v.Expected {
d := make([]byte, e.Len)
read(t, g1, d, e.Len)
if 0 != bytes.Compare(e.Expected, d) {
t.Fatalf("Index %d,%d: Generator.Read(%d) -> %v != %v", i, j, e.Len, d, e.Expected)
}
}
}
// Late reseeding results in the same output and that the output data is
// properly overwritten.
{
g2 := NewGenerator(nil, nil)
_, _ = g2.Write(v.Input)
for j, e := range v.Expected {
d := make([]byte, e.Len)
read(t, g2, d, e.Len)
if 0 != bytes.Compare(e.Expected, d) {
t.Fatalf("Index %d,%d: Generator.Read(%d) -> %v != %v", i, j, e.Len, d, e.Expected)
}
}
}
}
}
// Benches large chunks throughput. Calculates the cost per byte.
func BenchmarkGeneratorLarge(b *testing.B) {
g := NewGenerator(nil, []byte{0})
data := make([]byte, b.N)
count := 0
b.ResetTimer()
for count != b.N {
// For large values of b.N, the Read call will only return up to
// maxBytesPerRequest bytes so a loop is needed. In theory it will increase
// overhead, in practice maxBytesPerRequest is large enough that overhead
// is minimal.
remaining := b.N - count
n, err := g.Read(data[:remaining])
if err != nil {
b.Fatal(err)
}
if n == 0 {
b.Fatalf("Failed to read")
}
count += n
}
}
// Reads 1 byte at a time to bench overhead. Calculates the cost per byte.
func BenchmarkGenerator1Byte(b *testing.B) {
g := NewGenerator(nil, []byte{0})
data := make([]byte, 1)
b.ResetTimer()
for i := 0; i < b.N; i++ {
n, err := g.Read(data)
if err != nil {
b.Fatal(err)
}
if n != 1 {
b.Fatalf("Failed to read")
}
}
}
// Reads 16 bytes at a time to bench overhead. Calculates the cost per byte.
func BenchmarkGenerator16Bytes(b *testing.B) {
g := NewGenerator(nil, []byte{0})
data := make([]byte, 16)
count := 0
b.ResetTimer()
for count != b.N {
chunk := 16
if b.N-count < 16 {
chunk = b.N - count
}
n, err := g.Read(data[:chunk])
if err != nil {
b.Fatal(err)
}
if n != chunk {
b.Fatalf("Failed to read")
}
count += chunk
}
}
func decodeString(str string) []byte {
d, err := hex.DecodeString(str)
if err != nil {
panic("Invalid hex string")
}
return d
}
var key = decodeString("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f")
// Benches large chunks throughput. Calculates the cost per byte.
func BenchmarkAESCTRLarge(b *testing.B) {
c, err := aes.NewCipher(key)
if err != nil {
b.Fatal(err)
}
e := cipher.NewCTR(c, make([]byte, aes.BlockSize))
data := make([]byte, b.N)
b.ResetTimer()
// Interestingly, Generator is faster than AES in CTR, because there is no
// need to XOR the data.
e.XORKeyStream(data, data)
}
// Reads 1 byte at a time to bench overhead. Calculates the cost per byte.
func BenchmarkAESCTR1Byte(b *testing.B) {
c, err := aes.NewCipher(key)
if err != nil {
b.Fatal(err)
}
e := cipher.NewCTR(c, make([]byte, aes.BlockSize))
data := make([]byte, 1)
b.ResetTimer()
for i := 0; i < b.N; i++ {
e.XORKeyStream(data, data)
}
}
// Reads 16 bytes at a time to bench overhead. Calculates the cost per byte.
func BenchmarkAESCTR16Bytes(b *testing.B) {
c, err := aes.NewCipher(key)
if err != nil {
b.Fatal(err)
}
e := cipher.NewCTR(c, make([]byte, aes.BlockSize))
data := make([]byte, 16)
count := 0
b.ResetTimer()
for count != b.N {
chunk := 16
if b.N-count < 16 {
chunk = b.N - count
}
e.XORKeyStream(data[:chunk], data[:chunk])
count += chunk
}
}
// Reseeds the generator. Calculates the cost per reseed.
func BenchmarkGeneratorReseed(b *testing.B) {
g := NewGenerator(nil, []byte{0})
data := make([]byte, 16)
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, _ = g.Write(data)
}
}
| maruel/fortuna | generator_test.go | GO | apache-2.0 | 7,636 |
/* */
"format esm";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
import { isPresent, isBlank, CONST } from 'angular2/src/facade/lang';
import { ListWrapper, StringMapWrapper } from 'angular2/src/facade/collection';
import { ViewType } from './view_type';
export let StaticNodeDebugInfo = class StaticNodeDebugInfo {
constructor(providerTokens, componentToken, refTokens) {
this.providerTokens = providerTokens;
this.componentToken = componentToken;
this.refTokens = refTokens;
}
};
StaticNodeDebugInfo = __decorate([
CONST(),
__metadata('design:paramtypes', [Array, Object, Object])
], StaticNodeDebugInfo);
export class DebugContext {
constructor(_view, _nodeIndex, _tplRow, _tplCol) {
this._view = _view;
this._nodeIndex = _nodeIndex;
this._tplRow = _tplRow;
this._tplCol = _tplCol;
}
get _staticNodeInfo() {
return isPresent(this._nodeIndex) ? this._view.staticNodeDebugInfos[this._nodeIndex] : null;
}
get context() { return this._view.context; }
get component() {
var staticNodeInfo = this._staticNodeInfo;
if (isPresent(staticNodeInfo) && isPresent(staticNodeInfo.componentToken)) {
return this.injector.get(staticNodeInfo.componentToken);
}
return null;
}
get componentRenderElement() {
var componentView = this._view;
while (isPresent(componentView.declarationAppElement) &&
componentView.type !== ViewType.COMPONENT) {
componentView = componentView.declarationAppElement.parentView;
}
return isPresent(componentView.declarationAppElement) ?
componentView.declarationAppElement.nativeElement :
null;
}
get injector() { return this._view.injector(this._nodeIndex); }
get renderNode() {
if (isPresent(this._nodeIndex) && isPresent(this._view.allNodes)) {
return this._view.allNodes[this._nodeIndex];
}
else {
return null;
}
}
get providerTokens() {
var staticNodeInfo = this._staticNodeInfo;
return isPresent(staticNodeInfo) ? staticNodeInfo.providerTokens : null;
}
get source() {
return `${this._view.componentType.templateUrl}:${this._tplRow}:${this._tplCol}`;
}
get locals() {
var varValues = {};
// TODO(tbosch): right now, the semantics of debugNode.locals are
// that it contains the variables of all elements, not just
// the given one. We preserve this for now to not have a breaking
// change, but should change this later!
ListWrapper.forEachWithIndex(this._view.staticNodeDebugInfos, (staticNodeInfo, nodeIndex) => {
var refs = staticNodeInfo.refTokens;
StringMapWrapper.forEach(refs, (refToken, refName) => {
var varValue;
if (isBlank(refToken)) {
varValue = isPresent(this._view.allNodes) ? this._view.allNodes[nodeIndex] : null;
}
else {
varValue = this._view.injectorGet(refToken, nodeIndex, null);
}
varValues[refName] = varValue;
});
});
StringMapWrapper.forEach(this._view.locals, (localValue, localName) => { varValues[localName] = localValue; });
return varValues;
}
}
| tzerb/Learning | WebApplication3/src/WebApplication3/jspm_packages/npm/angular2@2.0.0-beta.17/es6/prod/src/core/linker/debug_context.js | JavaScript | apache-2.0 | 4,102 |
<?php
class Radios_FormItem_Core extends FormItem {
protected function options() {
return $this->options;
}
protected function inputType() {
return "radio";
}
}; | erichoglander/qf | core/form_item/radios_form_item.php | PHP | apache-2.0 | 193 |
// Copyright 2016-2022 The Libsacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sim
import (
"context"
"testing"
"github.com/sacloud/libsacloud/v2/helper/cleanup"
"github.com/sacloud/libsacloud/v2/sacloud"
"github.com/sacloud/libsacloud/v2/sacloud/pointer"
"github.com/sacloud/libsacloud/v2/sacloud/testutil"
"github.com/sacloud/libsacloud/v2/sacloud/types"
"github.com/stretchr/testify/require"
)
func TestSIMService_convertUpdateRequest(t *testing.T) {
ctx := context.Background()
caller := testutil.SingletonAPICaller()
name := testutil.ResourceName("sim-service")
// setup
simOp := sacloud.NewSIMOp(caller)
sim, err := New(caller).CreateWithContext(ctx, &CreateRequest{
Name: name,
Description: "desc",
Tags: types.Tags{"tag1", "tag2"},
ICCID: "aaaaaaaa",
PassCode: "bbbbbbbb",
Activate: true,
IMEI: "cccccccc",
Carriers: []*sacloud.SIMNetworkOperatorConfig{
{
Allow: true,
Name: types.SIMOperators.Docomo.String(),
},
},
})
if err != nil {
t.Fatal(err)
}
defer func() {
cleanup.DeleteSIM(ctx, simOp, sim.ID) // nolint
}()
// test
cases := []struct {
in *UpdateRequest
expect *ApplyRequest
}{
{
in: &UpdateRequest{
ID: sim.ID,
Name: pointer.NewString(name + "-upd"),
Activate: pointer.NewBool(false),
IMEI: pointer.NewString(""),
Carriers: &[]*sacloud.SIMNetworkOperatorConfig{
{Allow: true, Name: types.SIMOperators.SoftBank.String()},
},
},
expect: &ApplyRequest{
ID: sim.ID,
Name: name + "-upd",
Description: sim.Description,
Tags: sim.Tags,
IconID: sim.IconID,
ICCID: sim.ICCID,
PassCode: "",
Activate: false,
IMEI: "",
Carriers: []*sacloud.SIMNetworkOperatorConfig{
{
Allow: true,
Name: types.SIMOperators.SoftBank.String(),
},
},
},
},
}
for _, tc := range cases {
req, err := tc.in.ApplyRequest(ctx, caller)
require.NoError(t, err)
require.EqualValues(t, tc.expect, req)
}
}
| sacloud/libsacloud | v2/helper/service/sim/update_test.go | GO | apache-2.0 | 2,609 |
/*
Copyright 2015 Jenna Hatchard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.mycompany.assignment1;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Created by Me on 2015-10-03.
*/
public class SortStats {
//initializes all of them to zero
Long max10 = Long.valueOf(0);
Long min10 = Long.valueOf(0);
Long avg10= Long.valueOf(0);
Long med10 = Long.valueOf(0);
Long max100= Long.valueOf(0);
Long min100= Long.valueOf(0);
Long avg100= Long.valueOf(0);
Long med100 = Long.valueOf(0);
Long maxAll= Long.valueOf(0);
Long minAll= Long.valueOf(0);
Long avgAll= Long.valueOf(0);
Long medAll= Long.valueOf(0);
ArrayList<Long> results = new ArrayList<>();
//sort the stats so that they can be displayed later
public ArrayList sortIt(ArrayList stats) {
ArrayList statsCopy = (ArrayList)stats.clone(); //need to do this otherwise it can mess up later stats
//just for all
if(statsCopy.size() < 10) {
sortAll(statsCopy);
}
//have to do for 10 and all
else if(statsCopy.size() <= 100){
sort10(statsCopy);
sortAll(statsCopy);
}
//for everything!
else if(statsCopy.size() >= 100) {
sort10(statsCopy);
sort100(statsCopy);
sortAll(statsCopy);
} else {}
//add the new sorted results in
results.add(min10);
results.add(max10);
results.add(avg10);
results.add(med10);
results.add(min100);
results.add(max100);
results.add(avg100);
results.add(med100);
results.add(minAll);
results.add(maxAll);
results.add(avgAll);
results.add(medAll);
return results;
}
public void sortAll(ArrayList all) {
Long zeros = Long.valueOf(0);
Collections.sort(all);
ArrayList subAll = new ArrayList();
// get rid of the zero place holders
//checks that there are not zeros messing with the stats
int zeroFrequency = Collections.frequency(all, zeros);
if(zeroFrequency != 0) {
for(Object i:all){
if(i.equals(zeros)){}
else {
subAll.add(i);
}
}
//if the entire list was empty
if(subAll.isEmpty()) {
maxAll = zeros;
minAll = zeros;
medAll = zeros;
avgAll = zeros;
}
else {
int midIndex = (subAll.size() / 2);
maxAll = (Long) Collections.max(subAll);
minAll = (Long) Collections.min(subAll);
medAll = (Long) all.get(midIndex); //get middle number
avgAll = calculateAverage(subAll);
}
} else {
int midIndex = (all.size() / 2);
maxAll = (Long) Collections.max(all);
minAll = (Long) Collections.min(all);
medAll = (Long) all.get(midIndex); //get middle number
avgAll = calculateAverage(all);
}
}
public void sort100(ArrayList last100) {
int end = last100.size();
int start = end - 100;
List miniList = last100.subList(start, end);
Collections.sort(miniList);
max100 = (Long)Collections.max(miniList);
min100 = (Long)Collections.min(miniList);
med100 = (Long)miniList.get(49); //get middle number
avg100 = calculateAverage(miniList);
}
public void sort10(ArrayList last10) {
int end = last10.size();
int start = end - 10;
List miniList = last10.subList(start, end);
Collections.sort(miniList);
max10 = (Long)Collections.max(miniList);
min10 = (Long)Collections.min(miniList);
med10 = (Long)miniList.get(4); //get middle number
avg10 = calculateAverage(miniList);
}
//http://stackoverflow.com/questions/10791568/calculating-average-of-an-array-list Jesherun 10-03-2015
public Long calculateAverage(List <Long>list) {
Long sum = new Long(0);
if(!list.isEmpty()) {
for (Long i: list) {
sum += i;
}
return sum / list.size();
}
return sum;
}
}
| hatchard/Assignment1-CMPUT301 | app/src/main/java/com/mycompany/assignment1/SortStats.java | Java | apache-2.0 | 4,882 |
/**
*
*/
/**
* @author Vladimir Kornienko
*
*/
package org.openntf.red.security.authorization; | hyarthi/project-red | src/java/org.openntf.red.main/src/org/openntf/red/security/authorization/package-info.java | Java | apache-2.0 | 100 |
package io.fabric8.kubernetes.api.model;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.sundr.builder.annotations.Buildable;
import lombok.EqualsAndHashCode;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
@JsonDeserialize(using = com.fasterxml.jackson.databind.JsonDeserializer.None.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"apiVersion",
"kind",
"metadata",
"audience",
"expirationSeconds",
"path"
})
@ToString
@EqualsAndHashCode
@Setter
@Accessors(prefix = {
"_",
""
})
@Buildable(editableEnabled = false, validationEnabled = false, generateBuilderPackage = true, lazyCollectionInitEnabled = false, builderPackage = "io.fabric8.kubernetes.api.builder")
public class ServiceAccountTokenProjection implements KubernetesResource
{
@JsonProperty("audience")
private String audience;
@JsonProperty("expirationSeconds")
private Long expirationSeconds;
@JsonProperty("path")
private String path;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
/**
* No args constructor for use in serialization
*
*/
public ServiceAccountTokenProjection() {
}
/**
*
* @param path
* @param audience
* @param expirationSeconds
*/
public ServiceAccountTokenProjection(String audience, Long expirationSeconds, String path) {
super();
this.audience = audience;
this.expirationSeconds = expirationSeconds;
this.path = path;
}
@JsonProperty("audience")
public String getAudience() {
return audience;
}
@JsonProperty("audience")
public void setAudience(String audience) {
this.audience = audience;
}
@JsonProperty("expirationSeconds")
public Long getExpirationSeconds() {
return expirationSeconds;
}
@JsonProperty("expirationSeconds")
public void setExpirationSeconds(Long expirationSeconds) {
this.expirationSeconds = expirationSeconds;
}
@JsonProperty("path")
public String getPath() {
return path;
}
@JsonProperty("path")
public void setPath(String path) {
this.path = path;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| fabric8io/kubernetes-client | kubernetes-model-generator/kubernetes-model-core/src/generated/java/io/fabric8/kubernetes/api/model/ServiceAccountTokenProjection.java | Java | apache-2.0 | 2,936 |
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var di_1 = require('angular2/src/core/di');
var di_2 = require('angular2/src/core/di');
var collection_1 = require('angular2/src/facade/collection');
var lang_1 = require('angular2/src/facade/lang');
var exceptions_1 = require('angular2/src/facade/exceptions');
var metadata_1 = require('../core/metadata');
var view_resolver_1 = require('angular2/src/compiler/view_resolver');
var MockViewResolver = (function (_super) {
__extends(MockViewResolver, _super);
function MockViewResolver() {
_super.call(this);
/** @internal */
this._views = new collection_1.Map();
/** @internal */
this._inlineTemplates = new collection_1.Map();
/** @internal */
this._viewCache = new collection_1.Map();
/** @internal */
this._directiveOverrides = new collection_1.Map();
}
/**
* Overrides the {@link ViewMetadata} for a component.
*
* @param {Type} component
* @param {ViewDefinition} view
*/
MockViewResolver.prototype.setView = function (component, view) {
this._checkOverrideable(component);
this._views.set(component, view);
};
/**
* Overrides the inline template for a component - other configuration remains unchanged.
*
* @param {Type} component
* @param {string} template
*/
MockViewResolver.prototype.setInlineTemplate = function (component, template) {
this._checkOverrideable(component);
this._inlineTemplates.set(component, template);
};
/**
* Overrides a directive from the component {@link ViewMetadata}.
*
* @param {Type} component
* @param {Type} from
* @param {Type} to
*/
MockViewResolver.prototype.overrideViewDirective = function (component, from, to) {
this._checkOverrideable(component);
var overrides = this._directiveOverrides.get(component);
if (lang_1.isBlank(overrides)) {
overrides = new collection_1.Map();
this._directiveOverrides.set(component, overrides);
}
overrides.set(from, to);
};
/**
* Returns the {@link ViewMetadata} for a component:
* - Set the {@link ViewMetadata} to the overridden view when it exists or fallback to the default
* `ViewResolver`,
* see `setView`.
* - Override the directives, see `overrideViewDirective`.
* - Override the @View definition, see `setInlineTemplate`.
*
* @param component
* @returns {ViewDefinition}
*/
MockViewResolver.prototype.resolve = function (component) {
var view = this._viewCache.get(component);
if (lang_1.isPresent(view))
return view;
view = this._views.get(component);
if (lang_1.isBlank(view)) {
view = _super.prototype.resolve.call(this, component);
}
var directives = [];
var overrides = this._directiveOverrides.get(component);
if (lang_1.isPresent(overrides) && lang_1.isPresent(view.directives)) {
flattenArray(view.directives, directives);
overrides.forEach(function (to, from) {
var srcIndex = directives.indexOf(from);
if (srcIndex == -1) {
throw new exceptions_1.BaseException("Overriden directive " + lang_1.stringify(from) + " not found in the template of " + lang_1.stringify(component));
}
directives[srcIndex] = to;
});
view = new metadata_1.ViewMetadata({ template: view.template, templateUrl: view.templateUrl, directives: directives });
}
var inlineTemplate = this._inlineTemplates.get(component);
if (lang_1.isPresent(inlineTemplate)) {
view = new metadata_1.ViewMetadata({ template: inlineTemplate, templateUrl: null, directives: view.directives });
}
this._viewCache.set(component, view);
return view;
};
/**
* @internal
*
* Once a component has been compiled, the AppProtoView is stored in the compiler cache.
*
* Then it should not be possible to override the component configuration after the component
* has been compiled.
*
* @param {Type} component
*/
MockViewResolver.prototype._checkOverrideable = function (component) {
var cached = this._viewCache.get(component);
if (lang_1.isPresent(cached)) {
throw new exceptions_1.BaseException("The component " + lang_1.stringify(component) + " has already been compiled, its configuration can not be changed");
}
};
MockViewResolver = __decorate([
di_2.Injectable(),
__metadata('design:paramtypes', [])
], MockViewResolver);
return MockViewResolver;
}(view_resolver_1.ViewResolver));
exports.MockViewResolver = MockViewResolver;
function flattenArray(tree, out) {
for (var i = 0; i < tree.length; i++) {
var item = di_1.resolveForwardRef(tree[i]);
if (lang_1.isArray(item)) {
flattenArray(item, out);
}
else {
out.push(item);
}
}
}
//# sourceMappingURL=view_resolver_mock.js.map | tzerb/Learning | WebApplication3/src/WebApplication3/jspm_packages/npm/dist/js/cjs/src/mock/view_resolver_mock.js | JavaScript | apache-2.0 | 6,175 |
#!/usr/bin/env python
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run xDS integration tests on GCP using Traffic Director."""
import argparse
import datetime
import json
import logging
import os
import random
import shlex
import socket
import subprocess
import sys
import tempfile
import time
import uuid
from google.protobuf import json_format
import googleapiclient.discovery
import grpc
from oauth2client.client import GoogleCredentials
import python_utils.jobset as jobset
import python_utils.report_utils as report_utils
from src.proto.grpc.health.v1 import health_pb2
from src.proto.grpc.health.v1 import health_pb2_grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
# Envoy protos provided by PyPI package xds-protos
# Needs to import the generated Python file to load descriptors
try:
from envoy.extensions.filters.common.fault.v3 import fault_pb2
from envoy.extensions.filters.http.fault.v3 import fault_pb2
from envoy.extensions.filters.http.router.v3 import router_pb2
from envoy.extensions.filters.network.http_connection_manager.v3 import \
http_connection_manager_pb2
from envoy.service.status.v3 import csds_pb2
from envoy.service.status.v3 import csds_pb2_grpc
except ImportError:
# These protos are required by CSDS test. We should not fail the entire
# script for one test case.
pass
logger = logging.getLogger()
console_handler = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
console_handler.setFormatter(formatter)
logger.handlers = []
logger.addHandler(console_handler)
logger.setLevel(logging.WARNING)
# Suppress excessive logs for gRPC Python
original_grpc_trace = os.environ.pop('GRPC_TRACE', None)
original_grpc_verbosity = os.environ.pop('GRPC_VERBOSITY', None)
# Suppress not-essential logs for GCP clients
logging.getLogger('google_auth_httplib2').setLevel(logging.WARNING)
logging.getLogger('googleapiclient.discovery').setLevel(logging.WARNING)
_TEST_CASES = [
'backends_restart',
'change_backend_service',
'gentle_failover',
'load_report_based_failover',
'ping_pong',
'remove_instance_group',
'round_robin',
'secondary_locality_gets_no_requests_on_partial_primary_failure',
'secondary_locality_gets_requests_on_primary_failure',
'traffic_splitting',
'path_matching',
'header_matching',
'api_listener',
'forwarding_rule_port_match',
'forwarding_rule_default_port',
'metadata_filter',
]
# Valid test cases, but not in all. So the tests can only run manually, and
# aren't enabled automatically for all languages.
#
# TODO: Move them into _TEST_CASES when support is ready in all languages.
_ADDITIONAL_TEST_CASES = [
'circuit_breaking',
'timeout',
'fault_injection',
'csds',
]
# Test cases that require the V3 API. Skipped in older runs.
_V3_TEST_CASES = frozenset(['timeout', 'fault_injection', 'csds'])
# Test cases that require the alpha API. Skipped for stable API runs.
_ALPHA_TEST_CASES = frozenset(['timeout'])
def parse_test_cases(arg):
if arg == '':
return []
arg_split = arg.split(',')
test_cases = set()
all_test_cases = _TEST_CASES + _ADDITIONAL_TEST_CASES
for arg in arg_split:
if arg == "all":
test_cases = test_cases.union(_TEST_CASES)
else:
test_cases = test_cases.union([arg])
if not all([test_case in all_test_cases for test_case in test_cases]):
raise Exception('Failed to parse test cases %s' % arg)
# Perserve order.
return [x for x in all_test_cases if x in test_cases]
def parse_port_range(port_arg):
try:
port = int(port_arg)
return list(range(port, port + 1))
except:
port_min, port_max = port_arg.split(':')
return list(range(int(port_min), int(port_max) + 1))
argp = argparse.ArgumentParser(description='Run xDS interop tests on GCP')
# TODO(zdapeng): remove default value of project_id and project_num
argp.add_argument('--project_id', default='grpc-testing', help='GCP project id')
argp.add_argument('--project_num',
default='830293263384',
help='GCP project number')
argp.add_argument(
'--gcp_suffix',
default='',
help='Optional suffix for all generated GCP resource names. Useful to '
'ensure distinct names across test runs.')
argp.add_argument(
'--test_case',
default='ping_pong',
type=parse_test_cases,
help='Comma-separated list of test cases to run. Available tests: %s, '
'(or \'all\' to run every test). '
'Alternative tests not included in \'all\': %s' %
(','.join(_TEST_CASES), ','.join(_ADDITIONAL_TEST_CASES)))
argp.add_argument(
'--bootstrap_file',
default='',
help='File to reference via GRPC_XDS_BOOTSTRAP. Disables built-in '
'bootstrap generation')
argp.add_argument(
'--xds_v3_support',
default=False,
action='store_true',
help='Support xDS v3 via GRPC_XDS_EXPERIMENTAL_V3_SUPPORT. '
'If a pre-created bootstrap file is provided via the --bootstrap_file '
'parameter, it should include xds_v3 in its server_features field.')
argp.add_argument(
'--client_cmd',
default=None,
help='Command to launch xDS test client. {server_uri}, {stats_port} and '
'{qps} references will be replaced using str.format(). GRPC_XDS_BOOTSTRAP '
'will be set for the command')
argp.add_argument(
'--client_hosts',
default=None,
help='Comma-separated list of hosts running client processes. If set, '
'--client_cmd is ignored and client processes are assumed to be running on '
'the specified hosts.')
argp.add_argument('--zone', default='us-central1-a')
argp.add_argument('--secondary_zone',
default='us-west1-b',
help='Zone to use for secondary TD locality tests')
argp.add_argument('--qps', default=100, type=int, help='Client QPS')
argp.add_argument(
'--wait_for_backend_sec',
default=1200,
type=int,
help='Time limit for waiting for created backend services to report '
'healthy when launching or updated GCP resources')
argp.add_argument(
'--use_existing_gcp_resources',
default=False,
action='store_true',
help=
'If set, find and use already created GCP resources instead of creating new'
' ones.')
argp.add_argument(
'--keep_gcp_resources',
default=False,
action='store_true',
help=
'Leave GCP VMs and configuration running after test. Default behavior is '
'to delete when tests complete.')
argp.add_argument('--halt_after_fail',
action='store_true',
help='Halt and save the resources when test failed.')
argp.add_argument(
'--compute_discovery_document',
default=None,
type=str,
help=
'If provided, uses this file instead of retrieving via the GCP discovery '
'API')
argp.add_argument(
'--alpha_compute_discovery_document',
default=None,
type=str,
help='If provided, uses this file instead of retrieving via the alpha GCP '
'discovery API')
argp.add_argument('--network',
default='global/networks/default',
help='GCP network to use')
_DEFAULT_PORT_RANGE = '8080:8280'
argp.add_argument('--service_port_range',
default=_DEFAULT_PORT_RANGE,
type=parse_port_range,
help='Listening port for created gRPC backends. Specified as '
'either a single int or as a range in the format min:max, in '
'which case an available port p will be chosen s.t. min <= p '
'<= max')
argp.add_argument(
'--stats_port',
default=8079,
type=int,
help='Local port for the client process to expose the LB stats service')
argp.add_argument('--xds_server',
default='trafficdirector.googleapis.com:443',
help='xDS server')
argp.add_argument('--source_image',
default='projects/debian-cloud/global/images/family/debian-9',
help='Source image for VMs created during the test')
argp.add_argument('--path_to_server_binary',
default=None,
type=str,
help='If set, the server binary must already be pre-built on '
'the specified source image')
argp.add_argument('--machine_type',
default='e2-standard-2',
help='Machine type for VMs created during the test')
argp.add_argument(
'--instance_group_size',
default=2,
type=int,
help='Number of VMs to create per instance group. Certain test cases (e.g., '
'round_robin) may not give meaningful results if this is set to a value '
'less than 2.')
argp.add_argument('--verbose',
help='verbose log output',
default=False,
action='store_true')
# TODO(ericgribkoff) Remove this param once the sponge-formatted log files are
# visible in all test environments.
argp.add_argument('--log_client_output',
help='Log captured client output',
default=False,
action='store_true')
# TODO(ericgribkoff) Remove this flag once all test environments are verified to
# have access to the alpha compute APIs.
argp.add_argument('--only_stable_gcp_apis',
help='Do not use alpha compute APIs. Some tests may be '
'incompatible with this option (gRPC health checks are '
'currently alpha and required for simulating server failure',
default=False,
action='store_true')
args = argp.parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
CLIENT_HOSTS = []
if args.client_hosts:
CLIENT_HOSTS = args.client_hosts.split(',')
# Each of the config propagation in the control plane should finish within 600s.
# Otherwise, it indicates a bug in the control plane. The config propagation
# includes all kinds of traffic config update, like updating urlMap, creating
# the resources for the first time, updating BackendService, and changing the
# status of endpoints in BackendService.
_WAIT_FOR_URL_MAP_PATCH_SEC = 600
# In general, fetching load balancing stats only takes ~10s. However, slow
# config update could lead to empty EDS or similar symptoms causing the
# connection to hang for a long period of time. So, we want to extend the stats
# wait time to be the same as urlMap patch time.
_WAIT_FOR_STATS_SEC = _WAIT_FOR_URL_MAP_PATCH_SEC
_DEFAULT_SERVICE_PORT = 80
_WAIT_FOR_BACKEND_SEC = args.wait_for_backend_sec
_WAIT_FOR_OPERATION_SEC = 1200
_INSTANCE_GROUP_SIZE = args.instance_group_size
_NUM_TEST_RPCS = 10 * args.qps
_CONNECTION_TIMEOUT_SEC = 60
_GCP_API_RETRIES = 5
_BOOTSTRAP_TEMPLATE = """
{{
"node": {{
"id": "{node_id}",
"metadata": {{
"TRAFFICDIRECTOR_NETWORK_NAME": "%s",
"com.googleapis.trafficdirector.config_time_trace": "TRUE"
}},
"locality": {{
"zone": "%s"
}}
}},
"xds_servers": [{{
"server_uri": "%s",
"channel_creds": [
{{
"type": "google_default",
"config": {{}}
}}
],
"server_features": {server_features}
}}]
}}""" % (args.network.split('/')[-1], args.zone, args.xds_server)
# TODO(ericgribkoff) Add change_backend_service to this list once TD no longer
# sends an update with no localities when adding the MIG to the backend service
# can race with the URL map patch.
_TESTS_TO_FAIL_ON_RPC_FAILURE = ['ping_pong', 'round_robin']
# Tests that run UnaryCall and EmptyCall.
_TESTS_TO_RUN_MULTIPLE_RPCS = ['path_matching', 'header_matching']
# Tests that make UnaryCall with test metadata.
_TESTS_TO_SEND_METADATA = ['header_matching']
_TEST_METADATA_KEY = 'xds_md'
_TEST_METADATA_VALUE_UNARY = 'unary_yranu'
_TEST_METADATA_VALUE_EMPTY = 'empty_ytpme'
# Extra RPC metadata whose value is a number, sent with UnaryCall only.
_TEST_METADATA_NUMERIC_KEY = 'xds_md_numeric'
_TEST_METADATA_NUMERIC_VALUE = '159'
_PATH_MATCHER_NAME = 'path-matcher'
_BASE_TEMPLATE_NAME = 'test-template'
_BASE_INSTANCE_GROUP_NAME = 'test-ig'
_BASE_HEALTH_CHECK_NAME = 'test-hc'
_BASE_FIREWALL_RULE_NAME = 'test-fw-rule'
_BASE_BACKEND_SERVICE_NAME = 'test-backend-service'
_BASE_URL_MAP_NAME = 'test-map'
_BASE_SERVICE_HOST = 'grpc-test'
_BASE_TARGET_PROXY_NAME = 'test-target-proxy'
_BASE_FORWARDING_RULE_NAME = 'test-forwarding-rule'
_TEST_LOG_BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../../reports')
_SPONGE_LOG_NAME = 'sponge_log.log'
_SPONGE_XML_NAME = 'sponge_log.xml'
def get_client_stats(num_rpcs, timeout_sec):
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
with grpc.insecure_channel('%s:%d' %
(host, args.stats_port)) as channel:
stub = test_pb2_grpc.LoadBalancerStatsServiceStub(channel)
request = messages_pb2.LoadBalancerStatsRequest()
request.num_rpcs = num_rpcs
request.timeout_sec = timeout_sec
rpc_timeout = timeout_sec + _CONNECTION_TIMEOUT_SEC
logger.debug('Invoking GetClientStats RPC to %s:%d:', host,
args.stats_port)
response = stub.GetClientStats(request,
wait_for_ready=True,
timeout=rpc_timeout)
logger.debug('Invoked GetClientStats RPC to %s: %s', host,
json_format.MessageToJson(response))
return response
def get_client_accumulated_stats():
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
with grpc.insecure_channel('%s:%d' %
(host, args.stats_port)) as channel:
stub = test_pb2_grpc.LoadBalancerStatsServiceStub(channel)
request = messages_pb2.LoadBalancerAccumulatedStatsRequest()
logger.debug('Invoking GetClientAccumulatedStats RPC to %s:%d:',
host, args.stats_port)
response = stub.GetClientAccumulatedStats(
request, wait_for_ready=True, timeout=_CONNECTION_TIMEOUT_SEC)
logger.debug('Invoked GetClientAccumulatedStats RPC to %s: %s',
host, response)
return response
def get_client_xds_config_dump():
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
server_address = '%s:%d' % (host, args.stats_port)
with grpc.insecure_channel(server_address) as channel:
stub = csds_pb2_grpc.ClientStatusDiscoveryServiceStub(channel)
logger.debug('Fetching xDS config dump from %s', server_address)
response = stub.FetchClientStatus(csds_pb2.ClientStatusRequest(),
wait_for_ready=True,
timeout=_CONNECTION_TIMEOUT_SEC)
logger.debug('Fetched xDS config dump from %s', server_address)
if len(response.config) != 1:
logger.error('Unexpected number of ClientConfigs %d: %s',
len(response.config), response)
return None
else:
# Converting the ClientStatusResponse into JSON, because many
# fields are packed in google.protobuf.Any. It will require many
# duplicated code to unpack proto message and inspect values.
return json_format.MessageToDict(
response.config[0], preserving_proto_field_name=True)
def configure_client(rpc_types, metadata=[], timeout_sec=None):
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
with grpc.insecure_channel('%s:%d' %
(host, args.stats_port)) as channel:
stub = test_pb2_grpc.XdsUpdateClientConfigureServiceStub(channel)
request = messages_pb2.ClientConfigureRequest()
request.types.extend(rpc_types)
for rpc_type, md_key, md_value in metadata:
md = request.metadata.add()
md.type = rpc_type
md.key = md_key
md.value = md_value
if timeout_sec:
request.timeout_sec = timeout_sec
logger.debug(
'Invoking XdsUpdateClientConfigureService RPC to %s:%d: %s',
host, args.stats_port, request)
stub.Configure(request,
wait_for_ready=True,
timeout=_CONNECTION_TIMEOUT_SEC)
logger.debug('Invoked XdsUpdateClientConfigureService RPC to %s',
host)
class RpcDistributionError(Exception):
pass
def _verify_rpcs_to_given_backends(backends, timeout_sec, num_rpcs,
allow_failures):
start_time = time.time()
error_msg = None
logger.debug('Waiting for %d sec until backends %s receive load' %
(timeout_sec, backends))
while time.time() - start_time <= timeout_sec:
error_msg = None
stats = get_client_stats(num_rpcs, timeout_sec)
rpcs_by_peer = stats.rpcs_by_peer
for backend in backends:
if backend not in rpcs_by_peer:
error_msg = 'Backend %s did not receive load' % backend
break
if not error_msg and len(rpcs_by_peer) > len(backends):
error_msg = 'Unexpected backend received load: %s' % rpcs_by_peer
if not allow_failures and stats.num_failures > 0:
error_msg = '%d RPCs failed' % stats.num_failures
if not error_msg:
return
raise RpcDistributionError(error_msg)
def wait_until_all_rpcs_go_to_given_backends_or_fail(backends,
timeout_sec,
num_rpcs=_NUM_TEST_RPCS):
_verify_rpcs_to_given_backends(backends,
timeout_sec,
num_rpcs,
allow_failures=True)
def wait_until_all_rpcs_go_to_given_backends(backends,
timeout_sec,
num_rpcs=_NUM_TEST_RPCS):
_verify_rpcs_to_given_backends(backends,
timeout_sec,
num_rpcs,
allow_failures=False)
def wait_until_no_rpcs_go_to_given_backends(backends, timeout_sec):
start_time = time.time()
while time.time() - start_time <= timeout_sec:
stats = get_client_stats(_NUM_TEST_RPCS, timeout_sec)
error_msg = None
rpcs_by_peer = stats.rpcs_by_peer
for backend in backends:
if backend in rpcs_by_peer:
error_msg = 'Unexpected backend %s receives load' % backend
break
if not error_msg:
return
raise Exception('Unexpected RPCs going to given backends')
def wait_until_rpcs_in_flight(rpc_type, timeout_sec, num_rpcs, threshold):
'''Block until the test client reaches the state with the given number
of RPCs being outstanding stably.
Args:
rpc_type: A string indicating the RPC method to check for. Either
'UnaryCall' or 'EmptyCall'.
timeout_sec: Maximum number of seconds to wait until the desired state
is reached.
num_rpcs: Expected number of RPCs to be in-flight.
threshold: Number within [0,100], the tolerable percentage by which
the actual number of RPCs in-flight can differ from the expected number.
'''
if threshold < 0 or threshold > 100:
raise ValueError('Value error: Threshold should be between 0 to 100')
threshold_fraction = threshold / 100.0
start_time = time.time()
error_msg = None
logger.debug(
'Waiting for %d sec until %d %s RPCs (with %d%% tolerance) in-flight' %
(timeout_sec, num_rpcs, rpc_type, threshold))
while time.time() - start_time <= timeout_sec:
error_msg = _check_rpcs_in_flight(rpc_type, num_rpcs, threshold,
threshold_fraction)
if error_msg:
logger.debug('Progress: %s', error_msg)
time.sleep(2)
else:
break
# Ensure the number of outstanding RPCs is stable.
if not error_msg:
time.sleep(5)
error_msg = _check_rpcs_in_flight(rpc_type, num_rpcs, threshold,
threshold_fraction)
if error_msg:
raise Exception("Wrong number of %s RPCs in-flight: %s" %
(rpc_type, error_msg))
def _check_rpcs_in_flight(rpc_type, num_rpcs, threshold, threshold_fraction):
error_msg = None
stats = get_client_accumulated_stats()
rpcs_started = stats.num_rpcs_started_by_method[rpc_type]
rpcs_succeeded = stats.num_rpcs_succeeded_by_method[rpc_type]
rpcs_failed = stats.num_rpcs_failed_by_method[rpc_type]
rpcs_in_flight = rpcs_started - rpcs_succeeded - rpcs_failed
if rpcs_in_flight < (num_rpcs * (1 - threshold_fraction)):
error_msg = ('actual(%d) < expected(%d - %d%%)' %
(rpcs_in_flight, num_rpcs, threshold))
elif rpcs_in_flight > (num_rpcs * (1 + threshold_fraction)):
error_msg = ('actual(%d) > expected(%d + %d%%)' %
(rpcs_in_flight, num_rpcs, threshold))
return error_msg
def compare_distributions(actual_distribution, expected_distribution,
threshold):
"""Compare if two distributions are similar.
Args:
actual_distribution: A list of floats, contains the actual distribution.
expected_distribution: A list of floats, contains the expected distribution.
threshold: Number within [0,100], the threshold percentage by which the
actual distribution can differ from the expected distribution.
Returns:
The similarity between the distributions as a boolean. Returns true if the
actual distribution lies within the threshold of the expected
distribution, false otherwise.
Raises:
ValueError: if threshold is not with in [0,100].
Exception: containing detailed error messages.
"""
if len(expected_distribution) != len(actual_distribution):
raise Exception(
'Error: expected and actual distributions have different size (%d vs %d)'
% (len(expected_distribution), len(actual_distribution)))
if threshold < 0 or threshold > 100:
raise ValueError('Value error: Threshold should be between 0 to 100')
threshold_fraction = threshold / 100.0
for expected, actual in zip(expected_distribution, actual_distribution):
if actual < (expected * (1 - threshold_fraction)):
raise Exception("actual(%f) < expected(%f-%d%%)" %
(actual, expected, threshold))
if actual > (expected * (1 + threshold_fraction)):
raise Exception("actual(%f) > expected(%f+%d%%)" %
(actual, expected, threshold))
return True
def compare_expected_instances(stats, expected_instances):
"""Compare if stats have expected instances for each type of RPC.
Args:
stats: LoadBalancerStatsResponse reported by interop client.
expected_instances: a dict with key as the RPC type (string), value as
the expected backend instances (list of strings).
Returns:
Returns true if the instances are expected. False if not.
"""
for rpc_type, expected_peers in list(expected_instances.items()):
rpcs_by_peer_for_type = stats.rpcs_by_method[rpc_type]
rpcs_by_peer = rpcs_by_peer_for_type.rpcs_by_peer if rpcs_by_peer_for_type else None
logger.debug('rpc: %s, by_peer: %s', rpc_type, rpcs_by_peer)
peers = list(rpcs_by_peer.keys())
if set(peers) != set(expected_peers):
logger.info('unexpected peers for %s, got %s, want %s', rpc_type,
peers, expected_peers)
return False
return True
def test_backends_restart(gcp, backend_service, instance_group):
logger.info('Running test_backends_restart')
instance_names = get_instance_names(gcp, instance_group)
num_instances = len(instance_names)
start_time = time.time()
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_STATS_SEC)
try:
resize_instance_group(gcp, instance_group, 0)
wait_until_all_rpcs_go_to_given_backends_or_fail([],
_WAIT_FOR_BACKEND_SEC)
finally:
resize_instance_group(gcp, instance_group, num_instances)
wait_for_healthy_backends(gcp, backend_service, instance_group)
new_instance_names = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(new_instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_change_backend_service(gcp, original_backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group):
logger.info('Running test_change_backend_service')
original_backend_instances = get_instance_names(gcp, instance_group)
alternate_backend_instances = get_instance_names(gcp,
same_zone_instance_group)
patch_backend_service(gcp, alternate_backend_service,
[same_zone_instance_group])
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
wait_for_healthy_backends(gcp, alternate_backend_service,
same_zone_instance_group)
wait_until_all_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
passed = True
try:
patch_url_map_backend_service(gcp, alternate_backend_service)
wait_until_all_rpcs_go_to_given_backends(alternate_backend_instances,
_WAIT_FOR_URL_MAP_PATCH_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_gentle_failover(gcp,
backend_service,
primary_instance_group,
secondary_instance_group,
swapped_primary_and_secondary=False):
logger.info('Running test_gentle_failover')
num_primary_instances = len(get_instance_names(gcp, primary_instance_group))
min_instances_for_gentle_failover = 3 # Need >50% failure to start failover
passed = True
try:
if num_primary_instances < min_instances_for_gentle_failover:
resize_instance_group(gcp, primary_instance_group,
min_instances_for_gentle_failover)
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
primary_instance_names = get_instance_names(gcp, primary_instance_group)
secondary_instance_names = get_instance_names(gcp,
secondary_instance_group)
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
instances_to_stop = primary_instance_names[:-1]
remaining_instances = primary_instance_names[-1:]
try:
set_serving_status(instances_to_stop,
gcp.service_port,
serving=False)
wait_until_all_rpcs_go_to_given_backends(
remaining_instances + secondary_instance_names,
_WAIT_FOR_BACKEND_SEC)
finally:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=True)
except RpcDistributionError as e:
if not swapped_primary_and_secondary and is_primary_instance_group(
gcp, secondary_instance_group):
# Swap expectation of primary and secondary instance groups.
test_gentle_failover(gcp,
backend_service,
secondary_instance_group,
primary_instance_group,
swapped_primary_and_secondary=True)
else:
passed = False
raise e
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
resize_instance_group(gcp, primary_instance_group,
num_primary_instances)
instance_names = get_instance_names(gcp, primary_instance_group)
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_load_report_based_failover(gcp, backend_service,
primary_instance_group,
secondary_instance_group):
logger.info('Running test_load_report_based_failover')
passed = True
try:
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
primary_instance_names = get_instance_names(gcp, primary_instance_group)
secondary_instance_names = get_instance_names(gcp,
secondary_instance_group)
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
# Set primary locality's balance mode to RATE, and RPS to 20% of the
# client's QPS. The secondary locality will be used.
max_rate = int(args.qps * 1 / 5)
logger.info('Patching backend service to RATE with %d max_rate',
max_rate)
patch_backend_service(
gcp,
backend_service, [primary_instance_group, secondary_instance_group],
balancing_mode='RATE',
max_rate=max_rate)
wait_until_all_rpcs_go_to_given_backends(
primary_instance_names + secondary_instance_names,
_WAIT_FOR_BACKEND_SEC)
# Set primary locality's balance mode to RATE, and RPS to 120% of the
# client's QPS. Only the primary locality will be used.
max_rate = int(args.qps * 6 / 5)
logger.info('Patching backend service to RATE with %d max_rate',
max_rate)
patch_backend_service(
gcp,
backend_service, [primary_instance_group, secondary_instance_group],
balancing_mode='RATE',
max_rate=max_rate)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_BACKEND_SEC)
logger.info("success")
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
instance_names = get_instance_names(gcp, primary_instance_group)
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_ping_pong(gcp, backend_service, instance_group):
logger.info('Running test_ping_pong')
wait_for_healthy_backends(gcp, backend_service, instance_group)
instance_names = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_STATS_SEC)
def test_remove_instance_group(gcp, backend_service, instance_group,
same_zone_instance_group):
logger.info('Running test_remove_instance_group')
passed = True
try:
patch_backend_service(gcp,
backend_service,
[instance_group, same_zone_instance_group],
balancing_mode='RATE')
wait_for_healthy_backends(gcp, backend_service, instance_group)
wait_for_healthy_backends(gcp, backend_service,
same_zone_instance_group)
instance_names = get_instance_names(gcp, instance_group)
same_zone_instance_names = get_instance_names(gcp,
same_zone_instance_group)
try:
wait_until_all_rpcs_go_to_given_backends(
instance_names + same_zone_instance_names,
_WAIT_FOR_OPERATION_SEC)
remaining_instance_group = same_zone_instance_group
remaining_instance_names = same_zone_instance_names
except RpcDistributionError as e:
# If connected to TD in a different zone, we may route traffic to
# only one instance group. Determine which group that is to continue
# with the remainder of the test case.
try:
wait_until_all_rpcs_go_to_given_backends(
instance_names, _WAIT_FOR_STATS_SEC)
remaining_instance_group = same_zone_instance_group
remaining_instance_names = same_zone_instance_names
except RpcDistributionError as e:
wait_until_all_rpcs_go_to_given_backends(
same_zone_instance_names, _WAIT_FOR_STATS_SEC)
remaining_instance_group = instance_group
remaining_instance_names = instance_names
patch_backend_service(gcp,
backend_service, [remaining_instance_group],
balancing_mode='RATE')
wait_until_all_rpcs_go_to_given_backends(remaining_instance_names,
_WAIT_FOR_BACKEND_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service, [instance_group])
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_round_robin(gcp, backend_service, instance_group):
logger.info('Running test_round_robin')
wait_for_healthy_backends(gcp, backend_service, instance_group)
instance_names = get_instance_names(gcp, instance_group)
threshold = 1
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_STATS_SEC)
# TODO(ericgribkoff) Delayed config propagation from earlier tests
# may result in briefly receiving an empty EDS update, resulting in failed
# RPCs. Retry distribution validation if this occurs; long-term fix is
# creating new backend resources for each individual test case.
# Each attempt takes 10 seconds. Config propagation can take several
# minutes.
max_attempts = 40
for i in range(max_attempts):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
requests_received = [stats.rpcs_by_peer[x] for x in stats.rpcs_by_peer]
total_requests_received = sum(requests_received)
if total_requests_received != _NUM_TEST_RPCS:
logger.info('Unexpected RPC failures, retrying: %s', stats)
continue
expected_requests = total_requests_received / len(instance_names)
for instance in instance_names:
if abs(stats.rpcs_by_peer[instance] -
expected_requests) > threshold:
raise Exception(
'RPC peer distribution differs from expected by more than %d '
'for instance %s (%s)' % (threshold, instance, stats))
return
raise Exception('RPC failures persisted through %d retries' % max_attempts)
def test_secondary_locality_gets_no_requests_on_partial_primary_failure(
gcp,
backend_service,
primary_instance_group,
secondary_instance_group,
swapped_primary_and_secondary=False):
logger.info(
'Running secondary_locality_gets_no_requests_on_partial_primary_failure'
)
passed = True
try:
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
primary_instance_names = get_instance_names(gcp, primary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
instances_to_stop = primary_instance_names[:1]
remaining_instances = primary_instance_names[1:]
try:
set_serving_status(instances_to_stop,
gcp.service_port,
serving=False)
wait_until_all_rpcs_go_to_given_backends(remaining_instances,
_WAIT_FOR_BACKEND_SEC)
finally:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=True)
except RpcDistributionError as e:
if not swapped_primary_and_secondary and is_primary_instance_group(
gcp, secondary_instance_group):
# Swap expectation of primary and secondary instance groups.
test_secondary_locality_gets_no_requests_on_partial_primary_failure(
gcp,
backend_service,
secondary_instance_group,
primary_instance_group,
swapped_primary_and_secondary=True)
else:
passed = False
raise e
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
def test_secondary_locality_gets_requests_on_primary_failure(
gcp,
backend_service,
primary_instance_group,
secondary_instance_group,
swapped_primary_and_secondary=False):
logger.info('Running secondary_locality_gets_requests_on_primary_failure')
passed = True
try:
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
primary_instance_names = get_instance_names(gcp, primary_instance_group)
secondary_instance_names = get_instance_names(gcp,
secondary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
try:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=False)
wait_until_all_rpcs_go_to_given_backends(secondary_instance_names,
_WAIT_FOR_BACKEND_SEC)
finally:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=True)
except RpcDistributionError as e:
if not swapped_primary_and_secondary and is_primary_instance_group(
gcp, secondary_instance_group):
# Swap expectation of primary and secondary instance groups.
test_secondary_locality_gets_requests_on_primary_failure(
gcp,
backend_service,
secondary_instance_group,
primary_instance_group,
swapped_primary_and_secondary=True)
else:
passed = False
raise e
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
def prepare_services_for_urlmap_tests(gcp, original_backend_service,
instance_group, alternate_backend_service,
same_zone_instance_group):
'''
This function prepares the services to be ready for tests that modifies
urlmaps.
Returns:
Returns original and alternate backend names as lists of strings.
'''
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
patch_backend_service(gcp, alternate_backend_service,
[same_zone_instance_group])
logger.info('waiting for alternate to become healthy')
wait_for_healthy_backends(gcp, alternate_backend_service,
same_zone_instance_group)
original_backend_instances = get_instance_names(gcp, instance_group)
logger.info('original backends instances: %s', original_backend_instances)
alternate_backend_instances = get_instance_names(gcp,
same_zone_instance_group)
logger.info('alternate backends instances: %s', alternate_backend_instances)
# Start with all traffic going to original_backend_service.
logger.info('waiting for traffic to all go to original backends')
wait_until_all_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
return original_backend_instances, alternate_backend_instances
def test_metadata_filter(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
logger.info("Running test_metadata_filter")
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
original_backend_instances = get_instance_names(gcp, instance_group)
alternate_backend_instances = get_instance_names(gcp,
same_zone_instance_group)
patch_backend_service(gcp, alternate_backend_service,
[same_zone_instance_group])
wait_for_healthy_backends(gcp, alternate_backend_service,
same_zone_instance_group)
passed = True
try:
with open(bootstrap_path) as f:
md = json.load(f)['node']['metadata']
match_labels = []
for k, v in list(md.items()):
match_labels.append({'name': k, 'value': v})
not_match_labels = [{'name': 'fake', 'value': 'fail'}]
test_route_rules = [
# test MATCH_ALL
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': not_match_labels
}]
}],
'service': original_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': match_labels
}]
}],
'service': alternate_backend_service.url
},
],
# test mixing MATCH_ALL and MATCH_ANY
# test MATCH_ALL: super set labels won't match
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': not_match_labels + match_labels
}]
}],
'service': original_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': not_match_labels + match_labels
}]
}],
'service': alternate_backend_service.url
},
],
# test MATCH_ANY
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': not_match_labels
}]
}],
'service': original_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': not_match_labels + match_labels
}]
}],
'service': alternate_backend_service.url
},
],
# test match multiple route rules
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': match_labels
}]
}],
'service': alternate_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': match_labels
}]
}],
'service': original_backend_service.url
},
]
]
for route_rules in test_route_rules:
wait_until_all_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
wait_until_no_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
wait_until_all_rpcs_go_to_given_backends(
alternate_backend_instances, _WAIT_FOR_STATS_SEC)
patch_url_map_backend_service(gcp, original_backend_service)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, alternate_backend_service, [])
def test_api_listener(gcp, backend_service, instance_group,
alternate_backend_service):
logger.info("Running api_listener")
passed = True
new_config_suffix = ''
try:
wait_for_healthy_backends(gcp, backend_service, instance_group)
backend_instances = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# create a second suite of map+tp+fr with the same host name in host rule
# and we have to disable proxyless validation because it needs `0.0.0.0`
# ip address in fr for proxyless and also we violate ip:port uniqueness
# for test purpose. See https://github.com/grpc/grpc-java/issues/8009
new_config_suffix = '2'
create_url_map(gcp, url_map_name + new_config_suffix, backend_service,
service_host_name)
create_target_proxy(gcp, target_proxy_name + new_config_suffix, False)
if not gcp.service_port:
raise Exception(
'Faied to find a valid port for the forwarding rule')
potential_ip_addresses = []
max_attempts = 10
for i in range(max_attempts):
potential_ip_addresses.append('10.10.10.%d' %
(random.randint(0, 255)))
create_global_forwarding_rule(gcp,
forwarding_rule_name + new_config_suffix,
[gcp.service_port],
potential_ip_addresses)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp,
url_map_name + new_config_suffix,
backend_service,
service_host_name)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
delete_global_forwarding_rule(gcp, forwarding_rule_name)
delete_target_proxy(gcp, target_proxy_name)
delete_url_map(gcp, url_map_name)
verify_attempts = int(_WAIT_FOR_URL_MAP_PATCH_SEC / _NUM_TEST_RPCS *
args.qps)
for i in range(verify_attempts):
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# delete host rule for the original host name
patch_url_map_backend_service(gcp, alternate_backend_service)
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
delete_global_forwarding_rule(
gcp, forwarding_rule_name + new_config_suffix)
delete_target_proxy(gcp, target_proxy_name + new_config_suffix)
delete_url_map(gcp, url_map_name + new_config_suffix)
create_url_map(gcp, url_map_name, backend_service,
service_host_name)
create_target_proxy(gcp, target_proxy_name)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
server_uri = service_host_name + ':' + str(gcp.service_port)
else:
server_uri = service_host_name
return server_uri
def test_forwarding_rule_port_match(gcp, backend_service, instance_group):
logger.info("Running test_forwarding_rule_port_match")
passed = True
try:
wait_for_healthy_backends(gcp, backend_service, instance_group)
backend_instances = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
delete_global_forwarding_rule(gcp)
create_global_forwarding_rule(gcp, forwarding_rule_name, [
x for x in parse_port_range(_DEFAULT_PORT_RANGE)
if x != gcp.service_port
])
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
delete_global_forwarding_rule(gcp)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
server_uri = service_host_name + ':' + str(gcp.service_port)
else:
server_uri = service_host_name
return server_uri
def test_forwarding_rule_default_port(gcp, backend_service, instance_group):
logger.info("Running test_forwarding_rule_default_port")
passed = True
try:
wait_for_healthy_backends(gcp, backend_service, instance_group)
backend_instances = get_instance_names(gcp, instance_group)
if gcp.service_port == _DEFAULT_SERVICE_PORT:
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
delete_global_forwarding_rule(gcp)
create_global_forwarding_rule(gcp, forwarding_rule_name,
parse_port_range(_DEFAULT_PORT_RANGE))
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# expect success when no port in client request service uri, and no port in url-map
delete_global_forwarding_rule(gcp)
delete_target_proxy(gcp)
delete_url_map(gcp)
create_url_map(gcp, url_map_name, backend_service, service_host_name)
create_target_proxy(gcp, gcp.target_proxy.name, False)
potential_ip_addresses = []
max_attempts = 10
for i in range(max_attempts):
potential_ip_addresses.append('10.10.10.%d' %
(random.randint(0, 255)))
create_global_forwarding_rule(gcp, forwarding_rule_name, [80],
potential_ip_addresses)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# expect failure when no port in client request uri, but specify port in url-map
patch_url_map_host_rule_with_port(gcp, url_map_name, backend_service,
service_host_name)
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
delete_global_forwarding_rule(gcp)
delete_target_proxy(gcp)
delete_url_map(gcp)
create_url_map(gcp, url_map_name, backend_service,
service_host_name)
create_target_proxy(gcp, target_proxy_name)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
server_uri = service_host_name + ':' + str(gcp.service_port)
else:
server_uri = service_host_name
return server_uri
def test_traffic_splitting(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
# This test start with all traffic going to original_backend_service. Then
# it updates URL-map to set default action to traffic splitting between
# original and alternate. It waits for all backends in both services to
# receive traffic, then verifies that weights are expected.
logger.info('Running test_traffic_splitting')
original_backend_instances, alternate_backend_instances = prepare_services_for_urlmap_tests(
gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group)
passed = True
try:
# Patch urlmap, change route action to traffic splitting between
# original and alternate.
logger.info('patching url map with traffic splitting')
original_service_percentage, alternate_service_percentage = 20, 80
patch_url_map_backend_service(
gcp,
services_with_weights={
original_backend_service: original_service_percentage,
alternate_backend_service: alternate_service_percentage,
})
# Split percentage between instances: [20,80] -> [10,10,40,40].
expected_instance_percentage = [
original_service_percentage * 1.0 / len(original_backend_instances)
] * len(original_backend_instances) + [
alternate_service_percentage * 1.0 /
len(alternate_backend_instances)
] * len(alternate_backend_instances)
# Wait for traffic to go to both services.
logger.info(
'waiting for traffic to go to all backends (including alternate)')
wait_until_all_rpcs_go_to_given_backends(
original_backend_instances + alternate_backend_instances,
_WAIT_FOR_STATS_SEC)
# Verify that weights between two services are expected.
retry_count = 10
# Each attempt takes about 10 seconds, 10 retries is equivalent to 100
# seconds timeout.
for i in range(retry_count):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
got_instance_count = [
stats.rpcs_by_peer[i] for i in original_backend_instances
] + [stats.rpcs_by_peer[i] for i in alternate_backend_instances]
total_count = sum(got_instance_count)
got_instance_percentage = [
x * 100.0 / total_count for x in got_instance_count
]
try:
compare_distributions(got_instance_percentage,
expected_instance_percentage, 5)
except Exception as e:
logger.info('attempt %d', i)
logger.info('got percentage: %s', got_instance_percentage)
logger.info('expected percentage: %s',
expected_instance_percentage)
logger.info(e)
if i == retry_count - 1:
raise Exception(
'RPC distribution (%s) differs from expected (%s)' %
(got_instance_percentage, expected_instance_percentage))
else:
logger.info("success")
break
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_path_matching(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
# This test start with all traffic (UnaryCall and EmptyCall) going to
# original_backend_service.
#
# Then it updates URL-map to add routes, to make UnaryCall and EmptyCall to
# go different backends. It waits for all backends in both services to
# receive traffic, then verifies that traffic goes to the expected
# backends.
logger.info('Running test_path_matching')
original_backend_instances, alternate_backend_instances = prepare_services_for_urlmap_tests(
gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group)
passed = True
try:
# A list of tuples (route_rules, expected_instances).
test_cases = [
(
[{
'priority': 0,
# FullPath EmptyCall -> alternate_backend_service.
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/EmptyCall'
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Prefix UnaryCall -> alternate_backend_service.
'matchRules': [{
'prefixMatch': '/grpc.testing.TestService/Unary'
}],
'service': alternate_backend_service.url
}],
{
"UnaryCall": alternate_backend_instances,
"EmptyCall": original_backend_instances
}),
(
# This test case is similar to the one above (but with route
# services swapped). This test has two routes (full_path and
# the default) to match EmptyCall, and both routes set
# alternative_backend_service as the action. This forces the
# client to handle duplicate Clusters in the RDS response.
[
{
'priority': 0,
# Prefix UnaryCall -> original_backend_service.
'matchRules': [{
'prefixMatch': '/grpc.testing.TestService/Unary'
}],
'service': original_backend_service.url
},
{
'priority': 1,
# FullPath EmptyCall -> alternate_backend_service.
'matchRules': [{
'fullPathMatch':
'/grpc.testing.TestService/EmptyCall'
}],
'service': alternate_backend_service.url
}
],
{
"UnaryCall": original_backend_instances,
"EmptyCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Regex UnaryCall -> alternate_backend_service.
'matchRules': [{
'regexMatch':
'^\/.*\/UnaryCall$' # Unary methods with any services.
}],
'service': alternate_backend_service.url
}],
{
"UnaryCall": alternate_backend_instances,
"EmptyCall": original_backend_instances
}),
(
[{
'priority': 0,
# ignoreCase EmptyCall -> alternate_backend_service.
'matchRules': [{
# Case insensitive matching.
'fullPathMatch': '/gRpC.tEsTinG.tEstseRvice/empTycaLl',
'ignoreCase': True,
}],
'service': alternate_backend_service.url
}],
{
"UnaryCall": original_backend_instances,
"EmptyCall": alternate_backend_instances
}),
]
for (route_rules, expected_instances) in test_cases:
logger.info('patching url map with %s', route_rules)
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# Wait for traffic to go to both services.
logger.info(
'waiting for traffic to go to all backends (including alternate)'
)
wait_until_all_rpcs_go_to_given_backends(
original_backend_instances + alternate_backend_instances,
_WAIT_FOR_STATS_SEC)
retry_count = 80
# Each attempt takes about 5 seconds, 80 retries is equivalent to 400
# seconds timeout.
for i in range(retry_count):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
if not stats.rpcs_by_method:
raise ValueError(
'stats.rpcs_by_method is None, the interop client stats service does not support this test case'
)
logger.info('attempt %d', i)
if compare_expected_instances(stats, expected_instances):
logger.info("success")
break
elif i == retry_count - 1:
raise Exception(
'timeout waiting for RPCs to the expected instances: %s'
% expected_instances)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_header_matching(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
# This test start with all traffic (UnaryCall and EmptyCall) going to
# original_backend_service.
#
# Then it updates URL-map to add routes, to make RPCs with test headers to
# go to different backends. It waits for all backends in both services to
# receive traffic, then verifies that traffic goes to the expected
# backends.
logger.info('Running test_header_matching')
original_backend_instances, alternate_backend_instances = prepare_services_for_urlmap_tests(
gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group)
passed = True
try:
# A list of tuples (route_rules, expected_instances).
test_cases = [
(
[{
'priority': 0,
# Header ExactMatch -> alternate_backend_service.
# EmptyCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'exactMatch': _TEST_METADATA_VALUE_EMPTY
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Header PrefixMatch -> alternate_backend_service.
# UnaryCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'prefixMatch': _TEST_METADATA_VALUE_UNARY[:2]
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": original_backend_instances,
"UnaryCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Header SuffixMatch -> alternate_backend_service.
# EmptyCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'suffixMatch': _TEST_METADATA_VALUE_EMPTY[-2:]
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Header 'xds_md_numeric' present -> alternate_backend_service.
# UnaryCall is sent with the metadata, so will be sent to alternative.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_NUMERIC_KEY,
'presentMatch': True
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": original_backend_instances,
"UnaryCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Header invert ExactMatch -> alternate_backend_service.
# UnaryCall is sent with the metadata, so will be sent to
# original. EmptyCall will be sent to alternative.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'exactMatch': _TEST_METADATA_VALUE_UNARY,
'invertMatch': True
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Header 'xds_md_numeric' range [100,200] -> alternate_backend_service.
# UnaryCall is sent with the metadata in range.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_NUMERIC_KEY,
'rangeMatch': {
'rangeStart': '100',
'rangeEnd': '200'
}
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": original_backend_instances,
"UnaryCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Header RegexMatch -> alternate_backend_service.
# EmptyCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName':
_TEST_METADATA_KEY,
'regexMatch':
"^%s.*%s$" % (_TEST_METADATA_VALUE_EMPTY[:2],
_TEST_METADATA_VALUE_EMPTY[-2:])
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
]
for (route_rules, expected_instances) in test_cases:
logger.info('patching url map with %s -> alternative',
route_rules[0]['matchRules'])
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# Wait for traffic to go to both services.
logger.info(
'waiting for traffic to go to all backends (including alternate)'
)
wait_until_all_rpcs_go_to_given_backends(
original_backend_instances + alternate_backend_instances,
_WAIT_FOR_STATS_SEC)
retry_count = 80
# Each attempt takes about 5 seconds, 80 retries is equivalent to 400
# seconds timeout.
for i in range(retry_count):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
if not stats.rpcs_by_method:
raise ValueError(
'stats.rpcs_by_method is None, the interop client stats service does not support this test case'
)
logger.info('attempt %d', i)
if compare_expected_instances(stats, expected_instances):
logger.info("success")
break
elif i == retry_count - 1:
raise Exception(
'timeout waiting for RPCs to the expected instances: %s'
% expected_instances)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_circuit_breaking(gcp, original_backend_service, instance_group,
same_zone_instance_group):
'''
Since backend service circuit_breakers configuration cannot be unset,
which causes trouble for restoring validate_for_proxy flag in target
proxy/global forwarding rule. This test uses dedicated backend sevices.
The url_map and backend services undergoes the following state changes:
Before test:
original_backend_service -> [instance_group]
extra_backend_service -> []
more_extra_backend_service -> []
url_map -> [original_backend_service]
In test:
extra_backend_service (with circuit_breakers) -> [instance_group]
more_extra_backend_service (with circuit_breakers) -> [same_zone_instance_group]
url_map -> [extra_backend_service, more_extra_backend_service]
After test:
original_backend_service -> [instance_group]
extra_backend_service (with circuit_breakers) -> []
more_extra_backend_service (with circuit_breakers) -> []
url_map -> [original_backend_service]
'''
logger.info('Running test_circuit_breaking')
additional_backend_services = []
passed = True
try:
# TODO(chengyuanzhang): Dedicated backend services created for circuit
# breaking test. Once the issue for unsetting backend service circuit
# breakers is resolved or configuring backend service circuit breakers is
# enabled for config validation, these dedicated backend services can be
# eliminated.
extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-extra' + gcp_suffix
more_extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-more-extra' + gcp_suffix
extra_backend_service = add_backend_service(gcp,
extra_backend_service_name)
additional_backend_services.append(extra_backend_service)
more_extra_backend_service = add_backend_service(
gcp, more_extra_backend_service_name)
additional_backend_services.append(more_extra_backend_service)
# The config validation for proxyless doesn't allow setting
# circuit_breakers. Disable validate validate_for_proxyless
# for this test. This can be removed when validation
# accepts circuit_breakers.
logger.info('disabling validate_for_proxyless in target proxy')
set_validate_for_proxyless(gcp, False)
extra_backend_service_max_requests = 500
more_extra_backend_service_max_requests = 1000
patch_backend_service(gcp,
extra_backend_service, [instance_group],
circuit_breakers={
'maxRequests':
extra_backend_service_max_requests
})
logger.info('Waiting for extra backends to become healthy')
wait_for_healthy_backends(gcp, extra_backend_service, instance_group)
patch_backend_service(gcp,
more_extra_backend_service,
[same_zone_instance_group],
circuit_breakers={
'maxRequests':
more_extra_backend_service_max_requests
})
logger.info('Waiting for more extra backend to become healthy')
wait_for_healthy_backends(gcp, more_extra_backend_service,
same_zone_instance_group)
extra_backend_instances = get_instance_names(gcp, instance_group)
more_extra_backend_instances = get_instance_names(
gcp, same_zone_instance_group)
route_rules = [
{
'priority': 0,
# UnaryCall -> extra_backend_service
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/UnaryCall'
}],
'service': extra_backend_service.url
},
{
'priority': 1,
# EmptyCall -> more_extra_backend_service
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/EmptyCall'
}],
'service': more_extra_backend_service.url
},
]
# Make client send UNARY_CALL and EMPTY_CALL.
configure_client([
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL
])
logger.info('Patching url map with %s', route_rules)
patch_url_map_backend_service(gcp,
extra_backend_service,
route_rules=route_rules)
logger.info('Waiting for traffic to go to all backends')
wait_until_all_rpcs_go_to_given_backends(
extra_backend_instances + more_extra_backend_instances,
_WAIT_FOR_STATS_SEC)
# Make all calls keep-open.
configure_client([
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL
], [(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
'rpc-behavior', 'keep-open'),
(messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL,
'rpc-behavior', 'keep-open')])
wait_until_rpcs_in_flight(
'UNARY_CALL', (_WAIT_FOR_BACKEND_SEC +
int(extra_backend_service_max_requests / args.qps)),
extra_backend_service_max_requests, 1)
logger.info('UNARY_CALL reached stable state (%d)',
extra_backend_service_max_requests)
wait_until_rpcs_in_flight(
'EMPTY_CALL',
(_WAIT_FOR_BACKEND_SEC +
int(more_extra_backend_service_max_requests / args.qps)),
more_extra_backend_service_max_requests, 1)
logger.info('EMPTY_CALL reached stable state (%d)',
more_extra_backend_service_max_requests)
# Increment circuit breakers max_requests threshold.
extra_backend_service_max_requests = 800
patch_backend_service(gcp,
extra_backend_service, [instance_group],
circuit_breakers={
'maxRequests':
extra_backend_service_max_requests
})
wait_until_rpcs_in_flight(
'UNARY_CALL', (_WAIT_FOR_BACKEND_SEC +
int(extra_backend_service_max_requests / args.qps)),
extra_backend_service_max_requests, 1)
logger.info('UNARY_CALL reached stable state after increase (%d)',
extra_backend_service_max_requests)
logger.info('success')
# Avoid new RPCs being outstanding (some test clients create threads
# for sending RPCs) after restoring backend services.
configure_client(
[messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL])
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, original_backend_service,
[instance_group])
for backend_service in additional_backend_services:
delete_backend_service(gcp, backend_service)
set_validate_for_proxyless(gcp, True)
def test_timeout(gcp, original_backend_service, instance_group):
logger.info('Running test_timeout')
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
# UnaryCall -> maxStreamDuration:3s
route_rules = [{
'priority': 0,
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/UnaryCall'
}],
'service': original_backend_service.url,
'routeAction': {
'maxStreamDuration': {
'seconds': 3,
},
},
}]
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# A list of tuples (testcase_name, {client_config}, {expected_results})
test_cases = [
(
'timeout_exceeded (UNARY_CALL), timeout_different_route (EMPTY_CALL)',
# UnaryCall and EmptyCall both sleep-4.
# UnaryCall timeouts, EmptyCall succeeds.
{
'rpc_types': [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL,
],
'metadata': [
(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
'rpc-behavior', 'sleep-4'),
(messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL,
'rpc-behavior', 'sleep-4'),
],
},
{
'UNARY_CALL': 4, # DEADLINE_EXCEEDED
'EMPTY_CALL': 0,
},
),
(
'app_timeout_exceeded',
# UnaryCall only with sleep-2; timeout=1s; calls timeout.
{
'rpc_types': [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
],
'metadata': [
(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
'rpc-behavior', 'sleep-2'),
],
'timeout_sec': 1,
},
{
'UNARY_CALL': 4, # DEADLINE_EXCEEDED
},
),
(
'timeout_not_exceeded',
# UnaryCall only with no sleep; calls succeed.
{
'rpc_types': [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
],
},
{
'UNARY_CALL': 0,
},
)
]
passed = True
try:
first_case = True
for (testcase_name, client_config, expected_results) in test_cases:
logger.info('starting case %s', testcase_name)
configure_client(**client_config)
# wait a second to help ensure the client stops sending RPCs with
# the old config. We will make multiple attempts if it is failing,
# but this improves confidence that the test is valid if the
# previous client_config would lead to the same results.
time.sleep(1)
# Each attempt takes 10 seconds; 20 attempts is equivalent to 200
# second timeout.
attempt_count = 20
if first_case:
attempt_count = 120
first_case = False
before_stats = get_client_accumulated_stats()
if not before_stats.stats_per_method:
raise ValueError(
'stats.stats_per_method is None, the interop client stats service does not support this test case'
)
for i in range(attempt_count):
logger.info('%s: attempt %d', testcase_name, i)
test_runtime_secs = 10
time.sleep(test_runtime_secs)
after_stats = get_client_accumulated_stats()
success = True
for rpc, status in list(expected_results.items()):
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
want = test_runtime_secs * args.qps
# Allow 10% deviation from expectation to reduce flakiness
if qty < (want * .9) or qty > (want * 1.1):
logger.info('%s: failed due to %s[%s]: got %d want ~%d',
testcase_name, rpc, status, qty, want)
success = False
if success:
logger.info('success')
break
logger.info('%s attempt %d failed', testcase_name, i)
before_stats = after_stats
else:
raise Exception(
'%s: timeout waiting for expected results: %s; got %s' %
(testcase_name, expected_results,
after_stats.stats_per_method))
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
def test_fault_injection(gcp, original_backend_service, instance_group):
logger.info('Running test_fault_injection')
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
testcase_header = 'fi_testcase'
def _route(pri, name, fi_policy):
return {
'priority': pri,
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': testcase_header,
'exactMatch': name,
}],
}],
'service': original_backend_service.url,
'routeAction': {
'faultInjectionPolicy': fi_policy
},
}
def _abort(pct):
return {
'abort': {
'httpStatus': 401,
'percentage': pct,
}
}
def _delay(pct):
return {
'delay': {
'fixedDelay': {
'seconds': '20'
},
'percentage': pct,
}
}
zero_route = _abort(0)
zero_route.update(_delay(0))
route_rules = [
_route(0, 'zero_percent_fault_injection', zero_route),
_route(1, 'always_delay', _delay(100)),
_route(2, 'always_abort', _abort(100)),
_route(3, 'delay_half', _delay(50)),
_route(4, 'abort_half', _abort(50)),
{
'priority': 5,
'matchRules': [{
'prefixMatch': '/'
}],
'service': original_backend_service.url,
},
]
set_validate_for_proxyless(gcp, False)
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# A list of tuples (testcase_name, {client_config}, {code: percent}). Each
# test case will set the testcase_header with the testcase_name for routing
# to the appropriate config for the case, defined above.
test_cases = [
(
'zero_percent_fault_injection',
{},
{
0: 1
}, # OK
),
(
'non_matching_fault_injection', # Not in route_rules, above.
{},
{
0: 1
}, # OK
),
(
'always_delay',
{
'timeout_sec': 2
},
{
4: 1
}, # DEADLINE_EXCEEDED
),
(
'always_abort',
{},
{
16: 1
}, # UNAUTHENTICATED
),
(
'delay_half',
{
'timeout_sec': 2
},
{
4: .5,
0: .5
}, # DEADLINE_EXCEEDED / OK: 50% / 50%
),
(
'abort_half',
{},
{
16: .5,
0: .5
}, # UNAUTHENTICATED / OK: 50% / 50%
)
]
passed = True
try:
first_case = True
for (testcase_name, client_config, expected_results) in test_cases:
logger.info('starting case %s', testcase_name)
client_config['metadata'] = [
(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
testcase_header, testcase_name)
]
client_config['rpc_types'] = [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
]
configure_client(**client_config)
# wait a second to help ensure the client stops sending RPCs with
# the old config. We will make multiple attempts if it is failing,
# but this improves confidence that the test is valid if the
# previous client_config would lead to the same results.
time.sleep(1)
# Each attempt takes 10 seconds; 20 attempts is equivalent to 200
# second timeout.
attempt_count = 20
if first_case:
attempt_count = 120
first_case = False
before_stats = get_client_accumulated_stats()
if not before_stats.stats_per_method:
raise ValueError(
'stats.stats_per_method is None, the interop client stats service does not support this test case'
)
for i in range(attempt_count):
logger.info('%s: attempt %d', testcase_name, i)
test_runtime_secs = 10
time.sleep(test_runtime_secs)
after_stats = get_client_accumulated_stats()
success = True
for status, pct in list(expected_results.items()):
rpc = 'UNARY_CALL'
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
want = pct * args.qps * test_runtime_secs
# Allow 10% deviation from expectation to reduce flakiness
VARIANCE_ALLOWED = 0.1
if abs(qty - want) > want * VARIANCE_ALLOWED:
logger.info('%s: failed due to %s[%s]: got %d want ~%d',
testcase_name, rpc, status, qty, want)
success = False
if success:
logger.info('success')
break
logger.info('%s attempt %d failed', testcase_name, i)
before_stats = after_stats
else:
raise Exception(
'%s: timeout waiting for expected results: %s; got %s' %
(testcase_name, expected_results,
after_stats.stats_per_method))
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
set_validate_for_proxyless(gcp, True)
def test_csds(gcp, original_backend_service, instance_group, server_uri):
test_csds_timeout_s = datetime.timedelta(minutes=5).total_seconds()
sleep_interval_between_attempts_s = datetime.timedelta(
seconds=2).total_seconds()
logger.info('Running test_csds')
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
# Test case timeout: 5 minutes
deadline = time.time() + test_csds_timeout_s
cnt = 0
while time.time() <= deadline:
client_config = get_client_xds_config_dump()
logger.info('test_csds attempt %d: received xDS config %s', cnt,
json.dumps(client_config, indent=2))
if client_config is not None:
# Got the xDS config dump, now validate it
ok = True
try:
if client_config['node']['locality']['zone'] != args.zone:
logger.info('Invalid zone %s != %s',
client_config['node']['locality']['zone'],
args.zone)
ok = False
seen = set()
for xds_config in client_config['xds_config']:
if 'listener_config' in xds_config:
listener_name = xds_config['listener_config'][
'dynamic_listeners'][0]['active_state']['listener'][
'name']
if listener_name != server_uri:
logger.info('Invalid Listener name %s != %s',
listener_name, server_uri)
ok = False
else:
seen.add('lds')
elif 'route_config' in xds_config:
num_vh = len(
xds_config['route_config']['dynamic_route_configs']
[0]['route_config']['virtual_hosts'])
if num_vh <= 0:
logger.info('Invalid number of VirtualHosts %s',
num_vh)
ok = False
else:
seen.add('rds')
elif 'cluster_config' in xds_config:
cluster_type = xds_config['cluster_config'][
'dynamic_active_clusters'][0]['cluster']['type']
if cluster_type != 'EDS':
logger.info('Invalid cluster type %s != EDS',
cluster_type)
ok = False
else:
seen.add('cds')
elif 'endpoint_config' in xds_config:
sub_zone = xds_config["endpoint_config"][
"dynamic_endpoint_configs"][0]["endpoint_config"][
"endpoints"][0]["locality"]["sub_zone"]
if args.zone not in sub_zone:
logger.info('Invalid endpoint sub_zone %s',
sub_zone)
ok = False
else:
seen.add('eds')
want = {'lds', 'rds', 'cds', 'eds'}
if seen != want:
logger.info('Incomplete xDS config dump, seen=%s', seen)
ok = False
except:
logger.exception('Error in xDS config dump:')
ok = False
finally:
if ok:
# Successfully fetched xDS config, and they looks good.
logger.info('success')
return
logger.info('test_csds attempt %d failed', cnt)
# Give the client some time to fetch xDS resources
time.sleep(sleep_interval_between_attempts_s)
cnt += 1
raise RuntimeError('failed to receive a valid xDS config in %s seconds' %
test_csds_timeout_s)
def set_validate_for_proxyless(gcp, validate_for_proxyless):
if not gcp.alpha_compute:
logger.debug(
'Not setting validateForProxy because alpha is not enabled')
return
# This function deletes global_forwarding_rule and target_proxy, then
# recreate target_proxy with validateForProxyless=False. This is necessary
# because patching target_grpc_proxy isn't supported.
delete_global_forwarding_rule(gcp)
delete_target_proxy(gcp)
create_target_proxy(gcp, gcp.target_proxy.name, validate_for_proxyless)
create_global_forwarding_rule(gcp, gcp.global_forwarding_rule.name,
[gcp.service_port])
def get_serving_status(instance, service_port):
with grpc.insecure_channel('%s:%d' % (instance, service_port)) as channel:
health_stub = health_pb2_grpc.HealthStub(channel)
return health_stub.Check(health_pb2.HealthCheckRequest())
def set_serving_status(instances, service_port, serving):
logger.info('setting %s serving status to %s', instances, serving)
for instance in instances:
with grpc.insecure_channel('%s:%d' %
(instance, service_port)) as channel:
logger.info('setting %s serving status to %s', instance, serving)
stub = test_pb2_grpc.XdsUpdateHealthServiceStub(channel)
retry_count = 5
for i in range(5):
if serving:
stub.SetServing(empty_pb2.Empty())
else:
stub.SetNotServing(empty_pb2.Empty())
serving_status = get_serving_status(instance, service_port)
logger.info('got instance service status %s', serving_status)
want_status = health_pb2.HealthCheckResponse.SERVING if serving else health_pb2.HealthCheckResponse.NOT_SERVING
if serving_status.status == want_status:
break
if i == retry_count - 1:
raise Exception(
'failed to set instance service status after %d retries'
% retry_count)
def is_primary_instance_group(gcp, instance_group):
# Clients may connect to a TD instance in a different region than the
# client, in which case primary/secondary assignments may not be based on
# the client's actual locality.
instance_names = get_instance_names(gcp, instance_group)
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
return all(
peer in instance_names for peer in list(stats.rpcs_by_peer.keys()))
def get_startup_script(path_to_server_binary, service_port):
if path_to_server_binary:
return 'nohup %s --port=%d 1>/dev/null &' % (path_to_server_binary,
service_port)
else:
return """#!/bin/bash
sudo apt update
sudo apt install -y git default-jdk
mkdir java_server
pushd java_server
git clone https://github.com/grpc/grpc-java.git
pushd grpc-java
pushd interop-testing
../gradlew installDist -x test -PskipCodegen=true -PskipAndroid=true
nohup build/install/grpc-interop-testing/bin/xds-test-server \
--port=%d 1>/dev/null &""" % service_port
def create_instance_template(gcp, name, network, source_image, machine_type,
startup_script):
config = {
'name': name,
'properties': {
'tags': {
'items': ['allow-health-checks']
},
'machineType': machine_type,
'serviceAccounts': [{
'email': 'default',
'scopes': ['https://www.googleapis.com/auth/cloud-platform',]
}],
'networkInterfaces': [{
'accessConfigs': [{
'type': 'ONE_TO_ONE_NAT'
}],
'network': network
}],
'disks': [{
'boot': True,
'initializeParams': {
'sourceImage': source_image
},
'autoDelete': True
}],
'metadata': {
'items': [{
'key': 'startup-script',
'value': startup_script
}]
}
}
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.instanceTemplates().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.instance_template = GcpResource(config['name'], result['targetLink'])
def add_instance_group(gcp, zone, name, size):
config = {
'name': name,
'instanceTemplate': gcp.instance_template.url,
'targetSize': size,
'namedPorts': [{
'name': 'grpc',
'port': gcp.service_port
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.instanceGroupManagers().insert(
project=gcp.project, zone=zone,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_zone_operation(gcp, zone, result['name'])
result = gcp.compute.instanceGroupManagers().get(
project=gcp.project, zone=zone,
instanceGroupManager=config['name']).execute(
num_retries=_GCP_API_RETRIES)
instance_group = InstanceGroup(config['name'], result['instanceGroup'],
zone)
gcp.instance_groups.append(instance_group)
wait_for_instance_group_to_reach_expected_size(gcp, instance_group, size,
_WAIT_FOR_OPERATION_SEC)
return instance_group
def create_health_check(gcp, name):
if gcp.alpha_compute:
config = {
'name': name,
'type': 'GRPC',
'grpcHealthCheck': {
'portSpecification': 'USE_SERVING_PORT'
}
}
compute_to_use = gcp.alpha_compute
else:
config = {
'name': name,
'type': 'TCP',
'tcpHealthCheck': {
'portName': 'grpc'
}
}
compute_to_use = gcp.compute
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.healthChecks().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.health_check = GcpResource(config['name'], result['targetLink'])
def create_health_check_firewall_rule(gcp, name):
config = {
'name': name,
'direction': 'INGRESS',
'allowed': [{
'IPProtocol': 'tcp'
}],
'sourceRanges': ['35.191.0.0/16', '130.211.0.0/22'],
'targetTags': ['allow-health-checks'],
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.firewalls().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.health_check_firewall_rule = GcpResource(config['name'],
result['targetLink'])
def add_backend_service(gcp, name):
if gcp.alpha_compute:
protocol = 'GRPC'
compute_to_use = gcp.alpha_compute
else:
protocol = 'HTTP2'
compute_to_use = gcp.compute
config = {
'name': name,
'loadBalancingScheme': 'INTERNAL_SELF_MANAGED',
'healthChecks': [gcp.health_check.url],
'portName': 'grpc',
'protocol': protocol
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.backendServices().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
backend_service = GcpResource(config['name'], result['targetLink'])
gcp.backend_services.append(backend_service)
return backend_service
def create_url_map(gcp, name, backend_service, host_name):
config = {
'name': name,
'defaultService': backend_service.url,
'pathMatchers': [{
'name': _PATH_MATCHER_NAME,
'defaultService': backend_service.url,
}],
'hostRules': [{
'hosts': [host_name],
'pathMatcher': _PATH_MATCHER_NAME
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.urlMaps().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.url_map = GcpResource(config['name'], result['targetLink'])
def patch_url_map_host_rule_with_port(gcp, name, backend_service, host_name):
config = {
'hostRules': [{
'hosts': ['%s:%d' % (host_name, gcp.service_port)],
'pathMatcher': _PATH_MATCHER_NAME
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.urlMaps().patch(
project=gcp.project, urlMap=name,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
def create_target_proxy(gcp, name, validate_for_proxyless=True):
if gcp.alpha_compute:
config = {
'name': name,
'url_map': gcp.url_map.url,
'validate_for_proxyless': validate_for_proxyless
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.alpha_compute.targetGrpcProxies().insert(
project=gcp.project,
body=config).execute(num_retries=_GCP_API_RETRIES)
else:
config = {
'name': name,
'url_map': gcp.url_map.url,
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.targetHttpProxies().insert(
project=gcp.project,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.target_proxy = GcpResource(config['name'], result['targetLink'])
def create_global_forwarding_rule(gcp,
name,
potential_ports,
potential_ip_addresses=['0.0.0.0']):
if gcp.alpha_compute:
compute_to_use = gcp.alpha_compute
else:
compute_to_use = gcp.compute
for port in potential_ports:
for ip_address in potential_ip_addresses:
try:
config = {
'name': name,
'loadBalancingScheme': 'INTERNAL_SELF_MANAGED',
'portRange': str(port),
'IPAddress': ip_address,
'network': args.network,
'target': gcp.target_proxy.url,
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.globalForwardingRules().insert(
project=gcp.project,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.global_forwarding_rule = GcpResource(
config['name'], result['targetLink'])
gcp.service_port = port
return
except googleapiclient.errors.HttpError as http_error:
logger.warning(
'Got error %s when attempting to create forwarding rule to '
'%s:%d. Retrying with another port.' %
(http_error, ip_address, port))
def get_health_check(gcp, health_check_name):
try:
result = gcp.compute.healthChecks().get(
project=gcp.project, healthCheck=health_check_name).execute()
gcp.health_check = GcpResource(health_check_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.health_check = GcpResource(health_check_name, None)
def get_health_check_firewall_rule(gcp, firewall_name):
try:
result = gcp.compute.firewalls().get(project=gcp.project,
firewall=firewall_name).execute()
gcp.health_check_firewall_rule = GcpResource(firewall_name,
result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.health_check_firewall_rule = GcpResource(firewall_name, None)
def get_backend_service(gcp, backend_service_name, record_error=True):
try:
result = gcp.compute.backendServices().get(
project=gcp.project, backendService=backend_service_name).execute()
backend_service = GcpResource(backend_service_name, result['selfLink'])
except Exception as e:
if record_error:
gcp.errors.append(e)
backend_service = GcpResource(backend_service_name, None)
gcp.backend_services.append(backend_service)
return backend_service
def get_url_map(gcp, url_map_name):
try:
result = gcp.compute.urlMaps().get(project=gcp.project,
urlMap=url_map_name).execute()
gcp.url_map = GcpResource(url_map_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.url_map = GcpResource(url_map_name, None)
def get_target_proxy(gcp, target_proxy_name):
try:
if gcp.alpha_compute:
result = gcp.alpha_compute.targetGrpcProxies().get(
project=gcp.project,
targetGrpcProxy=target_proxy_name).execute()
else:
result = gcp.compute.targetHttpProxies().get(
project=gcp.project,
targetHttpProxy=target_proxy_name).execute()
gcp.target_proxy = GcpResource(target_proxy_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.target_proxy = GcpResource(target_proxy_name, None)
def get_global_forwarding_rule(gcp, forwarding_rule_name):
try:
result = gcp.compute.globalForwardingRules().get(
project=gcp.project, forwardingRule=forwarding_rule_name).execute()
gcp.global_forwarding_rule = GcpResource(forwarding_rule_name,
result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.global_forwarding_rule = GcpResource(forwarding_rule_name, None)
def get_instance_template(gcp, template_name):
try:
result = gcp.compute.instanceTemplates().get(
project=gcp.project, instanceTemplate=template_name).execute()
gcp.instance_template = GcpResource(template_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.instance_template = GcpResource(template_name, None)
def get_instance_group(gcp, zone, instance_group_name):
try:
result = gcp.compute.instanceGroups().get(
project=gcp.project, zone=zone,
instanceGroup=instance_group_name).execute()
gcp.service_port = result['namedPorts'][0]['port']
instance_group = InstanceGroup(instance_group_name, result['selfLink'],
zone)
except Exception as e:
gcp.errors.append(e)
instance_group = InstanceGroup(instance_group_name, None, zone)
gcp.instance_groups.append(instance_group)
return instance_group
def delete_global_forwarding_rule(gcp, name=None):
if name:
forwarding_rule_to_delete = name
else:
forwarding_rule_to_delete = gcp.global_forwarding_rule.name
try:
logger.debug('Deleting forwarding rule %s', forwarding_rule_to_delete)
result = gcp.compute.globalForwardingRules().delete(
project=gcp.project,
forwardingRule=forwarding_rule_to_delete).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_target_proxy(gcp, name=None):
if name:
proxy_to_delete = name
else:
proxy_to_delete = gcp.target_proxy.name
try:
if gcp.alpha_compute:
logger.debug('Deleting grpc proxy %s', proxy_to_delete)
result = gcp.alpha_compute.targetGrpcProxies().delete(
project=gcp.project, targetGrpcProxy=proxy_to_delete).execute(
num_retries=_GCP_API_RETRIES)
else:
logger.debug('Deleting http proxy %s', proxy_to_delete)
result = gcp.compute.targetHttpProxies().delete(
project=gcp.project, targetHttpProxy=proxy_to_delete).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_url_map(gcp, name=None):
if name:
url_map_to_delete = name
else:
url_map_to_delete = gcp.url_map.name
try:
logger.debug('Deleting url map %s', url_map_to_delete)
result = gcp.compute.urlMaps().delete(
project=gcp.project,
urlMap=url_map_to_delete).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_backend_service(gcp, backend_service):
try:
logger.debug('Deleting backend service %s', backend_service.name)
result = gcp.compute.backendServices().delete(
project=gcp.project, backendService=backend_service.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_backend_services(gcp):
for backend_service in gcp.backend_services:
delete_backend_service(gcp, backend_service)
def delete_firewall(gcp):
try:
logger.debug('Deleting firewall %s',
gcp.health_check_firewall_rule.name)
result = gcp.compute.firewalls().delete(
project=gcp.project,
firewall=gcp.health_check_firewall_rule.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_health_check(gcp):
try:
logger.debug('Deleting health check %s', gcp.health_check.name)
result = gcp.compute.healthChecks().delete(
project=gcp.project, healthCheck=gcp.health_check.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_instance_groups(gcp):
for instance_group in gcp.instance_groups:
try:
logger.debug('Deleting instance group %s %s', instance_group.name,
instance_group.zone)
result = gcp.compute.instanceGroupManagers().delete(
project=gcp.project,
zone=instance_group.zone,
instanceGroupManager=instance_group.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_zone_operation(gcp,
instance_group.zone,
result['name'],
timeout_sec=_WAIT_FOR_BACKEND_SEC)
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_instance_template(gcp):
try:
logger.debug('Deleting instance template %s',
gcp.instance_template.name)
result = gcp.compute.instanceTemplates().delete(
project=gcp.project,
instanceTemplate=gcp.instance_template.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def patch_backend_service(gcp,
backend_service,
instance_groups,
balancing_mode='UTILIZATION',
max_rate=1,
circuit_breakers=None):
if gcp.alpha_compute:
compute_to_use = gcp.alpha_compute
else:
compute_to_use = gcp.compute
config = {
'backends': [{
'group': instance_group.url,
'balancingMode': balancing_mode,
'maxRate': max_rate if balancing_mode == 'RATE' else None
} for instance_group in instance_groups],
'circuitBreakers': circuit_breakers,
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.backendServices().patch(
project=gcp.project, backendService=backend_service.name,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp,
result['name'],
timeout_sec=_WAIT_FOR_BACKEND_SEC)
def resize_instance_group(gcp,
instance_group,
new_size,
timeout_sec=_WAIT_FOR_OPERATION_SEC):
result = gcp.compute.instanceGroupManagers().resize(
project=gcp.project,
zone=instance_group.zone,
instanceGroupManager=instance_group.name,
size=new_size).execute(num_retries=_GCP_API_RETRIES)
wait_for_zone_operation(gcp,
instance_group.zone,
result['name'],
timeout_sec=360)
wait_for_instance_group_to_reach_expected_size(gcp, instance_group,
new_size, timeout_sec)
def patch_url_map_backend_service(gcp,
backend_service=None,
services_with_weights=None,
route_rules=None):
'''change url_map's backend service
Only one of backend_service and service_with_weights can be not None.
'''
if gcp.alpha_compute:
compute_to_use = gcp.alpha_compute
else:
compute_to_use = gcp.compute
if backend_service and services_with_weights:
raise ValueError(
'both backend_service and service_with_weights are not None.')
default_service = backend_service.url if backend_service else None
default_route_action = {
'weightedBackendServices': [{
'backendService': service.url,
'weight': w,
} for service, w in list(services_with_weights.items())]
} if services_with_weights else None
config = {
'pathMatchers': [{
'name': _PATH_MATCHER_NAME,
'defaultService': default_service,
'defaultRouteAction': default_route_action,
'routeRules': route_rules,
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.urlMaps().patch(
project=gcp.project, urlMap=gcp.url_map.name,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
def wait_for_instance_group_to_reach_expected_size(gcp, instance_group,
expected_size, timeout_sec):
start_time = time.time()
while True:
current_size = len(get_instance_names(gcp, instance_group))
if current_size == expected_size:
break
if time.time() - start_time > timeout_sec:
raise Exception(
'Instance group had expected size %d but actual size %d' %
(expected_size, current_size))
time.sleep(2)
def wait_for_global_operation(gcp,
operation,
timeout_sec=_WAIT_FOR_OPERATION_SEC):
start_time = time.time()
while time.time() - start_time <= timeout_sec:
result = gcp.compute.globalOperations().get(
project=gcp.project,
operation=operation).execute(num_retries=_GCP_API_RETRIES)
if result['status'] == 'DONE':
if 'error' in result:
raise Exception(result['error'])
return
time.sleep(2)
raise Exception('Operation %s did not complete within %d' %
(operation, timeout_sec))
def wait_for_zone_operation(gcp,
zone,
operation,
timeout_sec=_WAIT_FOR_OPERATION_SEC):
start_time = time.time()
while time.time() - start_time <= timeout_sec:
result = gcp.compute.zoneOperations().get(
project=gcp.project, zone=zone,
operation=operation).execute(num_retries=_GCP_API_RETRIES)
if result['status'] == 'DONE':
if 'error' in result:
raise Exception(result['error'])
return
time.sleep(2)
raise Exception('Operation %s did not complete within %d' %
(operation, timeout_sec))
def wait_for_healthy_backends(gcp,
backend_service,
instance_group,
timeout_sec=_WAIT_FOR_BACKEND_SEC):
start_time = time.time()
config = {'group': instance_group.url}
instance_names = get_instance_names(gcp, instance_group)
expected_size = len(instance_names)
while time.time() - start_time <= timeout_sec:
for instance_name in instance_names:
try:
status = get_serving_status(instance_name, gcp.service_port)
logger.info('serving status response from %s: %s',
instance_name, status)
except grpc.RpcError as rpc_error:
logger.info('checking serving status of %s failed: %s',
instance_name, rpc_error)
result = gcp.compute.backendServices().getHealth(
project=gcp.project,
backendService=backend_service.name,
body=config).execute(num_retries=_GCP_API_RETRIES)
if 'healthStatus' in result:
logger.info('received GCP healthStatus: %s', result['healthStatus'])
healthy = True
for instance in result['healthStatus']:
if instance['healthState'] != 'HEALTHY':
healthy = False
break
if healthy and expected_size == len(result['healthStatus']):
return
else:
logger.info('no healthStatus received from GCP')
time.sleep(5)
raise Exception('Not all backends became healthy within %d seconds: %s' %
(timeout_sec, result))
def get_instance_names(gcp, instance_group):
instance_names = []
result = gcp.compute.instanceGroups().listInstances(
project=gcp.project,
zone=instance_group.zone,
instanceGroup=instance_group.name,
body={
'instanceState': 'ALL'
}).execute(num_retries=_GCP_API_RETRIES)
if 'items' not in result:
return []
for item in result['items']:
# listInstances() returns the full URL of the instance, which ends with
# the instance name. compute.instances().get() requires using the
# instance name (not the full URL) to look up instance details, so we
# just extract the name manually.
instance_name = item['instance'].split('/')[-1]
instance_names.append(instance_name)
logger.info('retrieved instance names: %s', instance_names)
return instance_names
def clean_up(gcp):
if gcp.global_forwarding_rule:
delete_global_forwarding_rule(gcp)
if gcp.target_proxy:
delete_target_proxy(gcp)
if gcp.url_map:
delete_url_map(gcp)
delete_backend_services(gcp)
if gcp.health_check_firewall_rule:
delete_firewall(gcp)
if gcp.health_check:
delete_health_check(gcp)
delete_instance_groups(gcp)
if gcp.instance_template:
delete_instance_template(gcp)
class InstanceGroup(object):
def __init__(self, name, url, zone):
self.name = name
self.url = url
self.zone = zone
class GcpResource(object):
def __init__(self, name, url):
self.name = name
self.url = url
class GcpState(object):
def __init__(self, compute, alpha_compute, project, project_num):
self.compute = compute
self.alpha_compute = alpha_compute
self.project = project
self.project_num = project_num
self.health_check = None
self.health_check_firewall_rule = None
self.backend_services = []
self.url_map = None
self.target_proxy = None
self.global_forwarding_rule = None
self.service_port = None
self.instance_template = None
self.instance_groups = []
self.errors = []
logging.debug(
"script start time: %s",
datetime.datetime.now(
datetime.timezone.utc).astimezone().strftime("%Y-%m-%dT%H:%M:%S %Z"))
logging.debug("logging local timezone: %s",
datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo)
alpha_compute = None
if args.compute_discovery_document:
with open(args.compute_discovery_document, 'r') as discovery_doc:
compute = googleapiclient.discovery.build_from_document(
discovery_doc.read())
if not args.only_stable_gcp_apis and args.alpha_compute_discovery_document:
with open(args.alpha_compute_discovery_document, 'r') as discovery_doc:
alpha_compute = googleapiclient.discovery.build_from_document(
discovery_doc.read())
else:
compute = googleapiclient.discovery.build('compute', 'v1')
if not args.only_stable_gcp_apis:
alpha_compute = googleapiclient.discovery.build('compute', 'alpha')
test_results = {}
failed_tests = []
try:
gcp = GcpState(compute, alpha_compute, args.project_id, args.project_num)
gcp_suffix = args.gcp_suffix
health_check_name = _BASE_HEALTH_CHECK_NAME + gcp_suffix
if not args.use_existing_gcp_resources:
if args.keep_gcp_resources:
# Auto-generating a unique suffix in case of conflict should not be
# combined with --keep_gcp_resources, as the suffix actually used
# for GCP resources will not match the provided --gcp_suffix value.
num_attempts = 1
else:
num_attempts = 5
for i in range(num_attempts):
try:
logger.info('Using GCP suffix %s', gcp_suffix)
create_health_check(gcp, health_check_name)
break
except googleapiclient.errors.HttpError as http_error:
gcp_suffix = '%s-%04d' % (gcp_suffix, random.randint(0, 9999))
health_check_name = _BASE_HEALTH_CHECK_NAME + gcp_suffix
logger.exception('HttpError when creating health check')
if gcp.health_check is None:
raise Exception('Failed to create health check name after %d '
'attempts' % num_attempts)
firewall_name = _BASE_FIREWALL_RULE_NAME + gcp_suffix
backend_service_name = _BASE_BACKEND_SERVICE_NAME + gcp_suffix
alternate_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-alternate' + gcp_suffix
extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-extra' + gcp_suffix
more_extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-more-extra' + gcp_suffix
url_map_name = _BASE_URL_MAP_NAME + gcp_suffix
service_host_name = _BASE_SERVICE_HOST + gcp_suffix
target_proxy_name = _BASE_TARGET_PROXY_NAME + gcp_suffix
forwarding_rule_name = _BASE_FORWARDING_RULE_NAME + gcp_suffix
template_name = _BASE_TEMPLATE_NAME + gcp_suffix
instance_group_name = _BASE_INSTANCE_GROUP_NAME + gcp_suffix
same_zone_instance_group_name = _BASE_INSTANCE_GROUP_NAME + '-same-zone' + gcp_suffix
secondary_zone_instance_group_name = _BASE_INSTANCE_GROUP_NAME + '-secondary-zone' + gcp_suffix
potential_service_ports = list(args.service_port_range)
random.shuffle(potential_service_ports)
if args.use_existing_gcp_resources:
logger.info('Reusing existing GCP resources')
get_health_check(gcp, health_check_name)
get_health_check_firewall_rule(gcp, firewall_name)
backend_service = get_backend_service(gcp, backend_service_name)
alternate_backend_service = get_backend_service(
gcp, alternate_backend_service_name)
extra_backend_service = get_backend_service(gcp,
extra_backend_service_name,
record_error=False)
more_extra_backend_service = get_backend_service(
gcp, more_extra_backend_service_name, record_error=False)
get_url_map(gcp, url_map_name)
get_target_proxy(gcp, target_proxy_name)
get_global_forwarding_rule(gcp, forwarding_rule_name)
get_instance_template(gcp, template_name)
instance_group = get_instance_group(gcp, args.zone, instance_group_name)
same_zone_instance_group = get_instance_group(
gcp, args.zone, same_zone_instance_group_name)
secondary_zone_instance_group = get_instance_group(
gcp, args.secondary_zone, secondary_zone_instance_group_name)
if gcp.errors:
raise Exception(gcp.errors)
else:
create_health_check_firewall_rule(gcp, firewall_name)
backend_service = add_backend_service(gcp, backend_service_name)
alternate_backend_service = add_backend_service(
gcp, alternate_backend_service_name)
create_url_map(gcp, url_map_name, backend_service, service_host_name)
create_target_proxy(gcp, target_proxy_name)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if not gcp.service_port:
raise Exception(
'Failed to find a valid ip:port for the forwarding rule')
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
startup_script = get_startup_script(args.path_to_server_binary,
gcp.service_port)
create_instance_template(gcp, template_name, args.network,
args.source_image, args.machine_type,
startup_script)
instance_group = add_instance_group(gcp, args.zone, instance_group_name,
_INSTANCE_GROUP_SIZE)
patch_backend_service(gcp, backend_service, [instance_group])
same_zone_instance_group = add_instance_group(
gcp, args.zone, same_zone_instance_group_name, _INSTANCE_GROUP_SIZE)
secondary_zone_instance_group = add_instance_group(
gcp, args.secondary_zone, secondary_zone_instance_group_name,
_INSTANCE_GROUP_SIZE)
wait_for_healthy_backends(gcp, backend_service, instance_group)
if args.test_case:
client_env = dict(os.environ)
if original_grpc_trace:
client_env['GRPC_TRACE'] = original_grpc_trace
if original_grpc_verbosity:
client_env['GRPC_VERBOSITY'] = original_grpc_verbosity
bootstrap_server_features = []
if gcp.service_port == _DEFAULT_SERVICE_PORT:
server_uri = service_host_name
else:
server_uri = service_host_name + ':' + str(gcp.service_port)
if args.xds_v3_support:
client_env['GRPC_XDS_EXPERIMENTAL_V3_SUPPORT'] = 'true'
bootstrap_server_features.append('xds_v3')
if args.bootstrap_file:
bootstrap_path = os.path.abspath(args.bootstrap_file)
else:
with tempfile.NamedTemporaryFile(delete=False) as bootstrap_file:
bootstrap_file.write(
_BOOTSTRAP_TEMPLATE.format(
node_id='projects/%s/networks/%s/nodes/%s' %
(gcp.project_num, args.network.split('/')[-1],
uuid.uuid1()),
server_features=json.dumps(
bootstrap_server_features)).encode('utf-8'))
bootstrap_path = bootstrap_file.name
client_env['GRPC_XDS_BOOTSTRAP'] = bootstrap_path
client_env['GRPC_XDS_EXPERIMENTAL_CIRCUIT_BREAKING'] = 'true'
client_env['GRPC_XDS_EXPERIMENTAL_ENABLE_TIMEOUT'] = 'true'
client_env['GRPC_XDS_EXPERIMENTAL_FAULT_INJECTION'] = 'true'
for test_case in args.test_case:
if test_case in _V3_TEST_CASES and not args.xds_v3_support:
logger.info('skipping test %s due to missing v3 support',
test_case)
continue
if test_case in _ALPHA_TEST_CASES and not gcp.alpha_compute:
logger.info('skipping test %s due to missing alpha support',
test_case)
continue
if test_case in [
'api_listener', 'forwarding_rule_port_match',
'forwarding_rule_default_port'
] and CLIENT_HOSTS:
logger.info(
'skipping test %s because test configuration is'
'not compatible with client processes on existing'
'client hosts', test_case)
continue
if test_case == 'forwarding_rule_default_port':
server_uri = service_host_name
result = jobset.JobResult()
log_dir = os.path.join(_TEST_LOG_BASE_DIR, test_case)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
test_log_filename = os.path.join(log_dir, _SPONGE_LOG_NAME)
test_log_file = open(test_log_filename, 'w+')
client_process = None
if test_case in _TESTS_TO_RUN_MULTIPLE_RPCS:
rpcs_to_send = '--rpc="UnaryCall,EmptyCall"'
else:
rpcs_to_send = '--rpc="UnaryCall"'
if test_case in _TESTS_TO_SEND_METADATA:
metadata_to_send = '--metadata="EmptyCall:{keyE}:{valueE},UnaryCall:{keyU}:{valueU},UnaryCall:{keyNU}:{valueNU}"'.format(
keyE=_TEST_METADATA_KEY,
valueE=_TEST_METADATA_VALUE_EMPTY,
keyU=_TEST_METADATA_KEY,
valueU=_TEST_METADATA_VALUE_UNARY,
keyNU=_TEST_METADATA_NUMERIC_KEY,
valueNU=_TEST_METADATA_NUMERIC_VALUE)
else:
# Setting the arg explicitly to empty with '--metadata=""'
# makes C# client fail
# (see https://github.com/commandlineparser/commandline/issues/412),
# so instead we just rely on clients using the default when
# metadata arg is not specified.
metadata_to_send = ''
# TODO(ericgribkoff) Temporarily disable fail_on_failed_rpc checks
# in the client. This means we will ignore intermittent RPC
# failures (but this framework still checks that the final result
# is as expected).
#
# Reason for disabling this is, the resources are shared by
# multiple tests, and a change in previous test could be delayed
# until the second test starts. The second test may see
# intermittent failures because of that.
#
# A fix is to not share resources between tests (though that does
# mean the tests will be significantly slower due to creating new
# resources).
fail_on_failed_rpc = ''
try:
if not CLIENT_HOSTS:
client_cmd_formatted = args.client_cmd.format(
server_uri=server_uri,
stats_port=args.stats_port,
qps=args.qps,
fail_on_failed_rpc=fail_on_failed_rpc,
rpcs_to_send=rpcs_to_send,
metadata_to_send=metadata_to_send)
logger.debug('running client: %s', client_cmd_formatted)
client_cmd = shlex.split(client_cmd_formatted)
client_process = subprocess.Popen(client_cmd,
env=client_env,
stderr=subprocess.STDOUT,
stdout=test_log_file)
if test_case == 'backends_restart':
test_backends_restart(gcp, backend_service, instance_group)
elif test_case == 'change_backend_service':
test_change_backend_service(gcp, backend_service,
instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'gentle_failover':
test_gentle_failover(gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'load_report_based_failover':
test_load_report_based_failover(
gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'ping_pong':
test_ping_pong(gcp, backend_service, instance_group)
elif test_case == 'remove_instance_group':
test_remove_instance_group(gcp, backend_service,
instance_group,
same_zone_instance_group)
elif test_case == 'round_robin':
test_round_robin(gcp, backend_service, instance_group)
elif test_case == 'secondary_locality_gets_no_requests_on_partial_primary_failure':
test_secondary_locality_gets_no_requests_on_partial_primary_failure(
gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'secondary_locality_gets_requests_on_primary_failure':
test_secondary_locality_gets_requests_on_primary_failure(
gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'traffic_splitting':
test_traffic_splitting(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'path_matching':
test_path_matching(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'header_matching':
test_header_matching(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'circuit_breaking':
test_circuit_breaking(gcp, backend_service, instance_group,
same_zone_instance_group)
elif test_case == 'timeout':
test_timeout(gcp, backend_service, instance_group)
elif test_case == 'fault_injection':
test_fault_injection(gcp, backend_service, instance_group)
elif test_case == 'api_listener':
server_uri = test_api_listener(gcp, backend_service,
instance_group,
alternate_backend_service)
elif test_case == 'forwarding_rule_port_match':
server_uri = test_forwarding_rule_port_match(
gcp, backend_service, instance_group)
elif test_case == 'forwarding_rule_default_port':
server_uri = test_forwarding_rule_default_port(
gcp, backend_service, instance_group)
elif test_case == 'metadata_filter':
test_metadata_filter(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'csds':
test_csds(gcp, backend_service, instance_group, server_uri)
else:
logger.error('Unknown test case: %s', test_case)
sys.exit(1)
if client_process and client_process.poll() is not None:
raise Exception(
'Client process exited prematurely with exit code %d' %
client_process.returncode)
result.state = 'PASSED'
result.returncode = 0
except Exception as e:
logger.exception('Test case %s failed', test_case)
failed_tests.append(test_case)
result.state = 'FAILED'
result.message = str(e)
if args.halt_after_fail:
# Stop the test suite if one case failed.
raise
finally:
if client_process:
if client_process.returncode:
logger.info('Client exited with code %d' %
client_process.returncode)
else:
client_process.terminate()
test_log_file.close()
# Workaround for Python 3, as report_utils will invoke decode() on
# result.message, which has a default value of ''.
result.message = result.message.encode('UTF-8')
test_results[test_case] = [result]
if args.log_client_output:
logger.info('Client output:')
with open(test_log_filename, 'r') as client_output:
logger.info(client_output.read())
if not os.path.exists(_TEST_LOG_BASE_DIR):
os.makedirs(_TEST_LOG_BASE_DIR)
report_utils.render_junit_xml_report(test_results,
os.path.join(
_TEST_LOG_BASE_DIR,
_SPONGE_XML_NAME),
suite_name='xds_tests',
multi_target=True)
if failed_tests:
logger.error('Test case(s) %s failed', failed_tests)
sys.exit(1)
finally:
keep_resources = args.keep_gcp_resources
if args.halt_after_fail and failed_tests:
logger.info(
'Halt after fail triggered, exiting without cleaning up resources')
keep_resources = True
if not keep_resources:
logger.info('Cleaning up GCP resources. This may take some time.')
clean_up(gcp)
| nicolasnoble/grpc | tools/run_tests/run_xds_tests.py | Python | apache-2.0 | 148,710 |
// ===========================================================================================================
//
// Class/Library: ListBox Control - Main Script
// Author: Michael Marzilli ( http://www.linkedin.com/in/michaelmarzilli , http://www.develteam.com/Developer/Rowell/Portfolio )
// Created: Jun 10, 2016
//
// VERS 1.0.000 : Jun 10, 2016 : Original File Created. Released for Unity 3D.
// 1.0.001 : Jun 11, 2016 : Added a SubText field/element to the ListBox Control.
// The SubText field is a right justified field that can add additional information.
// Such as displaying a price for an item in and item list for a shop.
// 1.0.002 : May 04, 2017 : Added delegates for AddListItem and RemoveListItem.
//
// ===========================================================================================================
#if UNITY_EDITOR
#define IS_DEBUGGING
#else
#undef IS_DEBUGGING
#endif
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
public delegate void OnListBoxSelectChanged( GameObject go, int intSelected);
public delegate void OnListBoxDoubleClick( GameObject go, int intSelected);
public delegate void OnAddListItemEvent( GameObject go, int intAddedIndex);
public delegate void OnRemoveListItemEvent( GameObject go, int intRemovedIndex);
[System.Serializable]
public class ListBoxControl : MonoBehaviour
{
#region "PRIVATE CONSTANTS"
// IF DROPDOWN LIST SELECTION IS NOT BEING PROPERLY SCROLLED TO WHEN THE DROPDOWN LIST IS SELECTED,
// TRY INCREASING THE CONSTANT BELOW UNTIL THE SELECTED ITEM SCROLLS INTO VIEW PROPERLY
// (THIS CONSTANT IS USED IN THE SetScroll(float fValue) IENUMERATOR
private const float SCROLL_DELAY = 0.002f; // BEST DEFAULT: 0.12f ??
#endregion
#region "STARTING LIST ITEM CLASS"
[System.Serializable]
public class StartingListItem
{
public string Value = "";
public string Text = "";
public string SubText = "";
public Sprite Icon = null;
public int Index = -1;
public StartingListItem(string strValue, string strText, Sprite imgSprite = null, string strSub = "")
{
Value = strValue;
Text = strText;
SubText = strSub;
Icon = imgSprite;
}
}
#endregion
#region "PRIVATE VARIABLES"
// SERIALIZED FIELDS
[SerializeField]
protected List<StartingListItem> _startArray = new List<StartingListItem>();
[SerializeField]
private string _strTitle = "";
[SerializeField]
private bool _blnBestFit = false;
[SerializeField]
private bool _blnAllowDblClick = false;
[SerializeField]
private bool _blnPartOfDDL = false;
private ListBoxModes _lbMode = ListBoxModes.ListBox;
private List<ListBoxLineItem> _items = new List<ListBoxLineItem>();
private RectTransform _rtContainer = null;
private RectTransform _rtScrollRect = null;
private int _intItemCount = 0;
private int _intSelectedItem = -1;
private List<int> _intSelectedList = new List<int>();
protected bool _blnInitialized = false;
#endregion
#region "PRIVATE PROPERTIES"
private RectTransform ContainerRect
{
get
{
if (_rtContainer == null)
if (ScrollContainerObject != null)
_rtContainer = ScrollContainerObject.GetComponent<RectTransform>();
return _rtContainer;
}
}
private RectTransform ScrollRect
{
get
{
if (_rtScrollRect == null)
if (ScrollRectObject != null)
_rtScrollRect = ScrollRectObject.GetComponent<RectTransform>();
return _rtScrollRect;
}
}
#endregion
#region "PUBLIC EDITOR PROPERTIES"
public GameObject ScrollBarObject;
public GameObject ScrollRectObject;
public GameObject ScrollContainerObject;
public Text ListBoxTitle;
public GameObject ListBoxLineItemPrefabObject;
[SerializeField]
public Color ItemNormalColor;
[SerializeField]
public Color ItemHighlightColor;
[SerializeField]
public Color ItemSelectedColor;
[SerializeField]
public Color ItemDisabledColor;
[SerializeField]
public bool CanMultiSelect = false;
[SerializeField]
public bool AutoMultiSelect = false;
[SerializeField]
public float Height = 36;
[SerializeField]
public float Spacing = 4;
[SerializeField]
public char SeparatorChar = '|';
#endregion
#region "PUBLIC PROPERTIES"
public enum ListBoxModes : int { ListBox = 0, DropDownList = 1 }
public ListBoxModes ListBoxMode
{
get
{
return _lbMode;
}
set
{
_lbMode = value;
}
}
// HANDLE LISTBOX TITLE
public string Title
{
get
{
return _strTitle;
}
set
{
_strTitle = value.Trim();
if (ListBoxMode == ListBoxModes.ListBox && ListBoxTitle != null)
{
ListBoxTitle.gameObject.SetActive(_strTitle != "");
ListBoxTitle.text = _strTitle;
} else
if (ListBoxTitle != null)
ListBoxTitle.gameObject.SetActive(false);
}
}
public bool TitleBestFit
{
get
{
return _blnBestFit;
}
set
{
_blnBestFit = value;
if (ListBoxMode == ListBoxModes.ListBox && ListBoxTitle != null)
ListBoxTitle.resizeTextForBestFit = _blnBestFit;
}
}
public bool AllowDoubleClick
{
get
{
return _blnAllowDblClick && !_blnPartOfDDL && ListBoxMode == ListBoxModes.ListBox;
}
set
{
_blnAllowDblClick = value;
}
}
public bool PartOfDDL
{
get
{
return _blnPartOfDDL;
}
set
{
_blnPartOfDDL = value;
}
}
// HANDLE STARTING LIST ITEMS
public List<StartingListItem> StartArray
{
get
{
return _startArray;
}
}
// HANDLE SELECTION (GET)
public virtual List<ListBoxLineItem> Items
{
get
{
if (_items == null)
_items = new List<ListBoxLineItem>();
return _items;
}
}
public virtual List<int> SelectedIndexes
{
get
{
if (_intSelectedList == null)
_intSelectedList = new List<int>();
return _intSelectedList;
}
}
public virtual List<string> SelectedValues
{
get
{
if (_intSelectedItem < 0 || _intSelectedList == null || _intSelectedList.Count < 0)
return null;
List<string> st = new List<string>();
for (int i = 0; i < _intSelectedList.Count; i++)
st.Add(Items[_intSelectedList[i]].Value);
return st;
}
}
public virtual string SelectedValuesString
{
get
{
List<string> st = SelectedValues;
if (st == null || st.Count < 1)
return "";
string strOut = "";
for (int i = 0; i < st.Count; i++)
{
if (st[i].Trim() != "")
strOut += SeparatorChar + st[i];
}
if (strOut.Length > 1)
strOut = strOut.Substring(1);
return strOut;
}
}
public virtual string SelectedValue
{
get
{
if (_intSelectedItem < 0 || _intSelectedList == null || _intSelectedList.Count < 0)
return null;
return Items[_intSelectedList[0]].Value;
}
}
public virtual string SelectedArrayValue(int intIndex)
{
if (intIndex > Items[_intSelectedList[0]].Value.Split(SeparatorChar).Length - 1)
return "";
return Items[_intSelectedList[0]].Value.Split(SeparatorChar)[intIndex];
}
public virtual int SelectedValueInt
{
get
{
if (_intSelectedItem < 0 || _intSelectedList == null || _intSelectedList.Count < 0)
return -1;
return Util.ConvertToInt(Items[_intSelectedList[0]].Value);
}
}
public virtual int SelectedArrayValueInt(int intIndex)
{
return Util.ConvertToInt(SelectedArrayValue(intIndex));
}
public virtual int SelectedIndex
{
get
{
return _intSelectedItem;
}
set
{
_intSelectedItem = value;
}
}
public virtual string SelectedText
{
get
{
if (_intSelectedItem < 0 || _intSelectedList == null || _intSelectedList.Count < 0)
return "";
return Items[_intSelectedList[0]].Text;
}
}
public bool IsInitialized
{
get
{
return _blnInitialized;
}
}
#endregion
#region "PRIVATE FUNCTIONS"
private void Awake()
{
// INITIALIZE THE ITEM LIST
_intSelectedItem = -1;
_items = new List<ListBoxLineItem>();
_intSelectedList = new List<int>();
// EXIT IF THIS IS A DROPDOWN LIST
if (ListBoxMode == ListBoxModes.DropDownList)
return;
// RE-SIZE THE SCROLL CONTAINER
// REMOVE ANY GAMEOBJECTS IN THE CONTAINER
if (ScrollContainerObject != null)
{
// RESIZE THE WIDTH OF THE CONTAINER TO MATCH THE CONTROL
Vector2 v2 = ScrollContainerObject.GetComponent<RectTransform>().sizeDelta;
v2.x = ScrollContainerObject.transform.parent.GetComponent<RectTransform>().sizeDelta.x;
ScrollContainerObject.GetComponent<RectTransform>().sizeDelta = v2;
// REMOVE GAMEOBJECTS IN THE CONTAINER
if (ScrollContainerObject.transform.childCount > 0)
{
for (int i = ScrollContainerObject.transform.childCount - 1; i >= 0; i--)
Destroy(ScrollContainerObject.transform.GetChild(i).gameObject);
}
}
}
private void Start()
{
if (!gameObject.activeInHierarchy) // && !_blnInitialized)
return;
// RESIZE THE ITEM CONTAINER TO THE WIDTH OF THE SCROLL RECT
if (ContainerRect != null)
ContainerRect.sizeDelta = new Vector2(ScrollRect.rect.width, ContainerRect.rect.height);
// SET SCROLLBAR SENSITIVITY
if (ScrollRectObject != null)
ScrollRectObject.GetComponent<ScrollRect>().scrollSensitivity = Height - Spacing;
if (ScrollBarObject != null)
ScrollBarObject.GetComponent<Scrollbar>().numberOfSteps = 1;
// EXIT IF THIS IS A DROPDOWN LIST
if (ListBoxMode == ListBoxModes.DropDownList)
return;
// SET TITLE
if (ListBoxTitle != null)
Title = _strTitle;
// CHECK FOR LINE ITEM PREFAB
if (ListBoxLineItemPrefabObject == null)
Debug.LogError(gameObject.name + " is Missing the Line Item Prefab. Please add the Prefab.");
else if (ListBoxLineItemPrefabObject.GetComponent<ListBoxLineItem>() == null)
Debug.LogError(gameObject.name + " is Missing the Line Item Prefab. Please add the Prefab.");
// ADD INITIAL LIST ITEMS (IF THERE ARE ANY)
if (StartArray.Count > 0)
{
for (int i = 0; i < StartArray.Count; i++)
{
AddItem(StartArray[i].Value, StartArray[i].Text, StartArray[i].Icon);
}
}
// MARK CONTROL AS INITIALIZED
_blnInitialized = true;
}
private void OnEnable()
{
if (!_blnInitialized && gameObject.activeInHierarchy)
Start();
// MAKE SURE THAT THE LIST BOX ITEM CONTAINER IS PROPERLY SIZED (HEIGHT)
if (ListBoxMode == ListBoxModes.ListBox)
UpdateListBoxContainerSize();
}
private void ResizeContainer()
{
if (!Application.isPlaying || ListBoxMode == ListBoxModes.DropDownList)
return;
float fScroll = 1;
if (ScrollBarObject != null)
fScroll = ScrollBarObject.GetComponent<Scrollbar>().value;
Vector2 v2 = ContainerRect.sizeDelta;
v2.y = ((this.Height + this.Spacing) * Items.Count) + this.Spacing;
ContainerRect.sizeDelta = v2;
try
{
if (gameObject.activeInHierarchy)
StartCoroutine(SetScroll(fScroll));
} catch { }
}
private void SelectByRange( int intEnd)
{
// SELECTS A RANGE OF ITEMS STARTING AT _intSelectedItem, EXTENDING TO intEnd
int s = (int)Mathf.Sign(intEnd - _intSelectedItem);
int i = _intSelectedItem;
int e = intEnd;
while (e >= 0 && i >= 0 && i < Items.Count &&
((s > 0 && i <= e) || (s < 0 && i >= e)))
{
if (Items[i].Enabled && Items[i].Shown)
{
Items[i].Select();
_intSelectedList.Add(i);
}
i += s;
}
}
private void UnSelectItem( int intIndex)
{
if (ListBoxMode == ListBoxModes.DropDownList)
return;
// UNSELECT SINGLE ITEM
if (intIndex >= 0 && intIndex == _intSelectedItem && Items[intIndex] != null)
{
Items[_intSelectedItem].UnSelect();
int i = _intSelectedList.FindIndex(x => x == intIndex);
_intSelectedList.RemoveAt(i);
if (_intSelectedList.Count > 0)
_intSelectedItem = _intSelectedList[0];
else
_intSelectedItem = -1;
} else
// UNSELECT THE ITEM FROM THE LIST
if (_intSelectedList.Count > 0)
{
int i = _intSelectedList.FindIndex(x => x == intIndex);
if (i >= 0)
{
Items[_intSelectedList[i]].UnSelect();
_intSelectedList.RemoveAt(i);
}
}
}
private void UnSelectByRange(int intEnd)
{
if (ListBoxMode == ListBoxModes.DropDownList)
return;
int s = (int)Mathf.Sign(intEnd - _intSelectedItem);
int i = _intSelectedItem;
int e = intEnd;
while (e >= 0 && i >= 0 && i < Items.Count &&
((s > 0 && i <= e) || (s < 0 && i >= e)))
{
Items[_intSelectedList[i]].UnSelect();
_intSelectedList.RemoveAt(i);
i += s;
}
}
private void UnSelectAllItems()
{
if (ListBoxMode == ListBoxModes.DropDownList)
return;
// UNSELECT SINGLE ITEM
if (_intSelectedItem >= 0 && Items[_intSelectedItem] != null)
Items[_intSelectedItem].UnSelect();
// UNSELECT MULTIPLY SELECTED ITEMS
if (_intSelectedList.Count > 0)
{
for (int i = _intSelectedList.Count - 1; i >= 0; i--)
{
Items[_intSelectedList[i]].UnSelect();
_intSelectedList.RemoveAt(i);
}
}
}
private IEnumerator SetScroll(float fValue)
{
yield return new WaitForSeconds(0.001f);
if (gameObject.activeInHierarchy && ScrollBarObject != null && ScrollBarObject.activeSelf && ListBoxMode == ListBoxModes.ListBox)
{
yield return new WaitForSeconds(SCROLL_DELAY);
ScrollBarObject.GetComponent<Scrollbar>().value = 0;
yield return new WaitForSeconds(0.0001f);
ScrollBarObject.GetComponent<Scrollbar>().value = fValue;
}
}
private void PrivAddItem( string strValue, string strText, string strIcon = "", string strSub = "")
{
// CHECK IF LINE ITEM PREFAB EXISTS
if (ListBoxLineItemPrefabObject == null)
{
Debug.LogError(gameObject.name + " is Missing the Line Item Prefab. Please add the Prefab.");
return;
} else if (ListBoxLineItemPrefabObject.GetComponent<ListBoxLineItem>() == null) {
Debug.LogError(gameObject.name + " is Missing the Line Item Prefab. Please add the Prefab.");
return;
}
// CALCULATE ICON SPRITE
Sprite sprIcon = null;
if (strIcon != "")
{
sprIcon = Resources.Load<Sprite>(strIcon);
}
int i = Items.FindIndex(x => x.Value.ToLower() == strValue.ToLower() || x.Text.ToLower() == strText.ToLower());
if (i >= 0)
{
// ITEM ALREADY EXISTS -- UPDATE IT
Items[i].Value = strValue;
Items[i].Text = strText;
Items[i].SubText = strSub;
Items[i].SetIcon(sprIcon);
} else {
// ITEM DOES NOT EXIST -- CREATE IT
_intItemCount++;
i = Items.Count;
GameObject go = (GameObject)Instantiate(ListBoxLineItemPrefabObject);
PrivAddItem(go, i, strValue, strText, sprIcon, strSub);
}
}
private void PrivAddItem( string strValue, string strText, Sprite sprIcon, string strSub = "")
{
// CHECK IF LINE ITEM PREFAB EXISTS
if (ListBoxLineItemPrefabObject == null)
{
Debug.LogError(gameObject.name + " is Missing the Line Item Prefab. Please add the Prefab.");
return;
} else if (ListBoxLineItemPrefabObject.GetComponent<ListBoxLineItem>() == null) {
Debug.LogError(gameObject.name + " is Missing the Line Item Prefab. Please add the Prefab.");
return;
}
int i = Items.FindIndex(x => x.Value.ToLower() == strValue.ToLower() || x.Text.ToLower() == strText.ToLower());
if (i >= 0)
{
// ITEM ALREADY EXISTS -- UPDATE IT
Items[i].Value = strValue;
Items[i].Text = strText;
Items[i].SubText = strSub;
Items[i].SetIcon(sprIcon);
} else {
// ITEM DOES NOT EXIST -- CREATE IT
_intItemCount++;
i = Items.Count;
GameObject go = (GameObject)Instantiate(ListBoxLineItemPrefabObject);
PrivAddItem(go, i, strValue, strText, sprIcon, strSub);
}
}
private void PrivAddItem(GameObject go, int intIndex, string strValue, string strText, Sprite sprIcon, string strSub = "")
{
go.transform.SetParent(ScrollContainerObject.transform, false);
CanvasScaler scaler = go.transform.GetComponentInParent<CanvasScaler>();
if (scaler != null && scaler.uiScaleMode == CanvasScaler.ScaleMode.ScaleWithScreenSize)
{
// If the parent Canvas Scaler has UI Scale Mode set to "ScaleWithScreenSize", it messes up the item's scale, so we reset it to 1
go.GetComponent<RectTransform>().localScale = Vector3.one;
go.transform.localScale = Vector3.one;
}
go.GetComponent<ListBoxLineItem>().ListBoxControlObject = this.gameObject;
go.GetComponent<ListBoxLineItem>().Index = intIndex;
go.GetComponent<ListBoxLineItem>().Spacing = this.Spacing;
go.GetComponent<ListBoxLineItem>().Width = ContainerRect.sizeDelta.x - (this.Spacing * 2);
if (this.Height > 0)
go.GetComponent<ListBoxLineItem>().Height = this.Height;
else
this.Height = go.GetComponent<ListBoxLineItem>().Height;
go.GetComponent<ListBoxLineItem>().ItemNormalColor = ItemNormalColor;
go.GetComponent<ListBoxLineItem>().ItemHighlightColor = ItemHighlightColor;
go.GetComponent<ListBoxLineItem>().ItemSelectedColor = ItemSelectedColor;
go.GetComponent<ListBoxLineItem>().ItemDisabledColor = ItemDisabledColor;
go.GetComponent<ListBoxLineItem>().Value = strValue;
go.GetComponent<ListBoxLineItem>().Text = strText;
go.GetComponent<ListBoxLineItem>().SubText = strSub;
go.GetComponent<ListBoxLineItem>().SetIcon(sprIcon);
if (OnAddListItem != null)
OnAddListItem(go, intIndex);
go.GetComponent<ListBoxLineItem>().AutoSize();
Items.Add(go.GetComponent<ListBoxLineItem>());
ResizeContainer();
}
#endregion
#region "PUBLIC FUNCTIONS"
#region "LIST BOX STARTING ITEMS"
// -- CLEAR STARTING LIST
public void ClearStartItems()
{
_startArray = new List<StartingListItem>();
}
public void InitStartItems(List<StartingListItem> sli)
{
ClearStartItems();
foreach (StartingListItem s in sli)
{
_startArray.Add(s);
}
}
// -- ADD ITEM TO STARTING LIST
public virtual void AddStartItem(string strValue, string strText, Sprite sprIcon = null, string strSub = "")
{
int i = StartArray.FindIndex(x => x.Value.ToLower() == strValue.ToLower() || x.Text.ToLower() == strText.ToLower());
if (i >= 0)
{
// OVERWRITE EXISTING ITEM
StartArray[i].Value = strValue;
StartArray[i].Text = strText;
StartArray[i].Icon = sprIcon;
StartArray[i].SubText = strSub;
StartArray[i].Index = i;
} else {
StartArray.Add(new StartingListItem(strValue, strText, sprIcon, strSub));
StartArray[StartArray.Count - 1].Index = StartArray.Count - 1;
}
}
// -- REMOVE ITEM FROM STARTING LIST
public virtual void RemoveStartItemByIndex(int intIndex)
{
if (intIndex < 0 || intIndex >= StartArray.Count)
return;
for (int i = StartArray.Count - 1; i >= intIndex; i--)
{
if (i > intIndex)
{
// MOVE THE ITEM UP ONE SLOT
StartArray[i].Index = i - 1;
} else {
// REMOVE THE ITEM
StartArray.RemoveAt(i);
}
}
}
public virtual void RemoveStartItemByValue(string strValue)
{
int i = StartArray.FindIndex(x => x.Value.ToLower() == strValue.ToLower());
if (i >= 0)
RemoveStartItemByIndex(i);
}
public virtual void RemoveStartItemByText( string strText)
{
int i = StartArray.FindIndex(x => x.Text.ToLower() == strText.ToLower());
if (i >= 0)
RemoveStartItemByIndex(i);
}
// -- SORT ITEMS IN STARTING LIST
public virtual void SortStartByValue()
{
StartArray.Sort((p1, p2) => p1.Text.CompareTo(p2.Value));
for (int i = 0; i < StartArray.Count; i++)
{
StartArray[i].Index = i;
}
}
public virtual void SortStartByText()
{
StartArray.Sort((p1, p2) => p1.Text.CompareTo(p2.Text));
for (int i = 0; i < StartArray.Count; i++)
{
StartArray[i].Index = i;
}
}
public virtual void SortStartBySub()
{
StartArray.Sort((p1, p2) => p1.SubText.CompareTo(p2.SubText));
for (int i = 0; i < StartArray.Count; i++)
{
StartArray[i].Index = i;
}
}
#endregion
#region "LIST BOX ITEMS"
// HANDLE LISTBOX ITEMS
public virtual void Clear()
{
// INITIALIZE THE ITEM LIST
_intItemCount = 0;
_intSelectedItem = -1;
_items = new List<ListBoxLineItem>();
_intSelectedList = new List<int>();
// REMOVE ANY GAMEOBJECTS IN THE CONTAINER
if (ScrollContainerObject.transform.childCount > 0)
{
for (int i = ScrollContainerObject.transform.childCount - 1; i >= 0; i--)
Destroy(ScrollContainerObject.transform.GetChild(i).gameObject);
}
}
// -- ADD ITEM TO LISTBOX
public virtual void AddItem(string strValue, string strText, string strIcon = "", string strSub = "")
{
PrivAddItem(strValue, strText, strIcon, strSub);
}
public virtual void AddItem(string strValue, string strText, Sprite sprIcon, string strSub = "")
{
PrivAddItem(strValue, strText, sprIcon, strSub);
}
public virtual void AddItem(string strValue, string strText, string strIcon, int intSub)
{
AddItem(strValue, strText, strIcon, intSub.ToString());
}
public virtual void AddItem(string strValue, string strText, string strIcon, float fSub)
{
AddItem(strValue, strText, strIcon, fSub.ToString());
}
public virtual void AddItem(string strValue, string strText, Sprite sprIcon, int intSub)
{
AddItem(strValue, strText, sprIcon, intSub.ToString());
}
public virtual void AddItem(string strValue, string strText, Sprite sprIcon, float fSub)
{
AddItem(strValue, strText, sprIcon, fSub.ToString());
}
public virtual void AddItem(string[] strValue, string strText)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText);
}
}
public virtual void AddItem(string[] strValue, string strText, string strIcon)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, strIcon);
}
}
public virtual void AddItem(string[] strValue, string strText, string strIcon, string strSub)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, strIcon, strSub);
}
}
public virtual void AddItem(string[] strValue, string strText, string strIcon, int intSub)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, strIcon, intSub.ToString());
}
}
public virtual void AddItem(string[] strValue, string strText, string strIcon, float fSub)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, strIcon, fSub.ToString());
}
}
public virtual void AddItem(string[] strValue, string strText, Sprite sprIcon)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, sprIcon);
}
}
public virtual void AddItem(string[] strValue, string strText, Sprite sprIcon, string strSub)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, sprIcon, strSub);
}
}
public virtual void AddItem(string[] strValue, string strText, Sprite sprIcon, int intSub)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, sprIcon, intSub.ToString());
}
}
public virtual void AddItem(string[] strValue, string strText, Sprite sprIcon, float fSub)
{
if (strValue != null && strValue.Length > 0 && strText.Trim() != "")
{
string strNewVal = "";
for (int i = 0; i < strValue.Length; i++)
strNewVal += SeparatorChar + strValue[i];
strNewVal = strNewVal.Substring(1);
AddItem(strNewVal, strText, sprIcon, fSub.ToString());
}
}
public virtual void AddItem(int intValue, string strText)
{
AddItem(intValue.ToString(), strText);
}
public virtual void AddItem(int intValue, string strText, string strIcon)
{
AddItem(intValue.ToString(), strText, strIcon);
}
public virtual void AddItem(int intValue, string strText, string strIcon, string strSub)
{
AddItem(intValue.ToString(), strText, strIcon, strSub);
}
public virtual void AddItem(int intValue, string strText, string strIcon, int intSub)
{
AddItem(intValue.ToString(), strText, strIcon, intSub.ToString());
}
public virtual void AddItem(int intValue, string strText, string strIcon, float fSub)
{
AddItem(intValue.ToString(), strText, strIcon, fSub.ToString());
}
public virtual void AddItem(int intValue, string strText, Sprite sprIcon)
{
AddItem(intValue.ToString(), strText, sprIcon);
}
public virtual void AddItem(int intValue, string strText, Sprite sprIcon, string strSub)
{
AddItem(intValue.ToString(), strText, sprIcon, strSub);
}
public virtual void AddItem(int intValue, string strText, Sprite sprIcon, int intSub)
{
AddItem(intValue.ToString(), strText, sprIcon, intSub.ToString());
}
public virtual void AddItem(int intValue, string strText, Sprite sprIcon, float fSub)
{
AddItem(intValue.ToString(), strText, sprIcon, fSub.ToString());
}
// -- REMOVE ITEM FROM LISTBOX
public virtual void RemoveItemByIndex(int intIndex)
{
if (intIndex < 0 || intIndex >= Items.Count)
return;
if (OnRemoveListItem != null)
OnRemoveListItem(Items[intIndex].gameObject, intIndex);
for (int i = Items.Count - 1; i >= intIndex; i--)
{
if (i > intIndex)
{
// MOVE THE ITEM UP ONE SLOT
Items[i].Index = i - 1;
Items[i].AutoSize();
} else {
// REMOVE THE ITEM
Items[i].Destroy();
Items.RemoveAt(i);
}
}
_intItemCount--;
_intSelectedItem = -1;
_intSelectedList = new List<int>();
ResizeContainer();
}
public virtual void RemoveItemByValue(string strValue)
{
int i = Items.FindIndex(x => x.Value.ToLower() == strValue.ToLower());
if (i >= 0)
RemoveItemByIndex(i);
}
public virtual void RemoveItemByText( string strText)
{
int i = Items.FindIndex(x => x.Text.ToLower() == strText.ToLower());
if (i >= 0)
RemoveItemByIndex(i);
}
// -- SORT LISTBOX ITEMS
public virtual void Sort()
{
SortByText();
}
public virtual void SortByText()
{
Items.Sort((p1, p2) => p1.Text.CompareTo(p2.Text));
for (int i = 0; i < Items.Count; i++)
{
Items[i].Index = i;
Items[i].AutoSize();
}
}
public virtual void SortByValue()
{
Items.Sort((p1, p2) => p1.Value.CompareTo(p2.Value));
for (int i = 0; i < Items.Count; i++)
{
Items[i].Index = i;
Items[i].AutoSize();
}
}
public virtual void SortBySubText()
{
Items.Sort((p1, p2) => p1.SubText.CompareTo(p2.SubText));
for (int i = 0; i < Items.Count; i++)
{
Items[i].Index = i;
Items[i].AutoSize();
}
}
// -- SET LISTBOX SCROLLBAR POSITION
public virtual void SetToTop()
{
if (gameObject.activeInHierarchy)
StartCoroutine(SetScroll(1));
}
public virtual void SetToBottom()
{
if (gameObject.activeInHierarchy)
StartCoroutine(SetScroll(0));
}
public virtual void SetToIndex(int intIndex)
{
// SET THE SCROLLBAR TO MAKE THE SELECTED INDEX (intIndex) VISIBLE IN THE SCROLL CONTAINER
float c = ContainerRect.rect.height; // THE HEIGHT OF THE LISTBOX CONTAINER (VISIBLE TO THE USER)
float h = Height + Spacing; // THE HEIGHT OF AN INDIVIDUAL LIST ITEM
float t = ((((float) _items.Count - 2) * h) + Spacing); // THE TOTAL HEIGHT OF CONTAINER OF ALL LIST ITEMS
float p = (((float) intIndex) * h); // THE Y-POS OF AN INDIVIDUAL LIST ITEM
float f = 1.00f;
if (p >= t - c )
f = 1.00f - (p / t);
else
f = 1.00f - (p / (t - c));
if (intIndex < 1)
f = 1;
if (gameObject.activeInHierarchy)
StartCoroutine(SetScroll(f));
}
// -- CHECK FOR LISTBOX ITEM WITH VALUE
public virtual bool HasItemWithValue(string strValue)
{
return Items.FindIndex(x => x.Value.Trim().ToLower() == strValue.Trim().ToLower()) >= 0;
}
public virtual bool HasItemWithValue(int intValue)
{
return HasItemWithValue(intValue.ToString());
}
public virtual bool HasItemWithValue(float fValue)
{
return HasItemWithValue(fValue.ToString());
}
// -- ENABLE ONCLICK FOR LISTBOX ITEM (ALSO ADJUSTS ITEM STYLE)
public virtual void EnableByIndex(int intIndex)
{
if (intIndex >= 0 && intIndex < Items.Count)
Items[intIndex].Enabled = true;
}
public virtual void EnableByValue(string strValue)
{
EnableByIndex(Items.FindIndex(x => x.Value.ToLower() == strValue.ToLower()));
}
public virtual void EnableByValue(int intValue)
{
EnableByIndex(Items.FindIndex(x => x.Value.ToLower() == intValue.ToString().ToLower()));
}
public virtual void EnableByText(string strText)
{
EnableByIndex(Items.FindIndex(x => x.Text.ToLower() == strText.ToLower()));
}
// -- DISABLE ONCLICK FOR LISTBOX ITEM (ALSO ADJUSTS ITEM STYLE)
public virtual void DisableByIndex(int intIndex)
{
if (intIndex >= 0 && intIndex < Items.Count)
Items[intIndex].Enabled = false;
}
public virtual void DisableByValue(string strValue)
{
DisableByIndex(Items.FindIndex(x => x.Value.ToLower() == strValue.ToLower()));
}
public virtual void DisableByValue(int intValue)
{
DisableByIndex(Items.FindIndex(x => x.Value.ToLower() == intValue.ToString().ToLower()));
}
public virtual void DisableByText(string strText)
{
DisableByIndex(Items.FindIndex(x => x.Text.ToLower() == strText.ToLower()));
}
// -- SET LISTBOX ITEM TEXT
public virtual void SetItemTextByIndex(int intIndex, string strNewText)
{
Items[intIndex].Text = strNewText;
}
public virtual void SetItemTextByValue(string strValue, string strNewText)
{
int i = Items.FindIndex(x => x.Value == strValue);
if (i >= 0)
SetItemTextByIndex(i, strNewText);
}
public virtual void SetItemTextByValue(int intValue, string strNewText)
{
SetItemTextByValue(intValue.ToString(), strNewText);
}
// -- SET LISTBOX ITEM SUBTEXT
public virtual void SetItemSubTextByIndex(int intIndex, string strNewText)
{
Items[intIndex].SubText = strNewText;
}
public virtual void SetItemSubTextByValue(string strValue, string strNewText)
{
int i = Items.FindIndex(x => x.Value == strValue);
if (i >= 0)
SetItemSubTextByIndex(i, strNewText);
}
public virtual void SetItemSubTextByValue(int intValue, string strNewText)
{
SetItemSubTextByValue(intValue.ToString(), strNewText);
}
// -- CHANGE ITEM ORDER
public virtual bool MoveItemUp( int intIndex)
{
if (intIndex < 1)
return false;
// UNSELECT BOTH ITEMS
bool blnOrig = IsSelectedByIndex(intIndex);
bool blnTrgt = IsSelectedByIndex(intIndex - 1);
UnSelectItem(intIndex);
UnSelectItem(intIndex - 1);
// MOVE THE ITEM
ListBoxLineItem liOrig = _items[intIndex];
ListBoxLineItem liTrgt = _items[intIndex - 1];
liOrig.Index--;
liTrgt.Index++;
_items[intIndex] = liTrgt;
_items[intIndex - 1] = liOrig;
_items[intIndex].AutoSize();
_items[intIndex - 1].AutoSize();
// UPDATE SELECTION
if (blnTrgt)
SelectByIndex(intIndex);
if (blnOrig)
SelectByIndex(intIndex - 1);
if (_intSelectedItem == intIndex)
_intSelectedItem--;
return true;
}
public virtual bool MoveItemDown( int intIndex)
{
if (intIndex < 0 || intIndex >= _items.Count - 1)
return false;
// UNSELECT BOTH ITEMS
bool blnOrig = IsSelectedByIndex(intIndex);
bool blnTrgt = IsSelectedByIndex(intIndex + 1);
UnSelectItem(intIndex);
UnSelectItem(intIndex + 1);
// MOVE THE ITEM
ListBoxLineItem liOrig = _items[intIndex];
ListBoxLineItem liTrgt = _items[intIndex + 1];
liOrig.Index++;
liTrgt.Index--;
_items[intIndex] = liTrgt;
_items[intIndex + 1] = liOrig;
_items[intIndex].AutoSize();
_items[intIndex + 1].AutoSize();
// UPDATE SELECTION
if (blnTrgt)
SelectByIndex(intIndex);
if (blnOrig)
SelectByIndex(intIndex + 1);
if (_intSelectedItem == intIndex)
_intSelectedItem++;
return true;
}
// -- GET LISTBOX ITEM VALUE
public virtual string GetValueByText(string strText)
{
int i = Items.FindIndex(x => x.Text.ToLower() == strText.Trim().ToLower());
if (i < 0)
return "";
else
return Items[i].Value;
}
public virtual string GetValueByIndex(int intIndex)
{
if (intIndex < 0 || intIndex >= Items.Count)
return "";
return Items[intIndex].Value;
}
public virtual int GetIntValueByIndex(int intIndex)
{
if (intIndex < 0 || intIndex >= Items.Count)
return -1;
return Util.ConvertToInt(Items[intIndex].Value);
}
// -- GET LISTBOX ITEM TEXT
public virtual string GetTextByValue(string strvalue)
{
int i = Items.FindIndex(x => x.Value.ToLower() == strvalue.Trim().ToLower());
if (i < 0)
return "";
else
return Items[i].Text;
}
public virtual string GetTextByValue(int intValue)
{
return GetTextByValue(intValue.ToString());
}
public virtual string GetTextByValue(float fValue)
{
return GetTextByValue(fValue.ToString());
}
public virtual string GetTextByIndex(int intIndex)
{
if (intIndex < 0 || intIndex >= Items.Count)
return "";
return Items[intIndex].Text;
}
// -- GET LISTBOX ITEM SUBTEXT
public virtual string GetSubTextByValue(string strvalue)
{
int i = Items.FindIndex(x => x.Value.ToLower() == strvalue.Trim().ToLower());
if (i < 0)
return "";
else
return Items[i].SubText;
}
public virtual string GetSubTextByValue(int intValue)
{
return GetSubTextByValue(intValue.ToString());
}
public virtual string GetSubTextByValue(float fValue)
{
return GetSubTextByValue(fValue.ToString());
}
public virtual string GetSubTextByIndex(int intIndex)
{
if (intIndex < 0 || intIndex >= Items.Count)
return "";
return Items[intIndex].SubText;
}
// -- HANDLE SELECTION (SET LISTBOX ITEM SELECTED)
public virtual void SelectByIndex(int intIndex, bool blnShifted = false, bool blnCtrled = false)
{
// DATA INTEGRITY CHECK
if (intIndex < -1 || intIndex >= Items.Count)
return;
// MULTI-SELECT OVERRIDE
blnShifted = blnShifted && CanMultiSelect;
blnCtrled = blnCtrled && CanMultiSelect;
// UNSHIFTED/UNCONTROLLED/UNSELECTED CLICK -- (CLICKING FOR THE FIRST TIME)
if ((!blnShifted && !blnCtrled) || _intSelectedItem < 0)
{
UnSelectAllItems();
_intSelectedItem = intIndex;
if (_intSelectedItem >= 0 && Items[_intSelectedItem].Enabled)
{
Items[_intSelectedItem].Select();
_intSelectedList.Add(intIndex);
}
// CONTROLLED CLICK -- (ADD ITEM TO SELECTED LIST)
} else if (blnCtrled) {
if (intIndex >= 0 && intIndex < Items.Count && Items[intIndex].Enabled)
{
if (IsSelectedByIndex(intIndex))
UnSelectItem( intIndex);
else
{
Items[intIndex].Select();
_intSelectedList.Add(intIndex);
}
}
// SHIFT-CLICK -- (ADD RANGE OF ITEMS TO SELECTED LIST)
} else if (blnShifted) {
UnSelectAllItems();
SelectByRange(intIndex);
}
if (_intSelectedItem >= -1)
{
if (this.OnChange != null)
OnChange(this.gameObject, _intSelectedItem);
}
}
public virtual void SelectByValue(string strValue)
{
int i = Items.FindIndex(x => x.Value.ToLower() == strValue.ToLower());
SelectByIndex(i);
}
public virtual void SelectByText( string strText)
{
int i = Items.FindIndex(x => x.Text.ToLower() == strText.ToLower());
SelectByIndex(i);
}
public virtual void Unselect()
{
UnSelectAllItems();
_intSelectedItem = -1;
_intSelectedList = new List<int>();
}
public virtual void HandleDoubleClick(int intIndex)
{
// DATA INTEGRITY CHECK
if (!AllowDoubleClick)
return;
if (intIndex < -1 || intIndex >= Items.Count)
return;
// SELECT THE ITEM
UnSelectAllItems();
_intSelectedItem = intIndex;
if (_intSelectedItem >= 0 && Items[_intSelectedItem].Enabled)
{
Items[_intSelectedItem].Select();
_intSelectedList.Add(intIndex);
}
// PASS THE DOUBLE-CLICK EVENT TO THE ONDOUBLECLICK EVeNT
if (_intSelectedItem >= 0)
{
if (this.OnDoubleClick != null)
OnDoubleClick(this.gameObject, _intSelectedItem);
}
}
// -- HANDLE SELECTED INDEXES
public virtual bool IsSelectedByIndex(int intIndex)
{
return (_intSelectedItem == intIndex || _intSelectedList.FindIndex(x => x == intIndex) >= 0);
}
// -- RESIZE THE CONTAINER (IF NECESSARY)
public virtual void UpdateListBoxContainerSize()
{
Vector2 v2 = ContainerRect.sizeDelta;
v2.y = ((this.Height + this.Spacing) * Items.Count) + this.Spacing;
ContainerRect.sizeDelta = v2;
ResizeContainer();
}
// -- SHOW/HIDE THE LISTBOX CONTROL
public virtual void Hide()
{
gameObject.SetActive(true);
if (ListBoxMode == ListBoxModes.ListBox)
{
GetComponent<Image>().enabled = false;
if (ScrollBarObject != null)
ScrollBarObject.SetActive(false);
if (ScrollRectObject != null)
ScrollRectObject.SetActive(false);
if (ListBoxTitle != null)
ListBoxTitle.gameObject.SetActive(false);
}
}
public virtual void Show()
{
gameObject.SetActive(true);
if (ListBoxMode == ListBoxModes.ListBox)
{
GetComponent<Image>().enabled = true;
if (ScrollBarObject != null)
ScrollBarObject.SetActive(true);
if (ScrollRectObject != null)
ScrollRectObject.SetActive(true);
if (ListBoxTitle != null)
ListBoxTitle.gameObject.SetActive(true);
}
}
public virtual bool IsShown
{
get
{
if (ListBoxMode == ListBoxModes.ListBox)
{
return GetComponent<Image>().enabled && ScrollBarObject.activeSelf && ScrollRectObject.activeSelf;
}
return false;
}
}
#endregion
#endregion
#region "EVENT FUNCTIONS"
public event OnListBoxSelectChanged OnChange;
public event OnListBoxDoubleClick OnDoubleClick;
public event OnAddListItemEvent OnAddListItem;
public event OnRemoveListItemEvent OnRemoveListItem;
#endregion
}
| RowellKataan/Unity-List-Box | Assets/Unity UI Controls/Scripts/ListBox Scripts/ListBoxControl.cs | C# | apache-2.0 | 40,922 |
package org.docksidestage.dockside.dbflute.whitebox.cbean.bigartist.innerjoin;
import org.dbflute.cbean.result.ListResultBean;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.MemberAddressDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.MemberDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.MemberServiceDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.MemberStatusDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.MemberWithdrawalDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.ProductDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.ProductStatusDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.ServiceRankDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.SummaryProductDbm;
import org.docksidestage.dockside.dbflute.bsentity.dbmeta.WithdrawalReasonDbm;
import org.docksidestage.dockside.dbflute.exbhv.PurchaseBhv;
import org.docksidestage.dockside.dbflute.exentity.Member;
import org.docksidestage.dockside.dbflute.exentity.Product;
import org.docksidestage.dockside.dbflute.exentity.ProductStatus;
import org.docksidestage.dockside.dbflute.exentity.Purchase;
import org.docksidestage.dockside.unit.UnitContainerTestCase;
/**
* @author jflute
* @since 0.6.0 (2008/01/16 Wednesday)
*/
public class WxCBInnerJoinStructuralPossibleTest extends UnitContainerTestCase {
// ===================================================================================
// Attribute
// =========
private PurchaseBhv purchaseBhv;
// ===================================================================================
// Basic
// =====
public void test_StructuralPossible_without_Query() {
// ## Arrange ##
ListResultBean<Purchase> purchaseList = purchaseBhv.selectList(cb -> {
/* ## Act ## */
cb.enableInnerJoinAutoDetect();
cb.setupSelect_Member().withMemberStatus();
cb.setupSelect_Member().withMemberAddressAsValid(currentLocalDate());
cb.setupSelect_Member().withMemberServiceAsOne().withServiceRank();
cb.setupSelect_Member().withMemberWithdrawalAsOne().withWithdrawalReason();
cb.setupSelect_Product().withProductStatus();
cb.setupSelect_SummaryProduct();
cb.query().addOrderBy_MemberId_Asc().addOrderBy_PurchaseDatetime_Desc();
pushCB(cb);
});
// ## Assert ##
assertNotSame(0, purchaseList.size());
for (Purchase purchase : purchaseList) {
Product product = purchase.getProduct().get();
ProductStatus productStatus = product.getProductStatus().get();
assertNotNull(product);
assertNotNull(productStatus);
log("[PURCHASE]: " + purchase.getPurchaseId() + ", " + product.getProductName() + ", " + productStatus);
Member member = purchase.getMember().get();
assertNotNull(member);
assertNotNull(member.getMemberStatus());
member.getMemberWithdrawalAsOne().ifPresent(withdrawal -> {
withdrawal.getWithdrawalReason().ifPresent(reason -> {
String reasonText = reason.getWithdrawalReasonText();
log(" [WDL-MEMBER]: " + member.getMemberId() + ", " + member.getMemberName() + ", " + reasonText);
assertNotNull(reasonText);
markHere("existsWithdrawal");
});
});
}
String sql = popCB().toDisplaySql();
assertTrue(sql.contains("inner join " + MemberDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + MemberStatusDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + MemberAddressDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + MemberServiceDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + ServiceRankDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + MemberWithdrawalDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + WithdrawalReasonDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ProductDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ProductStatusDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + SummaryProductDbm.getInstance().getTableDbName()));
assertMarked("existsWithdrawal");
}
public void test_StructuralPossible_trace_is_ManualInnerJoin() {
// ## Arrange ##
ListResultBean<Purchase> purchaseList = purchaseBhv.selectList(cb -> {
/* ## Act ## */
cb.enableInnerJoinAutoDetect();
cb.setupSelect_Member().withMemberStatus();
cb.setupSelect_Member().withMemberAddressAsValid(currentLocalDate());
cb.setupSelect_Member().withMemberServiceAsOne().withServiceRank();
cb.setupSelect_Member().withMemberWithdrawalAsOne().withWithdrawalReason();
cb.setupSelect_Product().withProductStatus();
cb.setupSelect_SummaryProduct();
cb.query().queryMember().queryMemberServiceAsOne().innerJoin();
cb.query().addOrderBy_MemberId_Asc().addOrderBy_PurchaseDatetime_Desc();
pushCB(cb);
});
// ## Assert ##
assertNotSame(0, purchaseList.size());
for (Purchase purchase : purchaseList) {
Product product = purchase.getProduct().get();
ProductStatus productStatus = product.getProductStatus().get();
assertNotNull(product);
assertNotNull(productStatus);
log("[PURCHASE]: " + purchase.getPurchaseId() + ", " + product.getProductName() + ", " + productStatus);
Member member = purchase.getMember().get();
assertNotNull(member);
assertNotNull(member.getMemberStatus());
member.getMemberWithdrawalAsOne().ifPresent(withdrawal -> {
withdrawal.getWithdrawalReason().ifPresent(reason -> {
String reasonText = reason.getWithdrawalReasonText();
log(" [WDL-MEMBER]: " + member.getMemberId() + ", " + member.getMemberName() + ", " + reasonText);
assertNotNull(reasonText);
markHere("existsWithdrawal");
});
});
}
String sql = popCB().toDisplaySql();
assertTrue(sql.contains("inner join " + MemberDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + MemberStatusDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + MemberAddressDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + MemberServiceDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ServiceRankDbm.getInstance().getTableDbName())); // point
assertTrue(sql.contains("left outer join " + MemberWithdrawalDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + WithdrawalReasonDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ProductDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ProductStatusDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + SummaryProductDbm.getInstance().getTableDbName()));
assertMarked("existsWithdrawal");
}
public void test_StructuralPossible_trace_is_WhereUsedInnerJoin() {
// ## Arrange ##
ListResultBean<Purchase> purchaseList = purchaseBhv.selectList(cb -> {
/* ## Act ## */
cb.enableInnerJoinAutoDetect();
cb.setupSelect_Member().withMemberStatus();
cb.setupSelect_Member().withMemberAddressAsValid(currentLocalDate());
cb.setupSelect_Member().withMemberServiceAsOne().withServiceRank();
cb.setupSelect_Member().withMemberWithdrawalAsOne().withWithdrawalReason();
cb.setupSelect_Product().withProductStatus();
cb.setupSelect_SummaryProduct();
cb.query().queryMember().queryMemberServiceAsOne().setServiceRankCode_Equal_Gold();
cb.query().addOrderBy_MemberId_Asc().addOrderBy_PurchaseDatetime_Desc();
pushCB(cb);
});
// ## Assert ##
assertHasAnyElement(purchaseList);
for (Purchase purchase : purchaseList) {
purchase.getProduct().alwaysPresent(product -> {
assertTrue(product.getProductStatus().isPresent());
log("[PURCHASE]: " + purchase.getPurchaseId() + ", " + product.getProductName() + ", " + product.getProductStatus());
});
purchase.getMember().alwaysPresent(member -> {
assertTrue(member.getMemberStatus().isPresent());
member.getMemberWithdrawalAsOne().ifPresent(withdrawal -> {
withdrawal.getWithdrawalReason().ifPresent(reason -> {
String reasonText = reason.getWithdrawalReasonText();
log(" [WDL-MEMBER]: " + member.getMemberId() + ", " + member.getMemberName() + ", " + reasonText);
assertNotNull(reasonText);
markHere("existsWithdrawal");
});
});
});
}
String sql = popCB().toDisplaySql();
assertTrue(sql.contains("inner join " + MemberDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + MemberStatusDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + MemberAddressDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + MemberServiceDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ServiceRankDbm.getInstance().getTableDbName())); // point
assertTrue(sql.contains("left outer join " + MemberWithdrawalDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + WithdrawalReasonDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ProductDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("inner join " + ProductStatusDbm.getInstance().getTableDbName()));
assertTrue(sql.contains("left outer join " + SummaryProductDbm.getInstance().getTableDbName()));
assertMarked("existsWithdrawal");
}
}
| dbflute-test/dbflute-test-active-dockside | src/test/java/org/docksidestage/dockside/dbflute/whitebox/cbean/bigartist/innerjoin/WxCBInnerJoinStructuralPossibleTest.java | Java | apache-2.0 | 10,990 |
package com.mx.dic.holder;
import javax.sql.DataSource;
public class DataSourceHolder {
public static DataSource dataSource=null;
public static DataSource getDataSource() {
return dataSource;
}
public void setDataSource(DataSource dataSource) {
DataSourceHolder.dataSource = dataSource;
}
}
| minxinjituan/micro | dicServiceBean/src/main/java/com/mx/dic/holder/DataSourceHolder.java | Java | apache-2.0 | 314 |
/*
* Copyright (c) 2017 by E. A. Graham, Jr.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thevortex.lighting.jinks.devices.hubs;
import org.thevortex.lighting.jinks.devices.ConnectionReading;
import org.thevortex.lighting.jinks.devices.FirmwareReading;
import org.thevortex.lighting.jinks.devices.Reading;
/**
* Hub last reading states.
*
* @author E. A. Graham Jr.
*/
public class HubLastReading extends Reading implements FirmwareReading, ConnectionReading {
public Value<String> getAgentSessionId() {
return getFor("agent_session_id");
}
public Value<Long> getDesiredKiddeRadioCode() {
return getFor("desired_kidde_radio_code");
}
public Value<String> getDesiredPairingMode() {
return getFor("desired_pairing_mode");
}
public Value<String> getDesiredPairingModeDuration() {
return getFor("desired_pairing_mode_duration");
}
public Value<String> getDesiredPairingPrefix() {
return getFor("desired_pairing_mode_prefix");
}
public Value<String> getHubVersion() {
return getFor("hub_version");
}
public Value<String> getIpAddress() {
return getFor("ip_address");
}
public Value<Long> getKiddeRadioCode() {
return getFor("kidde_radio_code");
}
public Value<String> getLocalControlId() {
return getFor("local_control_id");
}
public Value<String> getLocalControlPublicKey() {
return getFor("local_control_public_key");
}
public Value<String> getLocalControlPublicKeyHash() {
return getFor("local_control_public_key_hash");
}
public Value<String> getMacAddress() {
return getFor("mac_address");
}
public Value<String> getPairingMode() {
return getFor("pairing_mode");
}
public Value<Integer> getPairingModeDuration() {
return getFor("pairing_mode_duration");
}
public Value<String> getPairingPrefix() {
return getFor("pairing_prefix");
}
public Value<Boolean> getRemotePairable() {
return getFor("remote_pairable");
}
public Value<Boolean> getUpdateNeeded() {
return getFor("update_needed");
}
public Value<Boolean> getUpdatingFirmware() {
return getFor("updating_firmware");
}
}
| EAGrahamJr/jinks | src/main/java/org/thevortex/lighting/jinks/devices/hubs/HubLastReading.java | Java | apache-2.0 | 2,817 |
package com.planet_ink.coffee_mud.Items.Basic;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Chair extends StdRideable
{
@Override public String ID(){ return "Chair";}
protected String readableText="";
public Chair()
{
super();
setName("a chair");
basePhyStats.setWeight(150);
setDisplayText("a chair is here.");
setDescription("Looks like a nice comfortable wooden chair");
material=RawMaterial.RESOURCE_OAK;
baseGoldValue=5;
basePhyStats().setLevel(1);
setRideBasis(Rideable.RIDEABLE_SIT);
setRiderCapacity(1);
recoverPhyStats();
}
}
| vjanmey/EpicMudfia | com/planet_ink/coffee_mud/Items/Basic/Chair.java | Java | apache-2.0 | 1,893 |
/*
* Copyright 2017 Young Digital Planet S.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.ydp.empiria.player.client.module.sourcelist.view;
import static org.fest.assertions.api.Assertions.*;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import com.google.gwt.event.dom.client.DragDropEventBase;
import com.google.gwt.junit.GWTMockUtilities;
import com.google.inject.Provider;
import eu.ydp.empiria.player.client.controller.body.InlineBodyGeneratorSocket;
import eu.ydp.empiria.player.client.gin.factory.TouchReservationFactory;
import eu.ydp.empiria.player.client.module.dragdrop.*;
import eu.ydp.empiria.player.client.module.sourcelist.presenter.SourceListPresenter;
import eu.ydp.empiria.player.client.ui.drop.FlowPanelWithDropZone;
import eu.ydp.empiria.player.client.util.dom.drag.DragDataObject;
import eu.ydp.empiria.player.client.util.events.internal.dragdrop.DragDropEventTypes;
import eu.ydp.gwtutil.client.util.geom.HasDimensions;
import org.junit.*;
import org.junit.runner.RunWith;
import org.mockito.*;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
@RunWith(MockitoJUnitRunner.class)
public class SourceListViewImplTest {
@InjectMocks
private SourceListViewImpl testObj;
@Mock
private SourceListPresenter sourceListPresenter;
@Mock
private TouchReservationFactory touchReservationFactory;
@Mock
private Provider<SourceListViewItem> sourceListViewItemProvider;
@Mock
private SourceListViewItem viewItem;
@Mock
private InlineBodyGeneratorSocket inlineBodyGeneratorSocket;
@Mock
private FlowPanelWithDropZone items;
private final int sourceListViewItemHeight = 11;
private final int sourceListViewItemWidth = 12;
@BeforeClass
public static void disarm() {
GWTMockUtilities.disarm();
}
@AfterClass
public static void rearm() {
GWTMockUtilities.restore();
}
@Before
public void before() {
when(sourceListViewItemProvider.get()).then(new Answer<SourceListViewItem>() {
@Override
public SourceListViewItem answer(InvocationOnMock invocation) throws Throwable {
doReturn(sourceListViewItemHeight).when(viewItem).getHeight();
doReturn(sourceListViewItemWidth).when(viewItem).getWidth();
return viewItem;
}
});
testObj.items = items;
}
@Test
public void testOnDragEventDragStart() throws Exception {
// given
String itemContent = "itemContent";
String itemId = "item";
SourceListPresenter sourceListPresenter = mock(SourceListPresenter.class);
String json = "{}";
DragDataObject dataObject = mock(DragDataObject.class);
doReturn(json).when(dataObject).toJSON();
when(sourceListPresenter.getDragDataObject(anyString())).thenReturn(dataObject);
DragDropEventBase event = mock(DragDropEventBase.class);
// when
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, itemContent, itemId), inlineBodyGeneratorSocket);
testObj.setSourceListPresenter(sourceListPresenter);
testObj.onDragEvent(DragDropEventTypes.DRAG_START, viewItem, event);
// then
verify(event).setData(eq("json"), eq(json));
verify(sourceListPresenter).onDragEvent(eq(DragDropEventTypes.DRAG_START), eq(itemId));
}
@Test
public void shouldNotSetData_whenNotDragStartEvent() throws Exception {
// given
String itemContent = "itemContent";
String itemId = "item";
DragDropEventBase event = mock(DragDropEventBase.class);
// when
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, itemContent, itemId), inlineBodyGeneratorSocket);
testObj.setSourceListPresenter(sourceListPresenter);
testObj.onDragEvent(DragDropEventTypes.DRAG_CANCEL, viewItem, event);
// then
verify(event, never()).setData(eq("json"), anyString());
verify(sourceListPresenter).onDragEvent(eq(DragDropEventTypes.DRAG_CANCEL), eq(itemId));
}
@Test
public void shouldGetItemValue() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.getItemValue("a");
// then
verify(viewItem).getItemContent();
}
@Test
public void shouldCreateItem() {
// given
String itemContent = "itemContent";
String itemId = "item";
doReturn(viewItem).when(sourceListViewItemProvider).get();
// when
SourcelistItemValue sourcelistItemValue = new SourcelistItemValue(SourcelistItemType.TEXT, itemId, itemContent);
testObj.createItem(sourcelistItemValue, inlineBodyGeneratorSocket);
// then
verify(sourceListViewItemProvider).get();
verify(items).add(eq(sourceListViewItemProvider.get()));
verify(sourceListViewItemProvider.get()).setSourceListView(eq(testObj));
verify(sourceListViewItemProvider.get()).createAndBindUi(eq(sourcelistItemValue), eq(inlineBodyGeneratorSocket));
}
@Test
public void shouldHideItem() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.hideItem("a");
// then
verify(viewItem).hide();
}
@Test
public void shouldNotHideItem_whenIdNotPresent() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.hideItem("aa");
// then
verify(viewItem, never()).show();
}
@Test
public void shouldShowItem() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.showItem("a");
// then
verify(viewItem).show();
}
@Test
public void shouldNotShowItem_whenIdNotPresent() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.showItem("aa");
// then
verify(viewItem, never()).show();
}
@Test
public void shouldLockItemForDrag() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.lockItemForDragDrop("a");
// then
verify(viewItem).lockForDragDrop();
}
@Test
public void shouldUnlockItemForDrag() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
testObj.unlockItemForDragDrop("a");
// then
verify(viewItem).unlockForDragDrop();
}
@Test
public void shouldGetMaxItemSize() {
// given
testObj.createItem(new SourcelistItemValue(SourcelistItemType.TEXT, "a", "a"), inlineBodyGeneratorSocket);
// when
HasDimensions maxItemSize = testObj.getMaxItemSize();
// then
assertThat(maxItemSize.getHeight()).isEqualTo(sourceListViewItemHeight);
assertThat(maxItemSize.getWidth()).isEqualTo(sourceListViewItemWidth);
}
}
| YoungDigitalPlanet/empiria.player | src/test/java/eu/ydp/empiria/player/client/module/sourcelist/view/SourceListViewImplTest.java | Java | apache-2.0 | 8,356 |
// Copyright (c) Martin Costello, 2017. All rights reserved.
// Licensed under the Apache 2.0 license. See the LICENSE file in the project root for full license information.
using Alexa.NET.Request;
using Alexa.NET.Request.Type;
using Alexa.NET.Response;
using Amazon.Lambda.Core;
namespace MartinCostello.LondonTravel.Skill;
public class AlexaFunctionTests : FunctionTests
{
public AlexaFunctionTests(ITestOutputHelper outputHelper)
: base(outputHelper)
{
}
[Fact]
public async Task Cannot_Invoke_Function_If_Application_Id_Incorrect()
{
// Arrange
AlexaFunction function = await CreateFunctionAsync();
SkillRequest request = CreateIntentRequest("AMAZON.HelpIntent");
request.Session.Application.ApplicationId = "not-my-skill-id";
ILambdaContext context = CreateContext();
// Act
InvalidOperationException exception = await Assert.ThrowsAsync<InvalidOperationException>(
async () => await function.HandlerAsync(request, context));
// Assert
exception.Message.ShouldBe("Request application Id 'not-my-skill-id' and configured skill Id 'my-skill-id' mismatch.");
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
[InlineData("____")]
[InlineData("qps-Ploc")]
public async Task Can_Invoke_Function_If_Locale_Is_Invalid(string locale)
{
// Arrange
AlexaFunction function = await CreateFunctionAsync();
SkillRequest request = CreateIntentRequest("AMAZON.HelpIntent");
request.Request.Locale = locale;
ILambdaContext context = CreateContext();
// Act
SkillResponse actual = await function.HandlerAsync(request, context);
// Assert
ResponseBody response = AssertResponse(actual, shouldEndSession: false);
response.OutputSpeech.ShouldNotBeNull();
response.OutputSpeech.Type.ShouldBe("SSML");
}
[Fact]
public async Task Cannot_Invoke_Function_With_System_Failure()
{
// Arrange
AlexaFunction function = await CreateFunctionAsync();
ILambdaContext context = CreateContext();
var error = new SystemExceptionRequest()
{
Error = new Error()
{
Message = "Internal error.",
Type = ErrorType.InternalError,
},
ErrorCause = new ErrorCause()
{
requestId = "my-request-id",
},
};
var request = CreateRequest(error);
// Act
SkillResponse actual = await function.HandlerAsync(request, context);
// Assert
ResponseBody response = AssertResponse(actual);
response.Card.ShouldBeNull();
response.Reprompt.ShouldBeNull();
response.OutputSpeech.ShouldNotBeNull();
response.OutputSpeech.Type.ShouldBe("SSML");
var ssml = response.OutputSpeech.ShouldBeOfType<SsmlOutputSpeech>();
ssml.Ssml.ShouldBe("<speak>Sorry, something went wrong.</speak>");
}
}
| martincostello/alexa-london-travel | test/LondonTravel.Skill.Tests/AlexaFunctionTests.cs | C# | apache-2.0 | 3,076 |
package com.cloudera.sa.hcu.io.utils;
import java.io.File;
import java.util.ArrayList;
public class LocalFileUtils
{
public static File[] createFileArray(String[] filePathArray)
{
ArrayList<File> fileList = new ArrayList<File>();
File[] tempFileArray;
for (String filePath: filePathArray)
{
File origFile = new File(filePath);
if (origFile.isDirectory())
{
tempFileArray = origFile.listFiles();
for (File file: tempFileArray)
{
fileList.add(file);
}
}else
{
fileList.add(origFile);
}
}
return fileList.toArray(new File[0]);
}
public static String[] createStringArrayOfFiles(String[] filePathArray)
{
ArrayList<String> filePathList = new ArrayList<String>();
File[] tempFileArray;
for (String filePath: filePathArray)
{
File origFile = new File(filePath);
if (origFile.isDirectory())
{
tempFileArray = origFile.listFiles();
for (File file: tempFileArray)
{
filePathList.add(file.getAbsolutePath());
}
}else
{
filePathList.add(origFile.getAbsolutePath());
}
}
return filePathList.toArray(new String[0]);
}
}
| tmalaska/hadcom.utils | src/main/java/com/cloudera/sa/hcu/io/utils/LocalFileUtils.java | Java | apache-2.0 | 1,160 |
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("DequeNet.Tests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("DequeNet.Tests")]
[assembly: AssemblyCopyright("Copyright © 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("8e1adf9b-2cb7-4477-9c01-69ea8b221522")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")] | dcastro/DequeNET | tests/DequeNet.Tests/Properties/AssemblyInfo.cs | C# | apache-2.0 | 1,364 |
require 'ffi'
require_relative '../sodium/utils'
require_relative '../random_bytes'
require_relative '../sodium/secret_buffer'
require_relative '../sodium'
module Crypto
module SecretBox
extend FFI::Library
extend Sodium::Utils
ffi_lib :libsodium
attach_function :primitive, :crypto_secretbox_primitive, [], :string
attach_function :keybytes, :crypto_secretbox_keybytes, [], :size_t
attach_function :noncebytes, :crypto_secretbox_noncebytes, [], :size_t
attach_function :macbytes, :crypto_secretbox_macbytes, [], :size_t
PRIMITIVE = primitive.freeze
KEYBYTES = keybytes.freeze
NONCEBYTES = noncebytes.freeze
MACBYTES = macbytes.freeze
attach_function :crypto_secretbox_easy, [:buffer_out, :buffer_in, :ulong_long, :buffer_in, :buffer_in], :int
attach_function :crypto_secretbox_open_easy, [:buffer_out, :buffer_in, :ulong_long, :buffer_in, :buffer_in], :int
attach_function :crypto_secretbox_detached, [:buffer_out, :buffer_out, :buffer_in, :ulong_long, :buffer_in, :buffer_in], :int
attach_function :crypto_secretbox_open_detached, [:buffer_out, :buffer_out, :buffer_in, :ulong_long, :buffer_in, :buffer_in], :int
module_function
def nonce
RandomBytes.buf(NONCEBYTES)
end
def secretbox(message, nonce, key)
message_len = get_size(message)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
ciphertext = zeros(message_len + MACBYTES)
key.readonly if key.is_a?(Sodium::SecretBuffer)
crypto_secretbox_easy(ciphertext, message, message_len, nonce, key)
ciphertext
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def open(ciphertext, nonce, key, encoding = nil)
ciphertext_len = get_size(ciphertext)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
decrypted = zeros(ciphertext_len - MACBYTES)
key.readonly if key.is_a?(Sodium::SecretBuffer)
if crypto_secretbox_open_easy(decrypted, ciphertext, ciphertext_len, nonce, key) == -1
raise Sodium::CryptoError, "Message forged", caller
end
if encoding
decrypted.force_encoding(encoding)
end
decrypted
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def secretbox!(data, nonce, key)
message = String(data)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
message_len = message.bytesize
message << zeros(MACBYTES)
key.readonly if key.is_a?(Sodium::SecretBuffer)
crypto_secretbox_easy(message, message, message_len, nonce, key)
message.force_encoding(Encoding::ASCII_8BIT)
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def open!(data, nonce, key, encoding = nil)
ciphertext = String(data)
if (message_len = (ciphertext_len = ciphertext.bytesize) - MACBYTES) < 0
fail Sodium::LengthError, "Ciphertext is too short", caller
end
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
key.readonly if key.is_a?(Sodium::SecretBuffer)
if crypto_secretbox_open_easy(ciphertext, ciphertext, ciphertext_len, nonce, key) == -1
raise Sodium::CryptoError, "Message forged", caller
end
ciphertext.slice!(message_len..-1)
if encoding
ciphertext.force_encoding(encoding)
end
ciphertext
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def detached(message, nonce, key)
message_len = get_size(message)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
ciphertext = zeros(message_len)
mac = zeros(MACBYTES)
key.readonly if key.is_a?(Sodium::SecretBuffer)
crypto_secretbox_detached(ciphertext, mac, message, message_len, nonce, key)
[ciphertext, mac]
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def open_detached(ciphertext, mac, nonce, key, encoding = nil)
ciphertext_len = get_size(ciphertext)
check_length(mac, MACBYTES, :Mac)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
message = zeros(ciphertext_len)
key.readonly if key.is_a?(Sodium::SecretBuffer)
if crypto_secretbox_open_detached(message, ciphertext, mac, ciphertext_len, nonce, key) == -1
raise Sodium::CryptoError, "Message forged", caller
end
if encoding
message.force_encoding(encoding)
end
message
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def detached!(message, nonce, key)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
mac = zeros(MACBYTES)
key.readonly if key.is_a?(Sodium::SecretBuffer)
crypto_secretbox_detached(message, mac, message, get_size(message), nonce, key)
[message, mac]
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
def open_detached!(ciphertext, mac, nonce, key, encoding = nil)
check_length(mac, MACBYTES, :Mac)
check_length(nonce, NONCEBYTES, :Nonce)
check_length(key, KEYBYTES, :SecretKey)
key.readonly if key.is_a?(Sodium::SecretBuffer)
if crypto_secretbox_open_detached(ciphertext, ciphertext, mac, get_size(ciphertext), nonce, key) == -1
raise Sodium::CryptoError, "Message forged", caller
end
if encoding && ciphertext.respond_to?(:force_encoding)
ciphertext.force_encoding(encoding)
end
ciphertext
ensure
key.noaccess if key.is_a?(Sodium::SecretBuffer)
end
end
SecretBox.freeze
module_function
def secretbox(*args)
SecretBox.secretbox(*args)
end
def secretbox!(*args)
SecretBox.secretbox!(*args)
end
end
| Asmod4n/ruby-ffi-libsodium | lib/crypto/secret_box.rb | Ruby | apache-2.0 | 5,944 |
/*
* Copyright 2014 Massachusetts General Hospital
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.mindinformatics.gwt.domeo.plugins.persistence.annotopia.serializers;
import java.util.List;
import org.mindinformatics.gwt.domeo.model.MAnnotation;
import org.mindinformatics.gwt.domeo.model.MAnnotationSet;
import org.mindinformatics.gwt.domeo.model.persistence.ontologies.IDomeoOntology;
import org.mindinformatics.gwt.domeo.model.persistence.ontologies.IPavOntology;
import org.mindinformatics.gwt.domeo.model.persistence.ontologies.IRdfsOntology;
import org.mindinformatics.gwt.domeo.plugins.persistence.annotopia.model.IAnnotopia;
import org.mindinformatics.gwt.framework.src.Utils;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
/**
* This class serializes the Annotation Set to Annotopia JSON format.
*
* @author Paolo Ciccarese <paolo.ciccarese@gmail.com>
*/
public class SAnnotationSetSerializer extends AAnnotopiaSerializer implements IAnnotopiaSerializer {
@Override
public JSONObject serialize(AnnotopiaSerializerManager manager, Object obj) {
MAnnotationSet annotationSet = (MAnnotationSet) obj;
JSONObject annotationSetJson = new JSONObject();
annotationSetJson.put(IRdfsOntology.type, new JSONString(IAnnotopia.ANNOTATION_SET));
// These have to exist and defined
// TODO HIGH track exception when any of these is null or blank
annotationSetJson.put("@context",new JSONString(Utils.getAnnotopiaContextPath( )));
annotationSetJson.put(IRdfsOntology.id, new JSONString(annotationSet.getIndividualUri()));
annotationSetJson.put(IDomeoOntology.transientLocalId, new JSONString(Long.toString(annotationSet.getLocalId())));
annotationSetJson.put("serializedBy",new JSONString("urn:application:domeo"));
//annotationSetJson.put(IDomeoOntology.annotates, new JSONString(annotationSet.getTargetResource().getUrl()));
//manager.addResourceToSerialize(annotationSet.getTargetResource());
// Creation
// --------------------------------------------------------------------
if(annotationSet.getCreatedBy()!=null) {
//manager.addAgentToSerialize(annotationSet.getCreatedBy());
annotationSetJson.put("createdBy", manager.serialize(annotationSet.getCreatedBy()));
//annotationSetJson.put(IPavOntology.createdBy, serializeAgent(manager, annotationSet.getCreatedBy()));
} else {
// Warning/Exception?
}
if(annotationSet.getCreatedOn()!=null) {
annotationSetJson.put("createdAt", new JSONString(Utils.fullfmt2.format(annotationSet.getCreatedOn())));
} else {
// Warning/Exception?
}
if(annotationSet.getCreatedWith()!=null) {
//manager.addAgentToSerialize(annotationSet.getCreatedWith());
annotationSetJson.put("createdWith", manager.serialize(annotationSet.getCreatedWith()));
//annotationSetJson.put(IPavOntology.createdWith, serializeAgent(manager, annotationSet.getCreatedWith()));
} else {
// Warning/Exception?
}
if(annotationSet.getLastSavedOn()!=null) {
annotationSetJson.put("lastUpdateOn", new JSONString(Utils.fullfmt2.format(annotationSet.getLastSavedOn())));
}
// Imports
// --------------------------------------------------------------------
if(annotationSet.getImportedFrom()!=null) {
//manager.addAgentToSerialize(annotationSet.getImportedFrom());
annotationSetJson.put(IPavOntology.importedFrom, new JSONString(annotationSet.getImportedFrom().getUri()));
//annotationSetJson.put(IPavOntology.importedFrom, serializeAgent(manager, annotationSet.getImportedFrom()));
}
if(annotationSet.getImportedBy()!=null) {
//manager.addAgentToSerialize(annotationSet.getImportedBy());
annotationSetJson.put(IPavOntology.importedBy, new JSONString(annotationSet.getImportedBy().getUri()));
//annotationSetJson.put(IPavOntology.importedBy, serializeAgent(manager, annotationSet.getImportedBy()));
}
if(annotationSet.getImportedOn()!=null) {
annotationSetJson.put(IPavOntology.importedOn, new JSONString(Utils.fullfmt2.format(annotationSet.getImportedOn())));
}
// These translate null values into blank strings
annotationSetJson.put("label", nullable(annotationSet.getLabel()));
annotationSetJson.put("description", nullable(annotationSet.getDescription()));
annotationSetJson.put("lineageUri", nullable(annotationSet.getLineageUri()));
annotationSetJson.put("version", nullable(annotationSet.getVersionNumber()));
annotationSetJson.put("previousVersion", nullable(annotationSet.getPreviousVersion()));
annotationSetJson.put("deleted", nullableBoolean(annotationSet.getIsDeleted()));
// Serialization of the annotation items that have changed
JSONArray annotations = new JSONArray();
List<MAnnotation> annotationsList = annotationSet.getAnnotations();
for(int i=0; i<annotationsList.size(); i++) {
//if(annotationsList.get(i).getHasChanged())
annotations.set(i, manager.serialize(annotationsList.get(i)));
}
annotationSetJson.put("annotations", annotations);
return annotationSetJson;
}
}
| domeo/DomeoClient | src/org/mindinformatics/gwt/domeo/plugins/persistence/annotopia/serializers/SAnnotationSetSerializer.java | Java | apache-2.0 | 5,833 |
# Copyright © 2012 The Pennsylvania State University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class BatchUpdateJob
include Hydra::AccessControlsEnforcement
include GenericFileHelper
include Rails.application.routes.url_helpers
def self.queue
:batch_update
end
def self.perform(*args)
new(*args)
end
def initialize(login, params, perms)
params = HashWithIndifferentAccess.new(params)
perms = HashWithIndifferentAccess.new(perms)
batch = Batch.find_or_create(params[:id])
user = User.find_by_login(login)
saved = []
denied = []
batch.generic_files.each do |gf|
unless user.can? :edit, get_permissions_solr_response_for_doc_id(gf.pid)[1]
logger.error "User #{user.login} DEEEENIED access to #{gf.pid}!"
denied << gf
next
end
gf.title = params[:title][gf.pid] if params[:title][gf.pid] rescue gf.label
gf.update_attributes(params[:generic_file])
gf.set_visibility(params)
save_tries = 0
begin
gf.save
rescue RSolr::Error::Http => error
save_tries += 1
logger.warn "BatchUpdateJob caught RSOLR error on #{gf.pid}: #{error.inspect}"
# fail for good if the tries is greater than 3
rescue_action_without_handler(error) if save_tries >=3
sleep 0.01
retry
end #
begin
Resque.enqueue(ContentUpdateEventJob, gf.pid, login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
saved << gf
end
batch.update_attributes({status:["Complete"]})
job_user = User.where(login:"batchuser").first
job_user = User.create(login:"batchuser", email:"batchmail") unless job_user
message = '<a class="batchid ui-helper-hidden">ss-'+batch.noid+'</a>The file(s) '+ file_list(saved)+ " have been saved." unless saved.empty?
job_user.send_message(user, message, 'Batch upload complete') unless saved.empty?
message = '<a class="batchid ui-helper-hidden">'+batch.noid+'</a>The file(s) '+ file_list(denied)+" could not be updated. You do not have sufficient privileges to edit it." unless denied.empty?
job_user.send_message(user, message, 'Batch upload permission denied') unless denied.empty?
end
def file_list ( files)
return files.map {|gf| '<a href="'+generic_files_path+'/'+gf.noid+'">'+display_title(gf)+'</a>'}.join(', ')
end
end
| ndlib/scholarsphere | app/models/batch_update_job.rb | Ruby | apache-2.0 | 2,932 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Impl.Transactions
{
using System;
using System.Globalization;
using System.Threading;
using System.Threading.Tasks;
using Apache.Ignite.Core.Impl.Common;
using Apache.Ignite.Core.Transactions;
/// <summary>
/// Grid cache transaction implementation.
/// </summary>
internal sealed class TransactionImpl : IDisposable
{
/** Metadatas. */
private object[] _metas;
/** Unique transaction ID.*/
private readonly long _id;
/** Cache. */
private readonly TransactionsImpl _txs;
/** TX concurrency. */
private readonly TransactionConcurrency _concurrency;
/** TX isolation. */
private readonly TransactionIsolation _isolation;
/** Timeout. */
private readonly TimeSpan _timeout;
/** Start time. */
private readonly DateTime _startTime;
/** Owning thread ID. */
private readonly int _threadId;
/** Originating node ID. */
private readonly Guid _nodeId;
/** State holder. */
private StateHolder _state;
// ReSharper disable once InconsistentNaming
/** Transaction for this thread. */
[ThreadStatic]
private static TransactionImpl THREAD_TX;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="id">ID.</param>
/// <param name="txs">Transactions.</param>
/// <param name="concurrency">TX concurrency.</param>
/// <param name="isolation">TX isolation.</param>
/// <param name="timeout">Timeout.</param>
/// <param name="nodeId">The originating node identifier.</param>
public TransactionImpl(long id, TransactionsImpl txs, TransactionConcurrency concurrency,
TransactionIsolation isolation, TimeSpan timeout, Guid nodeId) {
_id = id;
_txs = txs;
_concurrency = concurrency;
_isolation = isolation;
_timeout = timeout;
_nodeId = nodeId;
_startTime = DateTime.Now;
_threadId = Thread.CurrentThread.ManagedThreadId;
THREAD_TX = this;
}
/// <summary>
/// Transaction assigned to this thread.
/// </summary>
public static Transaction Current
{
get
{
var tx = THREAD_TX;
if (tx == null)
return null;
if (tx.IsClosed)
{
THREAD_TX = null;
return null;
}
return new Transaction(tx);
}
}
/// <summary>
/// Executes prepare step of the two phase commit.
/// </summary>
public void Prepare()
{
lock (this)
{
ThrowIfClosed();
_txs.TxPrepare(this);
}
}
/// <summary>
/// Commits this tx and closes it.
/// </summary>
public void Commit()
{
lock (this)
{
ThrowIfClosed();
_state = new StateHolder(_txs.TxCommit(this));
}
}
/// <summary>
/// Rolls this tx back and closes it.
/// </summary>
public void Rollback()
{
lock (this)
{
ThrowIfClosed();
_state = new StateHolder(_txs.TxRollback(this));
}
}
/// <summary>
/// Sets the rollback only flag.
/// </summary>
public bool SetRollbackOnly()
{
lock (this)
{
ThrowIfClosed();
return _txs.TxSetRollbackOnly(this);
}
}
/// <summary>
/// Gets a value indicating whether this instance is rollback only.
/// </summary>
public bool IsRollbackOnly
{
get
{
lock (this)
{
var state0 = _state == null ? State : _state.State;
return state0 == TransactionState.MarkedRollback ||
state0 == TransactionState.RollingBack ||
state0 == TransactionState.RolledBack;
}
}
}
/// <summary>
/// Gets the state.
/// </summary>
public TransactionState State
{
get
{
lock (this)
{
return _state != null ? _state.State : _txs.TxState(this);
}
}
}
/// <summary>
/// Gets the isolation.
/// </summary>
public TransactionIsolation Isolation
{
get { return _isolation; }
}
/// <summary>
/// Gets the concurrency.
/// </summary>
public TransactionConcurrency Concurrency
{
get { return _concurrency; }
}
/// <summary>
/// Gets the timeout.
/// </summary>
public TimeSpan Timeout
{
get { return _timeout; }
}
/// <summary>
/// Gets the start time.
/// </summary>
public DateTime StartTime
{
get { return _startTime; }
}
/// <summary>
/// Gets the node identifier.
/// </summary>
public Guid NodeId
{
get { return _nodeId; }
}
/// <summary>
/// Gets the thread identifier.
/// </summary>
public long ThreadId
{
get { return _threadId; }
}
/// <summary>
/// Adds a new metadata.
/// </summary>
public void AddMeta<TV>(string name, TV val)
{
if (name == null)
throw new ArgumentException("Meta name cannot be null.");
lock (this)
{
if (_metas != null)
{
int putIdx = -1;
for (int i = 0; i < _metas.Length; i += 2)
{
if (name.Equals(_metas[i]))
{
_metas[i + 1] = val;
return;
}
if (_metas[i] == null && putIdx == -1)
// Preserve empty space index.
putIdx = i;
}
// No meta with the given name found.
if (putIdx == -1)
{
// Extend array.
putIdx = _metas.Length;
object[] metas0 = new object[putIdx + 2];
Array.Copy(_metas, metas0, putIdx);
_metas = metas0;
}
_metas[putIdx] = name;
_metas[putIdx + 1] = val;
}
else
_metas = new object[] { name, val };
}
}
/// <summary>
/// Gets metadata by name.
/// </summary>
public TV Meta<TV>(string name)
{
if (name == null)
throw new ArgumentException("Meta name cannot be null.");
lock (this)
{
if (_metas != null)
{
for (int i = 0; i < _metas.Length; i += 2)
{
if (name.Equals(_metas[i]))
return (TV)_metas[i + 1];
}
}
return default(TV);
}
}
/// <summary>
/// Removes metadata by name.
/// </summary>
public TV RemoveMeta<TV>(string name)
{
if (name == null)
throw new ArgumentException("Meta name cannot be null.");
lock (this)
{
if (_metas != null)
{
for (int i = 0; i < _metas.Length; i += 2)
{
if (name.Equals(_metas[i]))
{
TV val = (TV)_metas[i + 1];
_metas[i] = null;
_metas[i + 1] = null;
return val;
}
}
}
return default(TV);
}
}
/// <summary>
/// Commits tx in async mode.
/// </summary>
internal Task CommitAsync()
{
lock (this)
{
ThrowIfClosed();
return CloseWhenComplete(_txs.CommitAsync(this));
}
}
/// <summary>
/// Rolls tx back in async mode.
/// </summary>
internal Task RollbackAsync()
{
lock (this)
{
ThrowIfClosed();
return CloseWhenComplete(_txs.RollbackAsync(this));
}
}
/// <summary>
/// Transaction ID.
/// </summary>
internal long Id
{
get { return _id; }
}
/** <inheritdoc /> */
public void Dispose()
{
try
{
Close();
}
finally
{
GC.SuppressFinalize(this);
}
}
/// <summary>
/// Gets a value indicating whether this transaction is closed.
/// </summary>
private bool IsClosed
{
get { return _state != null; }
}
/// <summary>
/// Gets the closed exception.
/// </summary>
private InvalidOperationException GetClosedException()
{
return new InvalidOperationException(string.Format(CultureInfo.InvariantCulture,
"Transaction {0} is closed, state is {1}", Id, State));
}
/// <summary>
/// Creates a task via provided factory if IsClosed is false; otherwise, return a task with an error.
/// </summary>
internal Task GetTask(Func<Task> operationFactory)
{
lock (this)
{
return IsClosed ? GetExceptionTask() : operationFactory();
}
}
/// <summary>
/// Gets the task that throws an exception.
/// </summary>
private Task GetExceptionTask()
{
var tcs = new TaskCompletionSource<object>();
tcs.SetException(GetClosedException());
return tcs.Task;
}
/// <summary>
/// Closes the transaction and releases unmanaged resources.
/// </summary>
private void Close()
{
lock (this)
{
_state = _state ?? new StateHolder((TransactionState) _txs.TxClose(this));
}
}
/// <summary>
/// Throws and exception if transaction is closed.
/// </summary>
private void ThrowIfClosed()
{
if (IsClosed)
throw GetClosedException();
}
/// <summary>
/// Closes this transaction upon task completion.
/// </summary>
private Task CloseWhenComplete(Task task)
{
return task.ContWith(x => Close());
}
/** <inheritdoc /> */
~TransactionImpl()
{
Dispose();
}
/// <summary>
/// State holder.
/// </summary>
private class StateHolder
{
/** Current state. */
private readonly TransactionState _state;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="state">State.</param>
public StateHolder(TransactionState state)
{
_state = state;
}
/// <summary>
/// Current state.
/// </summary>
public TransactionState State
{
get { return _state; }
}
}
}
}
| endian675/ignite | modules/platforms/dotnet/Apache.Ignite.Core/Impl/Transactions/TransactionImpl.cs | C# | apache-2.0 | 13,246 |
package com.nwea.dao;
import com.nwea.bean.Blogpost;
import java.sql.SQLException;
/**
* This interface will be used to invoke the database related operations.
*
*
* Created by tahmidchowdhury on 2/9/17.
*/
public interface BlogpostDAO {
//This method returns all the Blogposts from the database
public Blogpost[] getAllPosts() throws SQLException;
//This method inserts data into database table
public boolean insertBlogpost(String title, String body) throws SQLException;
}
| tahmid29/blogpostapi | src/main/java/com/nwea/dao/BlogpostDAO.java | Java | apache-2.0 | 502 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/quota.proto
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/api/quota.proto", :syntax => :proto3) do
add_message "google.api.Quota" do
repeated :limits, :message, 3, "google.api.QuotaLimit"
repeated :metric_rules, :message, 4, "google.api.MetricRule"
end
add_message "google.api.MetricRule" do
optional :selector, :string, 1
map :metric_costs, :string, :int64, 2
end
add_message "google.api.QuotaLimit" do
optional :name, :string, 6
optional :description, :string, 2
optional :default_limit, :int64, 3
optional :max_limit, :int64, 4
optional :free_tier, :int64, 7
optional :duration, :string, 5
optional :metric, :string, 8
optional :unit, :string, 9
map :values, :string, :int64, 10
optional :display_name, :string, 12
end
end
end
module Google
module Api
Quota = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.api.Quota").msgclass
MetricRule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.api.MetricRule").msgclass
QuotaLimit = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.api.QuotaLimit").msgclass
end
end
| googleapis/common-protos-ruby | googleapis-common-protos-types/lib/google/api/quota_pb.rb | Ruby | apache-2.0 | 1,330 |
/*
* (c) Copyright 2018 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.util;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
import com.palantir.common.concurrent.PTExecutors;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class CachedComposedSupplierTest {
private long counter;
private long supplierCounter;
private Supplier<VersionedType<Long>> mockVersionedSupplier;
private Supplier<Long> testSupplier;
@Before
public void setup() {
counter = 0;
supplierCounter = 0;
mockVersionedSupplier = Mockito.mock(Supplier.class);
testSupplier = new CachedComposedSupplier<>(this::countingFunction, mockVersionedSupplier);
}
@Test
public void appliesFunctionToNullValue() {
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(null, 0));
assertThat(counter).isEqualTo(0);
assertThat(testSupplier.get()).isNull();
assertThat(counter).isEqualTo(1);
}
@Test
public void appliesFunctionOnlyOnceWhenUnderlyingSupplierIsConstant() {
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(3L, 0));
assertThat(counter).isEqualTo(0);
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(counter).isEqualTo(1);
}
@Test
public void appliesFunctionEachTimeGetIsInvokedAndSuppliedVersionChanged() {
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(null, 0));
assertThat(testSupplier.get()).isNull();
assertThat(counter).isEqualTo(1);
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(3L, 1));
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(counter).isEqualTo(2);
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(3L, 2));
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(counter).isEqualTo(3);
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(8L, 3));
assertThat(testSupplier.get()).isEqualTo(16);
assertThat(counter).isEqualTo(4);
}
@Test
public void doesNotApplyFunctionIfGetIsInvokedAndSuppliedVersionConstant() {
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(null, 0));
assertThat(testSupplier.get()).isNull();
assertThat(counter).isEqualTo(1);
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(3L, 0));
assertThat(testSupplier.get()).isNull();
assertThat(counter).isEqualTo(1);
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(8L, 0));
assertThat(testSupplier.get()).isNull();
assertThat(counter).isEqualTo(1);
when(mockVersionedSupplier.get()).thenReturn(VersionedType.of(3L, 1));
assertThat(testSupplier.get()).isEqualTo(6);
assertThat(counter).isEqualTo(2);
}
@Test
public void appliesFunctionExactlyOncePerSuppliedValueChange() throws InterruptedException {
testSupplier = new CachedComposedSupplier<>(this::countingFunction, this::increasingNumber);
ExecutorService executorService = PTExecutors.newFixedThreadPool(16);
for (int i = 0; i < 100_000; i++) {
executorService.submit(testSupplier::get);
}
executorService.shutdown();
executorService.awaitTermination(1, TimeUnit.MINUTES);
assertThat(supplierCounter).isGreaterThan(100_000);
assertThat(counter).isEqualTo(1 + supplierCounter / 100);
}
@Test
@SuppressWarnings("ReturnValueIgnored") // Test relating to properties of a Supplier
public void recomputesIfSupplierHasNotUpdatedForTooLong() {
AtomicLong clockCounter = new AtomicLong();
testSupplier = new CachedComposedSupplier<>(this::countingFunction, this::constantNumber, 5, clockCounter::get);
for (int i = 0; i < 25; i++) {
clockCounter.incrementAndGet();
testSupplier.get();
}
assertThat(counter).isEqualTo(5);
}
private Long countingFunction(Long input) {
counter++;
if (input == null) {
return null;
}
return input * 2;
}
private synchronized VersionedType<Long> increasingNumber() {
supplierCounter++;
return VersionedType.of(supplierCounter, supplierCounter / 100);
}
private VersionedType<Long> constantNumber() {
return VersionedType.of(1L, 0);
}
}
| palantir/atlasdb | atlasdb-commons/src/test/java/com/palantir/util/CachedComposedSupplierTest.java | Java | apache-2.0 | 5,427 |
<?php
/**
* Copyright 2015 yannick.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
*/
/**
* Create HTML list of nav menu items.
*
* @since 3.0.0
* @uses Walker
*/
class Custom_walker_Nav_Menu extends Walker_Nav_Menu {
/**
* What the class handles.
*
* @see Walker::$tree_type
* @since 3.0.0
* @var string
*/
public $tree_type = array('post_type', 'taxonomy', 'custom');
/**
* Database fields to use.
*
* @see Walker::$db_fields
* @since 3.0.0
* @todo Decouple this.
* @var array
*/
public $db_fields = array('parent' => 'menu_item_parent', 'id' => 'db_id');
/**
* Starts the list before the elements are added.
*
* @see Walker::start_lvl()
*
* @since 3.0.0
*
* @param string $output Passed by reference. Used to append additional content.
* @param int $depth Depth of menu item. Used for padding.
* @param array $args An array of arguments. @see wp_nav_menu()
*/
public function start_lvl(&$output, $depth = 0, $args = array()) {
$indent = str_repeat("\t", $depth);
$output .= "\n$indent<ul class=\"dropdown-menu\" role=\"menu\">\n";
}
/**
* Ends the list of after the elements are added.
*
* @see Walker::end_lvl()
*
* @since 3.0.0
*
* @param string $output Passed by reference. Used to append additional content.
* @param int $depth Depth of menu item. Used for padding.
* @param array $args An array of arguments. @see wp_nav_menu()
*/
public function end_lvl(&$output, $depth = 0, $args = array()) {
$indent = str_repeat("\t", $depth);
$output .= "$indent</ul>\n";
}
/**
* Start the element output.
*
* @see Walker::start_el()
*
* @since 3.0.0
*
* @param string $output Passed by reference. Used to append additional content.
* @param object $item Menu item data object.
* @param int $depth Depth of menu item. Used for padding.
* @param array $args An array of arguments. @see wp_nav_menu()
* @param int $id Current item ID.
*/
public function start_el(&$output, $item, $depth = 0, $args = array(), $id = 0) {
$indent = ( $depth ) ? str_repeat("\t", $depth) : '';
/**
* Filter the ID applied to a menu item's list item element.
*
* @since 3.0.1
* @since 4.1.0 The `$depth` parameter was added.
*
* @param string $menu_id The ID that is applied to the menu item's `<li>` element.
* @param object $item The current menu item.
* @param array $args An array of {@see wp_nav_menu()} arguments.
* @param int $depth Depth of menu item. Used for padding.
*/
$id = apply_filters('nav_menu_item_id', 'menu-item-' . $item->ID, $item, $args, $depth);
$id = $id ? ' id="' . esc_attr($id) . '"' : '';
$atts = array();
$atts['title'] = !empty($item->attr_title) ? $item->attr_title : '';
$atts['target'] = !empty($item->target) ? $item->target : '';
$atts['rel'] = !empty($item->xfn) ? $item->xfn : '';
$atts['href'] = !empty($item->url) ? $item->url : '';
/**
* Detect sub menu condition.
*/
if (!("$this->has_children" == "1")) {
$output .= $indent . '<li>';
if(is_object($args)) $args->link_after = '';
} else {
$output .= $indent . "<li class=\"dropdown\">";
$atts['class'] = 'dropdown-toggle';
$atts['data-toggle'] = 'dropdown';
$atts['role'] = 'button';
$atts['aria-expanded'] = 'false';
$args->link_after = ' <span class="caret"></span>';
}
/**
* Filter the HTML attributes applied to a menu item's anchor element.
*
* @since 3.6.0
* @since 4.1.0 The `$depth` parameter was added.
*
* @param array $atts {
* The HTML attributes applied to the menu item's `<a>` element, empty strings are ignored.
*
* @type string $title Title attribute.
* @type string $target Target attribute.
* @type string $rel The rel attribute.
* @type string $href The href attribute.
* }
* @param object $item The current menu item.
* @param array $args An array of {@see wp_nav_menu()} arguments.
* @param int $depth Depth of menu item. Used for padding.
*/
$atts = apply_filters('nav_menu_link_attributes', $atts, $item, $args, $depth);
$attributes = '';
foreach ($atts as $attr => $value) {
if (!empty($value)) {
$value = ( 'href' === $attr ) ? esc_url($value) : esc_attr($value);
$attributes .= ' ' . $attr . '="' . $value . '"';
}
}
$item_output = $args->before;
$item_output .= '<a' . $attributes . '>';
/** This filter is documented in wp-includes/post-template.php */
$item_output .= $args->link_before . apply_filters('the_title', $item->title, $item->ID) . $args->link_after;
$item_output .= '</a>';
$item_output .= $args->after;
/**
* Filter a menu item's starting output.
*
* The menu item's starting output only includes `$args->before`, the opening `<a>`,
* the menu item's title, the closing `</a>`, and `$args->after`. Currently, there is
* no filter for modifying the opening and closing `<li>` for a menu item.
*
* @since 3.0.0
*
* @param string $item_output The menu item's starting HTML output.
* @param object $item Menu item data object.
* @param int $depth Depth of menu item. Used for padding.
* @param array $args An array of {@see wp_nav_menu()} arguments.
*/
$output .= apply_filters('walker_nav_menu_start_el', $item_output, $item, $depth, $args);
}
/**
* Ends the element output, if needed.
*
* @see Walker::end_el()
*
* @since 3.0.0
*
* @param string $output Passed by reference. Used to append additional content.
* @param object $item Page data object. Not used.
* @param int $depth Depth of page. Not Used.
* @param array $args An array of arguments. @see wp_nav_menu()
*/
public function end_el(&$output, $item, $depth = 0, $args = array()) {
/**
* Detect sub menu condition.
*/
if (!("$this->has_children" == "1")) {
$output .= "</li>\n";
}
}
}
// Walker_Nav_Menu
/**
* Description of boostrap
*
* @author yannick
*/
class bootstrap {
/**
* build menu
*/
public static function menu() {
$defaults = array(
'theme_location' => '',
'menu' => '',
'container' => 'div',
'container_class' => 'collapse navbar-collapse',
'container_id' => 'bs-example-navbar-collapse-1',
'menu_class' => '',
'menu_id' => '',
'echo' => true,
'fallback_cb' => 'wp_page_menu',
'items_wrap' => '<ul id="%1$s" class="nav navbar-nav">%3$s</ul>',
'depth' => 0,
'walker' => new Custom_walker_Nav_Menu
);
?>
<nav class="navbar navbar-default">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="#"><?php bloginfo('name') ?></a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="container-fluid">
<?php
wp_nav_menu($defaults);
?>
</div>
</nav>
<?php
}
/**
* build menu
*/
public static function carousel() {
?>
<div id="carousel-example-generic" class="carousel slide" data-ride="carousel">
<!-- Indicators -->
<ol class="carousel-indicators">
<li data-target="#carousel-example-generic" data-slide-to="0" class="active"></li>
<li data-target="#carousel-example-generic" data-slide-to="1"></li>
<li data-target="#carousel-example-generic" data-slide-to="2"></li>
</ol>
<!-- Wrapper for slides -->
<div class="carousel-inner" role="listbox">
<?php $category_id = get_theme_mod('illeetzick_set_carousel', '0'); ?>
<?php $url_img = get_theme_mod('illeetzick_url_carousel', '0'); ?>
<?php $query = new WP_Query( 'cat='.$category_id ); $count = 0; ?>
<?php if ( $query->have_posts() ) : while ( $query->have_posts() ) : $query->the_post(); $active = ($count == 0)?>
<div class="item <?php if($active) {echo "active";} ?> fix-carousel">
<img class="img-responsive img-rounded" src="<?php echo $url_img; ?>" alt="Responsive image" />
<div class="carousel-caption">
<h3><?php the_title(); ?></h3>
<?php the_content(); ?>
</div>
</div>
<?php $count=$count+1; endwhile; endif; wp_reset_postdata();
?>
<!-- Controls -->
<a class="left carousel-control" href="#carousel-example-generic" role="button" data-slide="prev">
<span class="glyphicon glyphicon-chevron-left" aria-hidden="true"></span>
<span class="sr-only">Previous</span>
</a>
<a class="right carousel-control" href="#carousel-example-generic" role="button" data-slide="next">
<span class="glyphicon glyphicon-chevron-right" aria-hidden="true"></span>
<span class="sr-only">Next</span>
</a>
</div>
</div>
<?php
}
/**
* build menu
*/
public static function post() {
?>
<br>
<blockquote>
<em>Posted on:</em> <?php the_time('F jS, Y') ?>
<em>by</em> <?php the_author() ?>
<a href="<?php the_permalink() ?>"><?php the_title(); ?></a>
<?php the_tags('Tags: ', ', ', '<br />'); ?>
<p>Categories: <?php the_category( ' ' ); ?></p>
</blockquote>
<!-- Post <?php the_ID() ?> -->
<div class="panel-group" id="panel-the-post">
<div class="panel panel-default">
<div class="panel-heading">
<a class="panel-title" data-toggle="collapse" data-parent="#panel-the-post" href="#panel-element-content">
<?php the_title(); ?>
</a>
</div>
<div id="panel-element-content" class="panel-collapse in" style="height: auto;">
<div class="panel-body">
<!-- Post <?php the_ID() ?> content begin -->
<?php the_content(); ?>
<!-- Post <?php the_ID() ?> content end -->
</div>
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<a class="panel-title collapsed" data-toggle="collapse" data-parent="#panel-the-post" href="#panel-element-comments">
Comments
</a>
</div>
<div id="panel-element-comments" class="panel-collapse collapse" style="height: 0px;">
<div class="panel-body">
<?php comments_template(); ?>
</div>
</div>
</div>
</div>
<?php
}
}
| yroffin/wordpress | themes/yrosimple/inc/bootstrap.php | PHP | apache-2.0 | 12,965 |
<?php
/**
Copyright 2013 Nikhil Patil
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**/
// Current version
define ( "VERSION", "0.1" );
// Configuration file path
define ( "CONF_PATH", "./conf/solr.php" );
// Error messages
define ( "ERROR_MISSING_CORE_INFO", "Cores could not be loaded" );
define ( "ERROR_UPDATING_CONF_FILE", "Error updating configuration file at " . CONF_PATH . " Please check file permissions" );
define ( "ERROR_MISSING_SYSTEM_INFO", "System info could not be retrieved. Solr older than 1.4?" );
define ( "ERROR_MISSING_CORE_STATS", "Stats for this core could not be retrieved" );
define ( "ERROR_NO_ACTIVE_HANDLERS", "There are no active handlers on this core" );
define ( "ERROR_NO_CACHE_INFO", "Details about cache unavailable" );
// Autoloader
function __autoload($class) {
require_once strtolower ( str_replace ( '_', DIRECTORY_SEPARATOR, $class ) ) . '.php';
}
| nikhilapatil/solr-monitor | lib/basic.php | PHP | apache-2.0 | 1,380 |
package io.github.tramchamploo.bufferslayer;
import io.github.tramchamploo.bufferslayer.Message.MessageKey;
import io.github.tramchamploo.bufferslayer.OverflowStrategy.Strategy;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
@Measurement(iterations = 5, time = 1)
@Warmup(iterations = 10, time = 1)
@Fork(3)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@State(Scope.Group)
public class ConcurrentSizeBoundedQueueBenchmark extends AbstractSizeBoundedQueueBenchmark {
@Override
protected AbstractSizeBoundedQueue newQueue(int maxSize, Strategy strategy, MessageKey key) {
return new ConcurrentSizeBoundedQueue(maxSize, strategy, key);
}
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(".*" + ConcurrentSizeBoundedQueueBenchmark.class.getSimpleName() + ".*")
.build();
new Runner(opt).run();
}
}
| tramchamploo/buffer-slayer | benchmark/src/main/java/io/github/tramchamploo/bufferslayer/ConcurrentSizeBoundedQueueBenchmark.java | Java | apache-2.0 | 1,452 |
/*
* Copyright 2014 Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rhq.simone.system;
import static jnr.ffi.Platform.getNativePlatform;
import jnr.ffi.Platform;
import jnr.posix.POSIX;
import jnr.posix.POSIXFactory;
import org.rhq.simone.system.linux.LinuxSystemService;
import org.rhq.simone.util.log.Log;
import org.rhq.simone.util.log.LogFactory;
/**
* @author Thomas Segismont
*/
public class SystemServiceFactory {
private static final Log LOG = LogFactory.getLog(SystemServiceFactory.class);
private SystemServiceFactory() {
// Factory
}
public static SystemService createSystemService() {
return createSystemService(POSIXFactory.getPOSIX());
}
public static SystemService createSystemService(POSIX posix) {
try {
Platform.OS os = getNativePlatform().getOS();
switch (os) {
case LINUX:
return createLinuxSystemService(posix);
default:
return null;
}
} catch (Throwable t) {
LOG.error("Failed to create SystemService", t);
return null;
}
}
public static SystemService createLinuxSystemService(POSIX posix) {
return new LinuxSystemService(posix);
}
}
| tsegismont/simone | src/main/java/org/rhq/simone/system/SystemServiceFactory.java | Java | apache-2.0 | 1,715 |
// app/routes/form.js
import Ember from 'ember';
export default Ember.Route.extend({
actions: {
willTransition: function(transition) {
// 如果是使用this.get('key')获取不了页面输入值,因为不是通过action提交表单的
var v = this.controller.get('firstName');
// 任意获取一个作为判断表单输入值
if (v && !confirm("你确定要离开这个页面吗??")) {
transition.abort();
} else {
return true;
}
}
}
});
| ubuntuvim/my_emberjs_code | chapter3_routes/app/routes/form.js | JavaScript | apache-2.0 | 532 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi;
import com.intellij.model.SymbolResolveResult;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
/**
* Inherit this interface if you want the reference to resolve to more than one element,
* or if you want to provide resolve result(s) for a superset of valid resolve cases.
* e.g. in Java references in static context are resolved to non-static methods in case
* there is no valid candidate. isValidResult() in this case should return false
* for later analysis by highlighting pass.
*
* @see PsiPolyVariantReferenceBase
*/
public interface PsiPolyVariantReference extends PsiReference {
/**
* Returns the results of resolving the reference.
*
* @param incompleteCode if true, the code in the context of which the reference is
* being resolved is considered incomplete, and the method may return additional
* invalid results.
*
* @return the array of results for resolving the reference.
*/
ResolveResult @NotNull [] multiResolve(boolean incompleteCode);
@NotNull
@Override
default Collection<? extends SymbolResolveResult> resolveReference() {
ResolveResult[] results = multiResolve(false);
return ContainerUtil.filter(results, it -> it.getElement() != null);
}
}
| leafclick/intellij-community | platform/core-api/src/com/intellij/psi/PsiPolyVariantReference.java | Java | apache-2.0 | 1,460 |
/* General Loops - Abstract Case - Free variable */
function f1() { return 1; }
function f2() { return 2; }
function f3() { return 3; }
var b = !Date.now(); // non-deterministic boolean value.
var o1 = {x: f1, y: f2, z: f3};
var o2 = {};
var arr = [];
if (b) {
arr.push("x");
arr.push("y");
} else {
arr.push("z");
arr.push("y");
arr.push("x");
}
// the array ’arr’ is not concrete when we assume the
// path-insensitive analysis.
var i = arr.length;
while (i--) { // the analysis cannot enumerate all the concrete iterations.
var t = arr[i];
var v = o1[t];
o2[t] = (function(v) {
return function() {
return v;
};
}(v));
}
var result = o2.x() !== f2;
TAJS_assert(result);
// TAJS-determinacy: BoolTop
// LSA: BoolTop
// CompAbs: true | cs-au-dk/TAJS | test-resources/src/micro-different-loop-kinds/AGHeap.js | JavaScript | apache-2.0 | 813 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ISAAR.MSolve.FEM.Entities
{
public class Cluster
{
private readonly IList<Subdomain> subdomains = new List<Subdomain>();
public IList<Subdomain> Subdomains
{
get { return subdomains; }
}
public int ID { get; set; }
}
}
| GoatOfRafin/MSolve | ISAAR.MSolve.FEM/Entities/Cluster.cs | C# | apache-2.0 | 385 |
<?php
namespace Atos\MissionRecensementBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* Mission
*
* @ORM\Table()
* @ORM\Entity(repositoryClass="Atos\MissionRecensementBundle\Entity\MissionRepository")
*/
class Mission
{
/**
* @var integer
*
* @ORM\Column(name="id", type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
*/
private $id;
/**
* @var string
*
* @ORM\Column(name="nom", type="string", length=100)
*/
private $nom;
/**
* @var string
*
* @ORM\Column(name="descriptionProcessus", type="string", length=255, nullable=true)
*/
private $descriptionProcessus;
/**
* @var string
*
* @ORM\Column(name="descriptionTechnique", type="string", length=255, nullable=true)
*/
private $descriptionTechnique;
/**
* @var string
*
* @ORM\Column(name="descriptionFonction", type="string", length=255, nullable=true)
*/
private $descriptionFonction;
/**
* @var \Date
*
* @ORM\Column(name="dateDebut", type="date")
*/
private $dateDebut;
/**
* @var \Date
*
* @ORM\Column(name="dateFin", type="date", nullable=true)
*/
private $dateFin;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Employe")
*/
private $employe;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Client")
*/
private $client;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Metier")
*/
private $metier;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\TypePrestation")
*/
private $typePrestation;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Niveau")
*/
private $niveau;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Type")
*/
private $type;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Domaine")
*/
private $domaine;
/**
* @ORM\ManyToOne(targetEntity="Atos\MissionRecensementBundle\Entity\Domaine")
*/
private $sousDomaine;
/**
* Get id
*
* @return integer
*/
public function getId()
{
return $this->id;
}
/**
* Set descriptionProcessus
*
* @param string $descriptionProcessus
* @return Mission
*/
public function setDescriptionProcessus($descriptionProcessus)
{
$this->descriptionProcessus = $descriptionProcessus;
return $this;
}
/**
* Get descriptionProcessus
*
* @return string
*/
public function getDescriptionProcessus()
{
return $this->descriptionProcessus;
}
/**
* Set descriptionTechnique
*
* @param string $descriptionTechnique
* @return Mission
*/
public function setDescriptionTechnique($descriptionTechnique)
{
$this->descriptionTechnique = $descriptionTechnique;
return $this;
}
/**
* Get descriptionTechnique
*
* @return string
*/
public function getDescriptionTechnique()
{
return $this->descriptionTechnique;
}
/**
* Set descriptionFonction
*
* @param string $descriptionFonction
* @return Mission
*/
public function setDescriptionFonction($descriptionFonction)
{
$this->descriptionFonction = $descriptionFonction;
return $this;
}
/**
* Get descriptionFonction
*
* @return string
*/
public function getDescriptionFonction()
{
return $this->descriptionFonction;
}
/**
* Set dateDebut
*
* @param \Date $dateDebut
* @return Mission
*/
public function setDateDebut($dateDebut)
{
$this->dateDebut = $dateDebut;
return $this;
}
/**
* Get dateDebut
*
* @return \Date
*/
public function getDateDebut()
{
return $this->dateDebut;
}
/**
* Set dateFin
*
* @param \Date $dateFin
* @return Mission
*/
public function setDateFin($dateFin)
{
$this->dateFin = $dateFin;
return $this;
}
/**
* Get dateFin
*
* @return \Date
*/
public function getDateFin()
{
return $this->dateFin;
}
/**
* Set employe
*
* @param \Atos\MissionRecensementBundle\Entity\Employe $employe
* @return Mission
*/
public function setEmploye(\Atos\MissionRecensementBundle\Entity\Employe $employe = null)
{
$this->employe = $employe;
return $this;
}
/**
* Get employe
*
* @return \Atos\MissionRecensementBundle\Entity\Employe
*/
public function getEmploye()
{
return $this->employe;
}
/**
* Set client
*
* @param \Atos\MissionRecensementBundle\Entity\Client $client
* @return Mission
*/
public function setClient(\Atos\MissionRecensementBundle\Entity\Client $client = null)
{
$this->client = $client;
return $this;
}
/**
* Get client
*
* @return \Atos\MissionRecensementBundle\Entity\Client
*/
public function getClient()
{
return $this->client;
}
/**
* Set metier
*
* @param \Atos\MissionRecensementBundle\Entity\Metier $metier
* @return Mission
*/
public function setMetier(\Atos\MissionRecensementBundle\Entity\Metier $metier = null)
{
$this->metier = $metier;
return $this;
}
/**
* Get metier
*
* @return \Atos\MissionRecensementBundle\Entity\Metier
*/
public function getMetier()
{
return $this->metier;
}
/**
* Set typePrestation
*
* @param \Atos\MissionRecensementBundle\Entity\TypePrestation $typePrestation
* @return Mission
*/
public function setTypePrestation(\Atos\MissionRecensementBundle\Entity\TypePrestation $typePrestation = null)
{
$this->typePrestation = $typePrestation;
return $this;
}
/**
* Get typePrestation
*
* @return \Atos\MissionRecensementBundle\Entity\TypePrestation
*/
public function getTypePrestation()
{
return $this->typePrestation;
}
/**
* Set niveau
*
* @param \Atos\MissionRecensementBundle\Entity\Niveau $niveau
* @return Mission
*/
public function setNiveau(\Atos\MissionRecensementBundle\Entity\Niveau $niveau = null)
{
$this->niveau = $niveau;
return $this;
}
/**
* Get niveau
*
* @return \Atos\MissionRecensementBundle\Entity\Niveau
*/
public function getNiveau()
{
return $this->niveau;
}
/**
* Set type
*
* @param \Atos\MissionRecensementBundle\Entity\Type $type
* @return Mission
*/
public function setType(\Atos\MissionRecensementBundle\Entity\Type $type = null)
{
$this->type = $type;
return $this;
}
/**
* Get type
*
* @return \Atos\MissionRecensementBundle\Entity\Type
*/
public function getType()
{
return $this->type;
}
/**
* Set nom
*
* @param string $nom
* @return Mission
*/
public function setNom($nom)
{
$this->nom = $nom;
return $this;
}
/**
* Get nom
*
* @return string
*/
public function getNom()
{
return $this->nom;
}
/**
* Get date interval.
*
* @return DateInterval
*/
public function getDateInterval()
{
$start = $this->getDateDebut();
$end = $this->getDateFin();
if($end)
{
return $start->diff($end);
}
return null;
}
/**
* Get duree annees.
*
* @return integer
*/
public function getDureeAnnees()
{
$interval = $this->getDateInterval();
if($interval)
{
return $interval->y;
}
}
/**
* Get duree mois.
*
* @return integer
*/
public function getDureeMois()
{
$interval = $this->getDateInterval();
if($interval)
{
return $interval->m;
}
}
/**
* Set domaine
*
* @param \Atos\MissionRecensementBundle\Entity\Domaine $domaine
* @return Mission
*/
public function setDomaine(\Atos\MissionRecensementBundle\Entity\Domaine $domaine = null)
{
$this->domaine = $domaine;
return $this;
}
/**
* Get domaine
*
* @return \Atos\MissionRecensementBundle\Entity\Domaine
*/
public function getDomaine()
{
return $this->domaine;
}
/**
* Set sousDomaine
*
* @param \Atos\MissionRecensementBundle\Entity\Domaine $sousDomaine
* @return Mission
*/
public function setSousDomaine(\Atos\MissionRecensementBundle\Entity\Domaine $sousDomaine = null)
{
$this->sousDomaine = $sousDomaine;
return $this;
}
/**
* Get sousDomaine
*
* @return \Atos\MissionRecensementBundle\Entity\Domaine
*/
public function getSousDomaine()
{
return $this->sousDomaine;
}
}
| kfrapin/mission-recensement | src/Atos/MissionRecensementBundle/Entity/Mission.php | PHP | apache-2.0 | 9,462 |
function f() { return 42}
TAJS_assert(f.call.call.call(f) === 42)
TAJS_assert(f.apply.apply.apply(f) === 42)
TAJS_assert(Function.prototype.apply.apply(f) == 42)
TAJS_dumpValue(f.apply.apply.apply(f))
function g(x) { return x + 1}
TAJS_assert(Function.prototype.call(g,45) === undefined)
TAJS_assert(isNaN(g.call.call(g,45)))
TAJS_dumpValue(g.call.call(g,45))
TAJS_assert(Function.prototype.call.call(g, null, 87), 'isMaybeNumUInt||isMaybeNaN');
TAJS_dumpValue(Function.prototype.call.call(g,null,87)); | cs-au-dk/TAJS | test-resources/src/micro/test109.js | JavaScript | apache-2.0 | 514 |
#-*- coding: utf-8 -*-
'''
Created on Jul 4, 2013
@author: jin
'''
from django.contrib import admin
from apps.agent.models import Client, RecommendRecord
class ClientAdmin(admin.ModelAdmin):
search_fields = ('username','user_username','IDCard')
class RecommendRecordAdmin(admin.ModelAdmin):
search_fields = ('user_username','client_username')
admin.site.register(Client,ClientAdmin)
admin.site.register(RecommendRecord,RecommendRecordAdmin)
| SnailJin/house | apps/agent/admin.py | Python | apache-2.0 | 461 |
package com.ibuildapp.romanblack.CustomFormPlugin.xmlparser;
/**
* Entity class that form submit button.
*/
public class FormButton {
private String label = "";
/**
* Returns the button label.
* @return the label
*/
public String getLabel() {
return label;
}
/**
* Sets the button label.
* @param label the label to set
*/
public void setLabel(String label) {
this.label = label;
}
}
| iBuildApp/android_module_CustomForm | src/main/java/com/ibuildapp/romanblack/CustomFormPlugin/xmlparser/FormButton.java | Java | apache-2.0 | 464 |
/*
* Copyright (c) Created by Cody.yi on 2016/8/26.
*/
package com.cody.app.framework.fragment;
import android.databinding.ViewDataBinding;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.cody.handler.framework.presenter.Presenter;
import com.cody.handler.framework.viewmodel.HeaderViewModel;
import com.cody.handler.framework.viewmodel.IWithHeaderViewModel;
/**
* A simple {@link Fragment} subclass.
*/
public abstract class WithHeaderFragment<P extends Presenter<VM>, VM extends IWithHeaderViewModel, B extends
ViewDataBinding> extends BaseBindingFragment<P, VM, B> {
/**
* 创建标题
* 返回空或者默认的HeaderViewModel不会显示头部,必须设置头部的visible
*
* @see HeaderViewModel#setVisible
*/
protected abstract void initHeader(HeaderViewModel header);
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = super.onCreateView(inflater, container, savedInstanceState);
initHeader(getViewModel().getHeaderViewModel());
return view;
}
}
| codyer/CleanFramework | app/src/main/java/com/cody/app/framework/fragment/WithHeaderFragment.java | Java | apache-2.0 | 1,317 |
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
require "elements"
module TestApp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| josetonyp/elements | spec/test_app/config/application.rb | Ruby | artistic-2.0 | 1,226 |
<?php
/*
mod_siokara : Pixmicat! siokara management subset (Alpha)
by: scribe
*/
class mod_siokara extends ModuleHelper {
private $mypage;
private $LANGUAGE=array(
'zh_TW' => array(
'siokara_admin_fsage' => '強制sage',
'siokara_admin_ufsage' => '解除強制sage',
'siokara_admin_htmb' => '替換縮圖',
'siokara_admin_uhtmb' => '解除替換縮圖',
'siokara_admin_agif' => '替換縮圖為靜態縮圖',
'siokara_admin_uagif' => '解除替換靜態縮圖',
'siokara_extra_opt' => '附加選項',
'siokara_anigif' => '動態GIF',
'siokara_warn_sage' => '此討論串已被強制sage。',
'siokara_warn_hidethumb' => '縮圖已被替換。'
),
'ja_JP' => array(
'siokara_admin_fsage' => '強制sage',
'siokara_admin_ufsage' => '強制sage解除',
'siokara_admin_htmb' => 'サムネイル差替',
'siokara_admin_uhtmb' => 'サムネイル差替解除',
'siokara_admin_agif' => 'GIFをサムネイル化する',
'siokara_admin_uagif' => 'GIFサムネイル化解除',
'siokara_extra_opt' => '余分なオプション',
'siokara_anigif' => 'GIFアニメ',
'siokara_warn_sage' => 'このスレは管理者によりsage指定されています。理由はお察しください。',
'siokara_warn_hidethumb' => 'この記事の画像は管理者によりサムネイルが差し替えられています。理由はお察しください。<br/>サムネイルをクリックすると元の画像を表示します。'
),
'en_US' => array(
'siokara_admin_fsage' => 'Force sage',
'siokara_admin_ufsage' => 'unForce sage',
'siokara_admin_htmb' => 'Replace thumbnail with nothumb image',
'siokara_admin_uhtmb' => 'Use orginal thumbnail',
'siokara_admin_agif' => 'Use still image of GIF image',
'siokara_admin_uagif' => 'Use Animated GIF',
'siokara_extra_opt' => 'Extra Options',
'siokara_anigif' => 'Animated GIF',
'siokara_warn_sage' => 'This thread was forced sage by administrator.',
'siokara_warn_hidethumb' => 'The thumbnail was replaced by administrator.'
)
);
public function __construct($PMS) {
parent::__construct($PMS);
$this->mypage = $this->getModulePageURL();
$this->attachLanguage( $this->LANGUAGE); // 載入語言檔
}
/* Get the name of module */
public function getModuleName(){
return 'mod_siokara : しおから式管理擴充套件';
}
/* Get the module version infomation */
public function getModuleVersionInfo(){
return 'v140531';
}
public function autoHookAdminList(&$modFunc, $post, $isres){
$FileIO = PMCLibrary::getFileIOInstance();
extract($post);
$fh=new FlagHelper($status);
if(!$isres) $modFunc .= '[<a href="'.$this->mypage.'&no='.$no.'&action=sage"'.($fh->value('asage')?' title="'.$this->_T('siokara_admin_ufsage').'">s':' title="'.$this->_T('siokara_admin_fsage').'">S').'</a>]';
if($ext && $FileIO->imageExists($tim.$ext)) {
$modFunc .= '[<a href="'.$this->mypage.'&no='.$no.'&action=thumb"'.($fh->value('htmb')?' title="'.$this->_T('siokara_admin_uhtmb').'">t':' title="'.$this->_T('siokara_admin_htmb').'">T').'</a>]';
if($ext == '.gif') $modFunc .= '[<a href="'.$this->mypage.'&no='.$no.'&action=agif"'.($fh->value('agif')?' title="'.$this->_T('siokara_admin_agif').'">g':' title="'.$this->_T('siokara_admin_uagif').'">G').'</a>]';
}
}
public function autoHookPostForm(&$form){
$form .= '<tr><td class="Form_bg"><b>'.$this->_T('siokara_extra_opt').'</b></td><td>[<input type="checkbox" name="anigif" id="anigif" value="on" /><label for="anigif">'.$this->_T('siokara_anigif').'</label>]</td></tr>';
}
public function autoHookThreadPost(&$arrLabels, $post, $isReply){
$PIO = PMCLibrary::getPIOInstance();
$FileIO = PMCLibrary::getFileIOInstance();
$fh = new FlagHelper($post['status']);
if($fh->value('asage')) { // 強制sage
if($arrLabels['{$COM}']) $arrLabels['{$WARN_ENDREPLY}'].='<br/><span class="warn_txt"><small>'.$this->_T('siokara_warn_sage').'<br/></small></span>';
else $arrLabels['{$WARN_ENDREPLY}'] = '<span class="warn_txt"><small>'.$this->_T('siokara_warn_sage').'<br/></small></span>';
}
if($FileIO->imageExists($post['tim'].$post['ext'])) {
if($fh->value('agif')) { // 動態GIF
$imgURL = $FileIO->getImageURL($post['tim'].$post['ext']);
$arrLabels['{$IMG_SRC}']=preg_replace('/<img src=".*"/U','<img src="'.$imgURL.'"',$arrLabels['{$IMG_SRC}']);
$arrLabels['{$IMG_BAR}'].='<small>['.$this->_T('siokara_anigif').']</small>';
}
if($fh->value('htmb')) { // 替換縮圖
$arrLabels['{$IMG_SRC}']=preg_replace('/<img src=".*" style="width: \d+px; height: \d+px;"/U','<img src="nothumb.gif"',$arrLabels['{$IMG_SRC}']);
$arrLabels['{$COM}'].='<br/><br/><span class="warn_txt"><small>'.$this->_T('siokara_warn_hidethumb').'<br/></small></span>';
}
}
}
public function autoHookThreadReply(&$arrLabels, $post, $isReply){
$this->autoHookThreadPost($arrLabels, $post, $isReply);
}
public function autoHookRegistBeforeCommit(&$name, &$email, &$sub, &$com, &$category, &$age, $dest, $isReply, $imgWH, &$status){
$PIO = PMCLibrary::getPIOInstance();
$fh = new FlagHelper($status);
$size = @getimagesize($dest);
if(isset($_POST['anigif']) && ($size[2] == 1)) { // 動態GIF
$fh->toggle('agif');
$status = $fh->toString();
}
if($isReply) {
$rpost = $PIO->fetchPosts($isReply); // 強制sage
$rfh = new FlagHelper($rpost[0]['status']);
if($rfh->value('asage')) $age = false;
}
}
/*
function _loadLanguage() {
global $language;
if(PIXMICAT_LANGUAGE != 'zh_TW' && PIXMICAT_LANGUAGE != 'ja_JP' && PIXMICAT_LANGUAGE != 'en_US') $lang = 'en_US';
else $lang = PIXMICAT_LANGUAGE;
// builtin language strings
if($lang == 'zh_TW') {
$language['siokara_admin_fsage'] = '強制sage';
$language['siokara_admin_ufsage'] = '解除強制sage';
$language['siokara_admin_htmb'] = '替換縮圖';
$language['siokara_admin_uhtmb'] = '解除替換縮圖';
$language['siokara_admin_agif'] = '替換縮圖為靜態縮圖';
$language['siokara_admin_uagif'] = '解除替換靜態縮圖';
$language['siokara_extra_opt'] = '附加選項';
$language['siokara_anigif'] = '動態GIF';
$language['siokara_warn_sage'] = '此討論串已被強制sage。';
$language['siokara_warn_hidethumb'] = '縮圖已被替換。';
} else if($lang == 'ja_JP'){
$language['siokara_admin_fsage'] = '強制sage';
$language['siokara_admin_ufsage'] = '強制sage解除';
$language['siokara_admin_htmb'] = 'サムネイル差替';
$language['siokara_admin_uhtmb'] = 'サムネイル差替解除';
$language['siokara_admin_agif'] = 'GIFをサムネイル化する';
$language['siokara_admin_uagif'] = 'GIFサムネイル化解除';
$language['siokara_extra_opt'] = '余分なオプション';
$language['siokara_anigif'] = 'GIFアニメ';
$language['siokara_warn_sage'] = 'このスレは管理者によりsage指定されています。理由はお察しください。';
$language['siokara_warn_hidethumb'] = 'この記事の画像は管理者によりサムネイルが差し替えられています。理由はお察しください。<br/>サムネイルをクリックすると元の画像を表示します。';
} else if($lang == 'en_US'){
$language['siokara_admin_fsage'] = 'Force sage';
$language['siokara_admin_ufsage'] = 'unForce sage';
$language['siokara_admin_htmb'] = 'Replace thumbnail with nothumb image';
$language['siokara_admin_uhtmb'] = 'Use orginal thumbnail';
$language['siokara_admin_agif'] = 'Use still image of GIF image';
$language['siokara_admin_uagif'] = 'Use Animated GIF';
$language['siokara_extra_opt'] = 'Extra Options';
$language['siokara_anigif'] = 'Animated GIF';
$language['siokara_warn_sage'] = 'This thread was forced sage by administrator.';
$language['siokara_warn_hidethumb'] = 'The thumbnail was replaced by administrator.';
}
// external language file
if(file_exists($langfile=str_replace('.php','.lang.php',__FILE__))) include_once($langfile);
}
*/
public function ModulePage(){
$PIO = PMCLibrary::getPIOInstance();
$FileIO = PMCLibrary::getFileIOInstance();
if(!adminAuthenticate('check')) die('403 Access denied');
$act=isset($_GET['action'])?$_GET['action']:'';
switch($act) {
case 'sage'; // 強制sage
if($PIO->isThread($_GET['no'])) {
$post = $PIO->fetchPosts($_GET['no']);
if(!count($post)) die('[Error] Post does not exist.');
$flgh = $PIO->getPostStatus($post[0]['status']);
$flgh->toggle('asage');
$PIO->setPostStatus($post[0]['no'], $flgh->toString());
$PIO->dbCommit();
//die('Done. Please go back.');
die('Done. Please go <script type="text/javascript"> document.write("<"+"a href=\""+document.referrer+"\">back</a>."); </script><noscript>back.</noscript> ');
} else die('[Error] Thread does not exist.');
break;
case 'thumb'; // 替換縮圖
$post = $PIO->fetchPosts($_GET['no']);
if(!count($post)) die('[Error] Post does not exist.');
if($post[0]['ext']) {
if(!$FileIO->imageExists($post[0]['tim'].$post[0]['ext'])) die('[Error] attachment does not exist.');
$flgh = $PIO->getPostStatus($post[0]['status']);
$flgh->toggle('htmb');
$PIO->setPostStatus($post[0]['no'], $flgh->toString());
$PIO->dbCommit();
//die('Done. Please go back.');
die('Done. Please go <script type="text/javascript"> document.write("<"+"a href=\""+document.referrer+"\">back</a>."); </script><noscript>back.</noscript> ');
} else die('[Error] Post does not have attechment.');
break;
case 'agif'; // 動態GIF
$post = $PIO->fetchPosts($_GET['no']);
if(!count($post)) die('[Error] Post does not exist.');
if($post[0]['ext'] && $post[0]['ext'] == '.gif') {
if(!$FileIO->imageExists($post[0]['tim'].$post[0]['ext'])) die('[Error] attachment does not exist.');
$flgh = $PIO->getPostStatus($post[0]['status']);
$flgh->toggle('agif');
$PIO->setPostStatus($post[0]['no'], $flgh->toString());
$PIO->dbCommit();
die('Done. Please go <script type="text/javascript"> document.write("<"+"a href=\""+document.referrer+"\">back</a>."); </script><noscript>back.</noscript> ');
} else die('[Error] Post does not have attechment.');
break;
}
}
} | miyorineko/pixmicat-materialize | module/mod_siokara.php | PHP | artistic-2.0 | 10,573 |
<?php
class view_conf extends admin_common
{
public $message = false;
private function set_tinymce_vars($section=array())
{
$tmp = 'var all_tinymce=" ';
//$tmpA = array('tinymce');
$tmp .='";';
$tmp .= 'var content_css_set="'.make_path("css","admin","css",false,true).'";';
global $conf;
$tmp .= 'var main_base_path="'.$conf->main_base_path.'"';
$this->set_js_code($tmp);
}
private function js_for_page($conf_data)
{
$js = "";
global $conf;
$this->set_tinymce_vars();
$js .= <<<EOF
jQuery(document).ready(function(){
EOF;
foreach($conf_data as $key=>$val)
{
if ($val['type'] == 'tinymce')
{
$js.= 'jQuery("#'. $val['key'] .'").editInPlace({
url: "conf",
show_buttons: true,
value_required: true,
field_type: "textarea",
params: "action=update",
delegate: delegateeip,
use_html: true
}); '."\n";
} else {
$js .= 'jQuery("#'.$val['key'].'").editInPlace({
url: "conf",
show_buttons: true,
value_required: true,
field_type: "'.$val['type'].'",
params: "action=update",
delegate: delegatebutton,
use_html: true
});'."\n";
}
$js.= 'jQuery("#desc_'. $val['key'] .'").editInPlace({
url: "conf",
show_buttons: true,
value_required: true,
field_type: "textarea",
params: "action=update_description",
delegate: delegateeip
}); '."\n";
}
$js.='});';
return $js;
}
public function display_result($result)
{
echo $result;
}
public function run($conf_data)
{
$this->set_title(_("Configuration"));
$title=$this->req;
$this->set_css_code('table { border-collapse: collapse;}');
$this->set_css(array('admin',"jquery.growl",$this->req,'jquery-ui.min'));
$this->set_js(array('tinymce_var',"jquery.growl",'admin','jquery','jquery-ui.min','jquery-migrate-1.2.1','tinymce/tinymce.min','jquery.editinplace','tinymce/jquery.tinymce.min','validation/jquery.validate.min','validation/additional-methods.min',$this->req,));
$this->set_js_code($this->js_for_page($conf_data));
$this->set_extra_render('conf_data', $conf_data);
global $lang,$conf;
$tmpLang = substr($lang,0,2);
if ($tmpLang != "en")
{
if (is_file(make_path('js', "validation/localization/messages_".$lang, "js")))
{
$this->set_js('validation/localization/messages_'.$lang.'.js');
} elseif (is_file(make_path('js', "validation/localization/messages_".$tmpLang, "js")))
{
$this->set_js('validation/localization/messages_'.$tmpLang.'.js');
} else {
$tmpLang = substr($conf->default_lang,0,2);
if (is_file(make_path('js', "validation/localization/messages_".$conf->default_lang, "js")))
{
$this->set_js('validation/localization/messages_'.$conf->default_lang.'.js');
} else {
$this->set_js('validation/localization/messages_'.$tmpLang.'.js');
}
}
}
$this->set_js(array (
'tinymce/themes/modern/theme.min',
'tinymce/plugins/pagebreak/plugin.min',
'tinymce/plugins/layer/plugin.min',
'tinymce/plugins/table/plugin.min',
'tinymce/plugins/save/plugin.min',
'tinymce/plugins/emoticons/plugin.min',
'tinymce/plugins/insertdatetime/plugin.min',
'tinymce/plugins/preview/plugin.min',
'tinymce/plugins/media/plugin.min',
'tinymce/plugins/searchreplace/plugin.min',
'tinymce/plugins/print/plugin.min',
'tinymce/plugins/contextmenu/plugin.min',
'tinymce/plugins/paste/plugin.min',
'tinymce/plugins/directionality/plugin.min',
'tinymce/plugins/fullscreen/plugin.min',
'tinymce/plugins/hr/plugin.min',
'tinymce/plugins/wordcount/plugin.min',
'tinymce/plugins/noneditable/plugin.min',
'tinymce/plugins/visualchars/plugin.min',
'tinymce/plugins/nonbreaking/plugin.min',
'tinymce/plugins/template/plugin.min',
'tinymce/plugins/image/plugin.min',
'tinymce/plugins/code/plugin.min'));
global $lang,$conf;
$tmpLang = substr($lang,0,2);
if (is_file(make_path('js', "tinymce/langs/".$lang, "js")))
{
$this->set_js('tinymce/langs/'.$lang.'.js');
} elseif (is_file(make_path('js', "tinymce/langs/".$tmpLang, "js")))
{
$this->set_js('tinymce/langs/'.$tmpLang.'.js');
} else {
$tmpLang = substr($conf->default_lang,0,2);
if (is_file(make_path('js', "tinymce/langs/".$conf->default_lang, "js")))
{
$this->set_js('tinymce/langs/'.$conf->default_lang.'.js');
} else {
$this->set_js('tinymce/langs/'.$tmpLang.'.js');
}
}
if ($this->message)
{
$this->set_js_code('
jQuery(document).ready(function(){
jQuery.growl({
title: "'._("Résultat").'",
message: "'.$this->message.'",
location: "tr",
duration: 3200
});
});
');
}
echo $this->gen_page();
}
} | eos-13/YaCMS | main/admin/view/conf.php | PHP | artistic-2.0 | 6,240 |
#!/usr/bin/env python
"""
Created by _UserName_
11/28/2013
"""
import Connect, DB
import uuid
def Register():
# Generate A Unique Idenfifier
ident = uuid.uuid4().hex
# Allow For Database Manipulation
database = DB.DB()
# Create Database
database.create()
# Add Self To Database
externalIP = "127.0.0.1"
database.personalInfo(ident, externalIP)
| Us3rNam/F2F-Network | Register.py | Python | artistic-2.0 | 398 |
/*
* Copyright (c) 2011 Tamas Cservenak. All rights reserved.
*
* <tamas@cservenak.com>
* http://www.cservenak.com/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.speakeasy.skyengine.resources.io.lzma.streams.cs;
import java.io.IOException;
import java.io.OutputStream;
public class CoderOutputStream
extends OutputStream {
private final CoderThread ct;
private OutputStream out;
protected CoderOutputStream(final OutputStream out, final Coder coder)
throws IOException {
this.ct = new CoderThread(coder, out);
this.out = ct.getOutputStreamSink();
this.ct.start();
}
public void write(int b)
throws IOException {
out.write(b);
}
public void write(byte b[])
throws IOException {
write(b, 0, b.length);
}
public void write(byte b[], int off, int len)
throws IOException {
if ((off | len | (b.length - (len + off)) | (off + len)) < 0) {
throw new IndexOutOfBoundsException();
}
out.write(b, off, len);
}
public void flush()
throws IOException {
out.flush();
}
public void close()
throws IOException {
try {
flush();
} catch (IOException ignored) {
// why do we swallow exception here?!
}
out.close();
try {
ct.join();
} catch (InterruptedException e) {
throw new IOException(e);
}
ct.checkForException();
}
}
| speakeasy/SKYEngine | src/com/speakeasy/skyengine/resources/io/lzma/streams/cs/CoderOutputStream.java | Java | artistic-2.0 | 2,095 |
"""
hmmer module
"""
from __future__ import print_function
from mungo.mungoCore import *
import blast, sequence
from mungo.useful import smartopen, extractRootName, ClassFromDict, warnDeprecated
import sys, re, warnings
hmmer2frame = {0: 1, 1: 2, 2: 3, 3: -1, 4: -2, 5: -3}
frame2hmmer = dict([(v,k) for k,v in hmmer2frame.iteritems()])
def HmmerFile(iFileHandle, **kw):
"Factory function returning a HmmerFileReader"
return HmmerReader(iFileHandle, **kw)
class HmmerReader(AbstractDataReader):
def __init__(self, iFileHandle, seqType=None, eValueCutoff=None, scoreCutoff=None):
super(HmmerReader, self).__init__(iFileHandle)
self.seqType = seqType
self.eValueCutoff = eValueCutoff
self.scoreCutoff = scoreCutoff
def _generator(self):
"""Return an iterator to a HMMer file."""
if self.seqType in [Domain, SixFrameDomain, BlockSixFrameDomain, OrfDomain, OrfDomain2]:
_Domain = self.seqType
elif self.seqType=='SixFrame':
_Domain = SixFrameDomain
elif self.seqType=='BlockSixFrame':
_Domain = BlockSixFrameDomain
elif self.seqType=='ORFs':
_Domain = OrfDomain
else:
_Domain = Domain
startToken = '^Parsed for domains'
endToken = '^Alignments of top-scoring domains'
abortToken = '\[no hits above thresholds\]'
startRegex = re.compile(startToken)
if not jumpToMatch(self.iFile, startRegex):
raise Exception('No match found. File may be empty.')
# 3. Parse domain details
line = self.iFile.next()
line = self.iFile.next()
endRegex = re.compile(endToken)
abortRegex = re.compile(abortToken)
domains = []
for line in self.iFile:
line = line.strip()
if endRegex.match(line) or abortRegex.match(line):
break
elif not line:
continue
tokens = line.split()
d = _Domain(dict(zip(Domain.attributes[1:], tokens)))
if (self.eValueCutoff and d.eValue>self.eValueCutoff) or \
(self.scoreCutoff and d.score<self.scoreCutoff): continue
yield d
class PfamReader(AbstractDataReader):
def __init__(self, iFileHandle, eValueCutoff=None, scoreCutoff=None):
super(PfamReader, self).__init__(iFileHandle)
self.eValueCutoff = eValueCutoff
self.scoreCutoff = scoreCutoff
def _generator(self):
pass
class Domain(AbstractFeature):
"""Domain feature class"""
attributes = ['domain', 'accession', 'count', 'sStart', 'sEnd',
'sCode', 'qStart', 'qEnd', 'qCode', 'score', 'eValue']
converters = zip(
['qStart','qEnd','sStart','sEnd','score','eValue'],
[int,int,int,int,float,float])
format = attributesToFormat(attributes)
def __init__(self, *args, **kw):
"""Constructor:
@param args: HMMer field values
@type args: list, dict, Domain
Optional keywords:
@keyword domain: Domain name
@keyword accession: Subject name
@keyword count: Id/total hits on subject
@keyword sStart:
@keyword sEnd:
@keyword sCode:
@keyword qStart:
@keyword qEnd:
@keyword qCode:
@keyword score: Bit score
@keyword eValue:
"""
super(Domain, self).__init__(*args, **kw)
self.genomic = False
def __repr__(self):
d = {}
for k,v in self.__dict__.iteritems():
d[k] = v
return self.format % d
def getTokens(self):
return [self.__dict__[key] for key in self.attributes]
def addAttribute(self, attribute, default=None):
self.attributes.append(attribute)
self.format = self.format + '\t%%(%s)s' % attribute
self.__dict__[attribute] = default
def addStrandAttribute(self, strand=None):
self.addAttribute('strand', strand)
def swapStartEnd(self):
if self.sStart>self.sEnd:
self.sStart,self.sEnd = self.sEnd,self.sStart
def getSequence(self, blastdb, getAll=False, convertAccession=lambda x: x):
if getAll:
start = 0
end = 0
else:
start = self.sStart
end = self.sEnd
accession = convertAccession(self.accession)
h,s = blast.getSequence(blastdb, accession, start, end)
return h,s
@staticmethod
def fromGenomic(tokens):
strand = tokens[-1]
d = Domain(tokens[0:-1])
d.genomic = True
d.addStrandAttribute(strand)
return d
class OrfDomain(Domain):
def toGenomic(self, orfStart, orfStrand, doSwapStartEnd=True):
"""Convert from ORF to genomic coordinates."""
self.genomic = True
self.sStart,self.sEnd = convertOrfToGenomic(
self.sStart, self.sEnd, orfStrand, orfStart)
self.addStrandAttribute(orfStrand)
if doSwapStartEnd:
self.swapStartEnd()
class OrfDomain2(Domain):
"ORF domain class for use with my ORF files"
def toGenomic(self, doSwapStartEnd=True):
"""Convert from ORF to genomic coordinates."""
self.genomic = True
o = parseOrfHeader(self.accession)
self.sStart,self.sEnd = convertOrfToGenomic(
self.sStart, self.sEnd, o.strand, o.start)
self.addStrandAttribute(o.strand)
if doSwapStartEnd:
self.swapStartEnd()
class SixFrameDomain(Domain):
def toGenomic(self, L, doSwapStartEnd=True):
"""Convert from 6 frame to genomic coordinates.
@param L: Length of DNA sequence.
"""
self.genomic = True
o = parseSixFrameHeader(self.accession)
self.sStart,self.sEnd = convertSixFrameToGenomic(
self.sStart, self.sEnd, o.frame, L)
self.accession = o.name
self.strand = o.strand
self.addStrandAttribute(o.strand)
if doSwapStartEnd:
self.swapStartEnd()
def toBlockCoords(self, L=1e99, blockSize=5000000, delimiter='.'):
self.accession, self.sStart, self.sEnd = \
blast.genomeToBlock(
self.accession, self.sStart, self.sEnd, L=L,
blockSize=blockSize, delimiter=delimiter)
def getSequenceFromString(self, seq):
s = seq[self.sStart-1:self.sEnd]
if self.strand=='-':
s = sequence.reverseComplement(s)
return s
def getSequence(self, blastDb, padFivePrime=0, padThreePrime=0):
if self.genomic:
start = max(1,self.sStart-padFivePrime)
end = self.sEnd+padThreePrime
h,s = blast.getSequence(blastDb, self.accession, start, end, self.strand)
else:
raise Exception('You must call the toGenomic method first.')
return h,s
class BlockSixFrameDomain(Domain):
def toGenomic(self, relative=False, doSwapStartEnd=True, relDelimiter=':'):
"""Convert from 6 frame to genomic coordinates."""
self.genomic = True
chrom,blockStart,blockEnd,gStart,gEnd,strand = \
convertBlockSixFrameToGenomic(
self.accession, self.sStart, self.sEnd)
if relative:
self.accession = '%s%s%i-%i' % (chrom,relDelimiter,blockStart,blockEnd)
self.sStart = gStart
self.sEnd = gEnd
else:
self.accession = chrom
self.sStart = blockStart + gStart - 1
self.sEnd = blockStart + gEnd - 1
self.addStrandAttribute(strand)
if doSwapStartEnd:
self.swapStartEnd()
class GenomicDomain(AbstractFeature):
"""GenomicDomain feature class"""
attributes = ['domain', 'accession', 'count', 'sStart', 'sEnd',
'sCode', 'qStart', 'qEnd', 'qCode', 'score', 'eValue']
converters = zip(
['qStart','qEnd','sStart','sEnd','score','eValue'],
[int,int,int,int,float,float])
format = attributesToFormat(attributes)
def __init__(self, *args, **kw):
"""Constructor:
@param args: HMMer field values
@type args: list, dict, Domain
Optional keywords:
@keyword domain: Domain name
@keyword accession: Subject name
@keyword count: Id/total hits on subject
@keyword sStart:
@keyword sEnd:
@keyword sCode:
@keyword qStart:
@keyword qEnd:
@keyword qCode:
@keyword score: Bit score
@keyword eValue:
@keyword strand:
"""
super(GenomicDomain, self).__init__(*args, **kw)
def toDict(self):
return self.__dict__
def toList(self):
return self.__dict__.items()
def __repr__(self):
try:
d = {}
for k,v in self.__dict__.iteritems():
d[k] = v
return self.format % d
except:
return str(self.__dict__)
def toBlockCoords(self, L=1e99, blockSize=5000000, delimiter='.'):
self.accession, self.sStart, self.sEnd = \
blast.genomeToBlock(
self.accession, self.sStart, self.sEnd, L=L,
blockSize=blockSize, delimiter=delimiter)
def getSequence(self, blastDb, padFivePrime=0, padThreePrime=0):
start = max(1,self.sStart-padFivePrime)
end = self.sEnd+padThreePrime
h,s = blast.getSequence(blastDb, self.accession, start, end, self.strand)
return h,s
def loadGenomicDomains(filename):
data = []
gene = []
for line in open(filename):
line = line.strip()
if not line:
continue
elif line[0] in ['#', '>']:
if gene:
data.append(gene)
gene = []
else:
tokens = line.split('\t')
d = GenomicDomain(tokens)
gene.append(d)
data.append(gene)
return data
def jumpToMatch(iFile, regex):
"""Jump to regex match in file.
@param iFile: File object
@param regex: Compiled regex object
@return: True if successful, False otherwise
"""
for line in iFile:
if regex.match(line):
return True
return False
def extractUptoMatch(iFile, regex):
"""Extract up to regex match from file.
@param iFile: File object
@param regex: Compiled regex object
@return: string
"""
block = []
for line in iFile:
if regex.match(line):
break
else:
block.append(line.rstrip())
return block
def parseSixFrameHeader(header):
"""Parse a 6 frame header (from translate or python).
@param header: Six frame header "<name>:<frame>" or "<name>.<start>-<end>:<frame>"
(assumes input frame is hmmer frame (0-5)).
@return: a simple class with attributes name, start, end, strand and frame.
"""
header = header.strip()
regex = re.compile(
'(?P<name>\w+)([\.|:](?P<start>\d+)[-|,](?P<end>\d+))?:(?P<frame>[0-5])')
rs = regex.search(header)
d = rs.groupdict()
d['frame'] = hmmer2frame[int(d['frame'])]
if d['frame']>0:
d['strand'] = '+'
else:
d['strand'] = '-'
try:
d['start'] = int(d['start'])
d['end'] = int(d['end'])
except:
pass
return ClassFromDict(d)
def parseOrfHeader(header):
"""Parse an ORF header (from extractORFs.py).
@param header: ORF header "<name>.<orfId>.<start>-<end> Length=<length>"
(Length optional).
@return: a simple class with attributes name, start, end, strand and length.
"""
regex = re.compile(
'(?P<name>\w+)\.(?P<orfId>\d+)\.(?P<start>\d+)-(?P<end>\d+)(\SLength=(?P<length>\d+))?')
rs = regex.match(header.strip())
d = rs.groupdict()
try:
d['start'] = int(d['start'])
d['end'] = int(d['end'])
d['length'] = int(d['length'])
except:
pass
if d['start']>d['end']:
d['strand'] = '-'
else:
d['strand'] = '+'
return ClassFromDict(d)
def convertSixFrameToGenomic(start, end, frame, L):
"""Convert 6 frame coords to genomic.
@param start: Amino acid start coord
@param end: Amino acid end coord
@param frame: Frame
@param L: Nucleotide seq length
@return: (gStart, gEnd, strand)
"""
if frame>=0:
gStart = 3*(start-1)+(frame-1)+1
gEnd = 3*(end-1)+(frame-1)+3
else:
gStart = L-(3*(start-1)+abs(frame)-1)
gEnd = L-(3*(end-1)+abs(frame)+1)
return gStart,gEnd
def convertBlockSixFrameToGenomic(block, start, end):
"""Convenience function that takes block 6 frame coords
(block,start,end), extracts the block start/end and frame
and converts them to genomic coords
ie.
chrom.blockStart-blockEnd:frame aaStart aaEnd or
chrom:blockStart-blockEnd:frame aaStart aaEnd
--> chrom,blockStart,blockEnd,gStart,gEnd,strand
@param block: Block accession ("<name>.<blockStart>-<blockEnd>:<frame>")
@param start: Domain start
@param end: Domain end
@return: (chrom, blockStart, blockEnd, gStart, gEnd, strand)
string[:.]number-number:number
"""
#prog = re.compile('\.|-|\:')
#tokens = prog.split(block)
#prog = re.compile("(?P<chrom>[\w]+)[.:](?P<bstart>[0-9]+)-(?P<bend>[0-9]+):(?P<frame>[0-9]+)")
#rs = prog.search(block)
#if rs:
# g = rs.groupdict()
# chrom,blockStart,blockEnd,hmmerFrame = g["chrom"],g["bstart"],g["bend"],g["frame"]
# blockStart = int(blockStart)
# blockEnd = int(blockEnd)
# hmmerFrame = int(hmmerFrame)
# L = blockEnd-blockStart+1
tokens = block.split(":")
if len(tokens)==2:
hmmerFrame = tokens[1]
tokens = tokens[0].split(".")
chrom = tokens[0]
blockStart,blockEnd = tokens[1].split("-")
elif len(tokens)==3:
chrom = tokens[0]
blockStart,blockEnd = tokens[1].split("-")
hmmerFrame = tokens[2]
else:
print(tokens, file=sys.stderr)
raise Exception("Don't know what to do")
blockStart = int(blockStart)
blockEnd = int(blockEnd)
L = blockEnd-blockStart+1
hmmerFrame = int(hmmerFrame)
frame = hmmer2frame[hmmerFrame]
if frame>0:
strand = '+'
else:
strand = '-'
gStart,gEnd = convertSixFrameToGenomic(start, end, frame, L)
return chrom,blockStart,blockEnd,gStart,gEnd,strand
def convertGenomicToBlockCoords(domain, chrLen, blockSize=5000000, delimiter='.'):
domain.accession, domain.sStart, domain.sEnd = \
blast.genomeToBlock(
domain.accession, domain.sStart, domain.sEnd,
L=chrLen, blockSize=blockSize, delimiter=delimiter)
return domain
def convertOrfToGenomic(start, end, strand, orfStart):
"""Convert domain coordinates in ORF to genomic.
@param start: Domain start coord
@param end: Domain end coord
@param strand: Strand
@param orfStart: ORF start coord
@return: (gStart, gEnd)
"""
if strand=='+':
gStart = orfStart + 3*(start-1)
gEnd = orfStart + 3*(end-1) + 2
else:
gStart = orfStart - 3*(start-1)
gEnd = orfStart - 3*(end-1) - 2
return gStart, gEnd
def loadDomains(iFileHandle):
"""Load hmmer domain results.
@param iFileHandle: Input file or filename
@param seqType: Type of sequence searched
[None (default), 'SixFrame', 'BlockSixFrame' or 'ORFs']
@param eValueCutoff: E-value threshold (default None)
@param scoreCutoff: Score threshold (default None)
@return: list of domains
"""
domains = []
for d in HmmerFile(iFileHandle):
domains.append(d)
return domains
| PapenfussLab/Mungo | mungo/hmmer.py | Python | artistic-2.0 | 15,959 |
package ttftcuts.atg.api;
import java.util.List;
import ttftcuts.atg.api.events.*;
import ttftcuts.atg.api.events.listenable.ATGBiomeGroupAssignmentEvent.ATGGroupActivationEvent;
import net.minecraft.world.World;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraftforge.common.MinecraftForge;
import com.google.common.base.Optional;
/**
*
* @author TTFTCUTS
*
* Biome related API things! Biome groups, adding biomes to those groups and more.
*
*/
public abstract class ATGBiomes {
public static enum BiomeType { LAND, COAST, SEA }
/**
* Gets an ATG biome by name.
*
* @param biomeName
* The name of the biome you want to get.
*
* @return the corresponding biome.
*/
public static BiomeGenBase getBiome(String biomeName) {
final ATGBiomeRequestEvent event = new ATGBiomeRequestEvent(biomeName);
MinecraftForge.EVENT_BUS.post(event);
if ( !event.biome.isPresent() ) {
return null;
}
return event.biome.get();
}
/**
* Gets a list of names corresponding to the Biome Groups which contain a specified biome.
*
* @param biome
* The biome you want to find groups for.
*
* @return a list of names of containing Biome Groups.
*/
public static List<String> getGroupFromBiome(BiomeGenBase biome) {
final ATGBiomeGroupRequestEvent event = new ATGBiomeGroupRequestEvent(biome);
MinecraftForge.EVENT_BUS.post(event);
return event.groups;
}
/**
* Gets the raw height, temperature and moisture values from the generator for a specific pair of x/z coordinates.
*
* WARNING: This is a VERY expensive calculation and the result is NOT cached, so please use as little as possible!
*
* @param world
* The world that you want to get the information for.
*
* @param x
* X coordinate of the point to query.
*
* @param z
* Z coordinate of the point to query.
*
* @return an array of three doubles corresponding to the height, temperature and moisture at the specified point in the ranges 0.0-1.0.
*/
public static double[] getGeneratorInfo(World world, double x, double z) {
final ATGGeneratorInfoEvent event = new ATGGeneratorInfoEvent(world,x,z);
MinecraftForge.EVENT_BUS.post(event);
return event.info;
}
/**
* Adds a new biome GROUP to ATG. Not something that would usually need to be used.
*
* @param type
* The biome type that this group belongs to. LAND, COAST or SEA.
*
* @param name
* The name of this group.
*
* @param temp
* Temperature value for this group. Same range as biome temperatures.
*
* @param moisture
* Moisture value for this group. Same range as biome rainfall.
*
* @param height
* Average height value for this group. Same range as biome heights.
*
* @param minHeight
* Minimum height to generate this group. Above this value, it will be skipped.
*
* @param maxHeight
* Maximum height to generate this group. Below this value, it will be skipped.
*
* @param salt
* Biome blob generation salt. Used to offset biome boundaries from other groups to avoid strange artifacts.
*
* @param generate
* Set to false to prevent this group generating in the default manner. Primarily for use with the biome group assignment events.
*/
public static void addBiomeGroup(BiomeType type, String name, double temp, double moisture, double height, double minHeight, double maxHeight, long salt, boolean generate) {
ATGBiomeGroupAddEvent event = new ATGBiomeGroupAddEvent(type, name, temp, moisture, height, minHeight, maxHeight, salt, generate);
MinecraftForge.EVENT_BUS.post(event);
if ( event.response == ATGBiomeGroupAddEvent.ResponseType.FAILED ) {
// FAILED!
}
}
public static void addBiomeGroup(BiomeType type, String name, double temp, double moisture, double height, double minHeight, double maxHeight, long salt) {
addBiomeGroup(type, name, temp, moisture, height, minHeight, maxHeight, salt, true);
}
public static void addBiomeGroup(BiomeType type, String name, double temp, double moisture, double height, long salt) {
addBiomeGroup(type, name, temp, moisture, height, 0.0, 1.0, salt);
}
public static void addBiomeGroup(BiomeType type, String name, double temp, double moisture, double height) {
addBiomeGroup(type, name, temp, moisture, height, 0);
}
/**
* Modifies a biome group to make it more or less likely to be chosen by the generator.
* Best used to ensure a height-constrained biome group generates in favour of an otherwise identically ranged group.
*
* @param type
* Group type for the second parameter. LAND, COAST or SEA.
*
* @param name
* Name of the group to modify.
*
* @param modifier
* Modifier value. Positive makes the group more likely to be picked. Very small values can have a large effect.
*/
public static void modGroupSuitability(BiomeType type, String name, double modifier) {
ATGBiomeGroupEvent event = new ATGBiomeGroupEvent( ATGBiomeGroupEvent.EventType.SUITABILITY, type, name, modifier );
MinecraftForge.EVENT_BUS.post(event);
if ( event.response == ATGBiomeGroupEvent.ResponseType.FAILED ) {
// FAILED!
}
}
/**
* Register a biome with ATG.
*
* @param type
* Type of the biome group this biome will inhabit. LAND, COAST or SEA.
*
* @param group
* Name of the biome group this biome will inhabit.
*
* @param biome
* The biome to be registered.
*
* @param weight
* Generation weight for this biome. All vanilla biomes are weighted 1.0 except mushroom island.
*/
public static void addBiome(BiomeType type, String group, BiomeGenBase biome, double weight) {
ATGBiomeEvent event = new ATGBiomeEvent( type, group, biome, null, weight);
MinecraftForge.EVENT_BUS.post(event);
}
/**
* Replace a biome in a group with a different biome.
*
* @param type
* Type of the target biome group. LAND, COAST or SEA.
*
* @param group
* Name of the target biome group.
*
* @param toReplace
* Biome to replace in the specified group.
*
* @param replacement
* Biome which will replace toReplace in the group.
*
* @param weight
* Generation weight for the replacement biome.
*/
public static void replaceBiome(BiomeType type, String group, BiomeGenBase toReplace, BiomeGenBase replacement, double weight) {
ATGBiomeEvent event = new ATGBiomeEvent( type, group, replacement, toReplace, weight );
MinecraftForge.EVENT_BUS.post(event);
}
/**
* Add a sub-biome to a biome. Sub-biomes appear as smaller patches within their parent biome.
*
* @param biome
* Parent biome.
*
* @param subBiome
* Biome that will appear as a sub-biome.
*
* @param weight
* Generation weight for the sub-biome. The parent biome is always weighted at 1.0, so a 1.0 weight here with a single sub-biome would be a 50/50 split.
*/
public static void addSubBiome(BiomeGenBase biome, BiomeGenBase subBiome, double weight) {
ATGBiomeModEvent event = new ATGBiomeModEvent(ATGBiomeModEvent.EventType.SUBBIOME, biome, null, subBiome, weight);
MinecraftForge.EVENT_BUS.post(event);
}
/**
* Add an IGenMod to a biome to modify how it generates.
*
* @param biome
* Biome to attach the mod to.
*
* @param mod
* IGenMod object that will modify the biome.
*/
public static void addGenMod(BiomeGenBase biome, IGenMod mod) {
ATGBiomeModEvent event = new ATGBiomeModEvent(ATGBiomeModEvent.EventType.GENMOD, biome, mod, null, 0);
MinecraftForge.EVENT_BUS.post(event);
}
/**
* Get the IGenMod assigned to a biome, or Optional.absent if there isn't one.
*
* @param biome
* The biome to get the IGenMod for.
*
* @return an Optional corresponding to the IGenMod for the biome, or Optional.absent.
*/
public static Optional<IGenMod> getGenMod(BiomeGenBase biome) {
ATGBiomeModRequestEvent event = new ATGBiomeModRequestEvent(biome);
MinecraftForge.EVENT_BUS.post(event);
return event.mod;
}
/**
* Sets the rock parameters for a biome, to modify how ATG boulders generate there.
*
* @param biome
* The biome to set rock properties for.
*
* @param rockChance
* 1 in rockChance chunks will contain a rock.
*
* @param bigRockChance
* 1 in bigRockChance rocks will be large.
*
* @param rocksPerChunk
* rockChance will be checked rocksPerChunk times per chunk.
*/
public static void setBiomeRocks(BiomeGenBase biome, int rockChance, int bigRockChance, int rocksPerChunk) {
ATGBiomeRocksEvent event = new ATGBiomeRocksEvent(biome, rockChance, bigRockChance, rocksPerChunk);
MinecraftForge.EVENT_BUS.post(event);
}
/**
* Use this to enable the posting of "ATGBiomeGroupAssignmentEvent"s at generation, to allow custom biome group overrides.
* If this is not called at least once, none of those events will be sent.
*
* Listening for ATGBiomeGroupAssignmentEvent allows direct replacement of the biome group at every x/z coordinate pair.
* When enabled, it slows generation by about 10% due to event volume, so it's off by default.
*
* Only call this if you intend to listen for those events.
*/
public static void enableBiomeGroupAssignmentEvent() {
ATGGroupActivationEvent event = new ATGGroupActivationEvent();
MinecraftForge.EVENT_BUS.post(event);
}
}
| reteo/CustomOreGen | src/api/java/ttftcuts/atg/api/ATGBiomes.java | Java | artistic-2.0 | 9,346 |
#! /usr/bin/env python
"""
stats -- Prints some channel information.
disconnect -- Disconnect the bot. The bot will try to reconnect
after 60 seconds.
die -- Let the bot cease to exist.
"""
import liblo
import irc.bot
import irc.strings
from irc.client import ip_numstr_to_quad, ip_quad_to_numstr
class TestBot(irc.bot.SingleServerIRCBot):
def __init__(self, channel, nickname, server, port=6667, OSCport=57120):
irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname, nickname)
self.server = server
self.channel = channel
self.nickname = nickname
try:
self.target = liblo.Address(OSCport)
except liblo.AddressError, err:
print str(err)
sys.exit("OSC address error")
def on_nicknameinuse(self, c, e):
c.nick(c.get_nickname() + "_")
def on_welcome(self, c, e):
c.join(self.channel)
print "connected to:\t" + self.server
def on_privmsg(self, c, e):
self.do_command(e, e.arguments[0])
def on_pubmsg(self, c, e):
a = e.arguments[0].split(":", 1)
if len(a) > 1 and irc.strings.lower(a[0]) == irc.strings.lower(self.connection.get_nickname()):
self.do_command(e, a[1].strip())
return
def do_command(self, e, cmd):
nick = e.source.nick
c = self.connection
target = self.target
c.notice(nick, "--- Channel statistics ---")
msg = liblo.Message(self.nickname) # nickname is osc tag...
#msg.add(nick)
#~ if nick == "iow":
#~ for i in cmd:
#~ #msg.add(ord(i)) #ord: char's ascii number
#~ msg.add(i)
#~ liblo.send(target, msg)
#~ for i in cmd:
#~ msg.add(ord(i))
#~ liblo.send(target, msg)
#~ print msg
for i in cmd:
msg.add(ord(i))
liblo.send(target, msg)
if cmd == "disconnect":
self.disconnect()
elif cmd == "die":
self.die()
elif cmd == "stats":
print 'stats?'
for chname, chobj in self.channels.items():
c.notice(nick, "--- Channel statistics ---")
c.notice(nick, "Channel: " + chname)
users = chobj.users()
users.sort()
c.notice(nick, "Users: " + ", ".join(users))
opers = chobj.opers()
opers.sort()
c.notice(nick, "Opers: " + ", ".join(opers))
voiced = chobj.voiced()
voiced.sort()
c.notice(nick, "Voiced: " + ", ".join(voiced))
else:
c.notice(nick, "Not understood: " + cmd)
def main():
import sys
nickname = "p1n1"
#channel = "#mode+v"
server = "127.0.0.1"
IRCport = 6667
OSCport = 57120
print len(sys.argv)
if len(sys.argv) != 5:
print("Usage: Dtestbot <server[:port]> <channel> <nickname> <oscport>")
print("$ ./ircbot.py 127.0.0.1 \"mode+v\" jk 57124")
sys.exit(1)
s = sys.argv[1].split(":", 1)
server = s[0]
if len(s) == 2:
try:
port = int(s[1])
except ValueError:
print("Error: Erroneous port.")
sys.exit(1)
else:
port = 6667
channel = sys.argv[2]
nickname = sys.argv[3]
OSCport = sys.argv[4]
#print nickname
#bot = TestBot(channel, nickname, server, port)
bot = TestBot("#mode+v", nickname, "127.0.0.1", 6667, OSCport)
#bot = TestBot(channel, nickname, server, IRCport, OSCport)
bot.start()
print 'started...'
if __name__ == "__main__":
main()
| sonoprob/0x56 | bot/py/ircoscbot.py | Python | artistic-2.0 | 3,668 |
// Machine generated IDispatch wrapper class(es) created by Microsoft Visual C++
// NOTE: Do not modify the contents of this file. If this class is regenerated by
// Microsoft Visual C++, your modifications will be overwritten.
#include "stdafx.h"
#include "printer.h"
/////////////////////////////////////////////////////////////////////////////
// CPrinter properties
/////////////////////////////////////////////////////////////////////////////
// CPrinter operations
long CPrinter::GetDetail()
{
long result;
InvokeHelper(0x18, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetDetail(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x18, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
long CPrinter::GetMarginBottom()
{
long result;
InvokeHelper(0x2, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetMarginBottom(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x2, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
long CPrinter::GetMarginLeft()
{
long result;
InvokeHelper(0x3, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetMarginLeft(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x3, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
long CPrinter::GetMarginTop()
{
long result;
InvokeHelper(0x4, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetMarginTop(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x4, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
long CPrinter::GetMarginRight()
{
long result;
InvokeHelper(0x5, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetMarginRight(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x5, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
long CPrinter::GetOrientation()
{
long result;
InvokeHelper(0x1, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetOrientation(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x1, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
void CPrinter::Print()
{
InvokeHelper(0x40, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
void CPrinter::PrintLandscape()
{
InvokeHelper(0x41, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
void CPrinter::PrintPortrait()
{
InvokeHelper(0x43, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
void CPrinter::ShowPreview()
{
InvokeHelper(0x1c, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
void CPrinter::PrintPartial(long Left, long Top, long Right, long Bottom)
{
static BYTE parms[] =
VTS_I4 VTS_I4 VTS_I4 VTS_I4;
InvokeHelper(0x6, DISPATCH_METHOD, VT_EMPTY, NULL, parms,
Left, Top, Right, Bottom);
}
void CPrinter::BeginDoc()
{
InvokeHelper(0x7, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
void CPrinter::EndDoc()
{
InvokeHelper(0x8, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
long CPrinter::GetPrinterIndex()
{
long result;
InvokeHelper(0x9, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
void CPrinter::SetPrinterIndex(long nNewValue)
{
static BYTE parms[] =
VTS_I4;
InvokeHelper(0x9, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
nNewValue);
}
long CPrinter::GetPrinterCount()
{
long result;
InvokeHelper(0xa, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
CString CPrinter::GetPrinterDescription(long Index)
{
CString result;
static BYTE parms[] =
VTS_I4;
InvokeHelper(0xb, DISPATCH_PROPERTYGET, VT_BSTR, (void*)&result, parms,
Index);
return result;
}
void CPrinter::PrintChart()
{
InvokeHelper(0xc, DISPATCH_METHOD, VT_EMPTY, NULL, NULL);
}
long CPrinter::GetPageHeight()
{
long result;
InvokeHelper(0xd, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
long CPrinter::GetPageWidth()
{
long result;
InvokeHelper(0xe, DISPATCH_PROPERTYGET, VT_I4, (void*)&result, NULL);
return result;
}
CString CPrinter::GetJobTitle()
{
CString result;
InvokeHelper(0xf, DISPATCH_PROPERTYGET, VT_BSTR, (void*)&result, NULL);
return result;
}
void CPrinter::SetJobTitle(LPCTSTR lpszNewValue)
{
static BYTE parms[] =
VTS_BSTR;
InvokeHelper(0xf, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
lpszNewValue);
}
BOOL CPrinter::GetPrintProportional()
{
BOOL result;
InvokeHelper(0x11, DISPATCH_PROPERTYGET, VT_BOOL, (void*)&result, NULL);
return result;
}
void CPrinter::SetPrintProportional(BOOL bNewValue)
{
static BYTE parms[] =
VTS_BOOL;
InvokeHelper(0x11, DISPATCH_PROPERTYPUT, VT_EMPTY, NULL, parms,
bNewValue);
}
void CPrinter::PrintPartialHandle(const VARIANT& DC, long Left, long Top, long Right, long Bottom)
{
static BYTE parms[] =
VTS_VARIANT VTS_I4 VTS_I4 VTS_I4 VTS_I4;
InvokeHelper(0x12, DISPATCH_METHOD, VT_EMPTY, NULL, parms,
&DC, Left, Top, Right, Bottom);
}
void CPrinter::PrintPages(long FromPage, long ToPage)
{
static BYTE parms[] =
VTS_I4 VTS_I4;
InvokeHelper(0x10, DISPATCH_METHOD, VT_EMPTY, NULL, parms,
FromPage, ToPage);
}
| ChIna-king-Arthur/MFC | cow2/TeeChartAPI/printer.cpp | C++ | artistic-2.0 | 5,140 |
package com.work.pm25;
import android.text.TextUtils;
import bean.City;
import bean.County;
import bean.Province;
/**
* Created by KalinaRain on 2015/5/20.
*/
public class Utility {
/**
* 解析和处理服务器返回的省级数据
*/
public synchronized static boolean handleProvincesResponse(PM25DB pm25DB, String response) {
if (!TextUtils.isEmpty(response)) {
String[] allProvinces = response.split(",");
if (allProvinces != null && allProvinces.length > 0) {
for (String p : allProvinces) {
String[] array = p.split("\\|");
Province province = new Province();
province.setProvinceCode(array[0]);
province.setProvinceName(array[1]);
// 将解析出来的数据存储到Province表
pm25DB.saveProvince(province);
}
return true;
}
}
return false;
}
/**
* 解析和处理服务器返回的市级数据
*/
public static boolean handleCitiesResponse(PM25DB pm25DB,
String response, int provinceId) {
if (!TextUtils.isEmpty(response)) {
String[] allCities = response.split(",");
if (allCities != null && allCities.length > 0) {
for (String c : allCities) {
String[] array = c.split("\\|");
City city = new City();
city.setCityCode(array[0]);
city.setCityName(array[1]);
city.setProvinceId(provinceId);
// 将解析出来的数据存储到City表
pm25DB.saveCity(city);
}
return true;
}
}
return false;
}
/**
* 解析和处理服务器返回的县级数据
*/
public static boolean handleCountiesResponse(PM25DB pm25DB,
String response, int cityId) {
if (!TextUtils.isEmpty(response)) {
String[] allCounties = response.split(",");
if (allCounties != null && allCounties.length > 0) {
for (String c : allCounties) {
String[] array = c.split("\\|");
County county = new County();
county.setCountyCode(array[0]);
county.setCountyName(array[1]);
county.setCityId(cityId);
// 将解析出来的数据存储到County表
pm25DB.saveCounty(county);
}
return true;
}
}
return false;
}
}
| KalinaRain/MyWorkSpace | pmdetection/src/main/java/com/work/pm25/Utility.java | Java | artistic-2.0 | 2,771 |
package com.germi.indrob.utility;
import javax.swing.filechooser.FileFilter;
import javax.swing.filechooser.FileNameExtensionFilter;
public class UtilityExtension{
protected String name;
protected String[] defs;
protected FileFilter filt;
public UtilityExtension(String a, String... b) {
this.name=a;
this.defs=b;
this.filt=new FileNameExtensionFilter(a,b);
}
public String getName() {
return this.name;
}
public String getPreferedExtension() {
return this.defs[0];
}
public FileFilter getFilter() {
return this.filt;
}
public String[] getDefinitions() {
return this.defs;
}
}
| el-germi/industrial-robot | src/main/java/com/germi/indrob/utility/UtilityExtension.java | Java | artistic-2.0 | 619 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.location'
db.add_column(u'calendar_event', 'location',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'Event.link_url'
db.add_column(u'calendar_event', 'link_url',
self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.location'
db.delete_column(u'calendar_event', 'location')
# Deleting field 'Event.link_url'
db.delete_column(u'calendar_event', 'link_url')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'calendar.dummytable': {
'Meta': {'object_name': 'DummyTable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'calendar.event': {
'Meta': {'object_name': 'Event'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'events'", 'to': u"orm['sfpirgapp.Category']"}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'featured_image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_menus': ('mezzanine.pages.fields.MenusField', [], {'default': '(1, 2, 3, 4, 5)', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': u"orm['generic.AssignedKeyword']", 'frozen_by_south': 'True'}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'link_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'theme_color': ('django.db.models.fields.CharField', [], {'default': "'grey'", 'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['calendar.EventType']", 'null': 'True', 'blank': 'True'}),
'zip_import': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'calendar.eventimage': {
'Meta': {'ordering': "('_order',)", 'object_name': 'EventImage'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'file': ('mezzanine.core.fields.FileField', [], {'max_length': '200'}),
'gallery': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'images'", 'to': u"orm['calendar.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'calendar.eventtype': {
'Meta': {'ordering': "['name']", 'object_name': 'EventType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'generic.assignedkeyword': {
'Meta': {'ordering': "('_order',)", 'object_name': 'AssignedKeyword'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': u"orm['generic.Keyword']"}),
'object_pk': ('django.db.models.fields.IntegerField', [], {})
},
u'generic.keyword': {
'Meta': {'object_name': 'Keyword'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
u'sfpirgapp.category': {
'Meta': {'ordering': "('_order',)", 'object_name': 'Category'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'featured_image': ('sfpirgapp.fields.MyImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': u"orm['generic.AssignedKeyword']", 'frozen_by_south': 'True'}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'theme_color': ('django.db.models.fields.CharField', [], {'default': "'grey'", 'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'categorys'", 'to': u"orm['auth.User']"})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['calendar'] | orlenko/sfpirg | mezzanine/calendar/migrations/0006_auto__add_field_event_location__add_field_event_link_url.py | Python | bsd-2-clause | 12,037 |
from django.conf import settings
from django.db.models.loading import get_model
from models import MasterSetting, SettingTypes
def get(name, default=None):
try:
setting = MasterSetting.objects.get(name=name)
if setting.type == SettingTypes.INT:
return int(setting.value)
elif setting.type == SettingTypes.FLOAT:
return float(setting.value)
elif setting.type == SettingTypes.FOREIGN:
model = get_model(*setting.foreign_model.split("."))
try:
return model.objects.get(id=int(setting.value))
except model.DoesNotExist:
return default
elif setting.type == SettingTypes.CHOICES:
return setting.value
else:
return setting.value
except MasterSetting.DoesNotExist:
return default
def set(name, value):
setting = MasterSetting.objects.get(name=name)
if setting.type == SettingTypes.INT:
setting.value = str(int(setting.value))
elif setting.type == SettingTypes.FLOAT:
setting.value = str(float(setting.value))
elif setting.type == SettingTypes.FOREIGN:
model = get_model(*setting.foreign_model.split("."))
try:
object_ = model.objects.get(id=int(value.id))
setting.value = str(object_.id)
except model.DoesNotExist:
return None
elif setting.type == SettingTypes.CHOICES:
options_ = settings.MASTER_SETTINGS[setting.name]['options']
if value in options_:
setting.value = value
else:
raise ValueError("Available options are: %s " % str(options_))
else:
setting.value = value
setting.save()
def exists(name):
try:
MasterSetting.objects.get(name=name)
return True
except MasterSetting.DoesNotExist:
return False | MasterAlish/django-ma-settings | ma_settings/master_settings.py | Python | bsd-2-clause | 1,876 |
# coding: utf-8
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.get_session_types_request import GetSessionTypesRequest # noqa: E501
from swagger_client.rest import ApiException
class TestGetSessionTypesRequest(unittest.TestCase):
"""GetSessionTypesRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetSessionTypesRequest(self):
"""Test GetSessionTypesRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.get_session_types_request.GetSessionTypesRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| mindbody/API-Examples | SDKs/Python/test/test_get_session_types_request.py | Python | bsd-2-clause | 1,006 |
package battleship
type ShipType uint8
const (
Missed ShipType = iota
Hit
AircraftCarrier
Battleship
Submarine
Destroyer
Cruiser
PatrolBoat
)
func (st ShipType) Mark() string {
s := "\u2573\u29BFA*CARRIERDESTR*SUB*SHIP*BCRUIS"
switch st {
case Missed:
return s[0:3] //"\u2573"
case Hit:
return s[3:6] //"\u29BF"
case AircraftCarrier:
return s[6:15] //"A*CARRIER"
case Destroyer:
return s[15:20] //"DESTR"
case Submarine:
return s[20:25] //"*SUB*"
case Battleship:
return s[23:30] //"B*SHIP*"
case PatrolBoat:
return s[28:31] //"P*B"
case Cruiser:
return s[31:] //"CRUIS"
default:
return ""
}
}
func (st ShipType) String() string {
switch st {
case Missed:
return "missed"
case Hit:
return "hit"
case AircraftCarrier:
return "aircraft carrier"
case Destroyer:
return "destroyer"
case Submarine:
return "submarine"
case Battleship:
return "battleship"
case PatrolBoat:
return "patrol boat"
case Cruiser:
return "cruiser"
default:
return ""
}
}
func (st ShipType) Size() int {
switch st {
case Missed:
return 1
case Hit:
return 1
case AircraftCarrier:
return 5
case Destroyer:
return 3
case Submarine:
return 3
case Battleship:
return 4
case PatrolBoat:
return 2
case Cruiser:
return 3
default:
return 0
}
}
func (st ShipType) Score() int {
switch st {
case AircraftCarrier:
return 20
case Destroyer:
return 6
case Submarine:
return 6
case Battleship:
return 12
case PatrolBoat:
return 2
case Cruiser:
return 6
default:
return 0
}
}
| gdey/go-challange-battleship | battleship/ship_type.go | GO | bsd-2-clause | 1,552 |
package utwil
import (
"encoding/json"
"fmt"
"testing"
)
// This test calls ToPhoneNumber and also forwards the call to ToPhoneNumber.
// ToPhoneNumber should expect two calls.
func TestCall(t *testing.T) {
callbackPostURL := fmt.Sprintf("http://twimlets.com/forward?PhoneNumber=%s", ToPhoneNumber)
call, err := TestClient.Call(FromPhoneNumber, ToPhoneNumber, callbackPostURL)
if err != nil {
t.Fatalf("Failed: %s", err.Error())
}
bs, err := json.MarshalIndent(call, "", " ")
if err != nil {
t.Fatalf("Failed: %s", err.Error())
}
t.Logf("Call:\n%s\n", string(bs))
}
| wyc/utwil | call_test.go | GO | bsd-2-clause | 584 |
/*********************************************************************
* Maxeler Technologies: BrainNetwork *
* *
* Version: 1.2 *
* Date: 05 July 2013 *
* *
* GUI code source file *
* *
*********************************************************************/
package com.maxeler.brainnetwork.gui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JToggleButton;
//Toggle button used to start/stop the demo
public class ComputeButton extends JToggleButton implements ActionListener, Runnable{
//UID generated for component serialization
private static final long serialVersionUID = -145094948834407005L;
//Reference to image correlation kernel and speed-up
private LinearCorrelation [] kernel = null;
private SpeedUpTextField speedup = null;
//Constructor
public ComputeButton (LinearCorrelation [] kernel, SpeedUpTextField speedup){
//Instantiate a toggle button initialized to false
super("Compute",false);
//Store external references
this.kernel = kernel;
this.speedup = speedup;
//Button size
setSize(150,40);
//In order to react to action on the button
addActionListener(this);
}
//Describe reaction to event
@Override
public void actionPerformed(ActionEvent actionEvent) {
//Trigger a new thread to manage the event
Thread t = new Thread(this);
t.start();
}
//Thread to manage the event
@Override
public void run() {
//Deactivate the button until the threads end
setEnabled(false);
//Switch the running flag according to the status
BrainNetwork.running = isSelected();
//When stop execution, restart all the correlation kernel
if (!BrainNetwork.running)
for (int i=0; i<kernel.length; ++i){
kernel[i].restart();
}
else
speedup.resetToZero();
//Finally, reactivate the button
setEnabled(true);
}
}
| maxeler/Brain-Network | APP/CPUCode/gui/src/com/maxeler/brainnetwork/gui/ComputeButton.java | Java | bsd-2-clause | 2,215 |
<?php
/**
* This file is part of the PHP Generics package.
*
* @package Generics
*/
namespace Generics\Streams;
use Generics\Streams\Interceptor\StreamInterceptor;
use Countable;
/**
* This class provides a stream for standard output
*
* @author Maik Greubel <greubel@nkey.de>
*/
class StandardOutputStream implements OutputStream
{
/**
* Interceptor
*
* @var StreamInterceptor
*/
private $interceptor;
/**
* The standard out channel
*
* @var resource
*/
private $stdout;
/**
* Create a new instance of StandardOutputStream
*/
public function __construct()
{
$this->open();
}
/**
* Opens a new standard output channel
*/
private function open()
{
$this->stdout = fopen("php://stdout", "w");
}
/**
*
* {@inheritdoc}
* @see \Generics\Streams\Stream::isOpen()
*/
public function isOpen(): bool
{
return is_resource($this->stdout);
}
/**
*
* {@inheritdoc}
* @see \Generics\Streams\OutputStream::flush()
*/
public function flush()
{
if ($this->isOpen()) {
fflush($this->stdout);
}
if ($this->interceptor instanceof StreamInterceptor) {
$this->interceptor->reset();
}
}
/**
*
* {@inheritdoc}
* @see \Generics\Streams\Stream::ready()
*/
public function ready(): bool
{
return $this->isOpen();
}
/**
*
* {@inheritdoc}
* @see Countable::count()
*/
public function count()
{
return 0;
}
/**
*
* {@inheritdoc}
* @see \Generics\Resettable::reset()
*/
public function reset()
{
$this->close();
$this->open();
if ($this->interceptor instanceof StreamInterceptor) {
$this->interceptor->reset();
$this->setInterceptor($this->interceptor);
}
}
/**
*
* {@inheritdoc}
* @see \Generics\Streams\OutputStream::write()
*/
public function write($buffer)
{
if ($this->isWriteable()) {
fwrite($this->stdout, $buffer);
}
}
/**
*
* {@inheritdoc}
* @see \Generics\Streams\Stream::close()
*/
public function close()
{
if ($this->isOpen()) {
fclose($this->stdout);
$this->stdout = null;
}
}
/**
*
* {@inheritdoc}
* @see \Generics\Streams\OutputStream::isWriteable()
*/
public function isWriteable(): bool
{
return $this->isOpen();
}
/**
* Apply a stream interceptor
*
* @param StreamInterceptor $interceptor
*/
public function setInterceptor(StreamInterceptor $interceptor)
{
$this->interceptor = $interceptor;
stream_filter_append($this->stdout, $interceptor->getFilterName());
}
}
| maikgreubel/phpgenerics | src/Generics/Streams/StandardOutputStream.php | PHP | bsd-2-clause | 2,960 |
# Copyright (c) 2003-present, Jodd Team (http://jodd.org)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
f = open('ConvertBean.java', 'r')
java = f.read()
f.close()
genStart = java.find('@@generated')
java = java[0:genStart + 11]
### -----------------------------------------------------------------
types = [
[0, 'Boolean', 'boolean', 'false'],
[2, 'Integer', 'int', '0'],
[4, 'Long', 'long', '0'],
[6, 'Float', 'float', '0'],
[8, 'Double', 'double', '0'],
[10, 'Short', 'short', '(short) 0'],
[12, 'Character', 'char', '(char) 0'],
[14, 'Byte', 'byte', '(byte) 0'],
]
template = '''
/**
* Converts value to <code>$T</code>.
*/
public $T to$T(Object value) {
return ($T) typeConverters[#].convert(value);
}
/**
* Converts value to <code>$T</code>. Returns default value
* when conversion result is <code>null</code>
*/
public $T to$T(Object value, $T defaultValue) {
$T result = ($T) typeConverters[#].convert(value);
if (result == null) {
return defaultValue;
}
return result;
}
/**
* Converts value to <code>$t</code>. Returns default value
* when conversion result is <code>null</code>.
*/
public $t to$PValue(Object value, $t defaultValue) {
$T result = ($T) typeConverters[#++].convert(value);
if (result == null) {
return defaultValue;
}
return result.$tValue();
}
/**
* Converts value to <code>$t</code> with common default value.
*/
public $t to$PValue(Object value) {
return to$PValue(value, $D);
}
'''
for type in types:
# small type
data = template
data = data.replace('#++', str(type[0] + 1))
data = data.replace('#', str(type[0]))
data = data.replace('$T', type[1])
data = data.replace('$t', type[2])
data = data.replace('$P', type[2].title())
data = data.replace('$D', type[3])
java += data
### -----------------------------------------------------------------
types = [
[16, 'boolean[]', 'BooleanArray', 0],
[17, 'int[]', 'IntegerArray', 0],
[18, 'long[]', 'LongArray', 0],
[19, 'float[]', 'FloatArray', 0],
[20, 'double[]', 'DoubleArray', 0],
[21, 'short[]', 'ShortArray', 0],
[22, 'char[]', 'CharacterArray', 0],
[23, 'String', 'String', 1],
[24, 'String[]', 'StringArray', 0],
[25, 'Class', 'Class', 0],
[26, 'Class[]', 'ClassArray', 0],
[27, 'JDateTime', 'JDateTime', 1],
[28, 'Date', 'Date', 1],
[29, 'Calendar', 'Calendar', 1],
[30, 'BigInteger', 'BigInteger', 1],
[31, 'BigDecimal', 'BigDecimal', 1],
]
template = '''
/**
* Converts value to <code>$T</code>.
*/
public $T to$N(Object value) {
return ($T) typeConverters[#].convert(value);
}
'''
template2 = '''
/**
* Converts value to <code>$T</code>. Returns default value
* when conversion result is <code>null</code>
*/
public $T to$N(Object value, $T defaultValue) {
$T result = ($T) typeConverters[#].convert(value);
if (result == null) {
return defaultValue;
}
return result;
}
'''
for type in types:
# small type
data = template
data = data.replace('#', str(type[0]))
data = data.replace('$T', type[1])
data = data.replace('$N', type[2])
java += data
if type[3] == 1:
data = template2
data = data.replace('#', str(type[0]))
data = data.replace('$T', type[1])
data = data.replace('$N', type[2])
java += data
### -----------------------------------------------------------------
java += '}'
f = open('ConvertBean.java', 'w')
f.write(java)
f.close() | vilmospapp/jodd | jodd-core/src/main/python/ConvertBean.py | Python | bsd-2-clause | 4,825 |
package com.kbsriram.keypan.core;
import java.io.IOException;
import java.io.Reader;
import java.util.regex.Pattern;
public final class CUtils
{
public final static String byte2hex(byte[] b)
{
StringBuilder sb = new StringBuilder();
for (int i=0; i<b.length; i++) {
int v = b[i] & 0xff;
sb.append(s_byte2hex[v >> 4]);
sb.append(s_byte2hex[v & 0xf]);
}
return sb.toString();
}
public final static String asString(Reader r)
throws IOException
{
char[] buf = new char[2048];
int nread;
StringBuilder sb = new StringBuilder();
while ((nread = r.read(buf)) > 0) {
sb.append(buf, 0, nread);
}
return sb.toString();
}
public final static String groupedFingerprint(String fp)
{
if (fp.length() != 40) {
throw new IllegalArgumentException("bad fp");
}
StringBuilder sb = new StringBuilder();
for (int i=0; i<40; i+=4) {
if (i > 0) { sb.append(" "); }
sb.append(fp.substring(i, i+4));
}
return sb.toString().toLowerCase();
}
public final static Pattern asPattern(String fp)
{
if (fp.length() != 40) {
throw new IllegalArgumentException("bad fp");
}
StringBuilder sb = new StringBuilder();
for (int i=0; i<40; i+=4) {
if (i > 0) { sb.append("[\\p{Z}\\s]*"); }
sb.append(fp.substring(i, i+4));
}
return Pattern.compile(sb.toString(), Pattern.CASE_INSENSITIVE);
}
public final static <T> void lognote(Class<T> cls, String msg)
{
if (s_debug) {
System.out.println("NOTE: "+cls.getSimpleName()+": "+msg);
return;
}
int mlen = msg.length();
if (mlen > MAX_NOTE_LEN) {
msg = msg.substring(0, MAX_NOTE_LEN-3)+"...";
mlen = MAX_NOTE_LEN;
}
System.out.print(msg);
int delta = (MAX_NOTE_LEN - msg.length());
while (delta > 0) {
System.out.print(" ");
delta--;
}
if (s_cli) { System.out.print("\r"); }
else { System.out.println(); }
System.out.flush();
}
public final static void clear()
{
if (s_debug) { return; }
for (int i=0; i<MAX_NOTE_LEN; i++) {
System.out.print(" ");
}
System.out.print("\r");
System.out.flush();
}
public final static String nullIfEmpty(String s)
{
if ((s == null) || (s.length() == 0)) {
return null;
}
else {
return s;
}
}
public final static void setDebug(boolean v)
{ s_debug = v; }
public final static void setCLI(boolean v)
{ s_cli = v; }
public final static boolean isDebug()
{ return s_debug; }
public final static <T> void logw(Class<T> cls, String msg)
{ logw(cls, msg, null); }
public final static <T> void logw(Class<T> cls, String msg, Throwable th)
{
System.err.println("WARN: "+cls.getSimpleName()+": "+msg);
if (th != null) { th.printStackTrace(); }
}
public final static <T> void logd(Class<T> cls, String msg)
{ logd(cls, msg, null); }
public final static <T> void logd(Class<T> cls, String msg, Throwable th)
{
if (s_debug) {
System.err.println("DEBUG: "+cls.getSimpleName()+": "+msg);
if (th != null) { th.printStackTrace(); }
}
}
private static boolean s_debug = false;
private static boolean s_cli = true;
private final static int MAX_NOTE_LEN = 35;
private final static char[] s_byte2hex = new char[] {
'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', 'a', 'b', 'c', 'd', 'e', 'f'
};
}
| kbsriram/keypan | src/core/com/kbsriram/keypan/core/CUtils.java | Java | bsd-2-clause | 3,841 |
package cs499blue.models;
import cs499blue.algorithms.Distance;
/**
* author: vwilson
* date: 4/1/14
*/
public class Edge {
private Vertex a, b;
public Edge(Vertex a, Vertex b) {
this.a = a;
this.b = b;
}
public Vertex getA() {
return a;
}
public Vertex getB() {
return b;
}
public Double getWeight() {
return Distance.distanceFrom(a, b);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Edge edge = (Edge) o;
if (!a.equals(edge.a)) return false;
if (!b.equals(edge.b)) return false;
return true;
}
@Override
public int hashCode() {
int result = a.hashCode();
result = 31 * result + b.hashCode();
return result;
}
}
| vwilson/CS499-Shortest-Path | CS499/src/main/java/cs499blue/models/Edge.java | Java | bsd-2-clause | 887 |
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Express' });
});
router.get('/socket', function(req, res, next){
res.render('socket', {title: 'socket.io test'})
});
module.exports = router;
| wushuyi/newClassSystem | server/routes/index.js | JavaScript | bsd-2-clause | 309 |
#!/usr/bin/env python
#
# :copyright: (c) 2013 by Mike Taylor
# :author: Mike Taylor
# :license: BSD 2-Clause
#
# See LICENSE file for details
#
import os
import time
import json
import argparse
import pyrax
_data_centers = [ 'DFW', 'ORD' ]
_commands = [ 'list' ]
_config_file = '~/.rackspace.cfg'
_username = 'ops'
_server_info_keys = ( 'accessIPv4', 'status', 'name' )
_marker = '##### auto-generated for rsinfo #####'
_usage = """
list list details for the servers
If a server name is specified, the list will
only contain that server
If a datacenter has been given, the list will only
contain those servers
hosts generate output that can be used in /etc/hosts
ssh generate output that can be used in ~/.ssh/config
Config File Format:
[rackspace_cloud]
username = USERNAME
api_key = KEY
"""
def loadConfig():
parser = argparse.ArgumentParser(epilog=_usage, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-c', '--config', default=_config_file, help='where to retrieve configuration items and the rackspace API keys (default: %(default)s)')
parser.add_argument('-d', '--datacenter', default='ALL', help='datacenter to work within (default: %(default)s)', choices=_data_centers)
parser.add_argument('-s', '--server', help='limit output to the named server')
parser.add_argument('command', choices=['list', 'hosts', 'ssh'])
return parser.parse_args()
def initCredentials(datacenter):
pyrax.set_setting("identity_type", "rackspace")
pyrax.set_credential_file(os.path.expanduser(cfg.config), datacenter)
def loadServers(datacenters):
# {'OS-EXT-STS:task_state': None,
# 'addresses': { u'public': [],
# u'private': []
# },
# 'links': [],
# 'image': { u'id': u'GUID',
# u'links': []
# },
# 'manager': <novaclient.v1_1.servers.ServerManager object at 0x101abb450>,
# 'OS-EXT-STS:vm_state': u'active',
# 'flavor': { u'id': u'2',
# u'links': []
# },
# 'id': u'',
# 'user_id': u'NNN',
# 'OS-DCF:diskConfig': u'AUTO',
# 'accessIPv4': u'',
# 'accessIPv6': u'',
# 'progress': 100,
# 'OS-EXT-STS:power_state': 1,
# 'metadata': {},
# 'status': u'ACTIVE',
# 'updated': u'2013-04-25T05:11:09Z',
# 'hostId': u'',
# 'key_name': None,
# 'name': u'sssss',
# 'created': u'2013-02-11T19:33:31Z',
# 'tenant_id': u'NNN',
# '_info': {},
# 'config_drive': u'',
# '_loaded': True
# }
result = {}
for dc in datacenters:
initCredentials(dc)
print 'searching for servers in', dc
cs = pyrax.cloudservers
for s in cs.servers.list(detailed=True):
if s.name not in result:
result[s.name] = None
result[s.name] = s
print len(result), 'servers processed'
return result
def loadFileWithoutAutoGeneratedItems(filename, marker):
print 'loading', filename
result = []
f = False
for line in open(filename, 'r').readlines():
if line.startswith(marker):
f = not f
else:
if not f:
result.append(line)
return result
def saveFile(filepath, filename, cleandata, newdata, marker, allowAlt=False):
fullname = os.path.join(filepath, filename)
print 'saving', fullname
try:
h = open(fullname, 'w+')
h.write(''.join(cleandata))
h.write('\n%s\n' % marker)
h.write(''.join(newdata))
h.write('\n%s\n' % marker)
except IOError:
print 'unable to write to', fullname
if allowAlt:
print 'attempting alternate location'
saveFile('/tmp', filename, cleandata, newdata, marker)
def getServerInfo(serverName, serverList):
result = None
if cfg.datacenter == 'ALL':
s = ' in server list'
else:
s = ' in datacenter %s' % cfg.datacenter
if serverName not in serverList:
print '%s not found %s' % (serverName, s)
else:
item = serverList[serverName]
result = {}
for key in _server_info_keys:
result[key] = item.__getattr__(key)
return result
_hosts_config = """%(accessIPv4)s\t%(name)s\n"""
def generateHostsFile(servers):
clean = loadFileWithoutAutoGeneratedItems('/etc/hosts', _marker)
new = []
for s in servers:
r = getServerInfo(s, servers)
new.append(_hosts_config % r)
saveFile('/etc', 'hosts', clean, new, _marker, allowAlt=True)
_ssh_config = """Host %(name)s
User %(username)s
StrictHostKeyChecking no
IdentityFile ~/.ssh/id_rsa
"""
def generateConfigFile(servers):
clean = loadFileWithoutAutoGeneratedItems('~/.ssh/config', _marker)
new = []
for s in servers:
r = getServerInfo(s, servers)
r['username'] = _username
new.append(_ssh_config % r)
saveFile('~/.ssh', 'config', clean, new, _marker, allowAlt=True)
def getCommandParam(cmdText, commands):
try:
p = commands.index(cmdText)
result = commands[p + 1]
except:
result = ''
return result
if __name__ == '__main__':
cfg = loadConfig()
if cfg.datacenter == 'ALL':
datacenters = _data_centers
else:
datacenters = [ datacenter ]
servers = loadServers(datacenters)
if cfg.command == 'list':
results = []
if cfg.server is None:
for s in servers:
r = getServerInfo(s, servers)
if r is not None:
results.append(r)
else:
r = getServerInfo(cfg.server, servers)
if r is not None:
results.append(r)
print json.dumps(results)
elif cfg.command == 'hosts':
generateHostsFile(servers)
elif cfg.command == 'ssh':
generateConfigFile(servers)
| bear/rsinfo | rsinfo.py | Python | bsd-2-clause | 6,094 |
class PerconaServerAT56 < Formula
desc "Drop-in MySQL replacement"
homepage "https://www.percona.com"
url "https://www.percona.com/downloads/Percona-Server-5.6/Percona-Server-5.6.37-82.2/source/tarball/percona-server-5.6.37-82.2.tar.gz"
version "5.6.37-82.2"
sha256 "3cf04b64c8bf5b9cc1ea1a68c54ba77a4709d9c9051314e70a4cbd4c904da702"
bottle do
sha256 "2eca6a7f69f893abeadf61df5eccc97510099686275140f1934e5ee8122c202f" => :high_sierra
sha256 "a5dc522f52d0c853b2ff04291884156111729f67379b24fa7d7022bcd3347632" => :sierra
sha256 "a9ba8cad6f5237e783ad3495206f3ba63365fff78048e34008f23cdd500a2c90" => :el_capitan
sha256 "ef56b2abe4ad121257e62cc4178c459dc034407b18ffc06bea6cf39c60d7286b" => :yosemite
end
keg_only :versioned_formula
option "with-test", "Build with unit tests"
option "with-embedded", "Build the embedded server"
option "with-memcached", "Build with InnoDB Memcached plugin"
option "with-local-infile", "Build with local infile loading support"
depends_on "cmake" => :build
depends_on "pidof" unless MacOS.version >= :mountain_lion
depends_on "openssl"
# Where the database files should be located. Existing installs have them
# under var/percona, but going forward they will be under var/mysql to be
# shared with the mysql and mariadb formulae.
def datadir
@datadir ||= (var/"percona").directory? ? var/"percona" : var/"mysql"
end
pour_bottle? do
reason "The bottle needs a var/mysql datadir (yours is var/percona)."
satisfy { datadir == var/"mysql" }
end
def install
# Don't hard-code the libtool path. See:
# https://github.com/Homebrew/homebrew/issues/20185
inreplace "cmake/libutils.cmake",
"COMMAND /usr/bin/libtool -static -o ${TARGET_LOCATION}",
"COMMAND libtool -static -o ${TARGET_LOCATION}"
args = std_cmake_args + %W[
-DMYSQL_DATADIR=#{datadir}
-DINSTALL_PLUGINDIR=lib/plugin
-DSYSCONFDIR=#{etc}
-DINSTALL_MANDIR=#{man}
-DINSTALL_DOCDIR=#{doc}
-DINSTALL_INFODIR=#{info}
-DINSTALL_INCLUDEDIR=include/mysql
-DINSTALL_MYSQLSHAREDIR=#{share.basename}/mysql
-DWITH_SSL=yes
-DDEFAULT_CHARSET=utf8
-DDEFAULT_COLLATION=utf8_general_ci
-DCOMPILATION_COMMENT=Homebrew
-DWITH_EDITLINE=system
-DCMAKE_FIND_FRAMEWORK=LAST
-DCMAKE_VERBOSE_MAKEFILE=ON
]
# PAM plugin is Linux-only at the moment
args.concat %w[
-DWITHOUT_AUTH_PAM=1
-DWITHOUT_AUTH_PAM_COMPAT=1
-DWITHOUT_DIALOG=1
]
# TokuDB is broken on MacOsX
# https://bugs.launchpad.net/percona-server/+bug/1531446
args.concat %w[-DWITHOUT_TOKUDB=1]
# To enable unit testing at build, we need to download the unit testing suite
if build.with? "test"
args << "-DENABLE_DOWNLOADS=ON"
else
args << "-DWITH_UNIT_TESTS=OFF"
end
# Build the embedded server
args << "-DWITH_EMBEDDED_SERVER=ON" if build.with? "embedded"
# Build with InnoDB Memcached plugin
args << "-DWITH_INNODB_MEMCACHED=ON" if build.with? "memcached"
# Build with local infile loading support
args << "-DENABLED_LOCAL_INFILE=1" if build.with? "local-infile"
system "cmake", ".", *std_cmake_args, *args
system "make"
system "make", "install"
# Don't create databases inside of the prefix!
# See: https://github.com/Homebrew/homebrew/issues/4975
rm_rf prefix+"data"
# Link the setup script into bin
bin.install_symlink prefix/"scripts/mysql_install_db"
# Fix up the control script and link into bin
inreplace "#{prefix}/support-files/mysql.server",
/^(PATH=".*)(")/, "\\1:#{HOMEBREW_PREFIX}/bin\\2"
bin.install_symlink prefix/"support-files/mysql.server"
# mysqlaccess deprecated on 5.6.17, and removed in 5.7.4.
# See: https://bugs.mysql.com/bug.php?id=69012
# Move mysqlaccess to libexec
libexec.mkpath
mv "#{bin}/mysqlaccess", libexec
mv "#{bin}/mysqlaccess.conf", libexec
# Install my.cnf that binds to 127.0.0.1 by default
(buildpath/"my.cnf").write <<-EOS.undent
# Default Homebrew MySQL server config
[mysqld]
# Only allow connections from localhost
bind-address = 127.0.0.1
EOS
etc.install "my.cnf"
end
def post_install
# Make sure that data directory exists
datadir.mkpath
unless File.exist? "#{datadir}/mysql/user.frm"
ENV["TMPDIR"] = nil
system "#{bin}/mysql_install_db", "--verbose", "--user=#{ENV["USER"]}",
"--basedir=#{prefix}", "--datadir=#{datadir}", "--tmpdir=/tmp"
end
end
def caveats; <<-EOS.undent
A "/etc/my.cnf" from another install may interfere with a Homebrew-built
server starting up correctly.
MySQL is configured to only allow connections from localhost by default
To connect:
mysql -uroot
EOS
end
plist_options :manual => "mysql.server start"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_bin}/mysqld_safe</string>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
</dict>
</plist>
EOS
end
test do
system "/bin/sh", "-n", "#{bin}/mysqld_safe"
(prefix/"mysql-test").cd do
system "./mysql-test-run.pl", "status", "--vardir=#{testpath}"
end
end
end
| bfontaine/homebrew-core | Formula/percona-server@5.6.rb | Ruby | bsd-2-clause | 5,617 |
// This file was procedurally generated from the following sources:
// - src/dstr-assignment/obj-id-identifier-resolution-last.case
// - src/dstr-assignment/default/for-of.template
/*---
description: Evaluation of DestructuringAssignmentTarget (last of many) (For..of statement)
esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation
es6id: 13.7.5.11
features: [destructuring-binding]
flags: [generated]
info: |
IterationStatement :
for ( LeftHandSideExpression of AssignmentExpression ) Statement
1. Let keyResult be the result of performing ? ForIn/OfHeadEvaluation(« »,
AssignmentExpression, iterate).
2. Return ? ForIn/OfBodyEvaluation(LeftHandSideExpression, Statement,
keyResult, assignment, labelSet).
13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation
[...]
4. If destructuring is true and if lhsKind is assignment, then
a. Assert: lhs is a LeftHandSideExpression.
b. Let assignmentPattern be the parse of the source text corresponding to
lhs using AssignmentPattern as the goal symbol.
[...]
---*/
var x = null;
var w;
var counter = 0;
for ({ w, x } of [{ x: 4 }]) {
assert.sameValue(x, 4);
counter += 1;
}
assert.sameValue(counter, 1);
| sebastienros/jint | Jint.Tests.Test262/test/language/statements/for-of/dstr-obj-id-identifier-resolution-last.js | JavaScript | bsd-2-clause | 1,252 |
/*
* Copyright (c) 2014 Abdul Hannan Ahsan <abdulhannan.ahsan@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package aha.datastructure;
public class DoubleLinkedList<T>
{
private T value;
private DoubleLinkedList<T> previous;
private DoubleLinkedList<T> next;
public DoubleLinkedList( T value )
{
this.value = value;
this.previous = null;
this.next = null;
}
public DoubleLinkedList( T value, DoubleLinkedList<T> previous, DoubleLinkedList<T> next )
{
this.value = value;
this.previous = previous;
this.next = next;
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
builder.append( "[" ).append( value );
DoubleLinkedList<T> current = next;
while( current != null )
{
builder.append( "," + current.getValue() );
current = current.getNext();
}
builder.append( "]" );
return builder.toString();
}
public T getValue()
{
return value;
}
public DoubleLinkedList<T> getPrevious()
{
return previous;
}
public DoubleLinkedList<T> getNext()
{
return next;
}
public void setValue( final T value )
{
this.value = value;
}
public void setPrevious( final DoubleLinkedList<T> previous )
{
this.previous = previous;
}
public void setNext( final DoubleLinkedList<T> next )
{
this.next = next;
}
public static<T>
DoubleLinkedList<T> createList( final T[] values )
{
if( values.length == 0 )
{
return null;
}
DoubleLinkedList<T> root = new DoubleLinkedList<T>( values[0] );
DoubleLinkedList<T> current = root;
for( int idx = 1; idx < values.length; ++idx )
{
DoubleLinkedList<T> next = new DoubleLinkedList<T>( values[idx] );
current.setNext( next );
next.setPrevious( current );
current = current.getNext();
}
return root;
}
public static void main( String[] args )
{
Integer[] values = { 34, -29, 45, 342, 99, 7 };
DoubleLinkedList<Integer> dlist = DoubleLinkedList.createList( values );
System.out.println( dlist );
}
}
| aha0x0x/snippets | java/aha/datastructure/DoubleLinkedList.java | Java | bsd-2-clause | 3,509 |
# Example: How to prepare a new refund with the Mollie API.
#
import os
from mollie.api.client import Client
from mollie.api.error import Error
def main():
try:
#
# Initialize the Mollie API library with your API key.
#
# See: https://www.mollie.com/dashboard/settings/profiles
#
api_key = os.environ.get("MOLLIE_API_KEY", "test_test")
mollie_client = Client()
mollie_client.set_api_key(api_key)
body = ""
payment_id = ""
body += "<p>Attempting to retrieve the first page of payments and grabbing the first.</p>"
payments = mollie_client.payments.list()
if not len(payments):
body += "<p>You have no payments. You can create one from the examples.</p>"
return body
payment = next(payments)
if (
payment.can_be_refunded()
and payment.amount_remaining["currency"] == "EUR"
and float(payment.amount_remaining["value"]) >= 2.0
):
data = {"amount": {"value": "2.00", "currency": "EUR"}}
refund = mollie_client.payment_refunds.with_parent_id(payment_id).create(data)
body += f'<p>{refund.amount["currency"]} {refund.amount["value"]} of payment {payment_id} refunded</p>'
else:
body += f"<p>Payment {payment_id} can not be refunded</p>"
return body
except Error as err:
return f"API call failed: {err}"
if __name__ == "__main__":
print(main())
| mollie/mollie-api-python | examples/11-refund-payment.py | Python | bsd-2-clause | 1,522 |
class GitGui < Formula
desc "Tcl/Tk UI for the git revision control system"
homepage "https://git-scm.com"
# NOTE: Please keep these values in sync with git.rb when updating.
url "https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.31.1.tar.xz"
sha256 "9f61417a44d5b954a5012b6f34e526a3336dcf5dd720e2bb7ada92ad8b3d6680"
license "GPL-2.0-only"
head "https://github.com/git/git.git", shallow: false
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "f6941b6984b73e4f89c9635a6113a0401461821febd431116dd06cf4971ec469"
sha256 cellar: :any_skip_relocation, big_sur: "430d3e73a3d07bb170c1bd3eb3db3a5cbb283e1fd72671e9607f00cce23982eb"
sha256 cellar: :any_skip_relocation, catalina: "603bf37ef37e46f4ec88855d44bd61a227299638dda5690c367b55c733ba3cf7"
sha256 cellar: :any_skip_relocation, mojave: "59df29678a0fa2c9546ee91ec1c8d4061ad3ae7436ecb4beef92fecd6c62de41"
end
depends_on "tcl-tk"
# Patch to fix Homebrew/homebrew-core#68798.
# Remove when the following PR has been merged
# and included in a release:
# https://github.com/git/git/pull/944
patch do
url "https://github.com/git/git/commit/1db62e44b7ec93b6654271ef34065b31496cd02e.patch?full_index=1"
sha256 "0c7816ee9c8ddd7aa38aa29541c9138997650713bce67bdef501b1de0b50f539"
end
def install
# build verbosely
ENV["V"] = "1"
# By setting TKFRAMEWORK to a non-existent directory we ensure that
# the git makefiles don't install a .app for git-gui
# We also tell git to use the homebrew-installed wish binary from tcl-tk.
# See https://github.com/Homebrew/homebrew-core/issues/36390
tcl_bin = Formula["tcl-tk"].opt_bin
args = %W[
TKFRAMEWORK=/dev/null
prefix=#{prefix}
gitexecdir=#{bin}
sysconfdir=#{etc}
CC=#{ENV.cc}
CFLAGS=#{ENV.cflags}
LDFLAGS=#{ENV.ldflags}
TCL_PATH=#{tcl_bin}/tclsh
TCLTK_PATH=#{tcl_bin}/wish
]
system "make", "-C", "git-gui", "install", *args
system "make", "-C", "gitk-git", "install", *args
end
test do
system bin/"git-gui", "--version"
end
end
| cblecker/homebrew-core | Formula/git-gui.rb | Ruby | bsd-2-clause | 2,119 |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace Free.Controls.TreeView.Tree
{
/// <summary>
/// Provides a simple ready to use implementation of <see cref="ITreeModel"/>. Warning: this class is not optimized
/// to work with big amount of data. In this case create you own implementation of <c>ITreeModel</c>, and pay attention
/// on GetChildren and IsLeaf methods.
/// </summary>
public class TreeModel : ITreeModel
{
private Node _root;
public Node Root
{
get { return _root; }
}
public Collection<Node> Nodes
{
get { return _root.Nodes; }
}
public TreeModel()
{
_root=new Node();
_root.Model=this;
}
public TreePath GetPath(Node node)
{
if(node==_root)
return TreePath.Empty;
else
{
Stack<object> stack=new Stack<object>();
while(node!=_root)
{
stack.Push(node);
node=node.Parent;
}
return new TreePath(stack.ToArray());
}
}
public Node FindNode(TreePath path)
{
if(path.IsEmpty())
return _root;
else
return FindNode(_root, path, 0);
}
private Node FindNode(Node root, TreePath path, int level)
{
foreach(Node node in root.Nodes)
if(node==path.FullPath[level])
{
if(level==path.FullPath.Length-1)
return node;
else
return FindNode(node, path, level+1);
}
return null;
}
#region ITreeModel Members
public System.Collections.IEnumerable GetChildren(TreePath treePath)
{
Node node=FindNode(treePath);
if(node!=null)
foreach(Node n in node.Nodes)
yield return n;
else
yield break;
}
public bool IsLeaf(TreePath treePath)
{
Node node=FindNode(treePath);
if(node!=null)
return node.IsLeaf;
else
throw new ArgumentException("treePath");
}
public event EventHandler<TreeModelEventArgs> NodesChanged;
internal void OnNodesChanged(TreeModelEventArgs args)
{
if(NodesChanged!=null)
NodesChanged(this, args);
}
public event EventHandler<TreePathEventArgs> StructureChanged;
public void OnStructureChanged(TreePathEventArgs args)
{
if(StructureChanged!=null)
StructureChanged(this, args);
}
public event EventHandler<TreeModelEventArgs> NodesInserted;
internal void OnNodeInserted(Node parent, int index, Node node)
{
if(NodesInserted!=null)
{
TreeModelEventArgs args=new TreeModelEventArgs(GetPath(parent), new int[] { index }, new object[] { node });
NodesInserted(this, args);
}
}
public event EventHandler<TreeModelEventArgs> NodesRemoved;
internal void OnNodeRemoved(Node parent, int index, Node node)
{
if(NodesRemoved!=null)
{
TreeModelEventArgs args=new TreeModelEventArgs(GetPath(parent), new int[] { index }, new object[] { node });
NodesRemoved(this, args);
}
}
#endregion
}
}
| shintadono/Free.Controls.TreeView | Tree/TreeModel.cs | C# | bsd-2-clause | 2,834 |
/*
* Copyright (c) 2012, JInterval Project.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.java.jinterval.field;
import java.math.RoundingMode;
import net.java.jinterval.rational.BinaryValueSet;
import net.java.jinterval.rational.ExtendedRational;
import net.java.jinterval.rational.ExtendedRationalContexts;
import net.java.jinterval.rational.ExtendedRationalContext;
import org.apache.commons.math3.Field;
/**
*
*/
public class RationalField implements Field<RationalFieldElement> {
private ExtendedRationalContext context;
private final RationalFieldElement zero;
private final RationalFieldElement one;
public RationalField(RoundingMode rm) {
this(BinaryValueSet.BINARY64, rm);
}
public RationalField(BinaryValueSet valueSet, RoundingMode rm) {
this(ExtendedRationalContexts.valueOf(valueSet, rm));
}
public RationalField(ExtendedRationalContext context) {
this.context = context;
zero = get(ExtendedRational.zero());
one = get(ExtendedRational.one());
}
public RationalFieldElement getZero() {
return zero;
}
public RationalFieldElement getOne() {
return one;
}
public RationalFieldElement get(Number value) {
return new RationalFieldElement(this, context.rnd(value));
}
public RationalFieldElement get(ExtendedRational value) {
return new RationalFieldElement(this, context.rnd(value));
}
public Class<RationalFieldElement> getRuntimeClass() {
return RationalFieldElement.class;
}
ExtendedRationalContext getContext() {
return context;
}
}
| jinterval/jinterval | jinterval-field/src/main/java/net/java/jinterval/field/RationalField.java | Java | bsd-2-clause | 2,936 |
# -*- coding: utf-8 -*-
# This code is distributed under the two-clause BSD license.
# Copyright (c) 2012-2013 Raphaël Barrois
from __future__ import absolute_import, unicode_literals
import logging
from django.core import exceptions
from django.db import models
from . import conf
PERM_USER = 'user'
PERM_GROUP_MEMBER = 'grpmember'
PERM_GROUP_ADMIN = 'grpadmin'
PERM_ADMIN = 'admin'
PERM_LEVELS = (
PERM_USER,
PERM_GROUP_MEMBER,
PERM_GROUP_ADMIN,
PERM_ADMIN,
)
logger = logging.getLogger(__name__)
def get_model(model_name):
"""Retrieve a django model.
This handles:
- Explicit models.Model subclass
- Absolute dotted path to import the model
"""
if isinstance(model_name, type) and issubclass(models.Model, model_name):
return model_name
# Not a Model or a Model instance, must be a class path
if '.' not in model_name:
raise ValueError("Invalid model name %s: should include module name."
% model_name)
app, cls = model_name.rsplit('.', 1)
return models.get_model(app, cls)
class AuthResult(object):
"""Result of an authentication query."""
def __init__(self, data):
self.data = data or {}
self.perms = self._setup_perms(self.data)
def __repr__(self):
return '<AuthResult: [%s / %s]>' % (self.data, self.perms)
def _setup_perms(self, data):
perms = set()
perms.add(PERM_USER)
if 'perms' in data:
perms.add(data['perms'])
if 'grpauth' in data:
if data['grpauth'] == 'admin':
perms.add(PERM_GROUP_ADMIN)
perms.add(PERM_GROUP_MEMBER)
elif data['grpauth'] == 'membre':
perms.add(PERM_GROUP_MEMBER)
return perms
@property
def username(self):
return self.data.get('username', '')
@property
def firstname(self):
return self.data.get('firstname', '')
@property
def lastname(self):
return self.data.get('lastname', '')
@property
def promo(self):
return self.data.get('promo', '')
@property
def email(self):
return self.data.get('email', '')
@property
def is_dead(self):
return bool(self.data.get('deathdate', ''))
@property
def is_admin(self):
return PERM_ADMIN in self.perms
def has_perm(self, perm):
return perm in self.perms
class AuthGroupeXMixin(object):
def __init__(self, config=None, *args, **kwargs):
super(AuthGroupeXMixin, self).__init__(*args, **kwargs)
self.config = config or conf.AuthGroupeXConf()
# Public API
# ==========
def authenticate(self, **kwargs):
"""Create a user if the authgroupex data has been passed.
This data should be present in the 'authgroupex' keyword argument.
"""
if 'authgroupex' in kwargs:
auth_data = kwargs['authgroupex']
else:
logger.info('Trying to authenticate, no authgroupex in data.')
return None
if not auth_data.username:
logger.error('Received a AuthResult object without a username.')
return None
try:
user = self._fetch_user(auth_data.username)
except exceptions.ObjectDoesNotExist:
try:
user = self._create_user_from_auth_data(auth_data)
except ValueError:
logger.warning('Received authgroupex with invalid name %s',
auth_data.username)
return None
self._update_user(user, auth_data)
return user
# Required extension points
# =========================
def get_user(self, user_id):
raise NotImplementedError()
def _fetch_user(self, username):
raise NotImplementedError()
def _create_user(self, username):
raise NotImplementedError()
# Optional extension points
# =========================
def _set_staff(self, user, is_staff):
if hasattr(user, 'is_staff'):
user.is_staff = is_staff
def _set_superuser(self, user, is_superuser):
if hasattr(user, 'is_superuser'):
user.is_superuser = is_superuser
def _set_active(self, user, is_active):
if hasattr(user, 'is_active'):
user.is_active = is_active
def _update_profile(self, user, auth_data):
"""Update fields of the profile according to auth-groupe-x data."""
pass
def _update_groups(self, user, auth_data):
pass
# Internals
# =========
def _update_perms(self, user, auth_data):
# Handle staff status
if self.config.STAFF_PERMS:
self._set_staff(user, any(
auth_data.has_perm(perm) for perm in self.config.STAFF_PERMS))
# Handle superadmins
if self.config.SUPERADMIN_PERMS:
is_superuser = any(
auth_data.has_perm(perm) for perm in self.config.SUPERADMIN_PERMS)
self._set_superuser(user, is_superuser)
if is_superuser:
self._set_staff(user, True)
# Handle active status
if auth_data.is_dead and self.config.DISABLE_DEADS:
self._set_active(user, False)
def _update_user(self, user, auth_data):
"""Update various fields of the user according to auth-groupe-x data."""
self._update_profile(user, auth_data)
self._update_perms(user, auth_data)
self._update_groups(user, auth_data)
# Refresh DB user
user.save()
logger.info('Updated user %s', user.get_username())
def _create_user_from_auth_data(self, auth_data):
"""Create a new Django user from AuthGroupeX data.
This only sets the basic username field;
groups and other data are handled by the update_user method.
"""
username = auth_data.username
user = self._create_user(username)
user.set_unusable_password()
logger.info('Created a new user with username %s', username)
return user
class AuthGroupeXBackend(AuthGroupeXMixin):
"""Authentication backend for auth-groupe-x"""
supports_anonymous_user = False
supports_object_permissions = False
def __init__(self, config=None, *args, **kwargs):
super(AuthGroupeXBackend, self).__init__(config=config, *args, **kwargs)
self.user_model = get_model(self.config.USER_MODEL)
def get_user(self, user_id):
"""Retrieve a user by ID.
Args:
user_id: int, the ID of the user
Returns:
Either an instance of self.config.USER_MODEL or None
"""
try:
return self.user_model.objects.get(pk=user_id)
except self.user_model.DoesNotExist:
return None
def _fetch_user(self, username):
return self.user_model.objects.get(username=username)
def _create_user(self, username):
return self.user_model.objects.create(username=username, is_active=True)
def _update_profile(self, user, auth_data):
"""Update fields of the profile according to auth-groupe-x data."""
# Update basic profile data
if auth_data.firstname:
user.first_name = auth_data.firstname
if auth_data.lastname:
user.last_name = auth_data.lastname
if auth_data.email:
user.email = auth_data.email
if getattr(self.config, 'PROFILE_CLASS', ''):
profile_model = get_model(self.config.PROFILE_CLASS)
try:
profile = user.get_profile()
except profile_model.DoesNotExist:
profile = profile_model.objects.create(user=user)
if auth_data.promo:
profile.promo = auth_data.promo
profile.save()
def _update_groups(self, user, auth_data):
"""Update django groups of the user according to auth-groupe-x data"""
if not self.config.MAP_GROUPS:
return
# Gather names of django groups by mapping perms using MAP_GROUPS
new_group_names = set()
old_group_names = set()
for perm in PERM_LEVELS:
if auth_data.has_perm(perm):
new_group_names |= set(self.config.MAP_GROUPS.get(perm, []))
else:
old_group_names |= set(self.config.MAP_GROUPS.get(perm, []))
# Find django group objects
group_model = get_model(self.config.GROUP_MODEL)
new_groups = list(group_model.objects.filter(name__in=new_group_names))
old_groups = list(group_model.objects.filter(name__in=old_group_names))
if old_groups:
logger.info(u"Removing user %s from groups %s", user, new_groups)
user.groups.remove(*list(old_groups))
if new_groups:
logger.info(u"Adding user %s to groups %s", user, new_groups)
user.groups.add(*list(new_groups))
| Polytechnique-org/django-authgroupex | django_authgroupex/auth.py | Python | bsd-2-clause | 8,966 |
// Copyright (C) 2017 Robin Templeton. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
description: Octal BigInt literal containing an invalid digit
esid: prod-NumericLiteral
info: |
NumericLiteral ::
NumericLiteralBase NumericLiteralSuffix
NumericLiteralBase ::
DecimalLiteral
BinaryIntegerLiteral
OctalIntegerLiteral
HexIntegerLiteral
NumericLiteralSuffix :: n
negative:
phase: parse
type: SyntaxError
features: [BigInt]
---*/
throw "Test262: This statement should not be evaluated.";
0o9n;
| sebastienros/jint | Jint.Tests.Test262/test/language/literals/bigint/octal-invalid-digit.js | JavaScript | bsd-2-clause | 578 |