repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
yvolk/calendar-widget
app/src/main/java/org/andstatus/todoagenda/DateUtil.java
4347
package org.andstatus.todoagenda; import android.support.annotation.NonNull; import android.text.TextUtils; import android.text.format.DateUtils; import android.util.Log; import org.andstatus.todoagenda.prefs.InstanceSettings; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.util.Formatter; import java.util.Locale; public class DateUtil { private static final String COMMA_SPACE = ", "; private static volatile DateTime mNow = null; private static volatile DateTime mNowSetAt = DateTime.now(); public static boolean isMidnight(DateTime date) { return date.isEqual(date.withTimeAtStartOfDay()); } public static String createDayHeaderTitle(InstanceSettings settings, DateTime dateTime) { return createDateString(settings, dateTime, true); } public static String createDateString(InstanceSettings settings, DateTime dateTime) { return createDateString(settings, dateTime, false); } private static String createDateString(InstanceSettings settings, DateTime dateTime, boolean forDayHeader) { if (settings.getAbbreviateDates()) { return formatDateTime(settings, dateTime, DateUtils.FORMAT_ABBREV_ALL | DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_WEEKDAY); } if (forDayHeader) { DateTime timeAtStartOfToday = DateTime.now().withTimeAtStartOfDay(); if (dateTime.withTimeAtStartOfDay().isEqual(timeAtStartOfToday)) { return createDateString(settings, dateTime, settings.getContext().getString(R.string.today)); } else if (dateTime.withTimeAtStartOfDay().isEqual(timeAtStartOfToday.plusDays(1))) { return createDateString(settings, dateTime, settings.getContext().getString(R.string.tomorrow)); } } return formatDateTime(settings, dateTime, DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_WEEKDAY); } private static String createDateString(InstanceSettings settings, DateTime dateTime, String prefix) { return prefix + COMMA_SPACE + formatDateTime(settings, dateTime, DateUtils.FORMAT_SHOW_DATE); } public static String formatDateTime(InstanceSettings settings, DateTime dateTime, int flags) { return settings.isTimeZoneLocked() ? formatDateTimeAtTimeZone(settings, dateTime, flags, settings.getLockedTimeZoneId()) : DateUtils.formatDateTime(settings.getContext(), dateTime.getMillis(), flags); } private static String formatDateTimeAtTimeZone(InstanceSettings settings, DateTime dateTime, int flags, String timeZoneId) { return DateUtils.formatDateRange(settings.getContext(), new Formatter(new StringBuilder(50), Locale.getDefault()), dateTime.getMillis(), dateTime.getMillis(), flags, timeZoneId).toString(); } public static void setNow(DateTime now) { mNowSetAt = DateTime.now(); mNow = now; } /** * Usually returns real "now", but may be #setNow to some other time for testing purposes */ public static DateTime now(DateTimeZone zone) { DateTime nowSetAt; DateTime now; do { nowSetAt = mNowSetAt; now = mNow; } while (nowSetAt != mNowSetAt); // Ensure concurrent consistency if (now == null) { return DateTime.now(zone); } else { long diffL = DateTime.now().getMillis() - nowSetAt.getMillis(); int diff = 0; if (diffL > 0 && diffL < Integer.MAX_VALUE) { diff = (int) diffL; } return new DateTime(now, zone).plusMillis(diff); } } /** * Returns an empty string in a case supplied ID is not a valid Time Zone ID */ @NonNull public static String validatedTimeZoneId(String timeZoneId) { if (!TextUtils.isEmpty(timeZoneId)) { try { return DateTimeZone.forID(timeZoneId).getID(); } catch (IllegalArgumentException e) { Log.w("validatedTimeZoneId", "The time zone is not recognized: '" + timeZoneId + "'"); } } return ""; } }
apache-2.0
jitsi/jitsi-meet
react/features/security/components/security-dialog/native/styles.js
2167
// @flow import BaseTheme from '../../../../base/ui/components/BaseTheme.native'; /** * The styles of the feature security. */ export default { securityDialogContainer: { flex: 1, marginTop: BaseTheme.spacing[4] }, headerCloseButton: { marginLeft: 12 }, lobbyModeContainer: { borderBottomColor: BaseTheme.palette.border01, borderBottomWidth: 1 }, lobbyModeContent: { marginHorizontal: BaseTheme.spacing[3], marginBottom: BaseTheme.spacing[4] }, lobbyModeLabel: { fontWeight: 'bold', marginTop: BaseTheme.spacing[2] }, lobbyModeSection: { alignItems: 'center', flexDirection: 'row', justifyContent: 'space-between', marginTop: BaseTheme.spacing[1] }, passwordContainer: { marginHorizontal: BaseTheme.spacing[3], marginTop: BaseTheme.spacing[4] }, passwordContainerControls: { alignItems: 'center', flexDirection: 'row', justifyContent: 'space-between' }, savedPasswordContainer: { flexDirection: 'row', marginTop: 20, width: 208 }, savedPasswordLabel: { fontWeight: 'bold' }, savedPassword: { color: BaseTheme.palette.text06 }, passwordInput: { borderColor: BaseTheme.palette.action03Active, borderRadius: BaseTheme.spacing[1], borderWidth: 2, height: BaseTheme.spacing[6], marginTop: BaseTheme.spacing[2], paddingLeft: BaseTheme.spacing[1], width: 208 }, passwordSetupButton: { ...BaseTheme.typography.heading7, color: BaseTheme.palette.screen01Header, marginTop: BaseTheme.spacing[4], textTransform: 'uppercase' }, passwordSetRemotelyContainer: { alignItems: 'center', flexDirection: 'row', justifyContent: 'space-between' }, passwordSetRemotelyText: { color: BaseTheme.palette.text06, marginTop: 22 }, passwordSetRemotelyTextDisabled: { color: BaseTheme.palette.text03, marginTop: 22 } };
apache-2.0
swjjxyxty/Study
studyspring/boot-autoconfig/src/main/java/com/bestxty/sutdy/boot/config/Application.java
378
package com.bestxty.sutdy.boot.config; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; /** * @author xty * Created by xty on 2017/7/2. */ @SpringBootApplication public class Application { public static void main(String[] args) { SpringApplication.run(Application.class); } }
apache-2.0
xiazdong/EasyAccount
templates_c/c7c6463daf126f1f4aabc04220f75ea6d4c9c80d.file.foot.html.php
1331
<?php /* Smarty version Smarty-3.1.12, created on 2014-01-05 01:25:09 compiled from "..\templates\foot.html" */ ?> <?php /*%%SmartyHeaderCode:3205352a08ae3e2afa0-18263060%%*/if(!defined('SMARTY_DIR')) exit('no direct access allowed'); $_valid = $_smarty_tpl->decodeProperties(array ( 'file_dependency' => array ( 'c7c6463daf126f1f4aabc04220f75ea6d4c9c80d' => array ( 0 => '..\\templates\\foot.html', 1 => 1388850830, 2 => 'file', ), ), 'nocache_hash' => '3205352a08ae3e2afa0-18263060', 'function' => array ( ), 'version' => 'Smarty-3.1.12', 'unifunc' => 'content_52a08ae3e33a61_19655601', 'has_nocache_code' => false, ),false); /*/%%SmartyHeaderCode%%*/?> <?php if ($_valid && !is_callable('content_52a08ae3e33a61_19655601')) {function content_52a08ae3e33a61_19655601($_smarty_tpl) {?> <footer class="footer"> <div class="container"> <p> Designed and built by <a href="http://weibo.com/u/2002073744" target="_blank">@buka</a><a href="http://weibo.com/u/1620966475" target="_blank">@xiazdong</a><a href="http://weibo.com/u/2108827503" target="_blank">@maxeric</a>. </p> <p> Code licensed under <a href="http://www.apache.org/licenses/LICENSE-2.0" target="_blank">Apache License v2.0</a>. </p> </div> </footer><?php }} ?>
apache-2.0
WaltChen/NDatabase
tests/NDatabase.Old.UnitTests/NDatabase/Odb/Test/VO/Human/Animal.cs
968
namespace Test.NDatabase.Odb.Test.VO.Human { public class Animal { protected string name; protected string sex; protected string specie; public Animal(string specie, string sex, string name) { this.specie = specie; this.sex = sex; this.name = name; } public virtual string GetSpecie() { return specie; } protected virtual void SetSpecie(string specie) { this.specie = specie; } public virtual string GetSex() { return sex; } protected virtual void SetSex(string sex) { this.sex = sex; } public virtual string GetName() { return name; } public virtual void SetName(string name) { this.name = name; } } }
apache-2.0
google-code/ant-util
src/main/java/com/newmainsoftech/ant/types/ArgumentExtAspect.java
3643
/* * Copyright (C) 2012-2013 NewMain Softech * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package com.newmainsoftech.ant.types; import java.lang.reflect.Method; import java.util.Arrays; import org.apache.tools.ant.types.Commandline.Argument; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.annotation.AfterReturning; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.newmainsoftech.ant.types.ArgumentContract.MethodSetPartsField; @Aspect public class ArgumentExtAspect { Logger logger = LoggerFactory.getLogger( this.getClass()); protected Logger getLogger() { return logger; } @Pointcut( value="execution( public * com.newmainsoftech.ant.types.ArgumentContract.*(..)) " + "&& within( com.newmainsoftech.ant.types.ArgumentExt) && if()") public static boolean pointcutAtExecutionOfArgumentContractMethods( JoinPoint joinPoint) { MethodSignature methodSignature = (MethodSignature)(joinPoint.getSignature()); Method method = methodSignature.getMethod(); String methodName = method.getName(); Class<?>[] argTypeArray = method.getParameterTypes(); for( MethodSetPartsField methodSetPartsField : MethodSetPartsField.values()) { if ( !methodName.equals( methodSetPartsField.getArgumentContractMethod().getName())) continue; // for if ( !Arrays.equals( argTypeArray, methodSetPartsField.getArgumentContractMethod().getParameterTypes()) ) { continue; // for } return true; } // for return false; } @AfterReturning( pointcut="pointcutAtExecutionOfArgumentContractMethods( joinPoint)") public void afterReturningAdvisedExecutionOfArgumentContractMethods( JoinPoint joinPoint) { Logger logger = getLogger(); if ( logger.isDebugEnabled()) { MethodSignature methodSignature = (MethodSignature)(joinPoint.getSignature()); logger.debug( String.format( "Entering %1$s method of %2$s.", methodSignature.getMethod().getName(), joinPoint.getTarget().toString() ) ); logger.debug( String.format( "Thead ID: %1$d%nStack-trace to here: %n%2$s", Thread.currentThread().getId(), Arrays.toString( Arrays.copyOf( Thread.currentThread().getStackTrace(), 7)) ) ); } ArgumentExt argumentExt = (ArgumentExt)(joinPoint.getTarget()); argumentExt.setArgList( null); if ( logger.isDebugEnabled()) { Object targetObj = joinPoint.getTarget(); logger.debug( String.format( "Nullified argList member field of %1$s.", targetObj.toString() ) ); MethodSignature methodSignature = (MethodSignature)(joinPoint.getSignature()); logger.debug( String.format( "Exited %1$s method of %2$s", methodSignature.getMethod().getName(), targetObj.toString() ) ); } } }
apache-2.0
rssvihla/cassandra-commons
cassandra-commons-maven/src/test/java/pro/foundev/cassandra/commons/maven/loader/MojoMigrationLoaderImplTest.java
3611
/* * Copyright 2015 Ryan Svihla * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package pro.foundev.cassandra.commons.maven.loader; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import pro.foundev.cassandra.commons.test.CassandraTestDB; import pro.foundev.cassandra.commons.test.spring.CassandraTableCleanupListener; import pro.foundev.cassandra.commons.test.spring.CassandraTestContext; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URISyntaxException; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = CassandraTestContext.class) @TestExecutionListeners(listeners = CassandraTableCleanupListener.class) public class MojoMigrationLoaderImplTest { @Autowired private CassandraTestDB cassandraTestDB; @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void itRequiresCassandraYaml() throws IOException { expectedException.expect(FileNotFoundException.class); new MojoMigrationLoaderImpl().exec("", null); } @Test public void itRequiresScriptDirectory() throws IOException { expectedException.expect(FileNotFoundException.class); new MojoMigrationLoaderImpl().exec(null, ""); } @Test public void itRunsMigrations() throws URISyntaxException, IOException { cassandraTestDB.deleteTestKeyspaceIfExists("my_test_keyspace"); try { String cassandraYaml = getClass().getResource("/cassandra.yaml").toURI().toURL().getFile(); String cqlDir = getClass().getResource("/cql").toURI().toURL().getFile(); new MojoMigrationLoaderImpl().exec(cqlDir, cassandraYaml); }finally { cassandraTestDB.deleteTestKeyspaceIfExists("my_test_keyspace"); } } @Test public void itLogsParameters() throws URISyntaxException, IOException { String keyspace = "my_test_keyspace"; cassandraTestDB.deleteTestKeyspaceIfExists(keyspace); try { String cassandraYaml = getClass().getResource("/cassandra.yaml").toURI().toURL().getFile(); String cqlDir = getClass().getResource("/cql").toURI().toURL().getFile(); MojoMigrationLoaderImpl mojoMigration = new MojoMigrationLoaderImpl(); Logger logger = mock(Logger.class); mojoMigration.setLogger(logger); mojoMigration.exec(cqlDir, cassandraYaml); verify(logger).info("Running migrations - cql directory: " + cqlDir + "; cassandra yaml: " + cassandraYaml); }finally { cassandraTestDB.deleteTestKeyspaceIfExists(keyspace); } } }
apache-2.0
0x0mar/arachni
spec/support/servers/plugins/uncommon_headers.rb
206
require 'sinatra' get '/' do <<EOHTML <a href="/1">1</a> <a href="/2">2</a> EOHTML end get '/1' do headers['Weird'] = 'Value' end get '/2' do headers['Weird2'] = 'Value2' end
apache-2.0
abbbystevens/csc207-hw6
src/Problem1/WrappingQueuesTest.java
2386
package Problem1; import static org.junit.Assert.*; import org.junit.Test; public class WrappingQueuesTest { @Test public void test() throws Exception { ReportingLinearStructure<String> expt = new ReportingLinearStructure<String>(new WrappingQueues<String>(5), "expt."); // Put in a few values, then remove two assertEquals(expt.isEmpty(), true); expt.put("alpha"); expt.put("beta"); expt.put("gamma"); expt.put("gamma"); expt.put("alpha"); // looks like // [alpha, beta, gamma, gamma, alpha] // ^front ^back assertEquals(expt.get(), "alpha"); assertEquals(expt.get(), "beta"); // looks like // [_____, _____, gamma, gamma, alpha] // ^front ^back // should need to wrap around here expt.put("delta"); expt.put("sigma"); // looks like // [delta, sigma, gamma, gamma, alpha] // ^back ^front assertEquals(expt.get(), "gamma"); assertEquals(expt.get(), "gamma"); assertEquals(expt.get(), "alpha"); assertEquals(expt.get(), "delta"); assertEquals(expt.get(), "sigma"); // looks like // [_____, _____, _____, _____, _____] // ^front // ^back ReportingLinearStructure<String> wrap = new ReportingLinearStructure<String>(new WrappingQueues<String>(5), "wrap."); wrap.put("d"); // looks like // [d, ___, ___, ___, ___, ___] // ^front // ^back assertEquals(wrap.peek(), "d"); wrap.put("e"); wrap.put("f"); wrap.put("g"); wrap.put("h"); // [d, e, f, g, h] // ^front ^back assertEquals(wrap.get(), "d"); // [__, e, f, g, h] // ^front ^back wrap.put("i"); // [i, e, f, g, h] // ^back^front assertEquals(wrap.get(), "e"); wrap.put("j"); // [i, j, f, g, h] // ^back^front assertEquals(wrap.get(), "f"); // [i, j, __, g, h] // ^back ^front wrap.put("k"); // [i, j, k, g, h] // ^bac^front assertEquals(wrap.get(), "g"); wrap.put("i"); // [i, j, k, i, h] // ^bac^front assertEquals(wrap.peek(), "h"); assertEquals(wrap.get(), "h"); assertEquals(wrap.get(), "i"); assertEquals(wrap.get(), "j"); assertEquals(wrap.get(), "k"); assertEquals(wrap.get(), "i"); assertEquals(wrap.isEmpty(), true); } }
apache-2.0
heia-fr/telecom-tower
sprite/sprite.go
1719
// Copyright 2015 Jacques Supcik, HEIA-FR // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sprite import ( "github.com/heia-fr/telecom-tower/ledmatrix" "image" _ "image/jpeg" _ "image/png" "log" "os" ) type Sprite struct { Width, Height int Bitmap [][]uint32 } func NewSprite(width, height int) *Sprite { s := new(Sprite) s.Width = width s.Height = height s.Bitmap = make([][]uint32, height) for i := 0; i < height; i++ { s.Bitmap[i] = make([]uint32, width) } return s } func NewSpriteFromImage(fileName string) *Sprite { reader, err := os.Open(fileName) if err != nil { log.Fatal(err) } defer reader.Close() m, _, err := image.Decode(reader) if err != nil { log.Fatal(err) } bounds := m.Bounds() if bounds.Max.Y-bounds.Min.Y != 8 { log.Fatal("Invalid image size") } s := NewSprite(bounds.Max.X-bounds.Min.X, bounds.Max.Y-bounds.Min.Y) for y := bounds.Min.Y; y < bounds.Max.Y; y++ { for x := bounds.Min.X; x < bounds.Max.X; x++ { r, g, b, _ := m.At(x, y).RGBA() s.SetPixel(x-bounds.Min.X, y-bounds.Min.Y, ledmatrix.RGB(int(r>>8), int(g>>8), int(b>>8))) } } return s } func (s *Sprite) SetPixel(x, y int, c uint32) { s.Bitmap[y][x] = c }
apache-2.0
EMC-CMD/libstorage
api/types/types_store.go
2347
package types // Store is a key/value store with case-insensitive keys. type Store interface { // Map returns the contents of the store as a map[string]interface{}. Map() map[string]interface{} // Keys returns a list of all the keys in the store. Keys() []string // IsSet returns true if a key exists. IsSet(k string) bool // Get returns a value for the key; a nil value if the key does not exist. Get(k string) interface{} // GetString returns a string value for a key; an empty string if the key // does not exist. GetString(k string) string // GetStringPtr returns a pointer to a string value for a key; nil if // the key does not exist. GetStringPtr(k string) *string // GetBool returns a boolean value for the key; false if the key does not // exist. GetBool(k string) bool // GetBoolPtr returns a pointer to a boolean value for the key; nil if the // key does not exist. GetBoolPtr(k string) *bool // GetInt return an int value for the key; 0 if the key does not exist. GetInt(k string) int // GetInt return a pointer to an int value for the key; nil if the key does // not exist. GetIntPtr(k string) *int // GetInt64 return an int64 value for the key; 0 if the key does not exist. GetInt64(k string) int64 // GetInt64Ptr return a pointer to an int64 value for the key; nil if the // key does not exist. GetInt64Ptr(k string) *int64 // GetIntSlice returns an int slice value for a key; a nil value if // the key does not exist. GetIntSlice(k string) []int // GetStringSlice returns a string slice value for a key; a nil value if // the key does not exist. GetStringSlice(k string) []string // GetBoolSlice returns a bool slice value for a key; a nil value if // the key does not exist. GetBoolSlice(k string) []bool // GetInstanceID returns an instance ID for a key; a nil value if the key // does not exist. GetInstanceID(k string) *InstanceID // GetMap returns a map value for a key; a nil value if the key does not // exist. GetMap(k string) map[string]interface{} // GetStore returns a Store value for a key; a nil value if the key does // not exist. GetStore(k string) Store // Set sets a key/value in the store. Set(k string, v interface{}) // Deletes a key/value from the store. If the value exists in the map it // is returned. Delete(k string) interface{} }
apache-2.0
spring-projects/spring-net
src/Spring/Spring.Data.NHibernate5/Data/NHibernate/HibernateOptimisticLockingFailureException.cs
3339
/* * Copyright © 2002-2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Runtime.Serialization; using NHibernate; using Spring.Dao; namespace Spring.Data.NHibernate { /// <summary> /// Hibernate-specific subclass of ObjectOptimisticLockingFailureException. /// </summary> /// <remarks> /// Converts Hibernate's StaleObjectStateException. /// </remarks> /// <author>Mark Pollack (.NET)</author> /// <version>$Id: HibernateOptimisticLockingFailureException.cs,v 1.2 2008/04/23 11:41:41 lahma Exp $</version> /// [Serializable] public class HibernateOptimisticLockingFailureException : ObjectOptimisticLockingFailureException { /// <summary> /// Initializes a new instance of the <see cref="HibernateOptimisticLockingFailureException"/> class. /// </summary> public HibernateOptimisticLockingFailureException() { } /// <summary> /// Creates a new instance of the /// <see cref="HibernateOptimisticLockingFailureException"/> class. /// </summary> /// <param name="message"> /// A message about the exception. /// </param> public HibernateOptimisticLockingFailureException(string message) : base(message) { } /// <summary> /// Initializes a new instance of the <see cref="HibernateOptimisticLockingFailureException"/> class. /// </summary> /// <param name="ex">The ex.</param> public HibernateOptimisticLockingFailureException(StaleObjectStateException ex) : base(ex.EntityName, ex.Identifier, ex.Message, ex) { } /// <summary> /// Initializes a new instance of the <see cref="HibernateOptimisticLockingFailureException"/> class. /// </summary> /// <param name="ex">The StaleStateException.</param> public HibernateOptimisticLockingFailureException(StaleStateException ex) : base(ex.Message, ex) { } /// <summary> /// Creates a new instance of the HibernateOptimisticLockingFailureException class with the specified message /// and root cause. /// </summary> /// <param name="message"> /// A message about the exception. /// </param> /// <param name="rootCause"> /// The root exception that is being wrapped. /// </param> public HibernateOptimisticLockingFailureException(string message, Exception rootCause) : base(message, rootCause) { } /// <inheritdoc /> protected HibernateOptimisticLockingFailureException(SerializationInfo info, StreamingContext context) : base( info, context) { } } }
apache-2.0
quarkusio/quarkus
extensions/elasticsearch-rest-client/runtime/src/main/java/io/quarkus/elasticsearch/restclient/lowlevel/runtime/ElasticsearchConfig.java
2710
package io.quarkus.elasticsearch.restclient.lowlevel.runtime; import java.net.InetSocketAddress; import java.time.Duration; import java.util.List; import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; @ConfigRoot(phase = ConfigPhase.RUN_TIME) public class ElasticsearchConfig { /** * The list of hosts of the Elasticsearch servers. */ @ConfigItem(defaultValue = "localhost:9200") public List<InetSocketAddress> hosts; /** * The protocol to use when contacting Elasticsearch servers. * Set to "https" to enable SSL/TLS. */ @ConfigItem(defaultValue = "http") public String protocol; /** * The username for basic HTTP authentication. */ @ConfigItem public Optional<String> username; /** * The password for basic HTTP authentication. */ @ConfigItem public Optional<String> password; /** * The connection timeout. */ @ConfigItem(defaultValue = "1S") public Duration connectionTimeout; /** * The socket timeout. */ @ConfigItem(defaultValue = "30S") public Duration socketTimeout; /** * The maximum number of connections to all the Elasticsearch servers. */ @ConfigItem(defaultValue = "20") public int maxConnections; /** * The maximum number of connections per Elasticsearch server. */ @ConfigItem(defaultValue = "10") public int maxConnectionsPerRoute; /** * The number of IO thread. * By default, this is the number of locally detected processors. * <p> * Thread counts higher than the number of processors should not be necessary because the I/O threads rely on non-blocking * operations, but you may want to use a thread count lower than the number of processors. * * @see <a href="https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/_number_of_threads.html">number of * threads</a> */ @ConfigItem public Optional<Integer> ioThreadCounts; /** * Configuration for the automatic discovery of new Elasticsearch nodes. */ @ConfigItem public DiscoveryConfig discovery; @ConfigGroup public static class DiscoveryConfig { /** * Defines if automatic discovery is enabled. */ @ConfigItem(defaultValue = "false") public boolean enabled; /** * Refresh interval of the node list. */ @ConfigItem(defaultValue = "5M") public Duration refreshInterval; } }
apache-2.0
herickson/terremark-api
src/main/java/com/terremark/api/LoginBanner.java
1906
package com.terremark.api; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for LoginBanner complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="LoginBanner"> * &lt;complexContent> * &lt;extension base="{}Resource"> * &lt;sequence> * &lt;element name="Display" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="Text" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "LoginBanner", propOrder = { "display", "text" }) @javax.xml.bind.annotation.XmlRootElement(name = "LoginBanner") public class LoginBanner extends Resource { @XmlElement(name = "Display") protected boolean display; @XmlElement(name = "Text", nillable = true) protected String text; /** * Gets the value of the display property. * */ public boolean isDisplay() { return display; } /** * Sets the value of the display property. * */ public void setDisplay(boolean value) { this.display = value; } /** * Gets the value of the text property. * * @return * possible object is * {@link String } * */ public String getText() { return text; } /** * Sets the value of the text property. * * @param value * allowed object is * {@link String } * */ public void setText(String value) { this.text = value; } }
apache-2.0
p2p-sync/sync
src/test/java/org/rmatil/sync/test/messaging/sharingexchange/unshare/UnshareExchangeHandlerResultTest.java
355
package org.rmatil.sync.test.messaging.sharingexchange.unshare; import org.junit.Test; import org.rmatil.sync.core.messaging.sharingexchange.unshare.UnshareExchangeHandlerResult; public class UnshareExchangeHandlerResultTest { @Test public void test() { UnshareExchangeHandlerResult result = new UnshareExchangeHandlerResult(); } }
apache-2.0
fkeglevich/Raw-Dumper
app/src/main/java/com/fkeglevich/rawdumper/camera/feature/ManualTemperatureFeature.java
3848
/* * Copyright 2018, Flávio Keglevich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fkeglevich.rawdumper.camera.feature; import android.content.SharedPreferences; import com.fkeglevich.rawdumper.camera.async.direct.AsyncParameterSender; import com.fkeglevich.rawdumper.camera.data.DataRange; import com.fkeglevich.rawdumper.camera.data.ManualTemperature; import com.fkeglevich.rawdumper.camera.data.ManualTemperatureRange; import com.fkeglevich.rawdumper.camera.extension.AsusParameters; import com.fkeglevich.rawdumper.camera.parameter.ParameterCollection; import com.fkeglevich.rawdumper.camera.parameter.value.RangeValidator; import com.fkeglevich.rawdumper.camera.parameter.value.ValueValidator; import com.fkeglevich.rawdumper.raw.info.ColorInfo; import com.fkeglevich.rawdumper.util.MathUtil; import androidx.annotation.NonNull; /** * TODO: add header comment * Created by Flávio Keglevich on 06/05/18. */ public class ManualTemperatureFeature extends RangeFeature<ManualTemperature> { private final int[] temperatureRange; @NonNull private static ValueValidator<ManualTemperature, DataRange<ManualTemperature>> createRangeValidator(ColorInfo colorInfo) { return RangeValidator.create(ManualTemperatureRange.getFrom(colorInfo)); } ManualTemperatureFeature(ColorInfo colorInfo, AsyncParameterSender asyncParameterSender, ParameterCollection parameterCollection) { super(asyncParameterSender, AsusParameters.MANUAL_TEMPERATURE, parameterCollection, createRangeValidator(colorInfo)); temperatureRange = colorInfo.getTemperatureRange(); } @Override public void setValueAsProportion(double proportion) { int lower = getAvailableValues().getLower().getNumericValue(); int upper = getAvailableValues().getUpper().getNumericValue(); double numericValue = calculateTemp(proportion, lower, upper); int finalValue = MathUtil.clamp((int) Math.round(numericValue), lower, upper); setValueAsync(ManualTemperature.create(finalValue)); } @Override void storeValue(SharedPreferences.Editor editor) { if (!isAvailable()) return; ManualTemperature value = getValue(); if (!ManualTemperature.DISABLED.equals(value)) editor.putInt(parameter.getKey(), value.getNumericValue()); } @Override void loadValue(SharedPreferences preferences) { if (!isAvailable()) return; int numValue = preferences.getInt(parameter.getKey(), 0); if (numValue != 0) setValue(ManualTemperature.create(numValue)); } private double calculateTemp(double proportion, int lower, int upper) { if (proportion < 0.25) return lin(inverseLin(proportion, 0, 0.25), temperatureRange[0], temperatureRange[1]); else if (proportion < 0.75) return lin(inverseLin(proportion, 0.25, 0.75), temperatureRange[1], temperatureRange[2]); else return lin(inverseLin(proportion, 0.75, 1), temperatureRange[2], temperatureRange[3]); } private double inverseLin(double x, double min, double max) { return (x - min) / (max - min); } private double lin(double x, double min, double max) { return (max - min) * x + min; } }
apache-2.0
oVirt/ovirt-web-ui
scripts/intl/extract-messages.js
1484
import fs from 'fs' import path from 'path' import chalk from 'chalk' import mkdirp from 'mkdirp' import messages from '../../src/intl/messages.js' import timeDurations from '../../src/intl/time-durations.js' function normalizeMessages (messages) { return Object.keys(messages) .map(key => { const value = messages[key] return toReactIntlMessageDescriptor(key, value) }) } function toReactIntlMessageDescriptor (messageId, messageValue) { if (typeof messageValue === 'string' || messageValue instanceof String) { return { id: messageId, defaultMessage: messageValue, } } if ('message' in messageValue) { const messageDescriptor = { id: messageId, defaultMessage: messageValue.message, } if ('description' in messageValue) { messageDescriptor.description = messageValue.description } return messageDescriptor } } function extractMessages (messages, destDir, destFile) { console.log(chalk.green(`> [extract-messages.js] write file -> ${destFile} ✔️`)) const json2poMessages = normalizeMessages(messages) mkdirp.sync(destDir) fs.writeFileSync(destFile, JSON.stringify(json2poMessages, null, 4)) console.log() } extractMessages( messages.messages, path.join('extra', 'to-zanata'), path.join('extra', 'to-zanata', 'messages.json') ) extractMessages( timeDurations.timeDurations, path.join('extra', 'to-zanata'), path.join('extra', 'to-zanata', 'time-durations.json') )
apache-2.0
oehme/analysing-gradle-performance
my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p337/Production6755.java
1891
package org.gradle.test.performance.mediummonolithicjavaproject.p337; public class Production6755 { private String property0; public String getProperty0() { return property0; } public void setProperty0(String value) { property0 = value; } private String property1; public String getProperty1() { return property1; } public void setProperty1(String value) { property1 = value; } private String property2; public String getProperty2() { return property2; } public void setProperty2(String value) { property2 = value; } private String property3; public String getProperty3() { return property3; } public void setProperty3(String value) { property3 = value; } private String property4; public String getProperty4() { return property4; } public void setProperty4(String value) { property4 = value; } private String property5; public String getProperty5() { return property5; } public void setProperty5(String value) { property5 = value; } private String property6; public String getProperty6() { return property6; } public void setProperty6(String value) { property6 = value; } private String property7; public String getProperty7() { return property7; } public void setProperty7(String value) { property7 = value; } private String property8; public String getProperty8() { return property8; } public void setProperty8(String value) { property8 = value; } private String property9; public String getProperty9() { return property9; } public void setProperty9(String value) { property9 = value; } }
apache-2.0
jorgevillaverde/ai-planning
ai-planning-model/src/main/java/ar/edu/utn/frre/ia/planning/model/data/strips/AND.java
7636
/************************************************************************ * Strathclyde Planning Group, * Department of Computer and Information Sciences, * University of Strathclyde, Glasgow, UK * http://planning.cis.strath.ac.uk/ * * Copyright 2007, Keith Halsey * Copyright 2008, Andrew Coles and Amanda Smith * * (Questions/bug reports now to be sent to Andrew Coles) * * This file is part of ar.edu.utn.frre.ia.planning.model. * * JavaFF is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * JavaFF is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ar.edu.utn.frre.ia.planning.model. If not, see <http://www.gnu.org/licenses/>. * ************************************************************************/ package ar.edu.utn.frre.ia.planning.model.data.strips; import ar.edu.utn.frre.ia.planning.model.data.Literal; import ar.edu.utn.frre.ia.planning.model.data.GroundCondition; import ar.edu.utn.frre.ia.planning.model.data.GroundEffect; import ar.edu.utn.frre.ia.planning.model.data.Condition; import ar.edu.utn.frre.ia.planning.model.data.UngroundCondition; import ar.edu.utn.frre.ia.planning.model.data.UngroundEffect; import ar.edu.utn.frre.ia.planning.model.data.CompoundLiteral; import ar.edu.utn.frre.ia.planning.model.data.PDDLPrinter; import ar.edu.utn.frre.ia.planning.model.planning.State; import java.util.Set; import java.util.HashSet; import java.util.Map; import java.util.Iterator; import java.io.PrintStream; public class AND implements CompoundLiteral, GroundCondition, GroundEffect, UngroundCondition, UngroundEffect { @SuppressWarnings("rawtypes") protected Set literals = new HashSet(); // set of Literals @SuppressWarnings("unchecked") public void add(Object o) { if (o instanceof AND) { AND a = (AND) o; @SuppressWarnings("rawtypes") Iterator ait = a.literals.iterator(); while (ait.hasNext()) { add(ait.next()); } } else literals.add(o); } public boolean isStatic() { @SuppressWarnings("rawtypes") Iterator it = literals.iterator(); while (it.hasNext()) { Condition c = (Condition) it.next(); if (!c.isStatic()) return false; } return true; } public GroundCondition staticifyCondition(Map fValues) { Set newlit = new HashSet(literals.size()); Iterator it = literals.iterator(); while (it.hasNext()) { GroundCondition c = (GroundCondition) it.next(); if (!(c instanceof TrueCondition)) newlit.add(c.staticifyCondition(fValues)); } literals = newlit; if (literals.isEmpty()) return TrueCondition.getInstance(); else return this; } public GroundEffect staticifyEffect(Map fValues) { Set newlit = new HashSet(literals.size()); Iterator it = literals.iterator(); while (it.hasNext()) { GroundEffect e = (GroundEffect) it.next(); if (!(e instanceof NullEffect)) newlit.add(e.staticifyEffect(fValues)); } literals = newlit; if (literals.isEmpty()) return NullEffect.getInstance(); else return this; } public Set<Predicate> getStaticPredicates() { Set<Predicate> rSet = new HashSet<Predicate>(); Iterator it = literals.iterator(); while (it.hasNext()) { UngroundCondition c = (UngroundCondition) it.next(); rSet.addAll(c.getStaticPredicates()); } return rSet; } public boolean effects(PredicateSymbol ps) { boolean rEff = false; Iterator lit = literals.iterator(); while (lit.hasNext() && !(rEff)) { UngroundEffect ue = (UngroundEffect) lit.next(); rEff = ue.effects(ps); } return rEff; } public UngroundCondition minus(UngroundEffect effect) { AND a = new AND(); Iterator lit = literals.iterator(); while (lit.hasNext()) { UngroundCondition p = (UngroundCondition) lit.next(); a.add(p.minus(effect)); } return a; } public UngroundCondition effectsAdd(UngroundCondition cond) { Iterator lit = literals.iterator(); UngroundCondition c = null; while (lit.hasNext()) { UngroundEffect p = (UngroundEffect) lit.next(); UngroundCondition d = p.effectsAdd(cond); if (!d.equals(cond)) c = d; } if (c == null) return cond; else return c; } public GroundEffect groundEffect(Map varMap) { AND a = new AND(); Iterator lit = literals.iterator(); while (lit.hasNext()) { UngroundEffect p = (UngroundEffect) lit.next(); a.add(p.groundEffect(varMap)); } return a; } public GroundCondition groundCondition(Map varMap) { AND a = new AND(); Iterator lit = literals.iterator(); while (lit.hasNext()) { UngroundCondition p = (UngroundCondition) lit.next(); a.add(p.groundCondition(varMap)); } return a; } public boolean isTrue(State s) { @SuppressWarnings("rawtypes") Iterator cit = literals.iterator(); while (cit.hasNext()) { GroundCondition c = (GroundCondition) cit.next(); if (!c.isTrue(s)) return false; } return true; } public void apply(State s) { applyDels(s); applyAdds(s); } public void applyAdds(State s) { Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundEffect e = (GroundEffect) eit.next(); e.applyAdds(s); } } public void applyDels(State s) { Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundEffect e = (GroundEffect) eit.next(); e.applyDels(s); } } public Set getConditionalPropositions() { Set rSet = new HashSet(); Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundCondition e = (GroundCondition) eit.next(); rSet.addAll(e.getConditionalPropositions()); } return rSet; } public Set getAddPropositions() { Set rSet = new HashSet(); Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundEffect e = (GroundEffect) eit.next(); rSet.addAll(e.getAddPropositions()); } return rSet; } public Set getDeletePropositions() { Set rSet = new HashSet(); Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundEffect e = (GroundEffect) eit.next(); rSet.addAll(e.getDeletePropositions()); } return rSet; } public Set getOperators() { Set rSet = new HashSet(); Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundEffect e = (GroundEffect) eit.next(); rSet.addAll(e.getOperators()); } return rSet; } public Set getComparators() { Set rSet = new HashSet(); Iterator eit = literals.iterator(); while (eit.hasNext()) { GroundCondition e = (GroundCondition) eit.next(); rSet.addAll(e.getComparators()); } return rSet; } public boolean equals(Object obj) { if (obj instanceof AND) { AND a = (AND) obj; return (literals.equals(a.literals)); } else return false; } public int hashCode() { return literals.hashCode(); } public void PDDLPrint(PrintStream p, int indent) { PDDLPrinter.printToString(literals, "and", p, false, true, indent); } public String toString() { String str = "(and"; Iterator it = literals.iterator(); while (it.hasNext()) { str += " " + it.next(); } str += ")"; return str; } public String toStringTyped() { String str = "(and"; Iterator it = literals.iterator(); while (it.hasNext()) { Literal l = (Literal) it.next(); str += " " + l.toStringTyped(); } str += ")"; return str; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-mediapackagevod/src/main/java/com/amazonaws/services/mediapackagevod/model/ListAssetsRequest.java
6668
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediapackagevod.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediapackage-vod-2018-11-07/ListAssets" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListAssetsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** Upper bound on number of records to return. */ private Integer maxResults; /** A token used to resume pagination from the end of a previous request. */ private String nextToken; /** Returns Assets associated with the specified PackagingGroup. */ private String packagingGroupId; /** * Upper bound on number of records to return. * * @param maxResults * Upper bound on number of records to return. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * Upper bound on number of records to return. * * @return Upper bound on number of records to return. */ public Integer getMaxResults() { return this.maxResults; } /** * Upper bound on number of records to return. * * @param maxResults * Upper bound on number of records to return. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAssetsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * A token used to resume pagination from the end of a previous request. * * @param nextToken * A token used to resume pagination from the end of a previous request. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * A token used to resume pagination from the end of a previous request. * * @return A token used to resume pagination from the end of a previous request. */ public String getNextToken() { return this.nextToken; } /** * A token used to resume pagination from the end of a previous request. * * @param nextToken * A token used to resume pagination from the end of a previous request. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAssetsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns Assets associated with the specified PackagingGroup. * * @param packagingGroupId * Returns Assets associated with the specified PackagingGroup. */ public void setPackagingGroupId(String packagingGroupId) { this.packagingGroupId = packagingGroupId; } /** * Returns Assets associated with the specified PackagingGroup. * * @return Returns Assets associated with the specified PackagingGroup. */ public String getPackagingGroupId() { return this.packagingGroupId; } /** * Returns Assets associated with the specified PackagingGroup. * * @param packagingGroupId * Returns Assets associated with the specified PackagingGroup. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAssetsRequest withPackagingGroupId(String packagingGroupId) { setPackagingGroupId(packagingGroupId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getPackagingGroupId() != null) sb.append("PackagingGroupId: ").append(getPackagingGroupId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListAssetsRequest == false) return false; ListAssetsRequest other = (ListAssetsRequest) obj; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getPackagingGroupId() == null ^ this.getPackagingGroupId() == null) return false; if (other.getPackagingGroupId() != null && other.getPackagingGroupId().equals(this.getPackagingGroupId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getPackagingGroupId() == null) ? 0 : getPackagingGroupId().hashCode()); return hashCode; } @Override public ListAssetsRequest clone() { return (ListAssetsRequest) super.clone(); } }
apache-2.0
zhenglaizhang/CppSnippets
Unix/svr.cc
594
#include <sys/types.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <unistd.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <fstream> #include <sstream> #include <iostream> #include <vector> //#include <error> using namespace std; int main(int argc, char const *argv[]) { double bs_x[] = { 35, -3, 76, 76, -3 }; double bs_y[] = { 35, 77, 77, -3, -3 }; double tdoa[384][4]; ifstream infile("otdoa.txt"); if (!infile) { cout << "Cannot open otdoa.txt" << endl; return -1; } return 0; }
apache-2.0
spark0001/spark2.1.1
core/src/main/scala/org/apache/spark/SparkConf.scala
30333
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConverters._ import scala.collection.mutable.LinkedHashSet import org.apache.avro.{Schema, SchemaNormalization} import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.serializer.KryoSerializer import org.apache.spark.util.Utils /** * Configuration for a Spark application. Used to set various Spark parameters as key-value pairs. * * Most of the time, you would create a SparkConf object with `new SparkConf()`, which will load * values from any `spark.*` Java system properties set in your application as well. In this case, * parameters you set directly on the `SparkConf` object take priority over system properties. * * For unit tests, you can also call `new SparkConf(false)` to skip loading external settings and * get the same configuration no matter what the system properties are. * * All setter methods in this class support chaining. For example, you can write * `new SparkConf().setMaster("local").setAppName("My app")`. * * @param loadDefaults whether to also load values from Java system properties * * @note Once a SparkConf object is passed to Spark, it is cloned and can no longer be modified * by the user. Spark does not support modifying the configuration at runtime. */ // Spark应用程序的配置。 用于将各种Spark参数设置为键值对. // 大多数情况下,您将使用new SparkConf()创建一个SparkConf对象, // 该对象将加载任何spark.*的值,Java系统属性也设置在您的应用程序中。 在这种情况下,直接在SparkConf对象上设置的参数优先于系统属性(默认设置)。 // 对于单元测试,您也可以调用new SparkConf(false)来跳过加载外部设置,并获得相同的配置,无论系统属性是什么。 // 此类中的所有setter方法都支持链式调用。 例如,您可以编写new SparkConf().setMaster("local").setAppName("My app")。 // 注意:一旦SparkConf对象传递到Spark,它被克隆,不能再被用户修改。 Spark不支持在运行时修改配置。 class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Serializable { import SparkConf._ /** Create a SparkConf that loads defaults from system properties and the classpath */ def this() = this(true) private val settings = new ConcurrentHashMap[String, String]() @transient private lazy val reader: ConfigReader = { val _reader = new ConfigReader(new SparkConfigProvider(settings)) _reader.bindEnv(new ConfigProvider { override def get(key: String): Option[String] = Option(getenv(key)) }) _reader } if (loadDefaults) { loadFromSystemProperties(false) } private[spark] def loadFromSystemProperties(silent: Boolean): SparkConf = { // Load any spark.* system properties for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) { set(key, value, silent) } this } /** Set a configuration variable. */ def set(key: String, value: String): SparkConf = { set(key, value, false) } private[spark] def set(key: String, value: String, silent: Boolean): SparkConf = { if (key == null) { throw new NullPointerException("null key") } if (value == null) { throw new NullPointerException("null value for " + key) } if (!silent) { logDeprecationWarning(key) } settings.put(key, value) this } private[spark] def set[T](entry: ConfigEntry[T], value: T): SparkConf = { set(entry.key, entry.stringConverter(value)) this } private[spark] def set[T](entry: OptionalConfigEntry[T], value: T): SparkConf = { set(entry.key, entry.rawStringConverter(value)) this } /** * The master URL to connect to, such as "local" to run locally with one thread, "local[4]" to * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. */ def setMaster(master: String): SparkConf = { set("spark.master", master) } /** Set a name for your application. Shown in the Spark web UI. */ def setAppName(name: String): SparkConf = { set("spark.app.name", name) } /** Set JAR files to distribute to the cluster. */ def setJars(jars: Seq[String]): SparkConf = { for (jar <- jars if (jar == null)) logWarning("null jar passed to SparkContext constructor") set("spark.jars", jars.filter(_ != null).mkString(",")) } /** Set JAR files to distribute to the cluster. (Java-friendly version.) */ def setJars(jars: Array[String]): SparkConf = { setJars(jars.toSeq) } /** * Set an environment variable to be used when launching executors for this application. * These variables are stored as properties of the form spark.executorEnv.VAR_NAME * (for example spark.executorEnv.PATH) but this method makes them easier to set. */ def setExecutorEnv(variable: String, value: String): SparkConf = { set("spark.executorEnv." + variable, value) } /** * Set multiple environment variables to be used when launching executors. * These variables are stored as properties of the form spark.executorEnv.VAR_NAME * (for example spark.executorEnv.PATH) but this method makes them easier to set. */ def setExecutorEnv(variables: Seq[(String, String)]): SparkConf = { for ((k, v) <- variables) { setExecutorEnv(k, v) } this } /** * Set multiple environment variables to be used when launching executors. * (Java-friendly version.) */ def setExecutorEnv(variables: Array[(String, String)]): SparkConf = { setExecutorEnv(variables.toSeq) } /** * Set the location where Spark is installed on worker nodes. */ def setSparkHome(home: String): SparkConf = { set("spark.home", home) } /** Set multiple parameters together */ def setAll(settings: Traversable[(String, String)]): SparkConf = { settings.foreach { case (k, v) => set(k, v) } this } /** Set a parameter if it isn't already configured */ def setIfMissing(key: String, value: String): SparkConf = { if (settings.putIfAbsent(key, value) == null) { logDeprecationWarning(key) } this } private[spark] def setIfMissing[T](entry: ConfigEntry[T], value: T): SparkConf = { if (settings.putIfAbsent(entry.key, entry.stringConverter(value)) == null) { logDeprecationWarning(entry.key) } this } private[spark] def setIfMissing[T](entry: OptionalConfigEntry[T], value: T): SparkConf = { if (settings.putIfAbsent(entry.key, entry.rawStringConverter(value)) == null) { logDeprecationWarning(entry.key) } this } /** * Use Kryo serialization and register the given set of classes with Kryo. * If called multiple times, this will append the classes from all calls together. */ def registerKryoClasses(classes: Array[Class[_]]): SparkConf = { val allClassNames = new LinkedHashSet[String]() allClassNames ++= get("spark.kryo.classesToRegister", "").split(',').map(_.trim) .filter(!_.isEmpty) allClassNames ++= classes.map(_.getName) set("spark.kryo.classesToRegister", allClassNames.mkString(",")) set("spark.serializer", classOf[KryoSerializer].getName) this } private final val avroNamespace = "avro.schema." /** * Use Kryo serialization and register the given set of Avro schemas so that the generic * record serializer can decrease network IO */ def registerAvroSchemas(schemas: Schema*): SparkConf = { for (schema <- schemas) { set(avroNamespace + SchemaNormalization.parsingFingerprint64(schema), schema.toString) } this } /** Gets all the avro schemas in the configuration used in the generic Avro record serializer */ def getAvroSchema: Map[Long, String] = { getAll.filter { case (k, v) => k.startsWith(avroNamespace) } .map { case (k, v) => (k.substring(avroNamespace.length).toLong, v) } .toMap } /** Remove a parameter from the configuration */ def remove(key: String): SparkConf = { settings.remove(key) this } private[spark] def remove(entry: ConfigEntry[_]): SparkConf = { remove(entry.key) } /** Get a parameter; throws a NoSuchElementException if it's not set */ def get(key: String): String = { getOption(key).getOrElse(throw new NoSuchElementException(key)) } /** Get a parameter, falling back to a default if not set */ def get(key: String, defaultValue: String): String = { getOption(key).getOrElse(defaultValue) } /** * Retrieves the value of a pre-defined configuration entry. * * - This is an internal Spark API. * - The return type if defined by the configuration entry. * - This will throw an exception is the config is not optional and the value is not set. */ private[spark] def get[T](entry: ConfigEntry[T]): T = { entry.readFrom(reader) } /** * Get a time parameter as seconds; throws a NoSuchElementException if it's not set. If no * suffix is provided then seconds are assumed. * @throws java.util.NoSuchElementException */ def getTimeAsSeconds(key: String): Long = { Utils.timeStringAsSeconds(get(key)) } /** * Get a time parameter as seconds, falling back to a default if not set. If no * suffix is provided then seconds are assumed. */ def getTimeAsSeconds(key: String, defaultValue: String): Long = { Utils.timeStringAsSeconds(get(key, defaultValue)) } /** * Get a time parameter as milliseconds; throws a NoSuchElementException if it's not set. If no * suffix is provided then milliseconds are assumed. * @throws java.util.NoSuchElementException */ def getTimeAsMs(key: String): Long = { Utils.timeStringAsMs(get(key)) } /** * Get a time parameter as milliseconds, falling back to a default if not set. If no * suffix is provided then milliseconds are assumed. */ def getTimeAsMs(key: String, defaultValue: String): Long = { Utils.timeStringAsMs(get(key, defaultValue)) } /** * Get a size parameter as bytes; throws a NoSuchElementException if it's not set. If no * suffix is provided then bytes are assumed. * @throws java.util.NoSuchElementException */ def getSizeAsBytes(key: String): Long = { Utils.byteStringAsBytes(get(key)) } /** * Get a size parameter as bytes, falling back to a default if not set. If no * suffix is provided then bytes are assumed. */ def getSizeAsBytes(key: String, defaultValue: String): Long = { Utils.byteStringAsBytes(get(key, defaultValue)) } /** * Get a size parameter as bytes, falling back to a default if not set. */ def getSizeAsBytes(key: String, defaultValue: Long): Long = { Utils.byteStringAsBytes(get(key, defaultValue + "B")) } /** * Get a size parameter as Kibibytes; throws a NoSuchElementException if it's not set. If no * suffix is provided then Kibibytes are assumed. * @throws java.util.NoSuchElementException */ def getSizeAsKb(key: String): Long = { Utils.byteStringAsKb(get(key)) } /** * Get a size parameter as Kibibytes, falling back to a default if not set. If no * suffix is provided then Kibibytes are assumed. */ def getSizeAsKb(key: String, defaultValue: String): Long = { Utils.byteStringAsKb(get(key, defaultValue)) } /** * Get a size parameter as Mebibytes; throws a NoSuchElementException if it's not set. If no * suffix is provided then Mebibytes are assumed. * @throws java.util.NoSuchElementException */ def getSizeAsMb(key: String): Long = { Utils.byteStringAsMb(get(key)) } /** * Get a size parameter as Mebibytes, falling back to a default if not set. If no * suffix is provided then Mebibytes are assumed. */ def getSizeAsMb(key: String, defaultValue: String): Long = { Utils.byteStringAsMb(get(key, defaultValue)) } /** * Get a size parameter as Gibibytes; throws a NoSuchElementException if it's not set. If no * suffix is provided then Gibibytes are assumed. * @throws java.util.NoSuchElementException */ def getSizeAsGb(key: String): Long = { Utils.byteStringAsGb(get(key)) } /** * Get a size parameter as Gibibytes, falling back to a default if not set. If no * suffix is provided then Gibibytes are assumed. */ def getSizeAsGb(key: String, defaultValue: String): Long = { Utils.byteStringAsGb(get(key, defaultValue)) } /** Get a parameter as an Option */ def getOption(key: String): Option[String] = { Option(settings.get(key)).orElse(getDeprecatedConfig(key, this)) } /** Get all parameters as a list of pairs */ def getAll: Array[(String, String)] = { settings.entrySet().asScala.map(x => (x.getKey, x.getValue)).toArray } /** * Get all parameters that start with `prefix` */ def getAllWithPrefix(prefix: String): Array[(String, String)] = { getAll.filter { case (k, v) => k.startsWith(prefix) } .map { case (k, v) => (k.substring(prefix.length), v) } } /** Get a parameter as an integer, falling back to a default if not set */ def getInt(key: String, defaultValue: Int): Int = { getOption(key).map(_.toInt).getOrElse(defaultValue) } /** Get a parameter as a long, falling back to a default if not set */ def getLong(key: String, defaultValue: Long): Long = { getOption(key).map(_.toLong).getOrElse(defaultValue) } /** Get a parameter as a double, falling back to a default if not set */ def getDouble(key: String, defaultValue: Double): Double = { getOption(key).map(_.toDouble).getOrElse(defaultValue) } /** Get a parameter as a boolean, falling back to a default if not set */ def getBoolean(key: String, defaultValue: Boolean): Boolean = { getOption(key).map(_.toBoolean).getOrElse(defaultValue) } /** Get all executor environment variables set on this SparkConf */ def getExecutorEnv: Seq[(String, String)] = { getAllWithPrefix("spark.executorEnv.") } /** * Returns the Spark application id, valid in the Driver after TaskScheduler registration and * from the start in the Executor. */ def getAppId: String = get("spark.app.id") /** Does the configuration contain a given parameter? */ def contains(key: String): Boolean = { settings.containsKey(key) || configsWithAlternatives.get(key).toSeq.flatten.exists { alt => contains(alt.key) } } private[spark] def contains(entry: ConfigEntry[_]): Boolean = contains(entry.key) /** Copy this object */ override def clone: SparkConf = { val cloned = new SparkConf(false) settings.entrySet().asScala.foreach { e => cloned.set(e.getKey(), e.getValue(), true) } cloned } /** * By using this instead of System.getenv(), environment variables can be mocked * in unit tests. */ private[spark] def getenv(name: String): String = System.getenv(name) /** * Checks for illegal or deprecated config settings. Throws an exception for the former. Not * idempotent - may mutate this conf object to convert deprecated settings to supported ones. */ private[spark] def validateSettings() { if (contains("spark.local.dir")) { val msg = "In Spark 1.0 and later spark.local.dir will be overridden by the value set by " + "the cluster manager (via SPARK_LOCAL_DIRS in mesos/standalone and LOCAL_DIRS in YARN)." logWarning(msg) } val executorOptsKey = "spark.executor.extraJavaOptions" val executorClasspathKey = "spark.executor.extraClassPath" val driverOptsKey = "spark.driver.extraJavaOptions" val driverClassPathKey = "spark.driver.extraClassPath" val driverLibraryPathKey = "spark.driver.extraLibraryPath" val sparkExecutorInstances = "spark.executor.instances" // Used by Yarn in 1.1 and before sys.props.get("spark.driver.libraryPath").foreach { value => val warning = s""" |spark.driver.libraryPath was detected (set to '$value'). |This is deprecated in Spark 1.2+. | |Please instead use: $driverLibraryPathKey """.stripMargin logWarning(warning) } // Validate spark.executor.extraJavaOptions getOption(executorOptsKey).foreach { javaOpts => if (javaOpts.contains("-Dspark")) { val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " + "Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit." throw new Exception(msg) } if (javaOpts.contains("-Xmx")) { val msg = s"$executorOptsKey is not allowed to specify max heap memory settings " + s"(was '$javaOpts'). Use spark.executor.memory instead." throw new Exception(msg) } } // Validate memory fractions val deprecatedMemoryKeys = Seq( "spark.storage.memoryFraction", "spark.shuffle.memoryFraction", "spark.shuffle.safetyFraction", "spark.storage.unrollFraction", "spark.storage.safetyFraction") val memoryKeys = Seq( "spark.memory.fraction", "spark.memory.storageFraction") ++ deprecatedMemoryKeys for (key <- memoryKeys) { val value = getDouble(key, 0.5) if (value > 1 || value < 0) { throw new IllegalArgumentException(s"$key should be between 0 and 1 (was '$value').") } } // Warn against deprecated memory fractions (unless legacy memory management mode is enabled) val legacyMemoryManagementKey = "spark.memory.useLegacyMode" val legacyMemoryManagement = getBoolean(legacyMemoryManagementKey, false) if (!legacyMemoryManagement) { val keyset = deprecatedMemoryKeys.toSet val detected = settings.keys().asScala.filter(keyset.contains) if (detected.nonEmpty) { logWarning("Detected deprecated memory fraction settings: " + detected.mkString("[", ", ", "]") + ". As of Spark 1.6, execution and storage " + "memory management are unified. All memory fractions used in the old model are " + "now deprecated and no longer read. If you wish to use the old memory management, " + s"you may explicitly enable `$legacyMemoryManagementKey` (not recommended).") } } // Check for legacy configs sys.env.get("SPARK_JAVA_OPTS").foreach { value => val warning = s""" |SPARK_JAVA_OPTS was detected (set to '$value'). |This is deprecated in Spark 1.0+. | |Please instead use: | - ./spark-submit with conf/spark-defaults.conf to set defaults for an application | - ./spark-submit with --driver-java-options to set -X options for a driver | - spark.executor.extraJavaOptions to set -X options for executors | - SPARK_DAEMON_JAVA_OPTS to set java options for standalone daemons (master or worker) """.stripMargin logWarning(warning) for (key <- Seq(executorOptsKey, driverOptsKey)) { if (getOption(key).isDefined) { throw new SparkException(s"Found both $key and SPARK_JAVA_OPTS. Use only the former.") } else { logWarning(s"Setting '$key' to '$value' as a work-around.") set(key, value) } } } sys.env.get("SPARK_CLASSPATH").foreach { value => val warning = s""" |SPARK_CLASSPATH was detected (set to '$value'). |This is deprecated in Spark 1.0+. | |Please instead use: | - ./spark-submit with --driver-class-path to augment the driver classpath | - spark.executor.extraClassPath to augment the executor classpath """.stripMargin logWarning(warning) for (key <- Seq(executorClasspathKey, driverClassPathKey)) { if (getOption(key).isDefined) { throw new SparkException(s"Found both $key and SPARK_CLASSPATH. Use only the former.") } else { logWarning(s"Setting '$key' to '$value' as a work-around.") set(key, value) } } } if (!contains(sparkExecutorInstances)) { sys.env.get("SPARK_WORKER_INSTANCES").foreach { value => val warning = s""" |SPARK_WORKER_INSTANCES was detected (set to '$value'). |This is deprecated in Spark 1.0+. | |Please instead use: | - ./spark-submit with --num-executors to specify the number of executors | - Or set SPARK_EXECUTOR_INSTANCES | - spark.executor.instances to configure the number of instances in the spark config. """.stripMargin logWarning(warning) set("spark.executor.instances", value) } } if (contains("spark.master") && get("spark.master").startsWith("yarn-")) { val warning = s"spark.master ${get("spark.master")} is deprecated in Spark 2.0+, please " + "instead use \"yarn\" with specified deploy mode." get("spark.master") match { case "yarn-cluster" => logWarning(warning) set("spark.master", "yarn") set("spark.submit.deployMode", "cluster") case "yarn-client" => logWarning(warning) set("spark.master", "yarn") set("spark.submit.deployMode", "client") case _ => // Any other unexpected master will be checked when creating scheduler backend. } } if (contains("spark.submit.deployMode")) { get("spark.submit.deployMode") match { case "cluster" | "client" => case e => throw new SparkException("spark.submit.deployMode can only be \"cluster\" or " + "\"client\".") } } } /** * Return a string listing all keys and values, one per line. This is useful to print the * configuration out for debugging. */ def toDebugString: String = { getAll.sorted.map{case (k, v) => k + "=" + v}.mkString("\n") } } private[spark] object SparkConf extends Logging { /** * Maps deprecated config keys to information about the deprecation. * * The extra information is logged as a warning when the config is present in the user's * configuration. */ private val deprecatedConfigs: Map[String, DeprecatedConfig] = { val configs = Seq( DeprecatedConfig("spark.cache.class", "0.8", "The spark.cache.class property is no longer being used! Specify storage levels using " + "the RDD.persist() method instead."), DeprecatedConfig("spark.yarn.user.classpath.first", "1.3", "Please use spark.{driver,executor}.userClassPathFirst instead."), DeprecatedConfig("spark.kryoserializer.buffer.mb", "1.4", "Please use spark.kryoserializer.buffer instead. The default value for " + "spark.kryoserializer.buffer.mb was previously specified as '0.064'. Fractional values " + "are no longer accepted. To specify the equivalent now, one may use '64k'."), DeprecatedConfig("spark.rpc", "2.0", "Not used any more."), DeprecatedConfig("spark.scheduler.executorTaskBlacklistTime", "2.1.0", "Please use the new blacklisting options, spark.blacklist.*") ) Map(configs.map { cfg => (cfg.key -> cfg) } : _*) } /** * Maps a current config key to alternate keys that were used in previous version of Spark. * * The alternates are used in the order defined in this map. If deprecated configs are * present in the user's configuration, a warning is logged. */ private val configsWithAlternatives = Map[String, Seq[AlternateConfig]]( "spark.executor.userClassPathFirst" -> Seq( AlternateConfig("spark.files.userClassPathFirst", "1.3")), "spark.history.fs.update.interval" -> Seq( AlternateConfig("spark.history.fs.update.interval.seconds", "1.4"), AlternateConfig("spark.history.fs.updateInterval", "1.3"), AlternateConfig("spark.history.updateInterval", "1.3")), "spark.history.fs.cleaner.interval" -> Seq( AlternateConfig("spark.history.fs.cleaner.interval.seconds", "1.4")), "spark.history.fs.cleaner.maxAge" -> Seq( AlternateConfig("spark.history.fs.cleaner.maxAge.seconds", "1.4")), "spark.yarn.am.waitTime" -> Seq( AlternateConfig("spark.yarn.applicationMaster.waitTries", "1.3", // Translate old value to a duration, with 10s wait time per try. translation = s => s"${s.toLong * 10}s")), "spark.reducer.maxSizeInFlight" -> Seq( AlternateConfig("spark.reducer.maxMbInFlight", "1.4")), "spark.kryoserializer.buffer" -> Seq(AlternateConfig("spark.kryoserializer.buffer.mb", "1.4", translation = s => s"${(s.toDouble * 1000).toInt}k")), "spark.kryoserializer.buffer.max" -> Seq( AlternateConfig("spark.kryoserializer.buffer.max.mb", "1.4")), "spark.shuffle.file.buffer" -> Seq( AlternateConfig("spark.shuffle.file.buffer.kb", "1.4")), "spark.executor.logs.rolling.maxSize" -> Seq( AlternateConfig("spark.executor.logs.rolling.size.maxBytes", "1.4")), "spark.io.compression.snappy.blockSize" -> Seq( AlternateConfig("spark.io.compression.snappy.block.size", "1.4")), "spark.io.compression.lz4.blockSize" -> Seq( AlternateConfig("spark.io.compression.lz4.block.size", "1.4")), "spark.rpc.numRetries" -> Seq( AlternateConfig("spark.akka.num.retries", "1.4")), "spark.rpc.retry.wait" -> Seq( AlternateConfig("spark.akka.retry.wait", "1.4")), "spark.rpc.askTimeout" -> Seq( AlternateConfig("spark.akka.askTimeout", "1.4")), "spark.rpc.lookupTimeout" -> Seq( AlternateConfig("spark.akka.lookupTimeout", "1.4")), "spark.streaming.fileStream.minRememberDuration" -> Seq( AlternateConfig("spark.streaming.minRememberDuration", "1.5")), "spark.yarn.max.executor.failures" -> Seq( AlternateConfig("spark.yarn.max.worker.failures", "1.5")), "spark.memory.offHeap.enabled" -> Seq( AlternateConfig("spark.unsafe.offHeap", "1.6")), "spark.rpc.message.maxSize" -> Seq( AlternateConfig("spark.akka.frameSize", "1.6")), "spark.yarn.jars" -> Seq( AlternateConfig("spark.yarn.jar", "2.0")) ) /** * A view of `configsWithAlternatives` that makes it more efficient to look up deprecated * config keys. * * Maps the deprecated config name to a 2-tuple (new config name, alternate config info). */ private val allAlternatives: Map[String, (String, AlternateConfig)] = { configsWithAlternatives.keys.flatMap { key => configsWithAlternatives(key).map { cfg => (cfg.key -> (key -> cfg)) } }.toMap } /** * Return whether the given config should be passed to an executor on start-up. * * Certain authentication configs are required from the executor when it connects to * the scheduler, while the rest of the spark configs can be inherited from the driver later. */ def isExecutorStartupConf(name: String): Boolean = { (name.startsWith("spark.auth") && name != SecurityManager.SPARK_AUTH_SECRET_CONF) || name.startsWith("spark.ssl") || name.startsWith("spark.rpc") || isSparkPortConf(name) } /** * Return true if the given config matches either `spark.*.port` or `spark.port.*`. */ def isSparkPortConf(name: String): Boolean = { (name.startsWith("spark.") && name.endsWith(".port")) || name.startsWith("spark.port.") } /** * Looks for available deprecated keys for the given config option, and return the first * value available. */ def getDeprecatedConfig(key: String, conf: SparkConf): Option[String] = { configsWithAlternatives.get(key).flatMap { alts => alts.collectFirst { case alt if conf.contains(alt.key) => val value = conf.get(alt.key) if (alt.translation != null) alt.translation(value) else value } } } /** * Logs a warning message if the given config key is deprecated. */ def logDeprecationWarning(key: String): Unit = { deprecatedConfigs.get(key).foreach { cfg => logWarning( s"The configuration key '$key' has been deprecated as of Spark ${cfg.version} and " + s"may be removed in the future. ${cfg.deprecationMessage}") return } allAlternatives.get(key).foreach { case (newKey, cfg) => logWarning( s"The configuration key '$key' has been deprecated as of Spark ${cfg.version} and " + s"may be removed in the future. Please use the new key '$newKey' instead.") return } if (key.startsWith("spark.akka") || key.startsWith("spark.ssl.akka")) { logWarning( s"The configuration key $key is not supported any more " + s"because Spark doesn't use Akka since 2.0") } } /** * Holds information about keys that have been deprecated and do not have a replacement. * * @param key The deprecated key. * @param version Version of Spark where key was deprecated. * @param deprecationMessage Message to include in the deprecation warning. */ private case class DeprecatedConfig( key: String, version: String, deprecationMessage: String) /** * Information about an alternate configuration key that has been deprecated. * * @param key The deprecated config key. * @param version The Spark version in which the key was deprecated. * @param translation A translation function for converting old config values into new ones. */ private case class AlternateConfig( key: String, version: String, translation: String => String = null) }
apache-2.0
lakshani/carbon-mobile-appmgt
components/extensions/org.wso2.carbon.appmgt.extension.is500/src/main/java/org/wso2/carbon/appmgt/impl/idp/sso/is500/internal/IS500AdapterComponent.java
2210
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.appmgt.impl.idp.sso.is500.internal; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.wso2.carbon.appmgt.api.IdentityApplicationManagementFactory; import org.wso2.carbon.appmgt.impl.idp.sso.is500.IdentityApplicationManagementFactoryImpl; /** * @scr.component name="org.wso2.appm.extension.identity.adapter.is500" immediate="true" */ public class IS500AdapterComponent { private static final Log log = LogFactory.getLog(IS500AdapterComponent.class); private ServiceRegistration registration; protected void activate(ComponentContext componentContext) throws Exception { if (log.isDebugEnabled()) { log.debug("AppM WSO2 IS 500 component activated"); } BundleContext bundleContext = componentContext.getBundleContext(); registration = bundleContext.registerService(IdentityApplicationManagementFactory.class, new IdentityApplicationManagementFactoryImpl(), null); } protected void deactivate(ComponentContext componentContext) { if (registration != null) { BundleContext bundleContext = componentContext.getBundleContext(); bundleContext.ungetService(registration.getReference()); } if (log.isDebugEnabled()) { log.debug("AppM WSO2 IS 500 component de-activated"); } } }
apache-2.0
cboling/xos
xos/api/utility/onboarding.py
3438
import json from django.http import HttpResponse from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse from rest_framework import serializers from rest_framework import generics from rest_framework import status from core.models import * from xos.apibase import XOSListCreateAPIView, XOSRetrieveUpdateDestroyAPIView, XOSPermissionDenied from api.xosapi_helpers import PlusModelSerializer, XOSViewSet, ReadOnlyField class OnboardingViewSet(XOSViewSet): base_name = "onboarding" method_name = "onboarding" method_kind = "viewset" @classmethod def get_urlpatterns(self, api_path="^"): patterns = [] #super(CordSubscriberViewSet, self).get_urlpatterns(api_path=api_path) patterns.append( self.list_url("xos/ready/$", {"get": "get_xos_ready"}, "xos_ready") ) patterns.append( self.list_url("xos/rebuild/$", {"post": "post_rebuild"}, "xos_rebuild") ) patterns.append( self.list_url("summary/$", {"get": "get_summary"}, "summary") ) patterns.append( self.list_url("services/$", {"get": "get_service_list"}, "service_list") ) patterns.append( self.list_url("services/(?P<service>[a-zA-Z0-9\-_]+)/ready/$", {"get": "get_service_ready"}, "service_ready") ) return patterns def is_ready(self, obj): return (obj.enacted is not None) and (obj.updated is not None) and (obj.enacted>=obj.updated) and (obj.backend_status.startswith("1")) def get_xos_ready(self, request): xos = XOS.objects.all() if not xos: return Response(false) xos=xos[0] result = (xos.enacted is not None) and (xos.updated is not None) and (xos.enacted>=xos.updated) and (xos.backend_status.startswith("1")) return HttpResponse( json.dumps(result), content_type="application/javascript" ) def post_rebuild(self, request): xos = XOS.objects.all() if not xos: raise Exception("There is no XOS object") xos=xos[0] xos.rebuild() return Response(True) def get_summary(self, request): result = [] xos = XOS.objects.all() if not xos: result.append( ("XOS", false) ) else: xos=xos[0] result.append( ("XOS", self.is_ready(xos)) ) for sc in xos.service_controllers.all(): result.append( (sc.name, self.is_ready(sc)) ) result = "\n".join( ["%s: %s" % (x[0], x[1]) for x in result] ) if result: result = result + "\n" return HttpResponse( result, content_type="text/ascii" ) def get_service_list(self, request): xos = XOS.objects.all() if not xos: return Response([]) xos=xos[0] result = [] for sc in xos.service_controllers.all(): result.append(sc.name) return HttpResponse( json.dumps(result), content_type="application/javascript") def get_service_ready(self, request, service): xos = XOS.objects.all() if not xos: return Response([]) xos=xos[0] sc=xos.service_controllers.filter(name=service) if not sc: return HttpResponse("Not Found", status_code=404) sc=sc[0] result = self.is_ready(sc) return HttpResponse( json.dumps(result), content_type="application/javascript")
apache-2.0
rhauch/modeshape
modeshape-jcr/src/main/java/org/modeshape/jcr/federation/ConnectorChangeSetImpl.java
19269
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.jcr.federation; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.modeshape.common.annotation.NotThreadSafe; import org.modeshape.jcr.Connectors; import org.modeshape.jcr.Connectors.PathMappings; import org.modeshape.jcr.api.value.DateTime; import org.modeshape.jcr.bus.ChangeBus; import org.modeshape.jcr.cache.NodeKey; import org.modeshape.jcr.cache.change.RecordingChanges; import org.modeshape.jcr.spi.federation.ConnectorChangeSet; import org.modeshape.jcr.value.DateTimeFactory; import org.modeshape.jcr.value.Name; import org.modeshape.jcr.value.Path; import org.modeshape.jcr.value.PathFactory; import org.modeshape.jcr.value.Property; import org.modeshape.jcr.value.WorkspaceAndPath; @NotThreadSafe public class ConnectorChangeSetImpl implements ConnectorChangeSet { private final Connectors connectors; private final String connectorSourceName; private final Connectors.PathMappings pathMappings; private final String processId; private final String repositoryKey; private final ChangeBus bus; private final Map<String, RecordingChanges> changesByWorkspace = new HashMap<String, RecordingChanges>(); private final DateTimeFactory timeFactory; private final String journalId; public ConnectorChangeSetImpl( Connectors connectors, PathMappings mappings, String processId, String repositoryKey, ChangeBus bus, DateTimeFactory timeFactory, String journalId ) { this.connectors = connectors; this.connectorSourceName = mappings.getConnectorSourceName(); this.timeFactory = timeFactory; this.pathMappings = mappings; this.processId = processId; this.repositoryKey = repositoryKey; this.bus = bus; this.journalId = journalId; assert this.connectors != null; assert this.connectorSourceName != null; assert this.pathMappings != null; assert this.processId != null; assert this.repositoryKey != null; assert this.bus != null; assert this.timeFactory != null; } protected final RecordingChanges changesFor( WorkspaceAndPath workspaceAndPath ) { return changesFor(workspaceAndPath.getWorkspaceName()); } protected final RecordingChanges changesFor( String workspaceName ) { RecordingChanges changes = changesByWorkspace.get(workspaceName); if (changes == null) { changes = new RecordingChanges(processId, repositoryKey, workspaceName, journalId); changesByWorkspace.put(workspaceName, changes); } return changes; } @Override public void nodeCreated( String docId, String parentDocId, String path, Name primaryType, Set<Name> mixinTypes, Map<Name, Property> properties ) { NodeKey key = nodeKey(docId); NodeKey parentKey = nodeKey(parentDocId); Path externalPath = pathMappings.getPathFactory().create(path); // This external path in the connector may be projected into *multiple* nodes in the same or different workspaces ... for (WorkspaceAndPath wsAndPath : pathMappings.resolveExternalPathToInternal(externalPath)) { changesFor(wsAndPath).nodeCreated(key, parentKey, wsAndPath.getPath(), primaryType, mixinTypes, properties); } } @Override public void nodeRemoved( String docId, String parentDocId, String path, Name primaryType, Set<Name> mixinTypes ) { NodeKey key = nodeKey(docId); NodeKey parentKey = nodeKey(parentDocId); Path externalPath = pathMappings.getPathFactory().create(path); // This external path in the connector may be projected into *multiple* nodes in the same or different workspaces ... for (WorkspaceAndPath wsAndPath : pathMappings.resolveExternalPathToInternal(externalPath)) { changesFor(wsAndPath).nodeRemoved(key, parentKey, wsAndPath.getPath(), primaryType, mixinTypes); } // Signal to the manager of the Connector instances that an external node was removed. If this external // node is used in a projection, that projection will be removed... connectors.externalNodeRemoved(docId); } @Override public void nodeMoved( String docId, Name primaryType, Set<Name> mixinTypes, String newParentDocId, String oldParentDocId, String newPath, String oldPath ) { NodeKey key = nodeKey(docId); NodeKey newParentKey = nodeKey(newParentDocId); NodeKey oldParentKey = nodeKey(oldParentDocId); Path newExternalPath = pathMappings.getPathFactory().create(newPath); Path oldExternalPath = pathMappings.getPathFactory().create(oldPath); Collection<WorkspaceAndPath> newWsAndPaths = pathMappings.resolveExternalPathToInternal(newExternalPath); Collection<WorkspaceAndPath> oldWsAndPaths = pathMappings.resolveExternalPathToInternal(oldExternalPath); // This method is unfortunately quite complicated because, while a single node can be moved within a connector's // single tree of content, different projections might mean the node's old location is in one projection while // the new location is in a different projection (especially considering that a projection projects a single // external node into a single internal node within a given workspace). Also, multiple projections can apply to a single // external node. // // Therefore, a single move within a connector's content tree might need to be mapped as a combination of // NODE_MOVED, NODE_CREATED, and NODE_REMOVED events. And because the general algorithm is a bit more complicated, // there are a few special cases where the logic (and overhead) can be much simpler. These special cases are // also quite common, so it's worth it to have the separate logic. int numNew = newWsAndPaths.size(); int numOld = oldWsAndPaths.size(); if (numNew == 0) { // The node was moved to a location that is not in a projection ... if (numOld == 0) { // this is an edge case, because the old location was not in a projection, either return; } // There are only old locations, so treat as NODE_REMOVED. for (WorkspaceAndPath wsAndOldPath : oldWsAndPaths) { changesFor(wsAndOldPath.getWorkspaceName()).nodeRemoved(key, oldParentKey, wsAndOldPath.getPath(), primaryType, mixinTypes); } return; } else if (numOld == 0) { // There are just new nodes, so treat as NODE_CREATED. // Note that we do not know the properties ... Map<Name, Property> properties = Collections.emptyMap(); for (WorkspaceAndPath wsAndNewPath : newWsAndPaths) { changesFor(wsAndNewPath.getWorkspaceName()).nodeCreated(key, newParentKey, wsAndNewPath.getPath(), primaryType, mixinTypes, properties); } return; } assert numNew >= 1; assert numOld >= 1; // Check for the most common case (just one new location and one old location) and use a more optimal algorithm ... if (numNew == 1 && numOld == 1) { WorkspaceAndPath newWsAndPath = newWsAndPaths.iterator().next(); WorkspaceAndPath oldWsAndPath = newWsAndPaths.iterator().next(); String newWorkspace = newWsAndPath.getWorkspaceName(); String oldWorkspace = oldWsAndPath.getWorkspaceName(); if (newWorkspace.equals(oldWorkspace)) { // The workspaces are the same, so this is the case of a simple move changesFor(newWorkspace).nodeMoved(key, primaryType, mixinTypes, newParentKey, oldParentKey, newWsAndPath.getPath(), oldWsAndPath.getPath()); return; } // The workspace names don't match, so treat the old as a NODE_REMOVED ... changesFor(oldWsAndPath.getWorkspaceName()).nodeRemoved(key, oldParentKey, oldWsAndPath.getPath(), primaryType, mixinTypes); // And the new as NODE_CREATED (in a separate workspace) ... // Note that we do not know the properties ... Map<Name, Property> properties = Collections.emptyMap(); changesFor(newWsAndPath.getWorkspaceName()).nodeCreated(key, newParentKey, newWsAndPath.getPath(), primaryType, mixinTypes, properties); return; } assert numNew > 1 || numOld > 1; // Finally the general case. Here, we need to make sure that we don't lose any old locations that did not correspond // to at least new location. Since this is the last algorithm, we're actually going to remove all elements from // the 'oldWsAndPaths' collection as soon as we move them. (If multiple new locations map to a single old location, // then we'll have only one NODE_MOVED and one or more NODE_CREATED.) for (WorkspaceAndPath wsAndNewPath : newWsAndPaths) { // Look for the projections of the old external path in the same workspace ... boolean found = false; Iterator<WorkspaceAndPath> oldWsAndPathsIter = oldWsAndPaths.iterator(); while (oldWsAndPathsIter.hasNext()) { WorkspaceAndPath wsAndOldPath = oldWsAndPathsIter.next(); String newWorkspace = wsAndNewPath.getWorkspaceName(); String oldWorkspace = wsAndOldPath.getWorkspaceName(); if (newWorkspace.equals(oldWorkspace)) { found = true; changesFor(newWorkspace).nodeMoved(key, primaryType, mixinTypes, newParentKey, oldParentKey, wsAndNewPath.getPath(), wsAndOldPath.getPath()); oldWsAndPathsIter.remove(); // we don't want to deal with this WorkspaceAndPath as the 'from' of another move } } if (!found) { // The node appeared in one workspace, but it was moved from a node that projected into a different workspace, // so treat it as a NODE_CREATED in the new workspace. // Note that we do not know the properties ... Map<Name, Property> properties = Collections.emptyMap(); changesFor(wsAndNewPath).nodeCreated(key, newParentKey, wsAndNewPath.getPath(), primaryType, mixinTypes, properties); } } // If there are any old paths left, we need to treat them as NODE_REMOVED ... for (WorkspaceAndPath oldWsAndPath : oldWsAndPaths) { changesFor(oldWsAndPath).nodeRemoved(key, oldParentKey, oldWsAndPath.getPath(), primaryType, mixinTypes); } } @Override public void nodeReordered( String docId, Name primaryType, Set<Name> mixinTypes, String parentDocId, String newPath, String oldNameSegment, String reorderedBeforeNameSegment ) { NodeKey key = nodeKey(docId); NodeKey parentKey = nodeKey(parentDocId); PathFactory pathFactory = pathMappings.getPathFactory(); Path newExternalPath = pathFactory.create(newPath); Path parentPath = newExternalPath.getParent(); Path oldExternalPath = pathFactory.create(parentPath, pathFactory.createSegment(oldNameSegment)); Path reorderedBeforePath = reorderedBeforeNameSegment == null ? null : pathFactory.create(parentPath, pathFactory.createSegment(reorderedBeforeNameSegment)); // This external path in the connector may be projected into *multiple* nodes in the same or different workspaces ... for (WorkspaceAndPath wsAndPath : pathMappings.resolveExternalPathToInternal(newExternalPath)) { changesFor(wsAndPath).nodeReordered(key, primaryType, mixinTypes, parentKey, wsAndPath.getPath(), oldExternalPath, reorderedBeforePath); } } @Override public void propertyAdded( String docId, Name nodePrimaryType, Set<Name> nodeMixinTypes, String nodePath, Property property ) { NodeKey key = nodeKey(docId); Path externalPath = pathMappings.getPathFactory().create(nodePath); // This external path in the connector may be projected into *multiple* nodes in the same or different workspaces ... for (WorkspaceAndPath wsAndPath : pathMappings.resolveExternalPathToInternal(externalPath)) { changesFor(wsAndPath).propertyAdded(key, nodePrimaryType, nodeMixinTypes, wsAndPath.getPath(), property); } } @Override public void propertyRemoved( String docId, Name nodePrimaryType, Set<Name> nodeMixinTypes, String nodePath, Property property ) { NodeKey key = nodeKey(docId); Path externalPath = pathMappings.getPathFactory().create(nodePath); // This external path in the connector may be projected into *multiple* nodes in the same or different workspaces ... for (WorkspaceAndPath wsAndPath : pathMappings.resolveExternalPathToInternal(externalPath)) { changesFor(wsAndPath).propertyRemoved(key, nodePrimaryType, nodeMixinTypes, wsAndPath.getPath(), property); } } @Override public void propertyChanged( String docId, Name nodePrimaryType, Set<Name> nodeMixinTypes, String nodePath, Property oldProperty, Property newProperty ) { NodeKey key = nodeKey(docId); Path externalPath = pathMappings.getPathFactory().create(nodePath); // This external path in the connector may be projected into *multiple* nodes in the same or different workspaces ... for (WorkspaceAndPath wsAndPath : pathMappings.resolveExternalPathToInternal(externalPath)) { changesFor(wsAndPath).propertyChanged(key, nodePrimaryType, nodeMixinTypes, wsAndPath.getPath(), newProperty, oldProperty); } } @Override public void publish( Map<String, String> data ) { DateTime now = timeFactory.create(); if (data == null) data = Collections.emptyMap(); // Freeze and then notify the bus of each change set of a given workspace ... for (RecordingChanges changes : changesByWorkspace.values()) { changes.freeze(connectorSourceName, data, now); bus.notify(changes); } changesByWorkspace.clear(); } private NodeKey nodeKey( String documentId ) { return FederatedDocumentStore.documentIdToNodeKey(connectorSourceName, documentId); } @Override public String toString() { return "Change set for connector '" + connectorSourceName + "': " + changesByWorkspace; } }
apache-2.0
alienwithin/0x88
admin/user_manager.php
3152
<?php /************************/ /* */ /*Dezended By Martian */ /*munir.skilledsoft.com */ /* */ /* */ /************************/ include( "include/inc.header.php" ); $message = ""; $username=check_input($_POST['UserName']); $pass=check_input($_POST['Pass']); $confirm=check_input($_POST['Re']); if ( isset( $_POST['UserName'], $_POST['Pass'], $_POST['Re'] ) ) { $sql = "SELECT * FROM tp_users WHERE login = '$username'"; $r = mysql_query( $sql ); $num = mysql_num_rows( $r ); if ( 0 < $num ) { $message = "This user already exists, choose another login"; } else if ( trim( $username) == "" ) { $message = "Enter your user name"; } else if ( $pass != $confirm ) { $message = "Password incorrectly confirmed"; } else { $sql = "INSERT INTO `tp_users`(login, password) VALUES('$username', 'md5($pass')"; $r = mysql_query( $sql ); $message = "user '".$username."' created"; $_POST['UserName'] = ""; $_POST['Pass'] = ""; $_POST['Re'] = ""; } } echo "\r\n\r\r\n<br>\r\r\n<table cellpadding=0 cellspacing=0 width='600' border=1 align=center bordercolor=\"#7AA0B8\" bordercolorlight=black bordercolordark=white>\r\r\n<form method=\"post\">\r\r\n <tr>\r\r\n\t\t<td align=\"center\" class=\"pagetitle\" colspan=\"5\" bgcolor=\"#103056\"><b>Add User</b></td>\r\r\n\t</tr>\r\r\n\t<tr>\r\r\n\t\t<td align=\"center\" class=\"pagetitle2\">Username:</td>\r\r\n\t\t<td align=\"center\" class=\"pagetitle2\">password:</td>\r\r\n\t\t<td align=\"center\" class=\"pagetitle2\">Confirm Password:</td>\r\r\n\t\t<td align=\"center\" class=\"pagetitle2\">&nbsp;</td>\r\r\n\t</tr>\r\r\n\t<tr>\r\r\n\t\t<td align=\"center\"><input type=\"text\" size=\"20\" name=\"UserName\" class=inputbox3 value=\""; echo $_POST['UserName']; echo "\"></td>\r\r\n\t\t<td align=\"center\"><input type=\"password\" size=\"20\" name=\"Pass\" class=inputbox3 value=\""; echo $_POST['Pass']; echo "\"></td>\r\r\n\t\t<td align=\"center\"><input type=\"password\" size=\"20\" name=\"Re\" class=inputbox3 value=\""; echo $_POST['Re']; echo "\"></td>\r\r\n\t\t<td align=\"center\"><input type=\"submit\" value=\"Create\" class=button1 onmouseover=\"this.style.backgroundColor = '#3E3E3E'\" onmouseout=\"this.style.backgroundColor = '#4B4B4B'\"></td>\r\r\n\t</tr>\r\r\n</form>\r\r\n</table>\r\r\n\r\r\n<br>\r\r\n\r\r\n"; if ( $message != "" ) { echo "\r\n<table cellpadding=0 cellspacing=0 width='600' border=1 align=center bordercolor='#7AA0B8' bordercolorlight=black bordercolordark=white>\r\r\n\t<tr><td align=center class=pagetitle bgcolor=#F9FBFB><font color=#186C1D><b>"; echo $message; echo "</b></font></td></tr>\r\r\n</table>\r\r\n"; } echo "\r\n\r\r\n<br>\r</td></tr><tr><td align=\"center\" class=\"pagetitle\" bgcolor=\"#000000\"><a href=\"http://munir.skilledsoft.com\"><font color=\"snow\">Recoded By by&nbsp;<b>Alienwithin</b>&nbsp; || Original Coder's(ICQ 92777755)</a></font></td></tr></table></body></html>\r\r\n"; ?>
apache-2.0
lorislab/appky
appky-application/src/main/java/org/lorislab/appky/application/model/Version.java
5966
/* * Copyright 2014 lorislab.org. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.lorislab.appky.application.model; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.OrderColumn; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import org.lorislab.appky.application.model.enums.VersionStatus; import org.lorislab.jel.jpa.model.TraceablePersistent; /** * The platform version. * * @author Andrej Petras <andrej@ajka-andrej.com> */ @Entity @Table(name = "AY_VERSION") public class Version extends TraceablePersistent { /** * The UID for this class. */ private static final long serialVersionUID = -7531756282617808791L; /** * The name. */ @Column(name = "C_NAME") private String name; /** * The release flag. */ @Column(name = "C_RELEASED") private boolean released; /** * The release date. */ @Column(name = "C_RELEASEDDATE") @Temporal(TemporalType.TIMESTAMP) private Date releasedDate; /** * The deleted flag. */ @Column(name = "C_DELETED") private boolean deleted; /** * The status. */ @Column(name = "C_STATUS") @Enumerated(EnumType.STRING) private VersionStatus status; /** * The largeIcon. */ @JoinColumn(name = "C_DATA") @OneToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER, orphanRemoval = true) private Document data; /** * The description. */ @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true) @OrderColumn(name = "C_INDEX") @JoinTable(name = "AY_VERSION_DESC", joinColumns = { @JoinColumn(name = "C_VERSION")}, inverseJoinColumns = { @JoinColumn(name = "C_DESCRIPTION")}) private List<Description> descriptions; /** * The platform for this version. */ @ManyToOne(cascade = CascadeType.REFRESH, fetch = FetchType.EAGER) @JoinColumn(name = "C_PLATFORM_GUID", insertable = true, updatable = true) private Platform platform; /** * The default constructor. */ public Version() { descriptions = new ArrayList<>(); } /** * Gets the release date. * * @return the release date. */ public Date getReleasedDate() { return releasedDate; } /** * Sets the release date. * * @param releasedDate the release date. */ public void setReleasedDate(Date releasedDate) { this.releasedDate = releasedDate; } /** * Returns * <code>true</code> if the version is deleted. * * @return <code>true</code> if the version is deleted. */ public boolean isDeleted() { return deleted; } /** * Sets the deleted flag. * * @param deleted the deleted flag. */ public void setDeleted(boolean deleted) { this.deleted = deleted; } /** * Gets the name. * * @return the name. */ public String getName() { return name; } /** * Sets the name. * * @param name the name to set */ public void setName(String name) { this.name = name; } /** * Gets the list of description. * * @return the list of description. */ public List<Description> getDescriptions() { return descriptions; } /** * Sets the list of description. * * @param descriptions the list of description. */ public void setDescriptions(List<Description> descriptions) { this.descriptions = descriptions; } /** * Gets the version status. * * @return the version status. */ public VersionStatus getStatus() { return status; } /** * Sets the list of description. * * @param status the list of description. */ public void setStatus(VersionStatus status) { this.status = status; } /** * Gets the data. * * @return the data. */ public Document getData() { return data; } /** * Sets the data. * * @param data the data. */ public void setData(Document data) { this.data = data; } /** * Returns * <code>true</code> if the version is released. * * @return <code>true</code> if the version is released. */ public boolean isReleased() { return released; } /** * Sets the released flag. * * @param released the released flag. */ public void setReleased(boolean released) { this.released = released; } /** * Gets the platform. * * @return the platform. */ public Platform getPlatform() { return platform; } /** * Sets the platform. * * @param platform the platform. */ public void setPlatform(Platform platform) { this.platform = platform; } }
apache-2.0
texastribune/gspreadsheet
gspreadsheet/utils.py
851
#http://code.google.com/apis/spreadsheets/data/1.0/developers_guide_python.html def PrintFeed(feed): """Example function from Google to print a feed""" import gdata for i, entry in enumerate(feed.entry): if isinstance(feed, gdata.spreadsheet.SpreadsheetsCellsFeed): print '%s %s\n' % (entry.title.text, entry.content.text) elif isinstance(feed, gdata.spreadsheet.SpreadsheetsListFeed): print '%s %s %s' % (i, entry.title.text, entry.content.text) # Print this row's value for each column (the custom dictionary is # built from the gsx: elements in the entry.) See the description of # gsx elements in the protocol guide. print 'Contents:' for key in entry.custom: print ' %s: %s' % (key, entry.custom[key].text) print '\n', else: print '%s %s\n' % (i, entry.title.text)
apache-2.0
cristiani/encuestame
enme-utils/src/main/java/org/encuestame/utils/social/SocialProvider.java
6051
/* ************************************************************************************ * Copyright (C) 2001-2011 encuestame: system online surveys Copyright (C) 2011 * encuestame Development Team. * Licensed under the Apache Software License version 2.0 * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. ************************************************************************************ */ package org.encuestame.utils.social; /** * Social Provider. * @author Picado, Juan juanATencuestame.org * @since Feb 27, 2011 */ public enum SocialProvider { /** * Twitter provider. */ TWITTER, /** * Facebook provider. */ FACEBOOK, /** * Identica provider. */ @Deprecated IDENTICA, /** * The new name of identi.ca */ PUMPIO, /** * Linked In provider. */ LINKEDIN, /** * Google Buzz provider. */ @Deprecated GOOGLE_BUZZ, /** Google +. **/ GOOGLE_PLUS, /** * Yahoo provider. */ YAHOO, /** * My Space. */ @Deprecated MYSPACE, //TODO: In the future we can add more API's Tumblr, Plurk, Jaiku. YOUTUBE, /** * Use full */ PICASA, /** * Google orkut social. */ ORKUT, /** * To access to google contact support. */ GOOGLE_CONTACTS, /** * Blog support to publish on blogger accounts. */ BLOGGER, /** * */ PLURK, /** * */ TUMBLR, /** * All social providers. */ ALL, /** * Constructor. */ SocialProvider(){ //Constructor. }; /** * To String. */ public String toString() { String provider = ""; if (this == TWITTER) { provider = "TWITTER"; } else if (this == FACEBOOK) { provider = "FACEBOOK"; } else if (this == PLURK) { provider = "PLURK"; } else if (this == TUMBLR) { provider = "TUMBLR"; } else if (this == IDENTICA) { provider = "IDENTICA"; } else if (this == LINKEDIN) { provider = "LINKEDIN"; } else if (this == PUMPIO) { provider = "PUMPIO"; } else if (this == GOOGLE_BUZZ) { provider = "GOOGLEBUZZ"; } else if (this == GOOGLE_PLUS) { provider = "GOOGLEPLUS"; } else if (this == YAHOO) { provider = "YAHOO"; } else if (this == MYSPACE) { provider = "MYSPACE"; } else if (this == ALL) { provider = "ALL"; } return provider; } /** * Some cases its necessary return the same provider name because the OAuth2 handler is the same. * Google manage all API keys with the same callback url on the same console. * http://wiki.encuestame.org/display/DOC/Set+Up+or+Customize+Google+OAuth+credentials+on+Encuestame * @return social callback string. */ public String getBackUrlProviderName() { String provider = ""; if (this == TWITTER) { provider = "TWITTER"; } else if (this == FACEBOOK) { provider = "FACEBOOK"; } else if (this == PLURK) { provider = "PLURK"; } else if (this == TUMBLR) { provider = "TUMBLR"; } else if (this == IDENTICA) { provider = "IDENTICA"; } else if (this == PUMPIO) { provider = "PUMPIO"; } else if (this == LINKEDIN) { provider = "LINKEDIN"; } else if (this == GOOGLE_PLUS) { provider = "GOOGLEPLUS"; } else if (this == GOOGLE_BUZZ) { provider = "GOOGLEBUZZ"; } else if (this == YAHOO) { provider = "YAHOO"; } else if (this == MYSPACE) { provider = "MYSPACE"; } return provider.toLowerCase(); } /** * Get Provider by String. * @param socialProvider period * @return provider enum */ public static SocialProvider getProvider(final String socialProvider) { if (null == socialProvider) { return null; } else if (socialProvider.equalsIgnoreCase("TWITTER")) { return TWITTER; } else if (socialProvider.equalsIgnoreCase("TUMBLR")) { return TUMBLR; } else if (socialProvider.equalsIgnoreCase("PLURK")) { return PLURK; } else if (socialProvider.equalsIgnoreCase("ALL")) { return ALL; } else if (socialProvider.equalsIgnoreCase("FACEBOOK")) { return FACEBOOK; } else if (socialProvider.equalsIgnoreCase("IDENTICA")) { return IDENTICA; } else if (socialProvider.equalsIgnoreCase("PUMPIO")) { return PUMPIO; } else if (socialProvider.equalsIgnoreCase("LINKEDIN")) { return LINKEDIN; } else if (socialProvider.equalsIgnoreCase("GOOGLEPLUS")) { return GOOGLE_PLUS; } else if (socialProvider.equalsIgnoreCase("GOOGLE_BUZZ")) { return GOOGLE_BUZZ; } else if (socialProvider.equalsIgnoreCase("YAHOO")) { return YAHOO; } else if (socialProvider.equalsIgnoreCase("MYSPACE")) { return MYSPACE; } else return null; } /** * Provide OAuth protocol. * @param provider {@link SocialProvider}. * @return */ public static TypeAuth getTypeAuth(final SocialProvider provider) { if(provider.equals(TWITTER) || provider.equals(IDENTICA) || provider.equals(ALL) || provider.equals(TUMBLR) || provider.equals(PLURK) || provider.equals(LINKEDIN) || provider.equals(PUMPIO) || provider.equals(MYSPACE) || provider.equals(YAHOO)){ return TypeAuth.OAUTH1; } else if ( provider.equals(GOOGLE_PLUS) || provider.equals(GOOGLE_BUZZ) || provider.equals(FACEBOOK)){ return TypeAuth.OAUTH2; } else { return null; } } }
apache-2.0
ebourg/flamingo-svg-transcoder
core/src/test/java/org/pushingpixels/flamingo/api/svg/SvgTranscoderTest.java
5698
package org.pushingpixels.flamingo.api.svg; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.LineNumberReader; import java.io.PrintWriter; import java.util.Arrays; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.ToolProvider; import junit.framework.TestCase; /** * @author Emmanuel Bourg * @version $Revision$, $Date$ */ public class SvgTranscoderTest extends TestCase { public void testTranscode() throws Exception { File svg = new File("target/test-classes/svg/edit-copy.svg"); File transcoded = new File(svg.getParentFile(), "edit_copy.java"); PrintWriter out = new PrintWriter(transcoded); SvgTranscoder transcoder = new SvgTranscoder(svg.toURI().toURL(), "edit_copy"); transcoder.setJavaPackageName("test.svg.transcoded"); transcoder.setTemplate(new Template("resizable.template")); transcoder.setPrintWriter(out); transcoder.transcode(); out.flush(); out.close(); assertTrue(transcoded.exists()); assertCompile(transcoded); assertEquals(new File("src/test/java/transcoded/edit_copy.java"), transcoded); } private void assertEquals(File expectedFile, File actualFile) throws IOException { LineNumberReader in1 = new LineNumberReader(new FileReader(expectedFile)); LineNumberReader in2 = new LineNumberReader(new FileReader(actualFile)); String line1; String line2; while ((line1 = in1.readLine()) != null | (line2 = in2.readLine()) != null) { assertEquals("Line " + in1.getLineNumber(), line1, line2); } } public void testTranscodeEmpty() throws Exception { File svg = new File("target/test-classes/svg/empty.svg"); File transcoded = new File(svg.getParentFile(), "empty.java"); PrintWriter out = new PrintWriter(transcoded); SvgTranscoder transcoder = new SvgTranscoder(svg.toURI().toURL(), "empty"); transcoder.setJavaPackageName("test.svg.transcoded"); transcoder.setTemplate(new Template("icon.template")); transcoder.setPrintWriter(out); transcoder.transcode(); out.flush(); out.close(); assertTrue(transcoded.exists()); assertCompile(transcoded); } public void testTranscodeLarge() throws Exception { File svg = new File("target/test-classes/svg/apache-feather.svg"); File transcoded = new File(svg.getParentFile(), "apache_feather.java"); PrintWriter out = new PrintWriter(transcoded); SvgTranscoder transcoder = new SvgTranscoder(svg.toURI().toURL(), "apache_feather"); transcoder.setJavaPackageName("test.svg.transcoded"); transcoder.setTemplate(new Template("icon.template")); transcoder.setPrintWriter(out); transcoder.transcode(); out.flush(); out.close(); assertTrue(transcoded.exists()); assertCompile(transcoded); } public void testTranscodeCompressed() throws Exception { File svg = new File("target/test-classes/svg/apache-feather.svgz"); File transcoded = new File(svg.getParentFile(), "apache_feather2.java"); PrintWriter out = new PrintWriter(transcoded); SvgTranscoder transcoder = new SvgTranscoder(svg.toURI().toURL(), "apache_feather2"); transcoder.setJavaPackageName("test.svg.transcoded"); transcoder.setTemplate(new Template("icon.template")); transcoder.setPrintWriter(out); transcoder.transcode(); out.flush(); out.close(); assertTrue(transcoded.exists()); assertCompile(transcoded); } public void testTranscodeText() throws Exception { File svg = new File("target/test-classes/svg/text.svg"); File transcoded = new File(svg.getParentFile(), "text.java"); PrintWriter out = new PrintWriter(transcoded); SvgTranscoder transcoder = new SvgTranscoder(svg.toURI().toURL(), "text"); transcoder.setJavaPackageName("test.svg.transcoded"); transcoder.setTemplate(new Template("icon.template")); transcoder.setPrintWriter(out); transcoder.transcode(); out.flush(); out.close(); assertTrue(transcoded.exists()); assertCompile(transcoded); } public void testTranscodeMarkers() throws Exception { File svg = new File("target/test-classes/svg/markers.svg"); File transcoded = new File(svg.getParentFile(), "markers.java"); PrintWriter out = new PrintWriter(transcoded); SvgTranscoder transcoder = new SvgTranscoder(svg.toURI().toURL(), "markers"); transcoder.setJavaPackageName("test.svg.transcoded"); transcoder.setTemplate(new Template("icon.template")); transcoder.setPrintWriter(out); transcoder.transcode(); out.flush(); out.close(); assertTrue(transcoded.exists()); assertCompile(transcoded); } private void assertCompile(File file) throws IOException { JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null); Iterable<? extends JavaFileObject> unit = fileManager.getJavaFileObjectsFromFiles(Arrays.asList(file)); JavaCompiler.CompilationTask task = compiler.getTask(null, fileManager, null, null, null, unit); assertTrue("Compilation failed", task.call()); fileManager.close(); } }
apache-2.0
yeeunshim/tajo_test
tajo-algebra/src/main/java/org/apache/tajo/algebra/RelationList.java
2376
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tajo.algebra; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import org.apache.tajo.util.TUtil; import java.util.Set; public class RelationList extends Expr { @Expose @SerializedName("Relations") private Expr[] relations; public RelationList(Expr[] relations) { super(OpType.RelationList); checkRelations(relations); this.relations = relations; } private void checkRelations(Expr[] relations) { for (Expr rel : relations) { Preconditions.checkArgument( rel.getType() == OpType.Relation || rel.getType() == OpType.Join || rel.getType() == OpType.TablePrimaryTableSubQuery, "Only Relation, Join, or TablePrimarySubQuery can be given to RelationList, but this expr " + " is " + rel.getType()); } } public Expr[] getRelations() { return this.relations; } public int size() { return this.relations.length; } @Override public String toString() { return toJson(); } @Override public int hashCode() { return Objects.hashCode(relations); } @Override boolean equalsTo(Expr expr) { Set<Expr> thisSet = TUtil.newHashSet(relations); RelationList another = (RelationList) expr; Set<Expr> anotherSet = TUtil.newHashSet(another.relations); return thisSet.equals(anotherSet); } @Override public Object clone() throws CloneNotSupportedException { RelationList relationList = (RelationList) super.clone(); relationList.relations = new Expr[relations.length]; for (int i = 0; i < relations.length; i++) { relationList.relations[i] = (Expr) relations[i].clone(); } return relationList; } }
apache-2.0
SAP/openui5
src/sap.ui.core/test/sap/ui/core/qunit/component/ComponentSupport.qunit.js
10404
sap.ui.define([ 'sap/base/Log', 'sap/base/strings/hyphenate', 'sap/ui/core/UIComponent', 'sap/ui/core/ComponentContainer', 'sap/ui/core/ComponentSupport', 'sap/ui/core/library' ], function(Log, hyphenate, UIComponent, ComponentContainer, ComponentSupport, library) { "use strict"; /*global QUnit, sinon, Promise*/ var ComponentLifecycle = library.ComponentLifecycle; // helper functionality to create component container DIVs function createComponentDIV(sId, mAttributes) { var oDIV = document.createElement("div"); if (mAttributes) { Object.keys(mAttributes).forEach(function(sKey) { oDIV.setAttribute(sKey, mAttributes[sKey]); }); } oDIV.setAttribute("id", sId); return oDIV; } var oContentElement = createComponentDIV("content"); document.body.appendChild(oContentElement); // settings var mSettings = { "div1": { id: "container1", name: "sap.ui.test.v2empty", settings: { id: "component1" }, componentCreated: "componentCreated" }, "div2": { id: "container2", name: "sap.ui.test.v2empty", settings: { id: "component2" }, componentCreated: "componentCreated", async: false }, "div3": { id: "container3", name: "sap.ui.test.v2empty", settings: { id: "component3" }, componentCreated: "componentCreated", manifest: "true" }, "div4": { id: "container4", name: "sap.ui.test.v2empty", settings: { id: "component4" }, componentCreated: "componentCreated", manifest: "false" } }; // convert the settings and create the component container div elements var mContainers = {}; Object.keys(mSettings).forEach(function(sId) { // create the component configuration for the div element from the settings var mContainer = mContainers[sId] = { "data-sap-ui-component": "" }; Object.keys(mSettings[sId]).forEach(function(sKey) { mContainer["data-" + hyphenate(sKey)] = sKey === "settings" ? JSON.stringify(mSettings[sId][sKey]) : mSettings[sId][sKey]; }); // create the div element for the component container oContentElement.appendChild(createComponentDIV(sId, mContainer)); }); // Promise which resolves once the component instances are created function runComponentSupport() { return new Promise(function(resolve, reject) { // create a global function to count the component instances var iComponentCount = 0; window.componentCreated = function() { iComponentCount++; // start the test once both component instances are created if (iComponentCount == 4) { resolve(); } }; // execute the ComponentSupport ComponentSupport.run(); }); } QUnit.module("Component Support"); QUnit.test("Finder, Parser and Default Settings", function(assert) { // check the finder var aElements = ComponentSupport._find(); assert.equal(aElements.length, 4, "Found 4 declarative components!"); for (var i = 0, l = aElements.length; i < l; i++) { var oElement = aElements[i]; var mExpectedSettings = mSettings[oElement.id]; // check the parser window.componentCreated = function() {}; var mComponentSettings = ComponentSupport._parse(oElement); mComponentSettings.componentCreated = "componentCreated"; // reset function for comparision! assert.deepEqual(mComponentSettings, mExpectedSettings, "Component settings parsed correctly for component " + oElement.id + "!"); // check the default settings ComponentSupport._applyDefaultSettings(mComponentSettings); mExpectedSettings.async = true; mExpectedSettings.lifecycle = ComponentLifecycle.Container; mExpectedSettings.manifest = true; mExpectedSettings.autoPrefixId = true; assert.deepEqual(mComponentSettings, mExpectedSettings, "Component settings defaults applied properly for component " + oElement.id + "!"); } }); QUnit.test("Parser with unknown property/event", function(assert) { this.spy(Log, "warning"); var oElement = document.createElement("div"); oElement.setAttribute("data-unkown", "foo"); ComponentSupport._parse(oElement); assert.ok( Log.warning.calledWithMatch("Property or event \"unkown\" will be ignored as it does not exist in sap.ui.core.ComponentContainer"), "should log a warning with the expected message"); }); QUnit.test("Parser with invalid event callback", function(assert) { var oElement = document.createElement("div"); oElement.setAttribute("data-component-created", "does.not.exist"); assert.throws(function() { ComponentSupport._parse(oElement); }, new Error("Callback handler for event \"componentCreated\" not found")); }); QUnit.test("ComponentContainer Factory", function(assert) { var oComponentContainerApplySettingsSpy = this.spy(ComponentContainer.prototype, "applySettings"); var aComponentElements = document.querySelectorAll("[data-sap-ui-component]"); assert.equal(aComponentElements.length, 4, "There should be four declarative elements"); assert.ok(aComponentElements[0].hasAttribute("data-sap-ui-component"), "First element should have the indicator attribute"); assert.ok(aComponentElements[1].hasAttribute("data-sap-ui-component"), "Second element should have the indicator attribute"); assert.ok(aComponentElements[2].hasAttribute("data-sap-ui-component"), "Third element should have the indicator attribute"); assert.ok(aComponentElements[3].hasAttribute("data-sap-ui-component"), "Fourth element should have the indicator attribute"); var pComponentSupport = runComponentSupport(); // Four ComponentContainers should have been created sinon.assert.callCount(oComponentContainerApplySettingsSpy, 4); assert.notOk(aComponentElements[0].hasAttribute("data-sap-ui-component"), "First element should not have the indicator attribute anymore"); assert.ok(aComponentElements[0].parentNode, "First element should still be part of the DOM"); assert.notOk(aComponentElements[1].hasAttribute("data-sap-ui-component"), "Second element should not have the indicator attribute anymore"); assert.ok(aComponentElements[1].parentNode, "Second element should still be part of the DOM"); assert.notOk(aComponentElements[1].hasAttribute("data-sap-ui-component"), "Third element should not have the indicator attribute anymore"); assert.ok(aComponentElements[1].parentNode, "Third element should still be part of the DOM"); assert.notOk(aComponentElements[1].hasAttribute("data-sap-ui-component"), "Fourth element should not have the indicator attribute anymore"); assert.ok(aComponentElements[1].parentNode, "Fourth element should still be part of the DOM"); assert.equal(document.querySelectorAll("[data-sap-ui-component]").length, 0, "There should not be any declarative elements anymore"); // Directly executing run again shouldn't try to create the same containers again ComponentSupport.run(); // Still, only four ComponentContainers should have been created sinon.assert.callCount(oComponentContainerApplySettingsSpy, 4); return pComponentSupport.then(function() { assert.ok(document.getElementById("div1"), "Placeholder DIV for first Component found!"); assert.ok(document.getElementById("div2"), "Placeholder DIV for second Component found!"); assert.ok(document.getElementById("div3"), "Placeholder DIV for third Component found!"); assert.ok(document.getElementById("div4"), "Placeholder DIV for fourth Component found!"); assert.ok(document.getElementById("container1"), "ComponentContainer element for first Component found!"); assert.ok(document.getElementById("container2"), "ComponentContainer element for second Component found!"); assert.ok(document.getElementById("container3"), "ComponentContainer element for third Component found!"); assert.ok(document.getElementById("container4"), "ComponentContainer element for fourth Component found!"); var oContainer1 = sap.ui.getCore().byId("container1"); var oContainer2 = sap.ui.getCore().byId("container2"); var oContainer3 = sap.ui.getCore().byId("container3"); var oContainer4 = sap.ui.getCore().byId("container4"); assert.ok(oContainer1 instanceof ComponentContainer, "ComponentContainer for first Component found!"); assert.ok(oContainer2 instanceof ComponentContainer, "ComponentContainer for second Component found!"); assert.ok(oContainer3 instanceof ComponentContainer, "ComponentContainer for third Component found!"); assert.ok(oContainer4 instanceof ComponentContainer, "ComponentContainer for fourth Component found!"); var oComponent1 = oContainer1.getComponentInstance(); var oComponent2 = oContainer2.getComponentInstance(); var oComponent3 = oContainer3.getComponentInstance(); var oComponent4 = oContainer4.getComponentInstance(); assert.ok(oComponent1 instanceof UIComponent, "UIComponent instance for first Component found!"); assert.ok(oComponent2 instanceof UIComponent, "UIComponent instance for second Component found!"); assert.ok(oComponent3 instanceof UIComponent, "UIComponent instance for third Component found!"); assert.ok(oComponent4 instanceof UIComponent, "UIComponent instance for fourth Component found!"); assert.equal(oComponent1.getId(), oContainer1.getId() + "-component1", "The id of the first Component is correct!"); assert.equal(oComponent1.getMetadata().getName(), "sap.ui.test.v2empty.Component", "The name of the first Component is correct!"); assert.equal(oComponent2.getId(), oContainer2.getId() + "-component2", "The id of the second Component is correct!"); assert.equal(oComponent2.getMetadata().getName(), "sap.ui.test.v2empty.Component", "The name of the second Component is correct!"); assert.equal(oComponent3.getId(), oContainer3.getId() + "-component3", "The id of the third Component is correct!"); assert.equal(oComponent3.getMetadata().getName(), "sap.ui.test.v2empty.Component", "The name of the third Component is correct!"); assert.equal(oComponent4.getId(), oContainer4.getId() + "-component4", "The id of the fourth Component is correct!"); assert.equal(oComponent4.getMetadata().getName(), "sap.ui.test.v2empty.Component", "The name of the fourth Component is correct!"); // Executing run again afterwards also shouldn't try to create the same containers again ComponentSupport.run(); // Still, only four ComponentContainers should have been created sinon.assert.callCount(oComponentContainerApplySettingsSpy, 4); }); }); });
apache-2.0
ytirahc/mobile-dev-trek
posts/032016/resizing-to-multiple.js
2379
// Copyright (c) 2016, ytirahc, www.mobiledevtrek.com // All rights reserved. Copyright holder cannot be held liable for any damages. // // Distributed under the Apache License (ASL). // http://www.apache.org/licenses/ // ***** // Description: This file is a script for Adobe Photoshop (tested with Photoshop CC 2015). // The current active image in Photoshop will be resized by the specified percentages // in the array scaleFactor and will save those files to the specified directory, saveDir. // // Usage: File -> Scripts -> Browse... to locate the script and execute it // ***** var scaleFactor = [75, 50, 25]; // Scale factor as percentage of original image var saveDir = "~/Documents/work/"; // Directory in which to save resized images // Create resized image for all specified scale factors for (var scaleFactorIndex in scaleFactor) { ResizeToScaleFactor(saveDir, scaleFactor[scaleFactorIndex]) } // ***** // ResizeToScaleFactor // // Description: Resizes an image according to specified scale factor and saves it // // Parameters: // inSaveDir: Directory in which to save resized image // inScaleFactor: Scale factor as a percentage in which to resize image // ***** function ResizeToScaleFactor(inSaveDir, inScaleFactor) { var doc; // Reference to the active document var duplicateDoc; // Reference to a copy of the active document var imageName; // The name of the current document, without extension var saveFN; // The full file path and name of the image to be saved var saveFNOptions; // The save options // Reference to current, active document doc = app.activeDocument; // Duplicate document duplicateDoc = doc.duplicate(); // Resize image according to specified scale factor duplicateDoc.resizeImage(UnitValue(inScaleFactor,"%"),null,null,ResampleMethod.BICUBIC); // Save the resized image as a jpg imageName = (doc.name).split(".")[0]; saveFN = new File(inSaveDir + imageName + "_" + inScaleFactor + "percent.jpg" ); saveFNOptions = new JPEGSaveOptions(); saveFNOptions.embedColorProfile = true; saveFNOptions.formatOptions = FormatOptions.STANDARDBASELINE; saveFNOptions.matte = MatteType.NONE; saveFNOptions.quality = 9; duplicateDoc.saveAs(saveFN, saveFNOptions, true, Extension.LOWERCASE); // Close the duplicate image view without a save as dialog box duplicateDoc.close(SaveOptions.DONOTSAVECHANGES) }
apache-2.0
Lucas1313/miesner
www/wp-content/plugins/nextgen-gallery-pro/modules/comments/class.comment_mapper.php
1751
<?php class C_Comment_Mapper extends C_CustomPost_DataMapper_Driver { public static $_instances = array(); function define($context = FALSE) { parent::define(NULL, array($context, 'photocrati-comments')); $this->add_mixin('Mixin_Comment_Mapper'); $this->implement('I_Comment_Mapper'); $this->set_model_factory_method('comment_container'); } function initialize($context=FALSE) { parent::initialize('photocrati-comments'); } public static function get_instance($context = False) { if (!isset(self::$_instances[$context])) self::$_instances[$context] = new C_Comment_Mapper($context); return self::$_instances[$context]; } } class Mixin_Comment_Mapper extends Mixin { function find_by_post_title($name, $model = FALSE) { $retval = NULL; $this->object->select(); $this->object->where(array('post_title = %s', $name)); $results = $this->object->run_query(FALSE, $model); if ($results) $retval = $results[0]; return $retval; } function find_or_create($type, $id) { $name = $this->object->get_stub($type, $id); $post = $this->object->find_by_post_title($name, TRUE); if (!$post) { $post = new stdClass; $post->name = $name; $post->post_title = $name; $post->comment_status = 'open'; $post->post_status = 'publish'; $this->object->save($post); $post = $this->object->find_by_post_title($name, TRUE); } return $post; } function get_stub($type, $id) { return "NextGEN Comment Link - {$type} - {$id}"; } }
apache-2.0
balloob/home-assistant
homeassistant/components/shelly/light.py
3918
"""Light for Shelly.""" from typing import Optional from aioshelly import Block from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, LightEntity, ) from homeassistant.core import callback from homeassistant.util.color import ( color_temperature_kelvin_to_mired, color_temperature_mired_to_kelvin, ) from . import ShellyDeviceWrapper from .const import DATA_CONFIG_ENTRY, DOMAIN from .entity import ShellyBlockEntity async def async_setup_entry(hass, config_entry, async_add_entities): """Set up lights for device.""" wrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][config_entry.entry_id] blocks = [block for block in wrapper.device.blocks if block.type == "light"] if not blocks: return async_add_entities(ShellyLight(wrapper, block) for block in blocks) class ShellyLight(ShellyBlockEntity, LightEntity): """Switch that controls a relay block on Shelly devices.""" def __init__(self, wrapper: ShellyDeviceWrapper, block: Block) -> None: """Initialize light.""" super().__init__(wrapper, block) self.control_result = None self._supported_features = 0 if hasattr(block, "brightness"): self._supported_features |= SUPPORT_BRIGHTNESS if hasattr(block, "colorTemp"): self._supported_features |= SUPPORT_COLOR_TEMP @property def supported_features(self) -> int: """Supported features.""" return self._supported_features @property def is_on(self) -> bool: """If light is on.""" if self.control_result: return self.control_result["ison"] return self.block.output @property def brightness(self) -> Optional[int]: """Brightness of light.""" if self.control_result: brightness = self.control_result["brightness"] else: brightness = self.block.brightness return int(brightness / 100 * 255) @property def color_temp(self) -> Optional[float]: """Return the CT color value in mireds.""" if self.control_result: color_temp = self.control_result["temp"] else: color_temp = self.block.colorTemp # If you set DUO to max mireds in Shelly app, 2700K, # It reports 0 temp if color_temp == 0: return self.max_mireds return int(color_temperature_kelvin_to_mired(color_temp)) @property def min_mireds(self) -> float: """Return the coldest color_temp that this light supports.""" return color_temperature_kelvin_to_mired(6500) @property def max_mireds(self) -> float: """Return the warmest color_temp that this light supports.""" return color_temperature_kelvin_to_mired(2700) async def async_turn_on(self, **kwargs) -> None: """Turn on light.""" params = {"turn": "on"} if ATTR_BRIGHTNESS in kwargs: tmp_brightness = kwargs[ATTR_BRIGHTNESS] params["brightness"] = int(tmp_brightness / 255 * 100) if ATTR_COLOR_TEMP in kwargs: color_temp = color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) if color_temp > 6500: color_temp = 6500 elif color_temp < 2700: color_temp = 2700 params["temp"] = int(color_temp) self.control_result = await self.block.set_state(**params) self.async_write_ha_state() async def async_turn_off(self, **kwargs) -> None: """Turn off light.""" self.control_result = await self.block.set_state(turn="off") self.async_write_ha_state() @callback def _update_callback(self): """When device updates, clear control result that overrides state.""" self.control_result = None super()._update_callback()
apache-2.0
google/gvisor
test/syscalls/linux/verity_symlink.cc
4031
// Copyright 2021 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include <stdint.h> #include <stdlib.h> #include <sys/mount.h> #include <sys/stat.h> #include "gmock/gmock.h" #include "gtest/gtest.h" #include "test/util/capability_util.h" #include "test/util/fs_util.h" #include "test/util/mount_util.h" #include "test/util/temp_path.h" #include "test/util/test_util.h" #include "test/util/verity_util.h" namespace gvisor { namespace testing { namespace { const char kSymlink[] = "verity_symlink"; class SymlinkTest : public ::testing::Test { protected: void SetUp() override { // Verity is implemented in VFS2. SKIP_IF(IsRunningWithVFS1()); SKIP_IF(!ASSERT_NO_ERRNO_AND_VALUE(HaveCapability(CAP_SYS_ADMIN))); // Mount a tmpfs file system, to be wrapped by a verity fs. tmpfs_dir_ = ASSERT_NO_ERRNO_AND_VALUE(TempPath::CreateDir()); ASSERT_THAT(mount("", tmpfs_dir_.path().c_str(), "tmpfs", 0, ""), SyscallSucceeds()); // Create a new file in the tmpfs mount. file_ = ASSERT_NO_ERRNO_AND_VALUE( TempPath::CreateFileWith(tmpfs_dir_.path(), kContents, 0777)); filename_ = Basename(file_.path()); // Create a symlink to the file. ASSERT_THAT(symlink(file_.path().c_str(), JoinPath(tmpfs_dir_.path(), kSymlink).c_str()), SyscallSucceeds()); } TempPath tmpfs_dir_; TempPath file_; std::string filename_; }; TEST_F(SymlinkTest, Success) { std::string verity_dir = ASSERT_NO_ERRNO_AND_VALUE(MountVerity( tmpfs_dir_.path(), {EnableTarget(filename_, O_RDONLY), EnableTarget(kSymlink, O_RDONLY | O_NOFOLLOW)})); char buf[256]; EXPECT_THAT( readlink(JoinPath(verity_dir, kSymlink).c_str(), buf, sizeof(buf)), SyscallSucceeds()); auto const verity_fd = ASSERT_NO_ERRNO_AND_VALUE( Open(JoinPath(verity_dir, kSymlink).c_str(), O_RDONLY, 0777)); EXPECT_THAT(ReadFd(verity_fd.get(), buf, sizeof(kContents)), SyscallSucceeds()); } TEST_F(SymlinkTest, DeleteLink) { std::string verity_dir = ASSERT_NO_ERRNO_AND_VALUE(MountVerity( tmpfs_dir_.path(), {EnableTarget(filename_, O_RDONLY), EnableTarget(kSymlink, O_RDONLY | O_NOFOLLOW)})); ASSERT_THAT(unlink(JoinPath(tmpfs_dir_.path(), kSymlink).c_str()), SyscallSucceeds()); char buf[256]; EXPECT_THAT( readlink(JoinPath(verity_dir, kSymlink).c_str(), buf, sizeof(buf)), SyscallFailsWithErrno(EIO)); EXPECT_THAT(open(JoinPath(verity_dir, kSymlink).c_str(), O_RDONLY, 0777), SyscallFailsWithErrno(EIO)); } TEST_F(SymlinkTest, ModifyLink) { std::string verity_dir = ASSERT_NO_ERRNO_AND_VALUE(MountVerity( tmpfs_dir_.path(), {EnableTarget(filename_, O_RDONLY), EnableTarget(kSymlink, O_RDONLY | O_NOFOLLOW)})); ASSERT_THAT(unlink(JoinPath(tmpfs_dir_.path(), kSymlink).c_str()), SyscallSucceeds()); std::string newlink = "newlink"; ASSERT_THAT(symlink(JoinPath(tmpfs_dir_.path(), newlink).c_str(), JoinPath(tmpfs_dir_.path(), kSymlink).c_str()), SyscallSucceeds()); char buf[256]; EXPECT_THAT( readlink(JoinPath(verity_dir, kSymlink).c_str(), buf, sizeof(buf)), SyscallFailsWithErrno(EIO)); EXPECT_THAT(open(JoinPath(verity_dir, kSymlink).c_str(), O_RDONLY, 0777), SyscallFailsWithErrno(EIO)); } } // namespace } // namespace testing } // namespace gvisor
apache-2.0
aws/aws-sdk-java
aws-java-sdk-pinpoint/src/main/java/com/amazonaws/services/pinpoint/model/transform/TemplatesResponseMarshaller.java
2285
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpoint.model.transform; import java.util.List; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.pinpoint.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * TemplatesResponseMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class TemplatesResponseMarshaller { private static final MarshallingInfo<List> ITEM_BINDING = MarshallingInfo.builder(MarshallingType.LIST).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Item").build(); private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("NextToken").build(); private static final TemplatesResponseMarshaller instance = new TemplatesResponseMarshaller(); public static TemplatesResponseMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(TemplatesResponse templatesResponse, ProtocolMarshaller protocolMarshaller) { if (templatesResponse == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(templatesResponse.getItem(), ITEM_BINDING); protocolMarshaller.marshall(templatesResponse.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
eclipse/hudson.stapler
stapler-core/src/main/java/org/kohsuke/stapler/WebApp.java
6209
/******************************************************************************* * * Copyright (c) 2004-2010 Oracle Corporation. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * * Kohsuke Kawaguchi * *******************************************************************************/ package org.kohsuke.stapler; import org.kohsuke.stapler.AbstractTearOff; import net.sf.json.JSONObject; import org.kohsuke.stapler.bind.BoundObjectTable; import javax.servlet.ServletContext; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.Vector; import java.util.WeakHashMap; import java.util.Hashtable; import java.util.concurrent.CopyOnWriteArrayList; /** * Object scoped to the entire webapp. Mostly used for configuring behavior of Stapler. * * <p> * In contrast, {@link Stapler} is a servlet, so there can be multiple instances per webapp. * * @author Kohsuke Kawaguchi * @see WebApp#get(ServletContext) * @see WebApp#getCurrent() * @see Stapler#getWebApp() */ public class WebApp { /** * Obtains the {@link WebApp} associated with the given {@link ServletContext}. */ public static WebApp get(ServletContext context) { Object o = context.getAttribute(WebApp.class.getName()); if(o==null) { synchronized (WebApp.class) { o = context.getAttribute(WebApp.class.getName()); if(o==null) { o = new WebApp(context); context.setAttribute(WebApp.class.getName(),o); } } } return (WebApp)o; } /** * {@link ServletContext} for this webapp. */ public final ServletContext context; /** * Duck-type wrappers for the given class. */ public final Map<Class,Class[]> wrappers = new HashMap<Class,Class[]>(); /** * MIME type -> encoding map that determines how static contents in the war file is served. */ public final Map<String,String> defaultEncodingForStaticResources = new HashMap<String,String>(); /** * Activated facets. * * TODO: is this really mutable? */ public final List<Facet> facets = new Vector<Facet>(); /** * MIME type mapping from extensions (like "txt" or "jpg") to MIME types ("foo/bar"). * * This overrides whatever mappings given in the servlet as far as stapler is concerned. * This is case insensitive, and should be normalized to lower case. */ public final Map<String,String> mimeTypes = new Hashtable<String,String>(); private volatile ClassLoader classLoader; /** * All {@link MetaClass}es. * * Avoids class leaks by {@link WeakHashMap}. */ private final Map<Class,MetaClass> classMap = new WeakHashMap<Class,MetaClass>(); /** * Handles objects that are exported. */ public final BoundObjectTable boundObjectTable = new BoundObjectTable(); private final CopyOnWriteArrayList<HttpResponseRenderer> responseRenderers = new CopyOnWriteArrayList<HttpResponseRenderer>(); private CrumbIssuer crumbIssuer = CrumbIssuer.DEFAULT; public WebApp(ServletContext context) { this.context = context; // TODO: allow classloader to be given? facets.addAll(Facet.discover(Thread.currentThread().getContextClassLoader())); responseRenderers.add(new HttpResponseRenderer.Default()); } /** * Returns the 'app' object, which is the user-specified object that * sits at the root of the URL hierarchy and handles the request to '/'. */ public Object getApp() { return context.getAttribute("app"); } public void setApp(Object app) { context.setAttribute("app",app); } public CrumbIssuer getCrumbIssuer() { return crumbIssuer; } public void setCrumbIssuer(CrumbIssuer crumbIssuer) { this.crumbIssuer = crumbIssuer; } public CopyOnWriteArrayList<HttpResponseRenderer> getResponseRenderers() { return responseRenderers; } public ClassLoader getClassLoader() { ClassLoader cl = classLoader; if(cl==null) cl = Thread.currentThread().getContextClassLoader(); if(cl==null) cl = Stapler.class.getClassLoader(); return cl; } /** * If the facet of the given type exists, return it. Otherwise null. */ public <T extends Facet> T getFacet(Class<T> type) { for (Facet f : facets) if(type==f.getClass()) return type.cast(f); return null; } /** * Sets the classloader used by {@link StaplerRequest#bindJSON(Class, JSONObject)} and its sibling methods. */ public void setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } public MetaClass getMetaClass(Class c) { if(c==null) return null; synchronized(classMap) { MetaClass mc = classMap.get(c); if(mc==null) { mc = new MetaClass(this,c); classMap.put(c,mc); } return mc; } } /** * Convenience maintenance method to clear all the cached scripts for the given tearoff type. * * <p> * This is useful when you want to have the scripts reloaded into the live system without * the performance penalty of {@link MetaClass#NO_CACHE}. * * @see MetaClass#NO_CACHE */ public void clearScripts(Class<? extends AbstractTearOff> clazz) { synchronized (classMap) { for (MetaClass v : classMap.values()) { AbstractTearOff t = v.getTearOff(clazz); if (t!=null) t.clearScripts(); } } } /** * Gets the current {@link WebApp} that the calling thread is associated with. */ public static WebApp getCurrent() { return Stapler.getCurrent().getWebApp(); } }
apache-2.0
xgqfrms/JavaWeb
x/src/com/deitel/jhtp5/elevator/event/PersonMoveEvent.java
1621
// PersonMoveEvent.java // Indicates where a Person has moved package com.deitel.jhtp5.elevator.event; // Deitel packages import com.deitel.jhtp5.elevator.model.*; public class PersonMoveEvent extends ElevatorSimulationEvent { // identifier of Person sending Event private int ID; // PersonMoveEvent constructor public PersonMoveEvent( Object source, Location location, int identifier ) { super( source, location ); ID = identifier; } // return identifier public int getID() { return( ID ); } } /************************************************************************** * (C) Copyright 1992-2003 by Deitel & Associates, Inc. and * * Prentice Hall. All Rights Reserved. * * * * DISCLAIMER: The authors and publisher of this book have used their * * best efforts in preparing the book. These efforts include the * * development, research, and testing of the theories and programs * * to determine their effectiveness. The authors and publisher make * * no warranty of any kind, expressed or implied, with regard to these * * programs or to the documentation contained in these books. The authors * * and publisher shall not be liable in any event for incidental or * * consequential damages in connection with, or arising out of, the * * furnishing, performance, or use of these programs. * *************************************************************************/
apache-2.0
aws/aws-sdk-java
aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/transform/HostedZoneOwnerStaxUnmarshaller.java
2636
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.route53.model.transform; import javax.xml.stream.events.XMLEvent; import javax.annotation.Generated; import com.amazonaws.services.route53.model.*; import com.amazonaws.transform.Unmarshaller; import com.amazonaws.transform.StaxUnmarshallerContext; import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*; /** * HostedZoneOwner StAX Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class HostedZoneOwnerStaxUnmarshaller implements Unmarshaller<HostedZoneOwner, StaxUnmarshallerContext> { public HostedZoneOwner unmarshall(StaxUnmarshallerContext context) throws Exception { HostedZoneOwner hostedZoneOwner = new HostedZoneOwner(); int originalDepth = context.getCurrentDepth(); int targetDepth = originalDepth + 1; if (context.isStartOfDocument()) targetDepth += 1; while (true) { XMLEvent xmlEvent = context.nextEvent(); if (xmlEvent.isEndDocument()) return hostedZoneOwner; if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) { if (context.testExpression("OwningAccount", targetDepth)) { hostedZoneOwner.setOwningAccount(StringStaxUnmarshaller.getInstance().unmarshall(context)); continue; } if (context.testExpression("OwningService", targetDepth)) { hostedZoneOwner.setOwningService(StringStaxUnmarshaller.getInstance().unmarshall(context)); continue; } } else if (xmlEvent.isEndElement()) { if (context.getCurrentDepth() < originalDepth) { return hostedZoneOwner; } } } } private static HostedZoneOwnerStaxUnmarshaller instance; public static HostedZoneOwnerStaxUnmarshaller getInstance() { if (instance == null) instance = new HostedZoneOwnerStaxUnmarshaller(); return instance; } }
apache-2.0
wiesed/machine-learning
src/main/java/com/bm/classify/ClassifyConstants.java
863
package com.bm.classify; /** * Alle Konstanten die zu BM classify gehoeren. * * @author Daniel Wiese * @since 02.06.2006 */ public interface ClassifyConstants { /** * Die datiendung fuer die klassify dateien. */ String SVM_DATEIENDUNG_FUER_SVM_CALSSIFY_DATEIEN = ".pred.dat"; /** * Der timeout beim lernen. */ long SVM_LEARN_TIMEOUT = 15 * 60 * 1000; /** * Der timeout beim lernen. */ long SVM_CLASSIFY_TIMEOUT = 10 * 60 * 1000; /** * SVM learn. */ String SVM_ENVIRONMENT_WINDOWS = "svm-windows-6.0.1"; /** * SVM learn. */ String SVM_ENVIRONMENT_LINUX = "svm-linux-6.0.1"; /** * SVM Larn mac ox. */ String SVM_ENVIRONMENT_MAC_OX = "svm-mac-6.0.1"; /** * SVM classify. */ String SVM_CLASSIFY_NAME_WINDOWS = "svm_classify.exe"; /** * SVM classify. */ String SVM_CLASSIFY_NAME_LINUX = "svm_classify"; }
apache-2.0
ydk2/ymvc2
libraries/js/phpjs/parse/parser.so.php
23492
<?php /* misc subset */ class Bug extends Exception {} function bug_unless($assertion, $gripe='Bug found.') { if (!$assertion) throw new Bug($gripe); } function span($class, $text, $title='') { $title = htmlspecialchars($title); if ($title) $extra = " title=\"$title\""; else $extra = ''; echo "<span class=\"$class\"{$extra}>".$text."</span>\n"; } /* File: set.so.php License: GPL Purpose: We should really have a "set" data type. It's too useful. */ class set { function set($list=array()) { $this->data = array_count_values($list); } function has($item) { return isset($this->data[$item]); } function add($item) { $this->data[$item] = true; } function del($item) { unset($this->data[$item]); } function all() { return array_keys($this->data); } function one() { return key($this->data); } function count() { return count($this->data); } } /* File: automata.so.php License: GPL Purpose: Contains various utilities for operating on finite automata. */ /* First, we care about finality-extended e-NFAs. These are the basis for most of the remainder of the systems. Let it be noted early that pushdown automata will be constructed in terms of (determinized) FAs which will interpret automata death as an indication that the system needs to do something with the PDA stack. We'll form the left-recursive closure of the available non-terminals following the dieing state. If the next terminal is found in that set, then we also know what to push and what state to enter. (Alternatively, we know the first step in a recursive set of "pushes" which don't accept the terminal until the stack looks the way it should.) Ambiguity here can be considered a bug in the grammar specification. It makes the PDA non-deterministic. While it may be possible to remove this non-determinism in some limited cases, I don't think it's actually necessary. We can, by this procedure, form a set of PDA rules for what to do with any given terminal, assuming that the state transitions in the production DFA call for a non-terminal. We thus have a special category of rule There is another possibility: We are in a "final" state and we don't have and edge or a push that accepts the next token. In this case, we assume that we have recognized a complete production rule. We call its associated code block, which is expected to return a syntax tree node. Then, we pop the stack. The symbol on the stack should tell which DFA to jump into and what state it will be in after recognizing a member of the production known to the called DFA. We can convert this entire idea to the normal definition of a DPDA by: 1. Selecting disjoint state labels for every DFA. 2. Keeping all DFA transitions in the same table. That done, a stack symbol is merely also a state label. */ define("FA_NO_MARK", 99999); # A sentinel value. Real marks should be less. function gen_label() { # Won't return the same number twice. Note that we use state labels # for hash keys all over the place. To prevent PHP from doing the # wrong thing when we merge such hashes, we tack a letter on the # front of the labels. static $count = 0; $count ++; return 's'.$count; } class enfa { # Extended epsilon NFA in normal form. function enfa() { # $this->alphabet = array(); # We don't care $this->states = array(); # Contains a list of labels # These are hashes with state labels for keys: $this->delta = array(); # sub-hash from symbol to label-list $this->epsilon = array(); # label-list $this->mark = array(); # distinguishing mark # Now we can add the initial and final states: $this->initial = $this->add_state(gen_label()); $this->final = $this->add_state(gen_label()); } function eclose($label_list) { $states = array_count_values($label_list); $queue = array_keys($states); while (count($queue) > 0) { $s = array_shift($queue); foreach($this->epsilon[$s] as $t) if (!isset($states[$t])) { $states[$t] = true; $queue[] = $t; } } return array_keys($states); } function any_are_final($label_list) { return in_array($this->final, $label_list); } function best_mark($label_list) { $mark = FA_NO_MARK; foreach($label_list as $label) { $mark = min($mark, $this->mark[$label]); } return $mark; } function add_state($label) { if (isset($this->delta[$label])) { die ("Trying to add existing state to an NFA."); } $this->states[] = $label; $this->delta[$label] = array(); $this->epsilon[$label] = array(); $this->mark[$label] = FA_NO_MARK; return $label; } function add_epsilon($src, $dest) { $this->epsilon[$src][] = $dest; } function start_states() { return $this->eclose(array($this->initial)); } function add_transition($src, $glyph, $dest) { $lst = & $this->delta[$src]; if (empty($lst[$glyph])) $lst[$glyph] = array($dest); else $lst[$glyph][] = $dest; } function step($label_list, $glyph) { $out = array(); foreach($label_list as $label) { if (isset($this->delta[$label][$glyph])) { $out = array_merge($out, $this->delta[$label][$glyph]); } } return $this->eclose($out); } function accepting($label_list) { # Return a set of those glyphs which will not kill the NFA. # Assume that any necessary epsilon closure is already done. # Note that there is a certain amount of unavoidable cleverness # in the algorithm. I don't care the values of $out, so it # doesn't matter that they happen also to be some arbitrary # transition lists. $out = array(); foreach($label_list as $label) $out = array_merge($out, $this->delta[$label]); return array_keys($out); } /* Now that we have the basics down, I'd like some functions that let me make convenient modifications to an NFA. In particular, I would like to: 1: Recognize a particular sequence of glyphs 2: Accept the union of the current NFA and some other 3: Perform the Kleene closure 4: Similar for the common + and ? operators 5: Accept the concatenation of this and some other NFA. Fortunately, these all boil down to a fairly simple set of steps. One slightly complicated part is that I'd also like to be able to carry these "distinguishing marks" through the system so that they can instruct the final PDA on which production matched. The other more complicated part is that these production rules are really transducers. Each rule has certain parts which must go into a parse tree node. It turns out that this is a relatively hard problem in the short run, and not necessary for a solution to the ultimate goal of getting PHP programs into a "tree-of-lists" structure. */ function recognize($glyph) { $this->add_transition($this->initial, $glyph, $this->final); } function plus() { # Recognize the current NFA one or more times: $this->add_epsilon($this->final, $this->initial); } function hook() { # Recognize the current NFA zero or one times: $this->add_epsilon($this->initial, $this->final); } function kleene() { # kleen-star closure over the current NFA: $this->hook(); $this->plus(); } function copy_in($nfa) { # Used by the union and concatenation operations. # Highly magical. Counts on a few things.... foreach (array('states', 'delta', 'epsilon', 'mark') as $part) { $this->$part = array_merge($this->$part, $nfa->$part); } } function determinize() { # Now I can write the code that converts # an NFA into an equivalent DFA. $map = new state_set_labeler(); $start = $this->start_states(); $queue = array($start); $dfa = new dfa(); $i = $map->label($start); $dfa->add_state($i); $dfa->initial = $i; $dfa->mark[$i] = $this->best_mark($start); while (count($queue) > 0) { $set = array_shift($queue); $label = $map->label($set); foreach ($this->accepting($set) as $glyph) { $dest = $this->step($set, $glyph); $dest_label = $map->label($dest); if (!$dfa->has_state($dest_label)) { $dfa->add_state($dest_label); $dfa->mark[$dest_label] = $this->best_mark($dest); $queue[] = $dest; } $dfa->add_transition($label, $glyph, $dest_label); } if ($this->any_are_final($set)) $dfa->final[$label] = true; } return $dfa; } } /* Note that you should really throw away any NFA once you have used one of the below functions on it, because the result will contain indentically named state labels from the originals. We could fix this apparent problem, but it would mean establishing a state-renaming function for NFAs. Because I don't care to do this just now, and it's not important anyway, I'm not doing it. */ function nfa_union($nfa_list) { $out = new enfa(); foreach($nfa_list as $nfa) { $out->copy_in($nfa); $out->add_epsilon($out->initial, $nfa->initial); $out->add_epsilon($nfa->final, $out->final); } return $out; } function nfa_concat($nfa_list) { $out = new enfa(); $last_state = $out->initial; foreach($nfa_list as $nfa) { $out->copy_in($nfa); $out->add_epsilon($last_state, $nfa->initial); $last_state = $nfa->final; } $out->add_epsilon($last_state, $out->final); return $out; } class dfa { /* A DFA has a simpler representation than that of an NFA. It also has a bit of a different interface. */ function dfa() { # $this->alphabet = array(); # We don't care $this->states = array(); # Contains a list of labels $this->initial = ''; # Set this later. # These are hashes with state labels for keys: $this->final = array(); # Just a bit for each state $this->delta = array(); # sub-hash from symbol to label $this->mark = array(); # distinguishing mark } function add_state($label) { if ($this->has_state($label)) { die ("Trying to add existing state to an DFA."); } $this->states[] = $label; $this->final[$label] = false; $this->delta[$label] = array(); $this->mark[$label] = FA_NO_MARK; return $label; } function has_state($label) { return isset($this->delta[$label]); } function add_transition($src, $glyph, $dest) { $this->delta[$src][$glyph] = $dest; } function step($label, $glyph) { return @$this->delta[$label][$glyph]; } function accepting($label) { return array_keys($this->delta[$label]); } function minimize() { /* We'll use the table-filling algorithm to find pairs of distinguishable states. When that algorithm is done, any states not distinguishable are equivalent. We'll return a new DFA. */ $map = $this->indistinguishable_state_map($this->table_fill()); $dist = array(); foreach($map as $p => $q) $dist[$q] = $q; $dfa = new dfa(); foreach($dist as $p) $dfa->add_state($p); foreach($dist as $p) { foreach ($this->delta[$p] as $glyph => $q) $dfa->add_transition($p, $glyph, $map[$q]); $dfa->final[$p] = $this->final[$p]; $dfa->mark[$p] = $this->mark[$p]; } $dfa->initial = $map[$this->initial]; return $dfa; } function indistinguishable_state_map($table) { # Assumes that $table is filled according to the table filling # algorithm. $map = array(); $set = new set($this->states); while ($set->count()) { $p = $set->one(); foreach($set->all() as $q) if (!$table->differ($p, $q)) { $map[$q] = $p; $set->del($q); } } return $map; } function table_fill() { /* We use a slight modification of the standard base case: Two states are automatically distinguishable if their marks differ. */ # Base Case: $table = new distinguishing_table(); foreach($this->states as $s1) foreach($this->states as $s2) { if ($this->mark[$s1] != $this->mark[$s2]) $table->distinguish($s1, $s2); } # Induction: do { /* nothing */ } while (!$this->filling_round($table)); return $table; } function filling_round(&$table) { $done = true; foreach($this->states as $s1) foreach($this->states as $s2) { if ($s1 == $s2) continue; if (!$table->differ($s1, $s2)) { # Try to find a reason why the two states # differ. If so, then mark them different # and clear $done. Note that if the table # has no record of either state, then we # can't yet make a determination. $different = $this->compare_states($s1, $s2, $table); if ($different) { $table->distinguish($s1, $s2); $done = false; break; } } } # ("Done Round<br/>"); return $done; } function compare_states($p, $q, $table) { $sigma = array_unique(array_merge($this->accepting($p), $this->accepting($q))); # "Comparing $p and $q - shared vocabulary: [ ".implode(' : ', $sigma)." ] - "); if ($p == $q) { # "Same State<br/>"; return false; } foreach($sigma as $glyph) { $p1 = $this->step($p, $glyph); $q1 = $this->step($q, $glyph); if (!($p1 and $q1) or $table->differ($p1, $q1)) { # "<font color=green>They differ on $glyph - $p1/$q1<br/></font>"); return true; } } # ("No difference found (yet)<br/>"); return false; } } class distinguishing_table { function distinguishing_table() { $this->dist = array(); } function key($s1, $s2) { $them = array($s1, $s2); sort($them); return implode("|", $them); } function distinguish($s1, $s2) { $key = $this->key($s1, $s2); $this->dist[$key] = true; } function differ($s1, $s2) { $key = $this->key($s1, $s2); return isset($this->dist[$key]); } } class state_set_labeler { function state_set_labeler() { $this->map=array(); } function label($list) { sort($list); $key = implode(':', $list); if (empty($this->map[$key])) $this->map[$key] = gen_label(); return $this->map[$key]; } } /* Now we can turn any production rule (head + set <body, action>) into a DFA that recognizes the rule and can even invoke the correct action based on a set of distinguishing marks. Any given final state in the DFA will be marked with exactly the best matching action number. A remaining problem is that of transduction. We would like to mark certain glyphs with a symbol indicating that they cause the corresponding parse node to go into the correct slot of a special parsing data structure which makes for convenient reference within an action part of a rule. In other words, we would really ideally like to turn NFTs into DFTs. It seems, at the moment, that the transduction might still be non-deterministic. This not so much of a problem as a big hassle. However, if we always make the entire matched glyph list available in the form of a list of parse nodes, then the action that corresponds to a given rule branch is free to do fancy things. All that remains is to build a PDA from a collection of DFAs. These various DFAs will mostly have some transitions that are predicated on non-terminal symbols in the CFG. We have to find all such transitions and deal with them specially. */ /* File: lex.so.php License: GPL Purpose: Provides a simple lexical analysis framework Purpose: useful in so many ways. (Minilanguages are a Purpose: fabulous way to save programming time.) */ class token { function __construct($type, $text, $start, $stop) { $this->type = $type; $this->text = $text; $this->start = $start; $this->stop = $stop; } } function null_token() { return new token('','','',''); } // we want a serializable structure. /* class preg_pattern { function __construct($regex, $type, $ignore, $action) { $this->regex = $regex; $this->type = $type; $this->ignore = $ignore; $this->action = $action; # This is the name of a function. } function test($string) { if (preg_match($this->regex.'A', $string, $match)) return $match; } } */ function preg_pattern($regex, $type, $ignore, $action) { return array($regex, $type, $ignore, $action); } function preg_pattern_test($pattern, $string) { if (preg_match($pattern.'A', $string, $match)) return $match; } class stream { function __construct($string) { $this->string = $string; $this->col = 0; $this->line = 1; } function consume($str) { $len = strlen($str); $this->string = substr($this->string, $len); $this->col += $len; } function test($pattern) { if ($match = preg_pattern_test($pattern,$this->string)) { $this->consume($match[0]); return $match; } } function default_rule() { if (!strlen($this->string)) return null_token(); $start = $this->pos(); $ch = $this->string[0]; $this->consume($ch); $stop = $this->pos(); return new token('c'.$ch, $ch, $start, $stop); } function pos() { return new point($this->line, $this->col); } } class point { function __construct($line, $col) { $this->line = $line; $this->col = $col; } } abstract class token_source { abstract function next(); abstract function report_instant_description(); function report_error() { $this->report_instant_description(); echo "The next few tokens are:<br/>\n"; for ($i=0; $i<15; $i++) { $tok = $this->next(); span('term', htmlSpecialChars($tok->text), $tok->type); } } } class preg_scanner extends token_source { function report_instant_description() { echo "Scanner State: $this->state<br/>\n"; } function __construct($init_context, $p = NULL) { bug_unless(func_num_args()); $this->pattern = $p?$p:array('INITIAL'=>array()); $this->state = 'INITIAL'; $this->init_context = $init_context; $this->context = $init_context; } function add_state($name, $cluster) { bug_unless(is_array($cluster)); $this->pattern[$name] = $cluster; } function start($string) { $this->context = $this->init_context; $this->stream = new stream($string); $this->megaregexp = array(); foreach ($this->pattern as $key=>$blah) { $s=''; foreach ($this->pattern[$key] as $pattern) { if ($s) $s.='|'; $s .= $pattern[0]; } $s = '('.$s.')'; $this->megaregexp[$key] = $s; } } function next() { $start = $this->stream->pos(); bug_unless(is_array($this->pattern[$this->state]), 'No state called '.$this->state); # much faster implementation of the lexer, by leveraging PCRE a bit better. if ($match = $this->stream->test($this->megaregexp[$this->state])) { $text = $match[0]; $tmp = array_flip($match); $index = $tmp[$text] -1; $pattern = $this->pattern[$this->state][$index]; $type = $pattern[1]; //->type; $action = $pattern[3]; //->action; if ($action) $action($type, $text, $match, $this->state, $this->context); if ($pattern[2]) return $this->next(); $stop = $this->stream->pos(); return new token($type, $text, $start, $stop); } return $this->stream->default_rule(); } } $GLOBALS['wasted']=0; /* File: parser.so.php License: GPL Purpose: Contains the code necessary to operate the left-recursive parsers Purpose: whose execution tables are generated by the parser generator. */ class parse_stack_frame { private $symbol, $semantic; public $state; function __construct($symbol, $state) { $this->symbol = $symbol; $this->state = $state; $this->semantic = array(); } function shift($semantic) { $this->semantic[] = $semantic; } function fold($semantic) { $this->semantic = array($semantic); } function semantic() { return $this->semantic; } function trace() { return "$this->symbol : $this->state"; } } class parse_error extends Exception {} abstract class parser { function __construct($pda) { $this->pda = $pda; $this->action = $pda['action']; $this->start = $pda['start']; $this->delta = $pda['delta']; } function report() { # pr($this->action); pr($this->start); foreach($this->delta as $label => $d) { echo "<h3>State $label</h3>"; foreach($d as $glyph => $step) echo $glyph." -&gt; ". implode(':', $step)."<br>"; } } function get_step($label, $glyph) { $d = $this->delta[$label]; if (isset($d[$glyph])) return $d[$glyph]; if (isset($d['[default]'])) return $d['[default]']; return array('error'); } function parse($symbol, $lex, $strategy = null) { $stack = array(); $tos = $this->frame($symbol); $token = $lex->next(); while (true) { $step = $this->get_step($tos->state, $token->type); # echo implode(':', $step)."<br>"; switch($step[0]) { case 'go': $tos->shift($token->text); $tos->state = $step[1]; $token = $lex->next(); break; case 'do': $semantic = $this->reduce($step[1], $tos->semantic()); if (empty($stack)) { $strategy->assert_done($token, $lex); return $semantic; } else { $tos = array_pop($stack); $tos->shift($semantic); } break; case 'push': $tos->state = $step[2]; $stack[] = $tos; $tos = $this->frame($step[1]); break; case 'fold': $tos->fold($this->reduce($step[1], $tos->semantic())); $tos->state = $step[2]; break; case 'error': $stack[] = $tos; $strategy->stuck($token, $lex, $stack); break; default: throw new parse_error("Impossible. Bad PDA has $step[0] instruction."); } } } function frame($symbol) { return new parse_stack_frame($symbol, $this->start[$symbol]); } abstract function reduce($action, $tokens); } class easy_parser extends parser { function __construct($pda, $strategy = null) { parent::__construct($pda); $this->call = $this->action; //array(); $this->strategy = ($strategy ? $strategy : new default_parser_strategy()); /* foreach($this->action as $k => $body) { $this->call[$k] = create_function( '$tokens', preg_replace('/{(\d+)}/', '$tokens[\\1]', $body)); } */ } function reduce($action, $tokens) { return $this->call[$action]($tokens); } function parse($symbol, $lex, $strategy = null) { return parent::parse($symbol, $lex, $this->strategy); } } abstract class parser_strategy { abstract function stuck($token, $lex, $stack); abstract function assert_done($token, $lex); } function send_parse_error_css_styles() { ?> <style> .term { border: 1px solid green; margin: 2px; } .char { border: 1px solid red; margin: 2px; } .nonterm { border: 1px solid blue; margin: 10px; } .wierd { border: 1px solid purple; margin: 2px; } pre { line-height: 1.5; } </style> <?php } class default_parser_strategy extends parser_strategy { function stuck($token, $lex, $stack) { send_parse_error_css_styles(); ?> <hr/>The LR parser is stuck. Source and grammar do not agree.<br/> Looking at token: <?php span('term', $token->text, $token->type); echo ' [ '.$token->type.' ]'; echo "<br/>\n"; $lex->report_error(); echo "<hr/>\n"; echo "Backtrace Follows:<br/>\n"; # pr($stack); while (count($stack)) { $tos = array_pop($stack); echo $tos->trace()."<br/>\n"; } throw new parse_error("Can't tell what to do with ".$token->type."."); } function assert_done($token, $lex) { if ($token->type) $this->stuck($token, $lex, array()); } } ?>
apache-2.0
lorensen/VTKExamples
src/Python/PolyData/ImplicitPolyDataDistance.py
2249
import numpy as np import vtk def main(): sphereSource = vtk.vtkSphereSource() sphereSource.SetCenter(0.0, 0.0, 0.0) sphereSource.SetRadius(1.0) sphereSource.Update() sphereMapper = vtk.vtkPolyDataMapper() sphereMapper.SetInputConnection(sphereSource.GetOutputPort()) sphereMapper.ScalarVisibilityOff() sphereActor = vtk.vtkActor() sphereActor.SetMapper(sphereMapper) sphereActor.GetProperty().SetOpacity(.3) sphereActor.GetProperty().SetColor(1, 0, 0) implicitPolyDataDistance = vtk.vtkImplicitPolyDataDistance() implicitPolyDataDistance.SetInput(sphereSource.GetOutput()) # Setup a grid points = vtk.vtkPoints() step = 0.1 for x in np.arange(-2, 2, step): for y in np.arange(-2, 2, step): for z in np.arange(-2, 2, step): points.InsertNextPoint(x, y, z) # Add distances to each point signedDistances = vtk.vtkFloatArray() signedDistances.SetNumberOfComponents(1) signedDistances.SetName("SignedDistances") # Evaluate the signed distance function at all of the grid points for pointId in range(points.GetNumberOfPoints()): p = points.GetPoint(pointId) signedDistance = implicitPolyDataDistance.EvaluateFunction(p) signedDistances.InsertNextValue(signedDistance) polyData = vtk.vtkPolyData() polyData.SetPoints(points) polyData.GetPointData().SetScalars(signedDistances) vertexGlyphFilter = vtk.vtkVertexGlyphFilter() vertexGlyphFilter.SetInputData(polyData) vertexGlyphFilter.Update() signedDistanceMapper = vtk.vtkPolyDataMapper() signedDistanceMapper.SetInputConnection(vertexGlyphFilter.GetOutputPort()) signedDistanceMapper.ScalarVisibilityOn() signedDistanceActor = vtk.vtkActor() signedDistanceActor.SetMapper(signedDistanceMapper) renderer = vtk.vtkRenderer() renderer.AddViewProp(sphereActor) renderer.AddViewProp(signedDistanceActor) renderWindow = vtk.vtkRenderWindow() renderWindow.AddRenderer(renderer) renWinInteractor = vtk.vtkRenderWindowInteractor() renWinInteractor.SetRenderWindow(renderWindow) renderWindow.Render() renWinInteractor.Start() if __name__ == '__main__': main()
apache-2.0
beargiles/projecteuler
src/main/java/com/invariantproperties/projecteuler/SequenceCache.java
3102
/* * This code was written by Bear Giles <bgiles@coyotesong.com> and he * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Any contributions made by others are licensed to this project under * one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * Copyright (c) 2014 Bear Giles <bgiles@coyotesong.com> */ package com.invariantproperties.projecteuler; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; /** * Cache interface. Three types of caches are supported: * * <b>IN-MEMORY CACHE.</b> This cache is held entirely in memory. It may use a * LRU algorithm to keep the amount of memory required in check. * * <b>EXISTING DATABASE-BACKED CACHE.</b> This cache is backed by a database * (but may use an in-memory cache as well for performance). The database is * already initialized. The database may be read-only, or it may allow * additional values to be added over time. * * <b>NEW DATABASE-BACKED CACHE.</b> This is another cache backed by a database * but it must be initialized before use. This could be the first use of a new * permanent database, or could be a JVM-based database that only exists for the * duration of the application. * * @author Bear Giles <bgiles@coyotesong.com> * * @param <E> */ public interface SequenceCache<E> { /** * Is this a read-only cache? */ boolean isReadOnly(); /** * Set cache read-only. */ void setReadOnly(boolean readOnly); /** * Is the cache already initialized? */ boolean isInitialized(); /** * Initialize elements * * @throws IllegalStateException * cache has already been initialized. */ boolean initialize(final List<E> elements); /** * Initialize elements * * @throws IllegalStateException * cache has already been initialized. */ boolean initialize(final Iterator<E> iterator, long count); /** * Does cache contain this value? */ // boolean contains(Object o); /** * Get value from cache. * * @param n * @return */ E get(final int n) throws NoSuchElementException; /** * Put value into the cache. * * @throws UnsupportedOperationException * if this is readonly cache. */ void put(final int n, final E value); /** * Rese cache to newly-initialized state. */ void reset(); }
apache-2.0
ivafanas/sltbench
tests/MAResultsContainerTest.cpp
1812
#include "gtest/gtest.h" #include "src/MAResultsContainer.h" #include <cstdint> #include <initializer_list> using namespace sltbench; static MAResultsContainer MakeContainer( std::initializer_list<std::uint64_t> values) { MAResultsContainer cont; for (const auto value : values) cont.Add(value); return cont; } TEST(MAResultsContainer, GetBestIsZeroForEmptyContainer) { MAResultsContainer cont; EXPECT_EQ(0u, cont.GetBest()); } TEST(MAResultsContainer, GetBestIsMinimumForSingleResultContainer) { EXPECT_EQ(50u, MakeContainer({ 50u }).GetBest()); } TEST(MAResultsContainer, GetBestIsMinimumForSingleElementContainer) { EXPECT_EQ(50u, MakeContainer({ 50u, 50u }).GetBest()); } TEST(MAResultsContainer, GetBestIsMinimumForMultiElementsContainer) { EXPECT_EQ(50u, MakeContainer({ 50u, 55u, 60u }).GetBest()); } TEST(MAResultsContainer, GetMinSpotValueIsZeroForEmptyContainer) { MAResultsContainer cont; EXPECT_EQ(0u, cont.GetMinSpotValue(1, 1)); } TEST(MAResultsContainer, GetMinSpotValueIsZeroForContainerSizeLessThanSpot) { EXPECT_EQ(0u, MakeContainer({ 50u, 50u }).GetMinSpotValue(3, 1)); } TEST(MAResultsContainer, GetMinSpotValueIsZeroForNoSuchSpot) { const auto cont = MakeContainer({ 50u, 50u, 60u, 60u, 70u }); EXPECT_EQ(0u, cont.GetMinSpotValue(3, 1)); } TEST(MAResultsContainer, GetMinSpotValueCorrectForSingleElementSpot) { EXPECT_EQ(50u, MakeContainer({ 50u, 50u }).GetMinSpotValue(2, 1)); } TEST(MAResultsContainer, GetMinSpotValueCorrectForMultipleUniresults) { const auto cont = MakeContainer({ 900u, 1000u, 1020u, 1100u }); EXPECT_EQ(1000u, cont.GetMinSpotValue(2, 3)); } TEST(MAResultsContainer, GetMinSpotValueCorrectForMultiElementSpot) { const auto cont = MakeContainer({ 900u, 1000u, 1003u, 1020u, 1100u }); EXPECT_EQ(1000u, cont.GetMinSpotValue(3, 3)); }
apache-2.0
carlosb1/examples-c14
algorithms/client_search.cpp
281
#include <iostream> #include "search.h" int main () { const int size = 13; int values [] ={4 ,5 ,6 ,7 ,2 ,3 ,65 ,12 ,88 ,12 ,38 ,1 ,8 }; int temp[size]; search::mergesort(values,temp,0,size-1); for (auto value: temp) { std::cout << value << ","; } std::cout << '\n'; }
apache-2.0
ceylon/ceylon
language/runtime-js/modules.js
195
/* Metamodel module and package objects */ var $loadedModules$={}; x$.$loadedModules$=$loadedModules$; function $addmod$(mod, modname) { $loadedModules$[modname] = mod; } x$.$addmod$=$addmod$;
apache-2.0
googleads/google-ads-ruby
lib/google/ads/google_ads/v9/services/bidding_strategy_simulation_service/paths.rb
2518
# frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! module Google module Ads module GoogleAds module V9 module Services module BiddingStrategySimulationService # Path helper methods for the BiddingStrategySimulationService API. module Paths ## # Create a fully-qualified BiddingStrategySimulation resource string. # # The resource will be in the following format: # # `customers/{customer_id}/biddingStrategySimulations/{bidding_strategy_id}~{type}~{modification_method}~{start_date}~{end_date}` # # @param customer_id [String] # @param bidding_strategy_id [String] # @param type [String] # @param modification_method [String] # @param start_date [String] # @param end_date [String] # # @return [::String] def bidding_strategy_simulation_path customer_id:, bidding_strategy_id:, type:, modification_method:, start_date:, end_date: raise ::ArgumentError, "customer_id cannot contain /" if customer_id.to_s.include? "/" raise ::ArgumentError, "bidding_strategy_id cannot contain /" if bidding_strategy_id.to_s.include? "/" raise ::ArgumentError, "type cannot contain /" if type.to_s.include? "/" raise ::ArgumentError, "modification_method cannot contain /" if modification_method.to_s.include? "/" raise ::ArgumentError, "start_date cannot contain /" if start_date.to_s.include? "/" "customers/#{customer_id}/biddingStrategySimulations/#{bidding_strategy_id}~#{type}~#{modification_method}~#{start_date}~#{end_date}" end extend self end end end end end end end
apache-2.0
googleads/google-ads-ruby
lib/google/ads/google_ads/v8/enums/account_link_status_pb.rb
1209
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/googleads/v8/enums/account_link_status.proto require 'google/protobuf' require 'google/api/annotations_pb' Google::Protobuf::DescriptorPool.generated_pool.build do add_file("google/ads/googleads/v8/enums/account_link_status.proto", :syntax => :proto3) do add_message "google.ads.googleads.v8.enums.AccountLinkStatusEnum" do end add_enum "google.ads.googleads.v8.enums.AccountLinkStatusEnum.AccountLinkStatus" do value :UNSPECIFIED, 0 value :UNKNOWN, 1 value :ENABLED, 2 value :REMOVED, 3 value :REQUESTED, 4 value :PENDING_APPROVAL, 5 value :REJECTED, 6 value :REVOKED, 7 end end end module Google module Ads module GoogleAds module V8 module Enums AccountLinkStatusEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.enums.AccountLinkStatusEnum").msgclass AccountLinkStatusEnum::AccountLinkStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.enums.AccountLinkStatusEnum.AccountLinkStatus").enummodule end end end end end
apache-2.0
jruesga/rview
gerrit/src/main/java/com/ruesga/rview/gerrit/model/DeletedDraftCommentInfo.java
971
/* * Copyright (C) 2016 Jorge Ruesga * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ruesga.rview.gerrit.model; import com.google.gson.annotations.SerializedName; /** * @link "https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#deleted-draft-comment-info" */ public class DeletedDraftCommentInfo { @SerializedName("change") public ChangeInfo name; @SerializedName("deleted") public CommentInfo[] deleted; }
apache-2.0
asimarslan/hazelcast-csharp-client
Hazelcast.Net/Hazelcast.Client.Protocol.Codec/MapGetAllCodec.cs
2874
// Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System.Collections.Generic; using Hazelcast.Client.Protocol.Util; using Hazelcast.IO; using Hazelcast.IO.Serialization; // Client Protocol version, Since:1.0 - Update:1.0 namespace Hazelcast.Client.Protocol.Codec { internal static class MapGetAllCodec { private static int CalculateRequestDataSize(string name, IList<IData> keys) { var dataSize = ClientMessage.HeaderSize; dataSize += ParameterUtil.CalculateDataSize(name); dataSize += Bits.IntSizeInBytes; foreach (var keysItem in keys) { dataSize += ParameterUtil.CalculateDataSize(keysItem); } return dataSize; } internal static ClientMessage EncodeRequest(string name, IList<IData> keys) { var requiredDataSize = CalculateRequestDataSize(name, keys); var clientMessage = ClientMessage.CreateForEncode(requiredDataSize); clientMessage.SetMessageType((int) MapMessageType.MapGetAll); clientMessage.SetRetryable(false); clientMessage.Set(name); clientMessage.Set(keys.Count); foreach (var keysItem in keys) { clientMessage.Set(keysItem); } clientMessage.UpdateFrameLength(); return clientMessage; } internal class ResponseParameters { public IList<KeyValuePair<IData, IData>> response; } internal static ResponseParameters DecodeResponse(IClientMessage clientMessage) { var parameters = new ResponseParameters(); var responseSize = clientMessage.GetInt(); var response = new List<KeyValuePair<IData, IData>>(responseSize); for (var responseIndex = 0; responseIndex < responseSize; responseIndex++) { var responseItemKey = clientMessage.GetData(); var responseItemVal = clientMessage.GetData(); var responseItem = new KeyValuePair<IData, IData>(responseItemKey, responseItemVal); response.Add(responseItem); } parameters.response = response; return parameters; } } }
apache-2.0
mox601/functionaljava-spike
functionaljava-spike/src/test/java/fm/mox/spikes/functionaljava/state/accountweb/Deposit.java
507
package fm.mox.spikes.functionaljava.state.accountweb; /** * Created by matteo (dot) moci (at) gmail (dot) com */ public class Deposit implements Input { private final int amount; public Deposit(int amount) { super(); this.amount = amount; } @Override public boolean isDeposit() { return true; } @Override public boolean isWithdraw() { return false; } @Override public int getAmount() { return this.amount; } }
apache-2.0
mairbek/junit-suite-configurator
src/com/github/suiteconfig/junit/ConfigurableRunnerBuilder.java
1702
package com.github.suiteconfig.junit; import com.google.common.collect.ImmutableList; import org.junit.Test; import org.junit.internal.builders.NullBuilder; import org.junit.rules.MethodRule; import org.junit.runner.Runner; import org.junit.runners.model.RunnerBuilder; import org.junit.runners.model.RunnerScheduler; import java.lang.reflect.Method; import java.util.List; /** * {@link org.junit.runners.model.RunnerBuilder} implementation that allows to specify rules and runner testScheduler. * * @author Mairbek Khadikov */ public class ConfigurableRunnerBuilder extends RunnerBuilder { private final List<MethodRule> rules; private final RunnerScheduler runnerScheduler; public ConfigurableRunnerBuilder(List<MethodRule> rules, RunnerScheduler runnerScheduler) { this.rules = ImmutableList.copyOf(rules); this.runnerScheduler = runnerScheduler; } @Override public Runner runnerForClass(Class<?> testClass) throws Throwable { RunnerBuilder delegate = new NullBuilder(); if (isTestClass(testClass)) { delegate = new RunnerBuilder() { @Override public Runner runnerForClass(Class<?> testClass) throws Throwable { return new TestCaseRunner(testClass, rules, runnerScheduler); } }; } return delegate.runnerForClass(testClass); } private static boolean isTestClass(Class<?> testClass) { Method[] methods = testClass.getMethods(); for (Method method : methods) { if (method.isAnnotationPresent(Test.class)) { return true; } } return false; } }
apache-2.0
mwilliamson-firefly/aws-sdk-net
sdk/src/Services/EC2/Generated/Model/Internal/MarshallTransformations/DescribeRouteTablesRequestMarshaller.cs
3983
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the ec2-2015-04-15.normal.json service model. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; using System.Xml.Serialization; using Amazon.EC2.Model; using Amazon.Runtime; using Amazon.Runtime.Internal; using Amazon.Runtime.Internal.Transform; using Amazon.Runtime.Internal.Util; namespace Amazon.EC2.Model.Internal.MarshallTransformations { /// <summary> /// DescribeRouteTables Request Marshaller /// </summary> public class DescribeRouteTablesRequestMarshaller : IMarshaller<IRequest, DescribeRouteTablesRequest> , IMarshaller<IRequest,AmazonWebServiceRequest> { /// <summary> /// Marshaller the request object to the HTTP request. /// </summary> /// <param name="input"></param> /// <returns></returns> public IRequest Marshall(AmazonWebServiceRequest input) { return this.Marshall((DescribeRouteTablesRequest)input); } /// <summary> /// Marshaller the request object to the HTTP request. /// </summary> /// <param name="publicRequest"></param> /// <returns></returns> public IRequest Marshall(DescribeRouteTablesRequest publicRequest) { IRequest request = new DefaultRequest(publicRequest, "Amazon.EC2"); request.Parameters.Add("Action", "DescribeRouteTables"); request.Parameters.Add("Version", "2015-04-15"); if(publicRequest != null) { if(publicRequest.IsSetFilters()) { int publicRequestlistValueIndex = 1; foreach(var publicRequestlistValue in publicRequest.Filters) { if(publicRequestlistValue.IsSetName()) { request.Parameters.Add("Filter" + "." + publicRequestlistValueIndex + "." + "Name", StringUtils.FromString(publicRequestlistValue.Name)); } if(publicRequestlistValue.IsSetValues()) { int publicRequestlistValuelistValueIndex = 1; foreach(var publicRequestlistValuelistValue in publicRequestlistValue.Values) { request.Parameters.Add("Filter" + "." + publicRequestlistValueIndex + "." + "Value" + "." + publicRequestlistValuelistValueIndex, StringUtils.FromString(publicRequestlistValuelistValue)); publicRequestlistValuelistValueIndex++; } } publicRequestlistValueIndex++; } } if(publicRequest.IsSetRouteTableIds()) { int publicRequestlistValueIndex = 1; foreach(var publicRequestlistValue in publicRequest.RouteTableIds) { request.Parameters.Add("RouteTableId" + "." + publicRequestlistValueIndex, StringUtils.FromString(publicRequestlistValue)); publicRequestlistValueIndex++; } } } return request; } } }
apache-2.0
mogotest/selenium
remote/client/src/java/org/openqa/selenium/remote/RemoteWebDriver.java
15985
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.remote; import java.net.URL; import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.google.common.base.Function; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.openqa.selenium.By; import org.openqa.selenium.Cookie; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Platform; import org.openqa.selenium.Speed; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.WebElement; import org.openqa.selenium.internal.FindsByClassName; import org.openqa.selenium.internal.FindsById; import org.openqa.selenium.internal.FindsByLinkText; import org.openqa.selenium.internal.FindsByName; import org.openqa.selenium.internal.FindsByTagName; import org.openqa.selenium.internal.FindsByXPath; import org.openqa.selenium.internal.ReturnedCookie; public class RemoteWebDriver implements WebDriver, JavascriptExecutor, FindsById, FindsByClassName, FindsByLinkText, FindsByName, FindsByTagName, FindsByXPath { private final ErrorHandler errorHandler = new ErrorHandler(); private CommandExecutor executor; private Capabilities capabilities; private SessionId sessionId; protected Process clientProcess; public RemoteWebDriver(CommandExecutor executor, Capabilities desiredCapabilities) { this.executor = executor; startClient(); startSession(desiredCapabilities); } public RemoteWebDriver(Capabilities desiredCapabilities) throws Exception { this((URL) null, desiredCapabilities); } public RemoteWebDriver(URL remoteAddress, Capabilities desiredCapabilities) throws Exception { this(new HttpCommandExecutor(remoteAddress), desiredCapabilities); } @SuppressWarnings({"unchecked"}) protected void startSession(Capabilities desiredCapabilities) { Response response = execute(DriverCommand.NEW_SESSION, ImmutableMap.of("desiredCapabilities", desiredCapabilities)); Map<String, Object> rawCapabilities = (Map<String, Object>) response.getValue(); DesiredCapabilities returnedCapabilities = new DesiredCapabilities(); for (Map.Entry<String, Object> entry : rawCapabilities.entrySet()) { // Handle the platform later if ("platform".equals(entry.getKey())) { continue; } returnedCapabilities.setCapability(entry.getKey(), entry.getValue()); } String platformString = (String) rawCapabilities.get("platform"); Platform platform; try { if (platformString == null || "".equals(platformString)) { platform = Platform.ANY; } else { platform = Platform.valueOf(platformString); } } catch (IllegalArgumentException e) { // The server probably responded with a name matching the os.name // system property. Try to recover and parse this. platform = Platform.extractFromSysProperty(platformString); } returnedCapabilities.setPlatform(platform); capabilities = returnedCapabilities; sessionId = new SessionId(response.getSessionId()); } /** * Method called before * {@link #startSession(Capabilities) starting a new session}. The default * implementation is a no-op, but subtypes should override this method to * define custom behavior. */ protected void startClient() { } /** * Method called after executing a {@link #quit()} command. Subtypes */ protected void stopClient() { } public ErrorHandler getErrorHandler() { return errorHandler; } public CommandExecutor getCommandExecutor() { return executor; } public Capabilities getCapabilities() { return capabilities; } public void get(String url) { execute(DriverCommand.GET, ImmutableMap.of("url", url)); } public String getTitle() { Response response = execute(DriverCommand.GET_TITLE); return response.getValue().toString(); } public String getCurrentUrl() { return execute(DriverCommand.GET_CURRENT_URL).getValue().toString(); } public List<WebElement> findElements(By by) { return by.findElements(this); } public WebElement findElement(By by) { return by.findElement(this); } protected WebElement findElement(String by, String using) { Response response = execute(DriverCommand.FIND_ELEMENT, ImmutableMap.of("using", by, "value", using)); return (WebElement) response.getValue(); } @SuppressWarnings("unchecked") protected List<WebElement> findElements(String by, String using) { Response response = execute(DriverCommand.FIND_ELEMENTS, ImmutableMap.of("using", by, "value", using)); return (List<WebElement>) response.getValue(); } public WebElement findElementById(String using) { return findElement("id", using); } public List<WebElement> findElementsById(String using) { return findElements("id", using); } public WebElement findElementByLinkText(String using) { return findElement("link text", using); } public List<WebElement> findElementsByLinkText(String using) { return findElements("link text", using); } public WebElement findElementByPartialLinkText(String using) { return findElement("partial link text", using); } public List<WebElement> findElementsByPartialLinkText(String using) { return findElements("partial link text", using); } public WebElement findElementByTagName(String using) { return findElement("tag name", using); } public List<WebElement> findElementsByTagName(String using) { return findElements("tag name", using); } public WebElement findElementByName(String using) { return findElement("name", using); } public List<WebElement> findElementsByName(String using) { return findElements("name", using); } public WebElement findElementByClassName(String using) { return findElement("class name", using); } public List<WebElement> findElementsByClassName(String using) { return findElements("class name", using); } public WebElement findElementByXPath(String using) { return findElement("xpath", using); } public List<WebElement> findElementsByXPath(String using) { return findElements("xpath", using); } // Misc public String getPageSource() { return (String) execute(DriverCommand.GET_PAGE_SOURCE).getValue(); } public void close() { execute(DriverCommand.CLOSE); } public void quit() { try { execute(DriverCommand.QUIT); } finally { sessionId = null; stopClient(); } } @SuppressWarnings({"unchecked"}) public Set<String> getWindowHandles() { Response response = execute(DriverCommand.GET_WINDOW_HANDLES); List<String> returnedValues = (List<String>) response.getValue(); return new LinkedHashSet<String>(returnedValues); } public String getWindowHandle() { return String.valueOf(execute(DriverCommand.GET_CURRENT_WINDOW_HANDLE).getValue()); } public Object executeScript(String script, Object... args) { if (!capabilities.isJavascriptEnabled()) { throw new UnsupportedOperationException("You must be using an underlying instance of WebDriver that supports executing javascript"); } // Escape the quote marks script = script.replaceAll("\"", "\\\""); Iterable<Object> convertedArgs = Iterables.transform( Lists.newArrayList(args), new WebElementToJsonConverter()); Map<String, ?> params = ImmutableMap.of( "script", script, "args", Lists.newArrayList(convertedArgs)); return execute(DriverCommand.EXECUTE_SCRIPT, params).getValue(); } public boolean isJavascriptEnabled() { return capabilities.isJavascriptEnabled(); } public TargetLocator switchTo() { return new RemoteTargetLocator(); } public Navigation navigate() { return new RemoteNavigation(); } public Options manage() { return new RemoteWebDriverOptions(); } /** * Creates a new {@link RemoteWebElement} that is a child of this instance. * Subtypes should override this method to customize the type of * RemoteWebElement returned. * * @return A new RemoteWebElement that is a child of this instance. */ protected RemoteWebElement newRemoteWebElement() { RemoteWebElement toReturn; if (capabilities.isJavascriptEnabled()) { toReturn = new RenderedRemoteWebElement(); } else { toReturn = new RemoteWebElement(); } toReturn.setParent(this); return toReturn; } protected Response execute(DriverCommand driverCommand, Map<String, ?> parameters) { Command command = new Command(sessionId, driverCommand, parameters); Response response; try { response = executor.execute(command); // Unwrap the response value by converting any JSON objects of the form // {"ELEMENT": id} to RemoteWebElements. Object value = new JsonToWebElementConverter().apply(response.getValue()); response.setValue(value); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new WebDriverException(e); } return errorHandler.throwIfResponseFailed(response); } protected Response execute(DriverCommand command) { return execute(command, ImmutableMap.<String, Object>of()); } private class RemoteWebDriverOptions implements Options { public void addCookie(Cookie cookie) { execute(DriverCommand.ADD_COOKIE, ImmutableMap.of("cookie", cookie)); } public void deleteCookieNamed(String name) { execute(DriverCommand.DELETE_COOKIE, ImmutableMap.of("name", name)); } public void deleteCookie(Cookie cookie) { deleteCookieNamed(cookie.getName()); } public void deleteAllCookies() { execute(DriverCommand.DELETE_ALL_COOKIES); } @SuppressWarnings({"unchecked"}) public Set<Cookie> getCookies() { Object returned = execute(DriverCommand.GET_ALL_COOKIES).getValue(); try { List<Map<String, Object>> cookies = new JsonToBeanConverter().convert(List.class, returned); Set<Cookie> toReturn = new HashSet<Cookie>(); for (Map<String, Object> rawCookie : cookies) { String name = (String) rawCookie.get("name"); String value = (String) rawCookie.get("value"); String path = (String) rawCookie.get("path"); String domain = (String) rawCookie.get("domain"); Boolean secure = (Boolean) rawCookie.get("secure"); toReturn.add( new ReturnedCookie(name, value, domain, path, null, secure, getCurrentUrl())); } return toReturn; } catch (Exception e) { throw new WebDriverException(e); } } public Cookie getCookieNamed(String name) { Set<Cookie> allCookies = getCookies(); for (Cookie cookie : allCookies) { if (cookie.getName().equals(name)) { return cookie; } } return null; } public Speed getSpeed() { Response response = execute(DriverCommand.GET_SPEED); return Speed.valueOf((String) response.getValue()); } public void setSpeed(Speed speed) { execute(DriverCommand.SET_SPEED, ImmutableMap.of("speed", speed)); } } private class RemoteNavigation implements Navigation { public void back() { execute(DriverCommand.GO_BACK); } public void forward() { execute(DriverCommand.GO_FORWARD); } public void to(String url) { get(url); } public void to(URL url) { get(String.valueOf(url)); } public void refresh() { execute(DriverCommand.REFRESH); } } protected class RemoteTargetLocator implements TargetLocator { public WebDriver frame(int frameIndex) { execute(DriverCommand.SWITCH_TO_FRAME, ImmutableMap.of("id", frameIndex)); return RemoteWebDriver.this; } public WebDriver frame(String frameName) { execute(DriverCommand.SWITCH_TO_FRAME, ImmutableMap.of("id", frameName)); return RemoteWebDriver.this; } public WebDriver window(String windowName) { execute(DriverCommand.SWITCH_TO_WINDOW, ImmutableMap.of("name", windowName)); return RemoteWebDriver.this; } public WebDriver defaultContent() { Map<String, Object> frameId = Maps.newHashMap(); frameId.put("id", null); execute(DriverCommand.SWITCH_TO_FRAME, frameId); return RemoteWebDriver.this; } public WebElement activeElement() { Response response = execute(DriverCommand.GET_ACTIVE_ELEMENT); return (WebElement) response.getValue(); } } /** * Converts {@link WebElement} objects to their JSON representation. Will * recursively convert Lists and Maps to catch nested references. */ private class WebElementToJsonConverter implements Function<Object, Object> { public Object apply(Object arg) { if (arg == null || arg instanceof String || arg instanceof Boolean || arg instanceof Number) { return arg; } if (arg instanceof RemoteWebElement) { return ImmutableMap.of("ELEMENT", ((RemoteWebElement) arg).getId()); } if (arg instanceof Collection<?>) { Collection<?> args = (Collection<?>) arg; return Collections2.transform(args, this); } if (arg instanceof Map<?, ?>) { Map<?, ?> args = (Map<?, ?>) arg; Map<String, Object> converted = Maps.newHashMapWithExpectedSize(args.size()); for (Map.Entry<?, ?> entry : args.entrySet()) { Object key = entry.getKey(); if (!(key instanceof String)) { throw new IllegalArgumentException( "All keys in Map script arguments must be strings: " + key.getClass().getName()); } converted.put((String) key, apply(entry.getValue())); } return converted; } throw new IllegalArgumentException("Argument is of an illegal type: " + arg.getClass().getName()); } } /** * Reconstitutes {@link WebElement}s from their JSON representation. Will * recursively convert Lists and Maps to catch nested references. All other * values pass through the converter unchanged. */ private class JsonToWebElementConverter implements Function<Object, Object> { public Object apply(Object result) { if (result instanceof Collection<?>) { Collection<?> results = (Collection<?>) result; return Lists.newArrayList(Iterables.transform(results, this)); } if (result instanceof Map<?, ?>) { Map<?, ?> resultAsMap = (Map<?, ?>) result; if (resultAsMap.containsKey("ELEMENT")) { RemoteWebElement element = newRemoteWebElement(); element.setId(String.valueOf(resultAsMap.get("ELEMENT"))); return element; } else { return Maps.transformValues(resultAsMap, this); } } if (result instanceof Number) { if (result instanceof Float || result instanceof Double) { return ((Number) result).doubleValue(); } return ((Number) result).longValue(); } return result; } } }
apache-2.0
kxingit/LeetCode_Java
Binary_Tree_Tilt.java
688
/* Given a binary tree, return the tilt of the whole tree. The tilt of a tree node is defined as the absolute difference between the sum of all left subtree node values and the sum of all right subtree node values. Null node has tilt 0. The tilt of the whole tree is defined as the sum of all nodes' tilt. */ public class Solution { public int findTilt(TreeNode root) { // 8:57 - 9:00 if(root == null) return 0; return findTilt(root.left) + findTilt(root.right) + Math.abs(sum(root.left) - sum(root.right)); } public int sum(TreeNode root) { if(root == null) return 0; return root.val + sum(root.left) + sum(root.right); } }
apache-2.0
kfrodgers/vmax-py
examples/test_get_initiators.py
1314
# Copyright 2016 EMC Corporation from os import getenv from emc_vmax_smis.vmax_smis_base import VmaxSmisBase from emc_vmax_smis.vmax_smis_devices import VmaxSmisDevices from emc_vmax_smis.vmax_smis_masking import VmaxSmisMasking if __name__ == '__main__': host = getenv("ECOM_IP", default="10.108.247.22") smis_base = VmaxSmisBase(host=host, port=5989, use_ssl=True) smis_devices = VmaxSmisDevices(smis_base=smis_base) smis_masking = VmaxSmisMasking(smis_base=smis_base) system_name = smis_base.list_storage_system_names()[0] print str(smis_base.find_storage_system(system_name)) volume_names = smis_devices.list_all_devices_by_name(system_name) for volume_name in volume_names[-100:]: volume_id = smis_devices.get_volume_by_name(system_name, volume_name) storage_groups = smis_devices.get_storage_group(system_name, volume_id) if len(storage_groups) == 0: continue for sg_inst_id in storage_groups: views = smis_masking.list_views_containing_sg(system_name, sg_inst_id) for mv in views: initiators = smis_masking.list_initiators_in_view(system_name, mv) print '\n' + volume_name + ':' for initiator in initiators: print '\t' + initiator
apache-2.0
MinionTim/Sentry
src/com/ville/sentry/Utility.java
4102
package com.ville.sentry; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Locale; import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo; public class Utility { private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd", Locale.US); public static boolean isConnected(Context context) { ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = cm.getActiveNetworkInfo(); return networkInfo != null && networkInfo.isConnected(); } public static boolean isWifi(Context context) { ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = cm.getActiveNetworkInfo(); if (networkInfo != null && networkInfo.isConnected()) { if (networkInfo.getType() == ConnectivityManager.TYPE_WIFI) { return true; } } return false; } public static int getNetType(Context context) { ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = cm.getActiveNetworkInfo(); if (networkInfo != null && networkInfo.isConnected()) { return networkInfo.getType(); } return -1; } public static boolean isGprs(Context context) { ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = cm.getActiveNetworkInfo(); if (networkInfo != null && networkInfo.isConnected()) { if (networkInfo.getType() != ConnectivityManager.TYPE_WIFI) { return true; } } return false; } /** * 时间上传的时间窗口 * 当满足以下条件之一即为有效窗口时间: <br> * 1.lastUploadDate不是当天的时间<br> * 2.lastUploadDate为当天时间,同时处于下述四个时间段之一:<br> * 7:00 - 9:00 11:30 - 13:00 17:00 - 19:00 20:30 - 22:00 @param last 上次的上传时间 * @return */ public static boolean isValidUploadWindow(Date date){ Calendar calendar = Calendar.getInstance(Locale.US); calendar.setTime(date); int hour = calendar.get(Calendar.HOUR_OF_DAY); if(hour >= 7 && hour <= 9) { return true; }else if(hour >= 11 && hour <= 13){ return true; }else if (hour >= 17 && hour <= 19){ return true; }else if (hour >= 20 && hour <= 22){ return true; } return false; } public static boolean isSameDay(Date d1, Date d2){ String d1Str = DATE_FORMAT.format(d1); String d2Str = DATE_FORMAT.format(d2); return d1Str.equals(d2Str); } /** * A hashing method that changes a string (like a URL) into a hash suitable for using as a * disk filename. */ public static String genContactId(String name, String id) { String cacheKey; try { final MessageDigest mDigest = MessageDigest.getInstance("MD5"); mDigest.update((name + id).getBytes()); cacheKey = bytesToHexString(mDigest.digest()); } catch (NoSuchAlgorithmException e) { cacheKey = String.valueOf(name.hashCode()); } return cacheKey; } private static String bytesToHexString(byte[] bytes) { // http://stackoverflow.com/questions/332079 StringBuilder sb = new StringBuilder(); for (int i = 0; i < bytes.length; i++) { String hex = Integer.toHexString(0xFF & bytes[i]); if (hex.length() == 1) { sb.append('0'); } sb.append(hex); } return sb.toString(); } }
apache-2.0
tensorflow/probability
tensorflow_probability/python/mcmc/internal/leapfrog_integrator.py
14204
# Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Defines the LeapfrogIntegrator class.""" import abc import six import tensorflow.compat.v2 as tf from tensorflow_probability.python.internal import tensorshape_util from tensorflow_probability.python.mcmc.internal import util as mcmc_util __all__ = [ 'LeapfrogIntegrator', 'SimpleLeapfrogIntegrator', 'process_args', ] @six.add_metaclass(abc.ABCMeta) class LeapfrogIntegrator(object): """Base class for all leapfrog integrators. [Leapfrog integrators](https://en.wikipedia.org/wiki/Leapfrog_integration) numerically integrate differential equations of the form: ```none v' = dv/dt = F(x) x' = dx/dt = v ``` This class defines minimal requirements for leapfrog integration calculations. """ @abc.abstractmethod def __call__(self, momentum_parts, state_parts, target=None, target_grad_parts=None, kinetic_energy_fn=None, name=None): """Computes the integration. Args: momentum_parts: Python `list` of `Tensor`s representing momentum for each state part. state_parts: Python `list` of `Tensor`s which collectively representing the state. target: Batch of scalar `Tensor` representing the target (i.e., unnormalized log prob) evaluated at `state_parts`. target_grad_parts: Python `list` of `Tensor`s representing the gradient of `target` with respect to each of `state_parts`. kinetic_energy_fn: Python callable that can evaluate the kinetic energy of the given momentum. name: Python `str` used to group ops created by this function. Returns: next_momentum_parts: Python `list` of `Tensor`s representing new momentum. next_state_parts: Python `list` of `Tensor`s which collectively representing the new state. next_target: Batch of scalar `Tensor` representing the target (i.e., unnormalized log prob) evaluated at `next_state_parts`. next_target_grad_parts: Python `list` of `Tensor`s representing the gradient of `next_target` with respect to each of `next_state_parts`. """ raise NotImplementedError('Integrate logic not implemented.') class SimpleLeapfrogIntegrator(LeapfrogIntegrator): # pylint: disable=line-too-long """Simple leapfrog integrator. Calling this functor is conceptually equivalent to: ```none def leapfrog(x, v, eps, L, f, M): g = lambda x: gradient(f, x) v[0] = v + eps/2 g(x) for l = 1...L: x[l] = x[l-1] + eps * inv(M) @ v[l-1] v[l] = v[l-1] + eps * g(x[l]) v = v[L] - eps/2 * g(x[L]) return x[L], v ``` where `M = eye(dims(x))`. (In the future we may support arbitrary covariance `M`.) #### Examples: ```python import matplotlib.pyplot as plt import tensorflow.compat.v2 as tf import tensorflow_probability as tfp from tensorflow_probability.python.mcmc.internal import leapfrog_integrator as leapfrog_impl dims = 10 dtype = tf.float32 target_fn = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(dims, dtype)).log_prob integrator = leapfrog_impl.SimpleLeapfrogIntegrator( target_fn, step_sizes=[0.1], num_steps=3) momentum = [tf.random.normal([dims], dtype=dtype)] position = [tf.random.normal([dims], dtype=dtype)] target = None target_grad_parts = None num_iter = int(1e3) positions = tf.zeros([num_iter, dims], dtype) for i in range(num_iter): [momentum, position, target, target_grad_parts] = integrator( momentum, position, target, target_grad_parts) positions = tf.tensor_scatter_nd_update(positions, [[i]], position) plt.plot(positions[:, 0]); # Sinusoidal. ``` """ # pylint: enable=line-too-long def __init__(self, target_fn, step_sizes, num_steps): """Constructs the LeapfrogIntegrator. Assumes a simple quadratic kinetic energy function: `0.5 ||momentum||**2`. Args: target_fn: Python callable which takes an argument like `*state_parts` and returns its (possibly unnormalized) log-density under the target distribution. step_sizes: Python `list` of `Tensor`s representing the step size for the leapfrog integrator. Must broadcast with the shape of `current_state_parts`. Larger step sizes lead to faster progress, but too-large step sizes make rejection exponentially more likely. When possible, it's often helpful to match per-variable step sizes to the standard deviations of the target distribution in each variable. num_steps: `int` `Tensor` representing number of steps to run the leapfrog integration. Total progress is roughly proportional to `step_size * num_steps`. """ # Note on per-variable step sizes: # # Using per-variable step sizes is equivalent to using the same step # size for all variables and adding a diagonal mass matrix in the # kinetic energy term of the Hamiltonian being integrated. This is # hinted at by Neal (2011) but not derived in detail there. # # Let x and v be position and momentum variables respectively. # Let g(x) be the gradient of `target_fn(x)`. # Let S be a diagonal matrix of per-variable step sizes. # Let the Hamiltonian H(x, v) = -target_fn(x) + 0.5 * ||v||**2. # # Using per-variable step sizes gives the updates: # # v' = v0 + 0.5 * S @ g(x0) # x1 = x0 + S @ v' # v1 = v' + 0.5 * S @ g(x1) # # Let, # # u = inv(S) @ v # # for "u'", "u0", and "u1". Multiplying v by inv(S) in the updates above # gives the transformed dynamics: # # u' = inv(S) @ v' # = inv(S) @ v0 + 0.5 * g(x) # = u0 + 0.5 * g(x) # # x1 = x0 + S @ v' # = x0 + S @ S @ u' # # u1 = inv(S) @ v1 # = inv(S) @ v' + 0.5 * g(x1) # = u' + 0.5 * g(x1) # # These are exactly the leapfrog updates for the Hamiltonian # # H'(x, u) = -target_fn(x) + 0.5 * (S @ u).T @ (S @ u) # = -target_fn(x) + 0.5 * ||v||**2 # = H(x, v). # # To summarize: # # * Using per-variable step sizes implicitly simulates the dynamics # of the Hamiltonian H' (which are energy-conserving in H'). We # keep track of v instead of u, but the underlying dynamics are # the same if we transform back. # * The value of the Hamiltonian H'(x, u) is the same as the value # of the original Hamiltonian H(x, v) after we transform back from # u to v. # * Sampling v ~ N(0, I) is equivalent to sampling u ~ N(0, S**-2). # # So using per-variable step sizes in HMC will give results that are # exactly identical to explicitly using a diagonal mass matrix. self._target_fn = target_fn self._step_sizes = step_sizes self._num_steps = num_steps @property def target_fn(self): return self._target_fn @property def step_sizes(self): return self._step_sizes @property def num_steps(self): return self._num_steps def __call__(self, momentum_parts, state_parts, target=None, target_grad_parts=None, kinetic_energy_fn=None, name=None): """Applies `num_steps` of the leapfrog integrator. Args: momentum_parts: Python `list` of `Tensor`s representing momentum for each state part. state_parts: Python `list` of `Tensor`s which collectively representing the state. target: Batch of scalar `Tensor` representing the target (i.e., unnormalized log prob) evaluated at `state_parts`. target_grad_parts: Python `list` of `Tensor`s representing the gradient of `target` with respect to each of `state_parts`. kinetic_energy_fn: Python callable that can evaluate the kinetic energy of the given momentum. This is typically the negative log probability of the distribution over the momentum. name: Python `str` used to group ops created by this function. Returns: next_momentum_parts: Python `list` of `Tensor`s representing new momentum. next_state_parts: Python `list` of `Tensor`s which collectively representing the new state. next_target: Batch of scalar `Tensor` representing the target (i.e., unnormalized log prob) evaluated at `next_state_parts`. next_target_grad_parts: Python `list` of `Tensor`s representing the gradient of `next_target` with respect to each of `next_state_parts`. """ with tf.name_scope(name or 'leapfrog_integrate'): [ momentum_parts, state_parts, target, target_grad_parts, ] = process_args( self.target_fn, momentum_parts, state_parts, target, target_grad_parts) if kinetic_energy_fn is None: # Avoid adding ops and taking grads, when the implied kinetic energy # is just 0.5 * ||x||^2, so the gradient is x get_velocity_parts = lambda x: x else: def get_velocity_parts(half_next_momentum_parts): _, velocity_parts = mcmc_util.maybe_call_fn_and_grads( kinetic_energy_fn, half_next_momentum_parts) return velocity_parts # See Algorithm 1 of "Faster Hamiltonian Monte Carlo by Learning Leapfrog # Scale", https://arxiv.org/abs/1810.04449. half_next_momentum_parts = [ v + _multiply(0.5 * eps, g, dtype=v.dtype) for v, eps, g in zip(momentum_parts, self.step_sizes, target_grad_parts)] [ _, next_half_next_momentum_parts, next_state_parts, next_target, next_target_grad_parts, ] = tf.while_loop( cond=lambda i, *_: i < self.num_steps, body=lambda i, *args: [i + 1] + list(_one_step( # pylint: disable=no-value-for-parameter,g-long-lambda self.target_fn, self.step_sizes, get_velocity_parts, *args)), loop_vars=[ tf.zeros_like(self.num_steps, name='iter'), half_next_momentum_parts, state_parts, target, target_grad_parts, ]) next_momentum_parts = [ v - _multiply(0.5 * eps, g, dtype=v.dtype) # pylint: disable=g-complex-comprehension for v, eps, g in zip(next_half_next_momentum_parts, self.step_sizes, next_target_grad_parts) ] return ( next_momentum_parts, next_state_parts, next_target, next_target_grad_parts, ) def _one_step( target_fn, step_sizes, get_velocity_parts, half_next_momentum_parts, state_parts, target, target_grad_parts): """Body of integrator while loop.""" with tf.name_scope('leapfrog_integrate_one_step'): velocity_parts = get_velocity_parts(half_next_momentum_parts) next_state_parts = [] for state_part, eps, velocity_part in zip( state_parts, step_sizes, velocity_parts): next_state_parts.append( state_part + _multiply(eps, velocity_part, dtype=state_part.dtype)) [next_target, next_target_grad_parts] = mcmc_util.maybe_call_fn_and_grads( target_fn, next_state_parts) if any(g is None for g in next_target_grad_parts): raise ValueError( 'Encountered `None` gradient.\n' ' state_parts: {}\n' ' next_state_parts: {}\n' ' next_target_grad_parts: {}'.format( state_parts, next_state_parts, next_target_grad_parts)) tensorshape_util.set_shape(next_target, target.shape) for ng, g in zip(next_target_grad_parts, target_grad_parts): tensorshape_util.set_shape(ng, g.shape) next_half_next_momentum_parts = [ v + _multiply(eps, g, dtype=v.dtype) # pylint: disable=g-complex-comprehension for v, eps, g in zip(half_next_momentum_parts, step_sizes, next_target_grad_parts)] return [ next_half_next_momentum_parts, next_state_parts, next_target, next_target_grad_parts, ] def process_args(target_fn, momentum_parts, state_parts, target=None, target_grad_parts=None): """Sanitize inputs to `__call__`.""" with tf.name_scope('process_args'): momentum_parts = [ tf.convert_to_tensor( v, dtype_hint=tf.float32, name='momentum_parts') for v in momentum_parts] state_parts = [ tf.convert_to_tensor( v, dtype_hint=tf.float32, name='state_parts') for v in state_parts] if target is None or target_grad_parts is None: [target, target_grad_parts] = mcmc_util.maybe_call_fn_and_grads( target_fn, state_parts) else: target = tf.convert_to_tensor( target, dtype_hint=tf.float32, name='target') target_grad_parts = [ tf.convert_to_tensor( g, dtype_hint=tf.float32, name='target_grad_part') for g in target_grad_parts] return momentum_parts, state_parts, target, target_grad_parts def _multiply(tensor, state_sized_tensor, dtype): """Multiply `tensor` by a "state sized" tensor and preserve shape.""" # User should be using a step size that does not alter the state size. This # will fail noisily if that is not the case. result = tf.cast(tensor, dtype) * tf.cast(state_sized_tensor, dtype) tensorshape_util.set_shape(result, state_sized_tensor.shape) return result
apache-2.0
PerfCake/pc4nb
src/main/java/org/perfcake/pc4nb/ui/MessagesView.java
3653
/* * Copyright (c) 2015 Andrej Halaj * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.perfcake.pc4nb.ui; import java.awt.Color; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.io.IOException; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; import javax.swing.TransferHandler; import javax.swing.border.LineBorder; import org.perfcake.model.Scenario.Messages.Message; import org.perfcake.pc4nb.model.MessageModel; import org.perfcake.pc4nb.model.MessagesModel; import org.perfcake.pc4nb.model.ModelMap; import org.perfcake.pc4nb.model.PC4NBModel; import org.perfcake.pc4nb.ui.actions.AddMessageAction; public final class MessagesView extends PC4NBView { private JMenuItem addComponent = new JMenuItem("Add new message"); private JPopupMenu menu = new JPopupMenu(); private TransferHandler transferHandler = new MessageTransferHandler(); public MessagesView() { setHeader("Messages"); setDefaultBorder(new LineBorder(Color.ORANGE, 1, true)); setBorder(getDefaultBorder()); addComponent.addActionListener(new AddMessageListener()); menu.add(addComponent); this.setComponentPopupMenu(menu); setTransferHandler(transferHandler); } @Override public void setModel(PC4NBModel model) { super.setModel(model); drawChildren(); } @Override public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName().equals(MessagesModel.PROPERTY_MESSAGE)) { drawChildren(); } } public void drawChildren() { removeAll(); MessagesModel model = (MessagesModel) getModel(); if (model != null && model.getMessages() != null) { for (Message message : model.getMessages().getMessage()) { add(new MessageView(ModelMap.getDefault().getPC4NBModelFor(message))); } } revalidate(); repaint(); } private final class MessageTransferHandler extends TransferHandler { @Override public boolean canImport(TransferHandler.TransferSupport support) { return support.isDataFlavorSupported(MessageModel.DATA_FLAVOR); } @Override public boolean importData(TransferHandler.TransferSupport support) { try { MessageModel model = (MessageModel) support.getTransferable().getTransferData(MessageModel.DATA_FLAVOR); model.setUri("Message"); ((MessagesModel) getModel()).addMessage(model.getMessage()); return true; } catch (UnsupportedFlavorException | IOException ex) { return false; } } } private class AddMessageListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { AddMessageAction action = new AddMessageAction(getModel()); action.execute(); } } }
apache-2.0
inbloom/secure-data-service
tools/data-tools/src/org/slc/sli/test/generators/interchange/InterchangeEdOrgGenerator.java
10754
/* * Copyright 2012-2013 inBloom, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.slc.sli.test.generators.interchange; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.slc.sli.test.edfi.entities.GradeLevelType; import org.slc.sli.test.edfi.entities.SLCEducationServiceCenter; import org.slc.sli.test.edfi.entities.FeederSchoolAssociation; import org.slc.sli.test.edfi.entities.InterchangeEducationOrganization; import org.slc.sli.test.edfi.entities.SLCLocalEducationAgency; import org.slc.sli.test.edfi.entities.SLCCourse; import org.slc.sli.test.edfi.entities.SLCProgram; import org.slc.sli.test.edfi.entities.SLCSchool; import org.slc.sli.test.edfi.entities.School; import org.slc.sli.test.edfi.entities.SLCStateEducationAgency; import org.slc.sli.test.edfi.entities.meta.CourseMeta; import org.slc.sli.test.edfi.entities.meta.ESCMeta; import org.slc.sli.test.edfi.entities.meta.LeaMeta; import org.slc.sli.test.edfi.entities.meta.ProgramMeta; import org.slc.sli.test.edfi.entities.meta.SchoolMeta; import org.slc.sli.test.edfi.entities.meta.SeaMeta; import org.slc.sli.test.edfi.entities.meta.relations.MetaRelations; import org.slc.sli.test.generators.CourseGenerator; import org.slc.sli.test.generators.EducationAgencyGenerator; import org.slc.sli.test.generators.LocalEducationAgencyGenerator; import org.slc.sli.test.generators.ProgramGenerator; import org.slc.sli.test.generators.SchoolGenerator; import org.slc.sli.test.generators.StateEducationAgencyGenerator; import org.slc.sli.test.utils.InterchangeWriter; import org.slc.sli.test.xmlgen.StateEdFiXmlGenerator; /** * Generates all Education Organizations contained in the variables: * - seaMap * - leaMap * - schoolMap * - courseMap * as created by the call to MetaRelations.buildFromSea() in StateEdFiXmlGenerator * * @author dduran * */ public class InterchangeEdOrgGenerator { static CourseGenerator gen ; static { try { gen = new CourseGenerator(GradeLevelType.SEVENTH_GRADE); }catch(Exception e) { e.printStackTrace(); } } /** * Sets up a new Education Organization Interchange and populates it * * @return * @throws Exception */ public static void generate(InterchangeWriter<InterchangeEducationOrganization> iWriter) throws Exception { writeEntitiesToInterchange(iWriter); } /** * Generates the individual entities that can be Educational Organizations * * @param interchangeObjects * @throws Exception */ private static void writeEntitiesToInterchange(InterchangeWriter<InterchangeEducationOrganization> iWriter) throws Exception { generateStateEducationAgencies(iWriter, MetaRelations.SEA_MAP.values()); generateEducationServiceCenters(iWriter, MetaRelations.ESC_MAP.values()); generateFeederSchoolAssociation(iWriter, MetaRelations.SCHOOL_MAP.values()); generateLocalEducationAgencies(iWriter, MetaRelations.LEA_MAP.values()); generateSchools(iWriter, MetaRelations.SCHOOL_MAP.values()); generateCourses(iWriter, MetaRelations.COURSE_MAP.values()); generatePrograms(iWriter, MetaRelations.PROGRAM_MAP.values()); } /** * Loops all SEAs and, using an SEA Generator, populates interchange data. * * @param interchangeObjects * @param seaMetas */ private static void generateStateEducationAgencies(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<SeaMeta> seaMetas) { long startTime = System.currentTimeMillis(); for (SeaMeta seaMeta : seaMetas) { SLCStateEducationAgency sea; if ("medium".equals(StateEdFiXmlGenerator.fidelityOfData)) { sea = StateEducationAgencyGenerator.generateLowFi(seaMeta.id, seaMeta); } else { sea = StateEducationAgencyGenerator.generateLowFi(seaMeta.id, seaMeta); } iWriter.marshal(sea); } System.out.println("generated " + seaMetas.size() + " StateEducationAgency objects in: " + (System.currentTimeMillis() - startTime)); } /** * Loops all ESCs and, using an ESC Generator, populates interchange data. * * @param interchangeObjects * @param escMetas */ private static void generateEducationServiceCenters(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<ESCMeta> escMetas) { long startTime = System.currentTimeMillis(); for (ESCMeta escMeta : escMetas) { SLCEducationServiceCenter esc; if ("medium".equals(StateEdFiXmlGenerator.fidelityOfData)) { esc = EducationAgencyGenerator.getEducationServiceCenter(escMeta.id, escMeta.seaId); } else { esc = EducationAgencyGenerator.getEducationServiceCenter(escMeta.id, escMeta.seaId); } iWriter.marshal(esc); } System.out.println("generated " + escMetas.size() + " EducationServiceCenter objects in: " + (System.currentTimeMillis() - startTime)); } /** * Generates FEEDER_RELATIONSHIPS FeederSchoolAssociation between 2 schools using a circular list. * * @param interchangeObjects * @param seaMetas */ private static void generateFeederSchoolAssociation(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<SchoolMeta> schools) { long startTime = System.currentTimeMillis(); List<SchoolMeta> schoolMetas = new LinkedList<SchoolMeta>(schools); int schoolCount = schoolMetas.size(); if(schoolCount > 1) { for(int i = 0; i < MetaRelations.FEEDER_RELATIONSHIPS; i++) { SchoolMeta feederMeta = schoolMetas.get(i % schoolCount); SchoolMeta receiverMeta = schoolMetas.get((i+ 1) % schoolCount); FeederSchoolAssociation fsa = EducationAgencyGenerator.getFeederSchoolAssociation(receiverMeta, feederMeta); fsa.setFeederRelationshipDescription("Feeder Relationship " + i); iWriter.marshal(fsa); } } System.out.println("generated " + MetaRelations.FEEDER_RELATIONSHIPS + " FeederSchoolAssociation objects in: " + (System.currentTimeMillis() - startTime)); } /** * Loops all LEAs and, using an LEA Generator, populates interchange data. * * @param interchangeObjects * @param leaMetas */ private static void generateLocalEducationAgencies(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<LeaMeta> leaMetas) { long startTime = System.currentTimeMillis(); for (LeaMeta leaMeta : leaMetas) { SLCLocalEducationAgency lea; if ("medium".equals(StateEdFiXmlGenerator.fidelityOfData)) { lea = LocalEducationAgencyGenerator.generateMedFi(leaMeta.id, leaMeta.seaId, leaMeta); } else { lea = LocalEducationAgencyGenerator.generateMedFi(leaMeta.id, leaMeta.seaId, leaMeta); } iWriter.marshal(lea); } System.out.println("generated " + leaMetas.size() + " LocalEducationAgency objects in: " + (System.currentTimeMillis() - startTime)); } /** * Loops all schools and, using a School Generator, populates interchange data. * * @param interchangeObjects * @param schoolMetas */ private static void generateSchools(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<SchoolMeta> schoolMetas) { long startTime = System.currentTimeMillis(); for (SchoolMeta schoolMeta : schoolMetas) { SLCSchool school; if ("medium".equals(StateEdFiXmlGenerator.fidelityOfData)) { school = null; } else { school = SchoolGenerator.generateLowFi(schoolMeta.id, schoolMeta.leaId, schoolMeta.programId); } iWriter.marshal(school); } System.out.println("generated " + schoolMetas.size() + " School objects in: " + (System.currentTimeMillis() - startTime)); } /** * Loops all courses and, using a Course Generator, populates interchange data. * * @param interchangeObjects * @param courseMetas * @throws Exception */ private static void generateCourses(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<CourseMeta> courseMetas) throws Exception { long startTime = System.currentTimeMillis(); for (CourseMeta courseMeta : courseMetas) { SLCCourse course; if ("medium".equals(StateEdFiXmlGenerator.fidelityOfData)) { //course = CourseGenerator.generateMidumFi(courseMeta.id, courseMeta.schoolId); course = null; } else { //course = CourseGenerator.generateLowFi(courseMeta.id, courseMeta.schoolId); course = gen.getCourse(courseMeta.id, courseMeta.schoolId, courseMeta.uniqueCourseId); } courseMeta.courseCodes.addAll(course.getCourseCode()); iWriter.marshal(course); } System.out.println("generated " + courseMetas.size() + " Course objects in: " + (System.currentTimeMillis() - startTime)); } /** * Loops all programs and, using a Program Generator, populates interchange data. * * @param interchangeObjects * @param programMetas */ private static void generatePrograms(InterchangeWriter<InterchangeEducationOrganization> iWriter, Collection<ProgramMeta> programMetas) { for (ProgramMeta programMeta : programMetas) { SLCProgram program; if ("medium".equals(StateEdFiXmlGenerator.fidelityOfData)) { program = null; } else { program = ProgramGenerator.generateLowFi(programMeta.id); } iWriter.marshal(program); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-codestar/src/main/java/com/amazonaws/services/codestar/model/transform/ProjectStatusJsonUnmarshaller.java
2940
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codestar.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.codestar.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * ProjectStatus JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ProjectStatusJsonUnmarshaller implements Unmarshaller<ProjectStatus, JsonUnmarshallerContext> { public ProjectStatus unmarshall(JsonUnmarshallerContext context) throws Exception { ProjectStatus projectStatus = new ProjectStatus(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("state", targetDepth)) { context.nextToken(); projectStatus.setState(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("reason", targetDepth)) { context.nextToken(); projectStatus.setReason(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return projectStatus; } private static ProjectStatusJsonUnmarshaller instance; public static ProjectStatusJsonUnmarshaller getInstance() { if (instance == null) instance = new ProjectStatusJsonUnmarshaller(); return instance; } }
apache-2.0
TheRingbearer/HAWKS
ode/dao-hibernate/src/main/java/org/apache/ode/daohib/bpel/hobj/HProcessInstance.java
9730
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.daohib.bpel.hobj; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.Set; /** * Hibernate table representing a BPEL process instance. * * @hibernate.class table="BPEL_INSTANCE" dynamic-update="true" lazy="true" * @hibernate.query name="COUNT_FAILED_INSTANCES_BY_PROCESSES_IDS_AND_STATES" * query= * "select i.process.processId as pid, count(i.id) as cnt, max(i.activityFailureDateTime) as lastFailureDt from HProcessInstance as i where i.process.processId in (:processIds) and i.state = 20 and i.activityFailureCount > 0 group by i.process.processId" * @hibernate.query name="SELECT_INSTANCES_BY_PROCESS" * query="from HProcessInstance as i where i.process = :process)" * @hibernate.query name="SELECT_INSTANCES_BY_PROCESS_AND_STATES" query= * "from HProcessInstance as i where i.process = :process and i.state in (:states)" * @hibernate.query name="COUNT_INSTANCES_BY_PROCESSES_IDS_AND_STATES" query= * "select i.process.processId as pid, count(i.id) as cnt from HProcessInstance as i where i.process.processId in (:processIds) and i.state in(:states) group by i.process.processId" */ public class HProcessInstance extends HObject { public static final String COUNT_FAILED_INSTANCES_BY_PROCESSES_IDS_AND_STATES = "COUNT_FAILED_INSTANCES_BY_PROCESSES_IDS_AND_STATES"; public static final String SELECT_INSTANCES_BY_PROCESS = "SELECT_INSTANCES_BY_PROCESS"; public static final String SELECT_INSTANCES_BY_PROCESS_AND_STATES = "SELECT_INSTANCES_BY_PROCESS_AND_STATES"; public static final String COUNT_INSTANCES_BY_PROCESSES_IDS_AND_STATES = "COUNT_INSTANCES_BY_PROCESSES_IDS_AND_STATES"; /** Foreign key to owner {@link HProcess}. */ private HProcess _process; // @hahnml private HProcessInstanceMigration _migration; private Long _processId; /** Foreign key to the instantiating {@link HCorrelator}. */ private HCorrelator _instantiatingCorrelator; /** Scope instances belonging to this process instance. */ private Collection<HScope> _scopes = new HashSet<HScope>(); //@hahnml /** Link instances belonging to this process instance. */ private Collection<HLink> _links = new HashSet<HLink>(); private Collection<HCorrelationSet> _csets = new HashSet<HCorrelationSet>(); /** Events belonging to this instance. */ private Collection<HBpelEvent> _events = new HashSet<HBpelEvent>(); private Set<HCorrelatorSelector> _correlatorSelectors = new HashSet<HCorrelatorSelector>(); private Set<HMessageExchange> _messageExchanges = new HashSet<HMessageExchange>(); private HFaultData _fault; private byte[] _jacobState; private short _previousState; private short _state; private Date _lastActiveTime; private Date _activityFailureDateTime; private int _activityFailureCount; private Collection<HActivityRecovery> _activityRecoveries = new HashSet<HActivityRecovery>(); private long _seq; /** * */ public HProcessInstance() { super(); } /** * @hibernate.many-to-one column="INSTANTIATING_CORRELATOR" * foreign-key="none" */ public HCorrelator getInstantiatingCorrelator() { return _instantiatingCorrelator; } public void setInstantiatingCorrelator(HCorrelator instantiatingCorrelator) { _instantiatingCorrelator = instantiatingCorrelator; } /** * @hibernate.many-to-one column="FAULT" column="FAULT" foreign-key="none" */ public HFaultData getFault() { return _fault; } public void setFault(HFaultData fault) { _fault = fault; } /** * @hibernate.property type="org.apache.ode.daohib.bpel.hobj.GZipDataType" * * @hibernate.column name="JACOB_STATE_DATA" sql-type="blob(2G)" */ public byte[] getJacobState() { return _jacobState; } public void setJacobState(byte[] jacobState) { _jacobState = jacobState; } /** * @hibernate.bag lazy="true" inverse="true" * @hibernate.collection-key column="IID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HBpelEvent" */ public Collection<HBpelEvent> getEvents() { return _events; } public void setEvents(Collection<HBpelEvent> events) { _events = events; } /** * @hibernate.set lazy="true" inverse="true" * @hibernate.collection-key column="PIID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HCorrelatorSelector" */ public Set<HCorrelatorSelector> getCorrelatorSelectors() { return _correlatorSelectors; } /** * @param selectors * the _correlatorSelectors to set */ public void setCorrelatorSelectors(Set<HCorrelatorSelector> selectors) { _correlatorSelectors = selectors; } /** * @hibernate.set lazy="true" inverse="true" * @hibernate.collection-key column="PIID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HMessageExchange" */ public Set<HMessageExchange> getMessageExchanges() { return _messageExchanges; } public void setMessageExchanges(Set<HMessageExchange> exchanges) { _messageExchanges = exchanges; } /** * @hibernate.property column="PREVIOUS_STATE" */ public short getPreviousState() { return _previousState; } public void setPreviousState(short previousState) { _previousState = previousState; } /** * * @hibernate.many-to-one column="PROCESS_ID" lazy="true" outer-join="true" * not-found="ignore" foreign-key="none" */ public HProcess getProcess() { return _process; } public void setProcess(HProcess process) { _process = process; } /** * @hibernate.property column="PROCESS_ID" insert="false" update="false" */ public Long getProcessId() { return _processId; } public void setProcessId(Long processId) { _processId = processId; } /** * @hibernate.bag lazy="true" inverse="true" * @hibernate.collection-key column="PIID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HScope" */ public Collection<HScope> getScopes() { return _scopes; } public void setScopes(Collection<HScope> scopes) { _scopes = scopes; } //@hahnml /** * @hibernate.bag lazy="true" inverse="true" * @hibernate.collection-key column="PIID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HLink" */ public Collection<HLink> getLinks() { return _links; } public void setLinks(Collection<HLink> links) { _links = links; } /** * @hibernate.bag lazy="true" inverse="true" * @hibernate.collection-key column="PIID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HCorrelationSet" */ public Collection<HCorrelationSet> getCorrelationSets() { return _csets; } public void setCorrelationSets(Collection<HCorrelationSet> csets) { _csets = csets; } /** * @hibernate.property column="STATE" */ public short getState() { return _state; } public void setState(short state) { _state = state; } /** * @hibernate.property column="LAST_ACTIVE_DT" */ public Date getLastActiveTime() { return _lastActiveTime; } public void setLastActiveTime(Date lastActiveTime) { _lastActiveTime = lastActiveTime; } public void setSequence(long seq) { _seq = seq; } /** * @hibernate.property column="SEQUENCE" */ public long getSequence() { return _seq; } /** * @hibernate.bag lazy="true" inverse="true" cascade="delete" * @hibernate.collection-key column="PIID" foreign-key="none" * @hibernate.collection-one-to-many * class="org.apache.ode.daohib.bpel.hobj.HActivityRecovery" */ public Collection<HActivityRecovery> getActivityRecoveries() { return _activityRecoveries; } public void setActivityRecoveries( Collection<HActivityRecovery> activityRecoveries) { _activityRecoveries = activityRecoveries; } /** * @hibernate.property column="FAILURE_COUNT" */ public int getActivityFailureCount() { return _activityFailureCount; } public void setActivityFailureCount(int count) { _activityFailureCount = count; } /** * @hibernate.property column="FAILURE_DT" */ public Date getActivityFailureDateTime() { return _activityFailureDateTime; } public void setActivityFailureDateTime(Date dateTime) { _activityFailureDateTime = dateTime; } /** * @hibernate.one-to-one column="MIGRATION" foreign-key="none" */ public HProcessInstanceMigration getMigration() { return _migration; } public void setMigration(HProcessInstanceMigration migration) { _migration = migration; } }
apache-2.0
aparod/jonix
jonix-codegen/src/main/java/com/tectonica/jonix/codegen/util/ListDiff.java
2600
/* * Copyright (C) 2012 Zach Melamed * * Latest version available online at https://github.com/zach-m/jonix * Contact me at zach@tectonica.co.il * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tectonica.jonix.codegen.util; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; public class ListDiff { public static interface CompareListener<T> { boolean onDiff(T itemL, T itemR); } /** * compares two <strong>sorted</strong> lists */ public static <T extends Comparable<? super T>> boolean compare(List<T> listL, List<T> listR, CompareListener<T> listener) { final Iterator<T> iterL = (listL == null) ? null : listL.iterator(); final Iterator<T> iterR = (listR == null) ? null : listR.iterator(); T itemL = null, itemR = null; boolean iterLbehind = (listL != null), iterRbehind = (listR != null); boolean doContinue = true; while (doContinue) { if (iterLbehind) itemL = iterL.hasNext() ? iterL.next() : null; if (iterRbehind) itemR = iterR.hasNext() ? iterR.next() : null; if (itemL == null && itemR == null) break; iterLbehind = (itemL != null) && (itemR == null || (itemL.compareTo(itemR) <= 0)); iterRbehind = (itemR != null) && (itemL == null || (itemR.compareTo(itemL) <= 0)); if (iterLbehind && iterRbehind) doContinue = listener.onDiff(itemL, itemR); else if (iterLbehind) doContinue = listener.onDiff(itemL, null); else { if (!iterRbehind) throw new RuntimeException("Internal error"); doContinue = listener.onDiff(null, itemR); } } return doContinue; } /** * compares two <strong>unsorted</strong> collections */ public static <T extends Comparable<? super T>> boolean sortAndCompare(Collection<T> listL, Collection<T> listR, CompareListener<T> listener) { final List<T> sortedL = new ArrayList<>(listL); final List<T> sortedR = new ArrayList<>(listR); Collections.sort(sortedL); Collections.sort(sortedR); return compare(sortedL, sortedR, listener); } }
apache-2.0
Erhannis/MathNStuff
src/main/java/com/erhannis/mathnstuff/utils/ObservableMap.java
5463
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.erhannis.mathnstuff.utils; import java.util.HashMap; import java.util.Map.Entry; import java.util.Objects; import java.util.function.Consumer; /** * (Fairly) thread-safe Observable Map. Triggers callbacks when the stored value is * changed. All methods are synchronized, and do not * themselves trigger any asynchronous behavior. (If the callbacks e.g. start * threads or something, that's on them.) * * Throwables thrown from any callback are logged, and otherwise ignored. * * Mechanism used to check for changes depends on checkIdentical, in constructor. * * Note: the map is modified BEFORE the callbacks are run, so is should be ok to * call .get() from inside a callback. * * @author erhannis */ public class ObservableMap<KEY, VAL> { /** * Gonna copy JavaFX's model, here */ public static class Change<KEY, VAL> { public final boolean wasRemoved; public final boolean wasAdded; public final KEY key; public final VAL valueRemoved; public final VAL valueAdded; public Change(boolean wasRemoved, boolean wasAdded, KEY key, VAL valueRemoved, VAL valueAdded) { this.wasRemoved = wasRemoved; this.wasAdded = wasAdded; this.key = key; this.valueRemoved = valueRemoved; this.valueAdded = valueAdded; } } private HashMap<Object, Consumer<Change<KEY, VAL>>> subscriptions = new HashMap<>(); private HashMap<KEY, VAL> map = new HashMap<>(); private boolean checkIdentical = false; /** * Defaults `checkIdentical` to false. */ public ObservableMap() { this(false); } /** * If `checkIdentical`, uses `==` to check if values have changed. Else, uses `Objects.equals()`. * @param checkIdentical */ public ObservableMap(boolean checkIdentical) { this.checkIdentical = checkIdentical; } /** * Put value. If this adds an entry, or changes an existing entry (by != ), * synchronously and sequentially runs callbacks. * * @param key * @param newValue * @return previous value, or null */ public synchronized VAL put(KEY key, VAL newValue) { VAL oldVal = null; if (!map.containsKey(key) || ((checkIdentical && newValue != map.get(key)) || (!checkIdentical && !Objects.equals(newValue, map.get(key))))) { Change<KEY, VAL> change; if (map.containsKey(key)) { // So value is different oldVal = map.get(key); map.put(key, newValue); change = new Change<KEY, VAL>(true, true, key, oldVal, newValue); } else { map.put(key, newValue); change = new Change<KEY, VAL>(false, true, key, null, newValue); } for (Consumer<Change<KEY, VAL>> callback : subscriptions.values()) { try { callback.accept(change); } catch (Throwable t) { t.printStackTrace(); } } } return oldVal; } /** * Remove key. Assuming there is an entry to be removed, synchronously and * sequentially runs callbacks. * * @param key * @return previous value, or null */ public synchronized VAL remove(KEY key) { VAL oldVal = null; if (map.containsKey(key)) { oldVal = map.remove(key); Change<KEY, VAL> change = new Change<KEY, VAL>(true, false, key, oldVal, null); for (Consumer<Change<KEY, VAL>> callback : subscriptions.values()) { try { callback.accept(change); } catch (Throwable t) { t.printStackTrace(); } } } return oldVal; } /** * Get a copy of the current map. * * @return */ public synchronized HashMap<KEY, VAL> get() { return new HashMap<KEY, VAL>(map); } /** * Get a value from the current map. * * @param key * @return */ public synchronized VAL get(KEY key) { return map.get(key); } /** * Checks if current map contains the given key. * * @param key * @return */ public synchronized boolean containsKey(KEY key) { return map.containsKey(key); } /** * Subscribes to changes. Returns token for unsubscription. * * @param callback * @return unsubscription token */ public synchronized Object subscribe(Consumer<Change<KEY, VAL>> callback) { Object token = new Object(); subscriptions.put(token, callback); return token; } /** * Subscribes to changes. Synchronously calls callback with ALL CURRENT MAP * ENTRIES, one call for each entry. May be more than you bargained for. * Returns token for unsubscription. * * @param callback * @return unsubscription token */ public synchronized Object subscribeWithGet(Consumer<Change<KEY, VAL>> callback) { Object token = new Object(); subscriptions.put(token, callback); for (Entry<KEY, VAL> entry : map.entrySet()) { try { callback.accept(new Change<KEY, VAL>(false, true, entry.getKey(), null, entry.getValue())); } catch (Throwable t) { t.printStackTrace(); } } return token; } /** * Unsubscribe, using the token returned from either subscribe method. * * @param token */ public synchronized void unsubscribe(Object token) { subscriptions.remove(token); } public synchronized void unsubscribeAll() { subscriptions.clear(); } }
apache-2.0
GoogleCloudPlatform/PerfKitBenchmarker
tests/providers/ibmcloud/ibmcloud_util_test.py
1261
# Copyright 2020 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for perfkitbenchmarker.providers.ibmcloud.util.""" import unittest from perfkitbenchmarker.providers.ibmcloud import util class IbmcloudUtilTest(unittest.TestCase): def testGetBaseOs(self): data = {'name': 'debian10'} self.assertEqual('debian', util.GetBaseOs(data)) data = {'name': 'myos'} self.assertEqual('unknown', util.GetBaseOs(data)) def testGetOsInfo(self): data = {'name': 'Debian 10', 'operating_system': {'name': 'debian10'}} self.assertEqual('debian', util.GetOsInfo(data)['base_os']) if __name__ == '__main__': unittest.main()
apache-2.0
BackSlasher/chef
spec/unit/resource_reporter_spec.rb
29835
# # Author:: Daniel DeLeo (<dan@opscode.com>) # Author:: Prajakta Purohit (<prajakta@opscode.com>) # Author:: Tyler Cloke (<tyler@opscode.com>) # # Copyright:: Copyright (c) 2012 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require File.expand_path("../../spec_helper", __FILE__) require "chef/resource_reporter" require "socket" describe Chef::ResourceReporter do before(:all) do @reporting_toggle_default = Chef::Config[:enable_reporting] Chef::Config[:enable_reporting] = true end after(:all) do Chef::Config[:enable_reporting] = @reporting_toggle_default end before do @node = Chef::Node.new @node.name("spitfire") @rest_client = double("Chef::ServerAPI (mock)") allow(@rest_client).to receive(:post).and_return(true) @resource_reporter = Chef::ResourceReporter.new(@rest_client) @new_resource = Chef::Resource::File.new("/tmp/a-file.txt") @cookbook_name = "monkey" @new_resource.cookbook_name = @cookbook_name @cookbook_version = double("Cookbook::Version", :version => "1.2.3") allow(@new_resource).to receive(:cookbook_version).and_return(@cookbook_version) @current_resource = Chef::Resource::File.new("/tmp/a-file.txt") @start_time = Time.new @end_time = Time.new + 20 @events = Chef::EventDispatch::Dispatcher.new @run_context = Chef::RunContext.new(@node, {}, @events) @run_status = Chef::RunStatus.new(@node, @events) @run_list = Chef::RunList.new @run_list << "recipe[lobster]" << "role[rage]" << "recipe[fist]" @expansion = Chef::RunList::RunListExpansion.new("_default", @run_list.run_list_items) @run_id = @run_status.run_id allow(Time).to receive(:now).and_return(@start_time, @end_time) end context "when first created" do it "has no updated resources" do expect(@resource_reporter.updated_resources.size).to eq(0) end it "reports a successful run" do expect(@resource_reporter.status).to eq("success") end it "assumes the resource history feature is supported" do expect(@resource_reporter.reporting_enabled?).to be_truthy end it "should have no error_descriptions" do expect(@resource_reporter.error_descriptions).to eq({}) # @resource_reporter.error_descriptions.should be_empty # @resource_reporter.should have(0).error_descriptions end end context "after the chef run completes" do before do end it "reports a successful run" do skip "refactor how node gets set." expect(@resource_reporter.status).to eq("success") end end context "when chef fails" do before do allow(@rest_client).to receive(:raw_request).and_return({"result"=>"ok"}); allow(@rest_client).to receive(:post).and_return({"uri"=>"https://example.com/reports/nodes/spitfire/runs/#{@run_id}"}); end context "before converging any resources" do before do @resource_reporter.run_started(@run_status) @exception = Exception.new @resource_reporter.run_failed(@exception) end it "sets the run status to 'failure'" do expect(@resource_reporter.status).to eq("failure") end it "keeps the exception data" do expect(@resource_reporter.exception).to eq(@exception) end end context "when a resource fails before loading current state" do before do @exception = Exception.new @exception.set_backtrace(caller) @resource_reporter.resource_action_start(@new_resource, :create) @resource_reporter.resource_failed(@new_resource, :create, @exception) @resource_reporter.resource_completed(@new_resource) end it "collects the resource as an updated resource" do expect(@resource_reporter.updated_resources.size).to eq(1) end it "collects the desired state of the resource" do update_record = @resource_reporter.updated_resources.first expect(update_record.new_resource).to eq(@new_resource) end end # TODO: make sure a resource that is skipped because of `not_if` doesn't # leave us in a bad state. context "once the a resource's current state is loaded" do before do @resource_reporter.resource_action_start(@new_resource, :create) @resource_reporter.resource_current_state_loaded(@new_resource, :create, @current_resource) end context "and the resource was not updated" do before do @resource_reporter.resource_up_to_date(@new_resource, :create) end it "has no updated resources" do expect(@resource_reporter.updated_resources.size).to eq(0) end end context "and the resource was updated" do before do @new_resource.content("this is the old content") @current_resource.content("this is the new hotness") @resource_reporter.resource_updated(@new_resource, :create) @resource_reporter.resource_completed(@new_resource) end it "collects the updated resource" do expect(@resource_reporter.updated_resources.size).to eq(1) end it "collects the old state of the resource" do update_record = @resource_reporter.updated_resources.first expect(update_record.current_resource).to eq(@current_resource) end it "collects the new state of the resource" do update_record = @resource_reporter.updated_resources.first expect(update_record.new_resource).to eq(@new_resource) end context "and a subsequent resource fails before loading current resource" do before do @next_new_resource = Chef::Resource::Service.new("apache2") @exception = Exception.new @exception.set_backtrace(caller) @resource_reporter.resource_failed(@next_new_resource, :create, @exception) @resource_reporter.resource_completed(@next_new_resource) end it "collects the desired state of the failed resource" do failed_resource_update = @resource_reporter.updated_resources.last expect(failed_resource_update.new_resource).to eq(@next_new_resource) end it "does not have the current state of the failed resource" do failed_resource_update = @resource_reporter.updated_resources.last expect(failed_resource_update.current_resource).to be_nil end end end # Some providers, such as RemoteDirectory and some LWRPs use other # resources for their implementation. These should be hidden from reporting # since we only care about the top-level resource and not the sub-resources # used for implementation. context "and a nested resource is updated" do before do @implementation_resource = Chef::Resource::CookbookFile.new("/preseed-file.txt") @resource_reporter.resource_action_start(@implementation_resource , :create) @resource_reporter.resource_current_state_loaded(@implementation_resource, :create, @implementation_resource) @resource_reporter.resource_updated(@implementation_resource, :create) @resource_reporter.resource_completed(@implementation_resource) @resource_reporter.resource_updated(@new_resource, :create) @resource_reporter.resource_completed(@new_resource) end it "does not collect data about the nested resource" do expect(@resource_reporter.updated_resources.size).to eq(1) end end context "and a nested resource runs but is not updated" do before do @implementation_resource = Chef::Resource::CookbookFile.new("/preseed-file.txt") @resource_reporter.resource_action_start(@implementation_resource , :create) @resource_reporter.resource_current_state_loaded(@implementation_resource, :create, @implementation_resource) @resource_reporter.resource_up_to_date(@implementation_resource, :create) @resource_reporter.resource_completed(@implementation_resource) @resource_reporter.resource_updated(@new_resource, :create) @resource_reporter.resource_completed(@new_resource) end it "does not collect data about the nested resource" do expect(@resource_reporter.updated_resources.size).to eq(1) end end context "and the resource failed to converge" do before do @exception = Exception.new @exception.set_backtrace(caller) @resource_reporter.resource_failed(@new_resource, :create, @exception) @resource_reporter.resource_completed(@new_resource) end it "collects the resource as an updated resource" do expect(@resource_reporter.updated_resources.size).to eq(1) end it "collects the desired state of the resource" do update_record = @resource_reporter.updated_resources.first expect(update_record.new_resource).to eq(@new_resource) end it "collects the current state of the resource" do update_record = @resource_reporter.updated_resources.first expect(update_record.current_resource).to eq(@current_resource) end end end end describe "when generating a report for the server" do before do allow(@rest_client).to receive(:raw_request).and_return({"result"=>"ok"}); allow(@rest_client).to receive(:post).and_return({"uri"=>"https://example.com/reports/nodes/spitfire/runs/#{@run_id}"}); @resource_reporter.run_started(@run_status) end context "when the new_resource does not have a string for name and identity" do context "the new_resource name and id are nil" do before do @bad_resource = Chef::Resource::File.new("/tmp/nameless_file.txt") allow(@bad_resource).to receive(:name).and_return(nil) allow(@bad_resource).to receive(:identity).and_return(nil) @resource_reporter.resource_action_start(@bad_resource, :create) @resource_reporter.resource_current_state_loaded(@bad_resource, :create, @current_resource) @resource_reporter.resource_updated(@bad_resource, :create) @resource_reporter.resource_completed(@bad_resource) @run_status.stop_clock @report = @resource_reporter.prepare_run_data @first_update_report = @report["resources"].first end it "resource_name in prepared_run_data is a string" do expect(@first_update_report["name"].class).to eq(String) end it "resource_id in prepared_run_data is a string" do expect(@first_update_report["id"].class).to eq(String) end end context "the new_resource name and id are hashes" do before do @bad_resource = Chef::Resource::File.new("/tmp/filename_as_hash.txt") allow(@bad_resource).to receive(:name).and_return({:foo=>:bar}) allow(@bad_resource).to receive(:identity).and_return({:foo=>:bar}) @resource_reporter.resource_action_start(@bad_resource, :create) @resource_reporter.resource_current_state_loaded(@bad_resource, :create, @current_resource) @resource_reporter.resource_updated(@bad_resource, :create) @resource_reporter.resource_completed(@bad_resource) @run_status.stop_clock @report = @resource_reporter.prepare_run_data @first_update_report = @report["resources"].first end # Ruby 1.8.7 flattens out hash to string using join instead of inspect, resulting in # irb(main):001:0> {:foo => :bar}.to_s # => "foobar" # instead of the expected # irb(main):001:0> {:foo => :bar}.to_s # => "{:foo=>:bar}" # Hence checking for the class instead of the actual value. it "resource_name in prepared_run_data is a string" do expect(@first_update_report["name"].class).to eq(String) end it "resource_id in prepared_run_data is a string" do expect(@first_update_report["id"].class).to eq(String) end end end shared_examples_for "a successful client run" do before do # TODO: add inputs to generate expected output. # expected_data = { # "action" : "end", # "resources" : [ # { # "type" : "file", # "id" : "/etc/passwd", # "name" : "User Defined Resource Block Name", # "duration" : "1200", # "result" : "modified", # "before" : { # "state" : "exists", # "group" : "root", # "owner" : "root", # "checksum" : "xyz" # }, # "after" : { # "state" : "modified", # "group" : "root", # "owner" : "root", # "checksum" : "abc" # }, # "delta" : "" # }, # {...} # ], # "status" : "success" # "data" : "" # } @resource_reporter.resource_action_start(new_resource, :create) @resource_reporter.resource_current_state_loaded(new_resource, :create, current_resource) @resource_reporter.resource_updated(new_resource, :create) @resource_reporter.resource_completed(new_resource) @run_status.stop_clock @report = @resource_reporter.prepare_run_data @first_update_report = @report["resources"].first end it "includes the run's status" do expect(@report).to have_key("status") end it "includes a list of updated resources" do expect(@report).to have_key("resources") end it "includes an updated resource's type" do expect(@first_update_report).to have_key("type") end it "includes an updated resource's initial state" do expect(@first_update_report["before"]).to eq(current_resource.state) end it "includes an updated resource's final state" do expect(@first_update_report["after"]).to eq(new_resource.state) end it "includes the resource's name" do expect(@first_update_report["name"]).to eq(new_resource.name) end it "includes the resource's id attribute" do expect(@first_update_report["id"]).to eq(new_resource.identity) end it "includes the elapsed time for the resource to converge" do # TODO: API takes integer number of milliseconds as a string. This # should be an int. expect(@first_update_report).to have_key("duration") expect(@first_update_report["duration"].to_i).to be_within(100).of(0) end it "includes the action executed by the resource" do # TODO: rename as "action" expect(@first_update_report["result"]).to eq("create") end it "includes the cookbook name of the resource" do expect(@first_update_report).to have_key("cookbook_name") expect(@first_update_report["cookbook_name"]).to eq(@cookbook_name) end it "includes the cookbook version of the resource" do expect(@first_update_report).to have_key("cookbook_version") expect(@first_update_report["cookbook_version"]).to eq("1.2.3") end it "includes the total resource count" do expect(@report).to have_key("total_res_count") expect(@report["total_res_count"]).to eq("1") end it "includes the data hash" do expect(@report).to have_key("data") expect(@report["data"]).to eq({}) end it "includes the run_list" do expect(@report).to have_key("run_list") expect(@report["run_list"]).to eq(Chef::JSONCompat.to_json(@run_status.node.run_list)) end it "includes the expanded_run_list" do expect(@report).to have_key("expanded_run_list") end it "includes the end_time" do expect(@report).to have_key("end_time") expect(@report["end_time"]).to eq(@run_status.end_time.to_s) end end context "when the resource is a File" do let(:new_resource) { @new_resource } let(:current_resource) { @current_resource } it_should_behave_like "a successful client run" end context "when the resource is a RegistryKey with binary data" do let(:new_resource) do resource = Chef::Resource::RegistryKey.new('Wubba\Lubba\Dub\Dubs') resource.values([ { :name => "rick", :type => :binary, :data => 255.chr * 1 } ]) allow(resource).to receive(:cookbook_name).and_return(@cookbook_name) allow(resource).to receive(:cookbook_version).and_return(@cookbook_version) resource end let(:current_resource) do resource = Chef::Resource::RegistryKey.new('Wubba\Lubba\Dub\Dubs') resource.values([ { :name => "rick", :type => :binary, :data => 255.chr * 1 } ]) resource end it_should_behave_like "a successful client run" end context "for an unsuccessful run" do before do @backtrace = ["foo.rb:1 in `foo!'","bar.rb:2 in `bar!","'baz.rb:3 in `baz!'"] @node = Chef::Node.new @node.name("spitfire") @exception = ArgumentError.new allow(@exception).to receive(:inspect).and_return("Net::HTTPServerException") allow(@exception).to receive(:message).and_return("Object not found") allow(@exception).to receive(:backtrace).and_return(@backtrace) @resource_reporter.run_list_expand_failed(@node, @exception) @resource_reporter.run_failed(@exception) @report = @resource_reporter.prepare_run_data end it "includes the exception type in the event data" do expect(@report).to have_key("data") expect(@report["data"]["exception"]).to have_key("class") expect(@report["data"]["exception"]["class"]).to eq("Net::HTTPServerException") end it "includes the exception message in the event data" do expect(@report["data"]["exception"]).to have_key("message") expect(@report["data"]["exception"]["message"]).to eq("Object not found") end it "includes the exception trace in the event data" do expect(@report["data"]["exception"]).to have_key("backtrace") expect(@report["data"]["exception"]["backtrace"]).to eq(Chef::JSONCompat.to_json(@backtrace)) end it "includes the error inspector output in the event data" do expect(@report["data"]["exception"]).to have_key("description") expect(@report["data"]["exception"]["description"]).to include({"title"=>"Error expanding the run_list:", "sections"=>[{"Unexpected Error:" => "ArgumentError: Object not found"}]}) end end context "when new_resource does not have a cookbook_name" do before do @bad_resource = Chef::Resource::File.new("/tmp/a-file.txt") @bad_resource.cookbook_name = nil @resource_reporter.resource_action_start(@bad_resource, :create) @resource_reporter.resource_current_state_loaded(@bad_resource, :create, @current_resource) @resource_reporter.resource_updated(@bad_resource, :create) @resource_reporter.resource_completed(@bad_resource) @run_status.stop_clock @report = @resource_reporter.prepare_run_data @first_update_report = @report["resources"].first end it "includes an updated resource's initial state" do expect(@first_update_report["before"]).to eq(@current_resource.state) end it "includes an updated resource's final state" do expect(@first_update_report["after"]).to eq(@new_resource.state) end it "includes the resource's name" do expect(@first_update_report["name"]).to eq(@new_resource.name) end it "includes the resource's id attribute" do expect(@first_update_report["id"]).to eq(@new_resource.identity) end it "includes the elapsed time for the resource to converge" do # TODO: API takes integer number of milliseconds as a string. This # should be an int. expect(@first_update_report).to have_key("duration") expect(@first_update_report["duration"].to_i).to be_within(100).of(0) end it "includes the action executed by the resource" do # TODO: rename as "action" expect(@first_update_report["result"]).to eq("create") end it "does not include a cookbook name for the resource" do expect(@first_update_report).not_to have_key("cookbook_name") end it "does not include a cookbook version for the resource" do expect(@first_update_report).not_to have_key("cookbook_version") end end context "when including a resource that overrides Resource#state" do before do @current_state_resource = Chef::Resource::WithState.new("Stateful", @run_context) @current_state_resource.state = nil @new_state_resource = Chef::Resource::WithState.new("Stateful", @run_context) @new_state_resource.state = "Running" @resource_reporter.resource_action_start(@new_state_resource, :create) @resource_reporter.resource_current_state_loaded(@new_state_resource, :create, @current_state_resource) @resource_reporter.resource_updated(@new_state_resource, :create) @resource_reporter.resource_completed(@new_state_resource) @run_status.stop_clock @report = @resource_reporter.prepare_run_data @first_update_report = @report["resources"].first end it "sets before to {} instead of nil" do expect(@first_update_report).to have_key("before") expect(@first_update_report["before"]).to eq({}) end it "sets after to {} instead of 'Running'" do expect(@first_update_report).to have_key("after") expect(@first_update_report["after"]).to eq({}) end end end describe "when updating resource history on the server" do before do @resource_reporter.run_started(@run_status) @run_status.start_clock end context "when the server does not support storing resource history" do before do # 404 getting the run_id @response = Net::HTTPNotFound.new("a response body", "404", "Not Found") @error = Net::HTTPServerException.new("404 message", @response) expect(@rest_client).to receive(:post). with("reports/nodes/spitfire/runs", {:action => :start, :run_id => @run_id, :start_time => @start_time.to_s}, {"X-Ops-Reporting-Protocol-Version" => Chef::ResourceReporter::PROTOCOL_VERSION}). and_raise(@error) end it "assumes the feature is not enabled" do @resource_reporter.run_started(@run_status) expect(@resource_reporter.reporting_enabled?).to be_falsey end it "does not send a resource report to the server" do @resource_reporter.run_started(@run_status) expect(@rest_client).not_to receive(:post) @resource_reporter.run_completed(@node) end it "prints an error about the 404" do expect(Chef::Log).to receive(:debug).with(/404/) @resource_reporter.run_started(@run_status) end end context "when the server returns a 500 to the client" do before do # 500 getting the run_id @response = Net::HTTPInternalServerError.new("a response body", "500", "Internal Server Error") @error = Net::HTTPServerException.new("500 message", @response) expect(@rest_client).to receive(:post). with("reports/nodes/spitfire/runs", {:action => :start, :run_id => @run_id, :start_time => @start_time.to_s}, {"X-Ops-Reporting-Protocol-Version" => Chef::ResourceReporter::PROTOCOL_VERSION}). and_raise(@error) end it "assumes the feature is not enabled" do @resource_reporter.run_started(@run_status) expect(@resource_reporter.reporting_enabled?).to be_falsey end it "does not send a resource report to the server" do @resource_reporter.run_started(@run_status) expect(@rest_client).not_to receive(:post) @resource_reporter.run_completed(@node) end it "prints an error about the error" do expect(Chef::Log).to receive(:info).with(/500/) @resource_reporter.run_started(@run_status) end end context "when the server returns a 500 to the client and enable_reporting_url_fatals is true" do before do @enable_reporting_url_fatals = Chef::Config[:enable_reporting_url_fatals] Chef::Config[:enable_reporting_url_fatals] = true # 500 getting the run_id @response = Net::HTTPInternalServerError.new("a response body", "500", "Internal Server Error") @error = Net::HTTPServerException.new("500 message", @response) expect(@rest_client).to receive(:post). with("reports/nodes/spitfire/runs", {:action => :start, :run_id => @run_id, :start_time => @start_time.to_s}, {"X-Ops-Reporting-Protocol-Version" => Chef::ResourceReporter::PROTOCOL_VERSION}). and_raise(@error) end after do Chef::Config[:enable_reporting_url_fatals] = @enable_reporting_url_fatals end it "fails the run and prints an message about the error" do expect(Chef::Log).to receive(:error).with(/500/) expect { @resource_reporter.run_started(@run_status) }.to raise_error(Net::HTTPServerException) end end context "after creating the run history document" do before do response = {"uri"=>"https://example.com/reports/nodes/spitfire/runs/@run_id"} expect(@rest_client).to receive(:post). with("reports/nodes/spitfire/runs", {:action => :start, :run_id => @run_id, :start_time => @start_time.to_s}, {"X-Ops-Reporting-Protocol-Version" => Chef::ResourceReporter::PROTOCOL_VERSION}). and_return(response) @resource_reporter.run_started(@run_status) end it "creates a run document on the server at the start of the run" do expect(@resource_reporter.run_id).to eq(@run_id) end it "updates the run document with resource updates at the end of the run" do # update some resources... @resource_reporter.resource_action_start(@new_resource, :create) @resource_reporter.resource_current_state_loaded(@new_resource, :create, @current_resource) @resource_reporter.resource_updated(@new_resource, :create) allow(@resource_reporter).to receive(:end_time).and_return(@end_time) @expected_data = @resource_reporter.prepare_run_data response = {"result"=>"ok"} expect(@rest_client).to receive(:raw_request).ordered do |method, url, headers, data| expect(method).to eq(:POST) expect(headers).to eq({"Content-Encoding" => "gzip", "X-Ops-Reporting-Protocol-Version" => Chef::ResourceReporter::PROTOCOL_VERSION, },) data_stream = Zlib::GzipReader.new(StringIO.new(data)) data = data_stream.read expect(data).to eq(Chef::JSONCompat.to_json(@expected_data)) response end @resource_reporter.run_completed(@node) end end context "when data report post is enabled and the server response fails" do before do @enable_reporting_url_fatals = Chef::Config[:enable_reporting_url_fatals] Chef::Config[:enable_reporting_url_fatals] = true end after do Chef::Config[:enable_reporting_url_fatals] = @enable_reporting_url_fatals end it "should log 4xx errors" do response = Net::HTTPClientError.new("forbidden", "403", "Forbidden") error = Net::HTTPServerException.new("403 message", response) allow(@rest_client).to receive(:raw_request).and_raise(error) expect(Chef::Log).to receive(:error).with(/403/) @resource_reporter.post_reporting_data end it "should log error 5xx errors" do response = Net::HTTPServerError.new("internal error", "500", "Internal Server Error") error = Net::HTTPFatalError.new("500 message", response) allow(@rest_client).to receive(:raw_request).and_raise(error) expect(Chef::Log).to receive(:error).with(/500/) @resource_reporter.post_reporting_data end it "should log if a socket error happens" do allow(@rest_client).to receive(:raw_request).and_raise(SocketError.new("test socket error")) expect(Chef::Log).to receive(:error).with(/test socket error/) @resource_reporter.post_reporting_data end it "should raise if an unkwown error happens" do allow(@rest_client).to receive(:raw_request).and_raise(Exception.new) expect { @resource_reporter.post_reporting_data }.to raise_error(Exception) end end end end
apache-2.0
kubernetes-client/python
kubernetes/client/models/v1_node_selector.py
3980
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: release-1.22 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from kubernetes.client.configuration import Configuration class V1NodeSelector(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'node_selector_terms': 'list[V1NodeSelectorTerm]' } attribute_map = { 'node_selector_terms': 'nodeSelectorTerms' } def __init__(self, node_selector_terms=None, local_vars_configuration=None): # noqa: E501 """V1NodeSelector - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._node_selector_terms = None self.discriminator = None self.node_selector_terms = node_selector_terms @property def node_selector_terms(self): """Gets the node_selector_terms of this V1NodeSelector. # noqa: E501 Required. A list of node selector terms. The terms are ORed. # noqa: E501 :return: The node_selector_terms of this V1NodeSelector. # noqa: E501 :rtype: list[V1NodeSelectorTerm] """ return self._node_selector_terms @node_selector_terms.setter def node_selector_terms(self, node_selector_terms): """Sets the node_selector_terms of this V1NodeSelector. Required. A list of node selector terms. The terms are ORed. # noqa: E501 :param node_selector_terms: The node_selector_terms of this V1NodeSelector. # noqa: E501 :type: list[V1NodeSelectorTerm] """ if self.local_vars_configuration.client_side_validation and node_selector_terms is None: # noqa: E501 raise ValueError("Invalid value for `node_selector_terms`, must not be `None`") # noqa: E501 self._node_selector_terms = node_selector_terms def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1NodeSelector): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1NodeSelector): return True return self.to_dict() != other.to_dict()
apache-2.0
Vilsol/NMSWrapper
src/main/java/me/vilsol/nmswrapper/wraps/unparsed/NMSEntityHuman.java
24942
package me.vilsol.nmswrapper.wraps.unparsed; import com.mojang.authlib.GameProfile; import me.vilsol.nmswrapper.NMSWrapper; import me.vilsol.nmswrapper.reflections.ReflectiveClass; import me.vilsol.nmswrapper.reflections.ReflectiveMethod; import me.vilsol.nmswrapper.wraps.NMSItemStack; import java.util.UUID; @ReflectiveClass(name = "EntityHuman") public class NMSEntityHuman extends NMSEntityLiving { public NMSEntityHuman(Object nmsObject){ super(nmsObject); } public NMSEntityHuman(String nmsName, Object[] paramTypes, Object[] params) { super(nmsName, paramTypes, params); } public NMSEntityHuman(NMSWorld world, GameProfile gameProfile){ super("EntityHuman", new Object[]{NMSWorld.class, GameProfile.class}, new Object[]{world, gameProfile}); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#L() */ @ReflectiveMethod(name = "L", types = {}) public int L(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#P() */ @ReflectiveMethod(name = "P", types = {}) public String P(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#X() */ @ReflectiveMethod(name = "X", types = {}) public void X(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#a(net.minecraft.server.v1_9_R1.ChestLock) */ @ReflectiveMethod(name = "a", types = {NMSChestLock.class}) public boolean a(NMSChestLock chestLock){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, chestLock); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#aA() */ @ReflectiveMethod(name = "aA", types = {}) public void aA(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#aL() */ @ReflectiveMethod(name = "aL", types = {}) public boolean aL(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#aa() */ @ReflectiveMethod(name = "aa", types = {}) public String aa(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#addScore(int) */ @ReflectiveMethod(name = "addScore", types = {int.class}) public void addScore(int i){ NMSWrapper.getInstance().exec(nmsObject, i); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#ak() */ @ReflectiveMethod(name = "ak", types = {}) public void ak(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#alwaysGivesExp() */ @ReflectiveMethod(name = "alwaysGivesExp", types = {}) public boolean alwaysGivesExp(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#am() */ @ReflectiveMethod(name = "am", types = {}) public double am(){ return (double) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#applyExhaustion(float) */ @ReflectiveMethod(name = "applyExhaustion", types = {float.class}) public void applyExhaustion(float f){ NMSWrapper.getInstance().exec(nmsObject, f); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#aq() */ @ReflectiveMethod(name = "aq", types = {}) public int aq(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#attack(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "attack", types = {NMSEntity.class}) public void attack(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#b(java.lang.String) */ @ReflectiveMethod(name = "b", types = {String.class}) public UUID b(String s){ return (UUID) NMSWrapper.getInstance().exec(nmsObject, s); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bA() */ @ReflectiveMethod(name = "bA", types = {}) public NMSItemStack bA(){ return new NMSItemStack(NMSWrapper.getInstance().exec(nmsObject)); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bD() */ @ReflectiveMethod(name = "bD", types = {}) public boolean bD(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bF() */ @ReflectiveMethod(name = "bF", types = {}) public void bF(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bI() */ @ReflectiveMethod(name = "bI", types = {}) public float bI(){ return (float) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bS() */ @ReflectiveMethod(name = "bS", types = {}) public boolean bS(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bU() */ @ReflectiveMethod(name = "bU", types = {}) public void bU(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bV() */ @ReflectiveMethod(name = "bV", types = {}) public void bV(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bY() */ @ReflectiveMethod(name = "bY", types = {}) public float bY(){ return (float) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bZ() */ @ReflectiveMethod(name = "bZ", types = {}) public NMSItemStack bZ(){ return new NMSItemStack(NMSWrapper.getInstance().exec(nmsObject)); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bo() */ @ReflectiveMethod(name = "bo", types = {}) public String bo(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#bp() */ @ReflectiveMethod(name = "bp", types = {}) public String bp(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#br() */ @ReflectiveMethod(name = "br", types = {}) public int br(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#c(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "c", types = {NMSEntity.class}) public void c(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#ca() */ @ReflectiveMethod(name = "ca", types = {}) public void ca(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#checkMovement(double, double, double) */ @ReflectiveMethod(name = "checkMovement", types = {double.class, double.class, double.class}) public void checkMovement(double d, double d1, double d2){ NMSWrapper.getInstance().exec(nmsObject, d, d1, d2); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#cj() */ @ReflectiveMethod(name = "cj", types = {}) public int cj(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#closeInventory() */ @ReflectiveMethod(name = "closeInventory", types = {}) public void closeInventory(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#cm() */ @ReflectiveMethod(name = "cm", types = {}) public boolean cm(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#cn() */ @ReflectiveMethod(name = "cn", types = {}) public boolean cn(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#copyTo(net.minecraft.server.v1_9_R1.EntityHuman, boolean) */ @ReflectiveMethod(name = "copyTo", types = {NMSEntityHuman.class, boolean.class}) public void copyTo(NMSEntityHuman entityHuman, boolean b){ NMSWrapper.getInstance().exec(nmsObject, entityHuman, b); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#d(int, net.minecraft.server.v1_9_R1.ItemStack) */ @ReflectiveMethod(name = "d", types = {int.class, NMSItemStack.class}) public boolean d(int i, NMSItemStack itemStack){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, i, itemStack); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#damageArmor(float) */ @ReflectiveMethod(name = "damageArmor", types = {float.class}) public void damageArmor(float f){ NMSWrapper.getInstance().exec(nmsObject, f); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#damageEntity(net.minecraft.server.v1_9_R1.DamageSource, float) */ @ReflectiveMethod(name = "damageEntity", types = {NMSDamageSource.class, float.class}) public boolean damageEntity(NMSDamageSource damageSource, float f){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, damageSource, f); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#die() */ @ReflectiveMethod(name = "die", types = {}) public void die(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#doTick() */ @ReflectiveMethod(name = "doTick", types = {}) public void doTick(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#drop(net.minecraft.server.v1_9_R1.ItemStack, boolean) */ @ReflectiveMethod(name = "drop", types = {NMSItemStack.class, boolean.class}) public NMSEntityItem drop(NMSItemStack itemStack, boolean b){ return new NMSEntityItem(NMSWrapper.getInstance().exec(nmsObject, itemStack, b)); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#e(float, float) */ @ReflectiveMethod(name = "e", types = {float.class, float.class}) public void e(float f, float f1){ NMSWrapper.getInstance().exec(nmsObject, f, f1); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#g(float, float) */ @ReflectiveMethod(name = "g", types = {float.class, float.class}) public void g(float f, float f1){ NMSWrapper.getInstance().exec(nmsObject, f, f1); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getAbsorptionHearts() */ @ReflectiveMethod(name = "getAbsorptionHearts", types = {}) public float getAbsorptionHearts(){ return (float) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getBed() */ @ReflectiveMethod(name = "getBed", types = {}) public NMSBlockPosition getBed(){ return new NMSBlockPosition(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getBukkitEntity() */ @ReflectiveMethod(name = "getBukkitEntity", types = {}) public NMSCraftEntity getBukkitEntity(){ return new NMSCraftEntity(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getEnderChest() */ @ReflectiveMethod(name = "getEnderChest", types = {}) public NMSInventoryEnderChest getEnderChest(){ return new NMSInventoryEnderChest(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getEquipment(int) */ @ReflectiveMethod(name = "getEquipment", types = {int.class}) public NMSItemStack getEquipment(int i){ return new NMSItemStack(NMSWrapper.getInstance().exec(nmsObject, i)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getExpToLevel() */ @ReflectiveMethod(name = "getExpToLevel", types = {}) public int getExpToLevel(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getExpValue(net.minecraft.server.v1_9_R1.EntityHuman) */ @ReflectiveMethod(name = "getExpValue", types = {NMSEntityHuman.class}) public int getExpValue(NMSEntityHuman entityHuman){ return (int) NMSWrapper.getInstance().exec(nmsObject, entityHuman); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getFoodData() */ @ReflectiveMethod(name = "getFoodData", types = {}) public NMSFoodMetaData getFoodData(){ return new NMSFoodMetaData(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getHeadHeight() */ @ReflectiveMethod(name = "getHeadHeight", types = {}) public float getHeadHeight(){ return (float) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getName() */ @ReflectiveMethod(name = "getName", types = {}) public String getName(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getProfile() */ @ReflectiveMethod(name = "getProfile", types = {}) public GameProfile getProfile(){ return (GameProfile) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getScore() */ @ReflectiveMethod(name = "getScore", types = {}) public int getScore(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getScoreboard() */ @ReflectiveMethod(name = "getScoreboard", types = {}) public NMSScoreboard getScoreboard(){ return new NMSScoreboard(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getScoreboardDisplayName() */ @ReflectiveMethod(name = "getScoreboardDisplayName", types = {}) public NMSIChatBaseComponent getScoreboardDisplayName(){ return (NMSIChatBaseComponent) NMSWrapper.getInstance().createApplicableObject(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getScoreboardTeam() */ @ReflectiveMethod(name = "getScoreboardTeam", types = {}) public NMSScoreboardTeamBase getScoreboardTeam(){ return new NMSScoreboardTeamBase(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#getSendCommandFeedback() */ @ReflectiveMethod(name = "getSendCommandFeedback", types = {}) public boolean getSendCommandFeedback(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#giveExp(int) */ @ReflectiveMethod(name = "giveExp", types = {int.class}) public void giveExp(int i){ NMSWrapper.getInstance().exec(nmsObject, i); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#h() */ @ReflectiveMethod(name = "h", types = {}) public void h(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#inBlock() */ @ReflectiveMethod(name = "inBlock", types = {}) public boolean inBlock(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#initAttributes() */ @ReflectiveMethod(name = "initAttributes", types = {}) public void initAttributes(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#isBlocking() */ @ReflectiveMethod(name = "isBlocking", types = {}) public boolean isBlocking(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#isDeeplySleeping() */ @ReflectiveMethod(name = "isDeeplySleeping", types = {}) public boolean isDeeplySleeping(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#isRespawnForced() */ @ReflectiveMethod(name = "isRespawnForced", types = {}) public boolean isRespawnForced(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#isSleeping() */ @ReflectiveMethod(name = "isSleeping", types = {}) public boolean isSleeping(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#isSpectator() */ @ReflectiveMethod(name = "isSpectator", types = {}) public boolean isSpectator(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#j(boolean) */ @ReflectiveMethod(name = "j", types = {boolean.class}) public boolean j(boolean b){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, b); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#levelDown(int) */ @ReflectiveMethod(name = "levelDown", types = {int.class}) public void levelDown(int i){ NMSWrapper.getInstance().exec(nmsObject, i); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#m() */ @ReflectiveMethod(name = "m", types = {}) public void m(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#makeSound(java.lang.String, float, float) */ @ReflectiveMethod(name = "makeSound", types = {String.class, float.class, float.class}) public void makeSound(String s, float f, float f1){ NMSWrapper.getInstance().exec(nmsObject, s, f, f1); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#n(int) */ @ReflectiveMethod(name = "n", types = {int.class}) public String n(int i){ return (String) NMSWrapper.getInstance().exec(nmsObject, i); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#openBook(net.minecraft.server.v1_9_R1.ItemStack) */ @ReflectiveMethod(name = "openBook", types = {NMSItemStack.class}) public void openBook(NMSItemStack itemStack){ NMSWrapper.getInstance().exec(nmsObject, itemStack); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#openContainer(net.minecraft.server.v1_9_R1.IInventory) */ @ReflectiveMethod(name = "openContainer", types = {NMSIInventory.class}) public void openContainer(NMSIInventory iInventory){ NMSWrapper.getInstance().exec(nmsObject, iInventory); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#openHorseInventory(net.minecraft.server.v1_9_R1.EntityHorse, net.minecraft.server.v1_9_R1.IInventory) */ @ReflectiveMethod(name = "openHorseInventory", types = {NMSEntityHorse.class, NMSIInventory.class}) public void openHorseInventory(NMSEntityHorse entityHorse, NMSIInventory iInventory){ NMSWrapper.getInstance().exec(nmsObject, entityHorse, iInventory); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#openSign(net.minecraft.server.v1_9_R1.TileEntitySign) */ @ReflectiveMethod(name = "openSign", types = {NMSTileEntitySign.class}) public void openSign(NMSTileEntitySign tileEntitySign){ NMSWrapper.getInstance().exec(nmsObject, tileEntitySign); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#openTileEntity(net.minecraft.server.v1_9_R1.ITileEntityContainer) */ @ReflectiveMethod(name = "openTileEntity", types = {NMSITileEntityContainer.class}) public void openTileEntity(NMSITileEntityContainer iTileEntityContainer){ NMSWrapper.getInstance().exec(nmsObject, iTileEntityContainer); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#openTrade(net.minecraft.server.v1_9_R1.IMerchant) */ @ReflectiveMethod(name = "openTrade", types = {NMSIMerchant.class}) public void openTrade(NMSIMerchant iMerchant){ NMSWrapper.getInstance().exec(nmsObject, iMerchant); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#q(int) */ @ReflectiveMethod(name = "q", types = {int.class}) public NMSItemStack q(int i){ return new NMSItemStack(NMSWrapper.getInstance().exec(nmsObject, i)); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#s() */ @ReflectiveMethod(name = "s", types = {}) public void s(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#s_() */ @ReflectiveMethod(name = "s_", types = {}) public boolean s_(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#setAbsorptionHearts(float) */ @ReflectiveMethod(name = "setAbsorptionHearts", types = {float.class}) public void setAbsorptionHearts(float f){ NMSWrapper.getInstance().exec(nmsObject, f); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#setEquipment(int, net.minecraft.server.v1_9_R1.ItemStack) */ @ReflectiveMethod(name = "setEquipment", types = {int.class, NMSItemStack.class}) public void setEquipment(int i, NMSItemStack itemStack){ NMSWrapper.getInstance().exec(nmsObject, i, itemStack); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#setRespawnPosition(net.minecraft.server.v1_9_R1.BlockPosition, boolean) */ @ReflectiveMethod(name = "setRespawnPosition", types = {NMSBlockPosition.class, boolean.class}) public void setRespawnPosition(NMSBlockPosition blockPosition, boolean b){ NMSWrapper.getInstance().exec(nmsObject, blockPosition, b); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#setScore(int) */ @ReflectiveMethod(name = "setScore", types = {int.class}) public void setScore(int i){ NMSWrapper.getInstance().exec(nmsObject, i); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#t_() */ @ReflectiveMethod(name = "t_", types = {}) public void t_(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityHuman#u(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "u", types = {NMSEntity.class}) public boolean u(NMSEntity entity){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, entity); } /** * @see net.minecraft.server.v1_9_R1.EntityHuman#updateAbilities() */ @ReflectiveMethod(name = "updateAbilities", types = {}) public void updateAbilities(){ NMSWrapper.getInstance().exec(nmsObject); } }
apache-2.0
javamind/Mixit2013
src/com/ehret/mixit/adapter/ListTweetAdapter.java
3266
/* * Copyright 2013 Guillaume EHRET * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ehret.mixit.adapter; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.TextView; import com.ehret.mixit.R; import com.ehret.mixit.domain.twitter.Tweet; import java.util.List; /** * Adpater permttant l'affichage de la liste des tweets */ public class ListTweetAdapter extends BaseAdapter { private List<Tweet> tweets; private Context context; public ListTweetAdapter(Context context, List<Tweet> tweets) { this.tweets = tweets; this.context = context; } @Override public int getCount() { return tweets.size(); } @Override public Tweet getItem(int position) { return tweets.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { ViewHolder holder; if (convertView == null) { convertView = LayoutInflater.from(context).inflate(R.layout.tweet_item, null); holder = new ViewHolder(); holder.content = (TextView) convertView.findViewById(R.id.tweet_content); holder.userName = (TextView) convertView.findViewById(R.id.tweet_username); holder.user = (TextView) convertView.findViewById(R.id.tweet_user); holder.since = (TextView) convertView.findViewById(R.id.tweet_since); holder.profile_image = (ImageView) convertView.findViewById(R.id.tweet_profile_image); convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } Tweet tweet = tweets.get(position); holder.user.setText("@" + tweet.getFrom_user()); holder.userName.setText(tweet.getFrom_user_name()); holder.content.setText(tweet.getText()); holder.since.setText(tweet.getCreatedSince(context)); //Recuperation de l'mage liee au profil if (tweet.getImageToDisplay() != null) { holder.profile_image.setImageBitmap(tweet.getImageToDisplay()); } else { holder.profile_image.setImageDrawable(context.getResources().getDrawable(R.drawable.tweetmixit)); } return convertView; } static class ViewHolder { TextView content; TextView userName; TextView user; TextView since; ImageView profile_image; } }
apache-2.0
stevem999/gocd
server/src/test-fast/java/com/thoughtworks/go/server/ui/AgentsViewModelTest.java
6518
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.server.ui; import com.thoughtworks.go.config.ResourceConfig; import com.thoughtworks.go.domain.AgentInstance; import com.thoughtworks.go.helper.AgentInstanceMother; import org.junit.Test; import java.util.Date; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.Is.is; import static org.hamcrest.number.OrderingComparison.lessThan; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class AgentsViewModelTest { @Test public void shouldSortByStatusAsc() { AgentsViewModel instances = agentsViewModel(); instances.sortBy(AgentViewModel.STATUS_COMPARATOR, SortOrder.ASC); for (int i = 1; i < instances.size(); i++) { assertThat(instances.get(i - 1).getStatus().compareTo(instances.get(i).getStatus()), is(lessThan(0))); } } @Test public void shouldSortByStatusDesc() { AgentsViewModel instances = agentsViewModel(); instances.sortBy(AgentViewModel.STATUS_COMPARATOR, SortOrder.DESC); for (int i = 1; i < instances.size(); i++) { assertThat(instances.get(i - 1).getStatus().compareTo(instances.get(i).getStatus()), is(greaterThan(0))); } } @Test public void shouldReturnTheCorrectCountForAgentStatuses() { AgentsViewModel agents = agentsViewModel(); assertThat(agents.disabledCount(), is(1)); assertThat(agents.enabledCount(), is(2)); assertThat(agents.pendingCount(), is(1)); } @Test public void shouldFilterByResources() { AgentsViewModel agents = agentsViewModel(); agents.filter("resource:Foo"); assertThat(agents.size(), is(2)); } @Test public void shouldFilterOnlyBySingleResource() { AgentsViewModel agents = agentsViewModel(); agents.filter("resource:bar | fooooo"); assertThat(agents.size(), is(0)); } @Test public void shouldFilterByAgentName() { AgentsViewModel agents = agentsViewModel(); agents.filter(String.format("name:%s", AgentInstanceMother.disabled().getHostname())); assertThat(agents.size(), is(1)); agents = agentsViewModel(); agents.filter("name:CCeDev"); assertThat(agents.size(), is(4)); } @Test public void shouldFilterByIpAddress() { AgentsViewModel agents = agentsViewModel(); agents.filter("ip:10."); assertThat(agents.size(), is(4)); agents.filter(String.format("ip:%s", AgentInstanceMother.disabled().getIpAddress())); assertThat(agents.size(), is(1)); } @Test public void shouldFilterByOS() { AgentsViewModel agents = agentsViewModel(); agents.filter("os:Macos"); assertThat(agents.size(), is(1)); } @Test public void shouldFilterByEnvironmentNames() { AgentsViewModel agents = agentsViewModel(); agents.filter("environment:Uat"); assertThat(agents.size(), is(2)); agents = agentsViewModel(); agents.filter("environment:de"); assertThat(agents.size(), is(1)); } @Test public void shouldFilterByResourcesOrStatus() { AgentsViewModel agents = agentsViewModel(); agents.filter("resource:foo, status: Pending"); assertThat(agents.size(), is(3)); } @Test public void shouldNotThrowUpAndShouldReturnAllAgentsIfInvalidFilterCriteriaIsPassed() { AgentsViewModel agentsViewModel = agentsViewModel(); try { agentsViewModel.filter("foo:bar"); assertThat(agentsViewModel.size(), is(4)); } catch (Exception e) { fail("should not fail."); } } @Test public void shouldNotThrowUpIfInvalidFilterFormatIsPassed() { AgentsViewModel agentsViewModel = agentsViewModel(); try { agentsViewModel.filter("some_invalid_format"); } catch (Exception e) { fail("should not fail"); } } @Test public void shouldFilterByStatus() { AgentsViewModel agents = agentsViewModel(); agents.filter("status:building"); assertThat(agents.size(), is(1)); assertThat(agents.get(0).isBuilding(), is(true)); agents = agentsViewModel(); agents.filter("status:Building"); assertThat(agents.size(), is(1)); assertThat(agents.get(0).isBuilding(), is(true)); } @Test public void shouldFilterWithExactMatch() throws Exception { AgentsViewModel agents = agentsViewModel(); agents.filter("resource:\"Foo\", baz"); assertThat(agents.size(), is(1)); assertThat(agents.get(0).getHostname().equals("CCeDev01"), is(true)); } @Test public void shouldHandleUnclosedDoubleQuotes() throws Exception { AgentsViewModel agents = agentsViewModel(); agents.filter("resource:\""); assertThat(agents.size(), is(0)); } private AgentsViewModel agentsViewModel() { AgentsViewModel agents = new AgentsViewModel(); AgentInstance idle = AgentInstanceMother.idle(new Date(), "CCeDev01"); AgentInstanceMother.updateOS(idle, "macos"); idle.getResourceConfigs().add(new ResourceConfig("foo")); idle.getResourceConfigs().add(new ResourceConfig("bar")); agents.add(new AgentViewModel(idle, "uat")); AgentInstance building = AgentInstanceMother.building(); building.getResourceConfigs().add(new ResourceConfig("goofooboo")); agents.add(new AgentViewModel(building, "dev", "uat")); agents.add(new AgentViewModel(AgentInstanceMother.pending())); agents.add(new AgentViewModel(AgentInstanceMother.disabled(), "prod")); return agents; } }
apache-2.0
Pkcs11Interop/Pkcs11Interop
src/Pkcs11Interop/HighLevelAPI/Pkcs11InteropFactories.cs
7205
/* * Copyright 2012-2021 The Pkcs11Interop Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Written for the Pkcs11Interop project by: * Jaroslav IMRICH <jimrich@jimrich.sk> */ using Net.Pkcs11Interop.HighLevelAPI.Factories; // Note: Code in this file is maintained manually. namespace Net.Pkcs11Interop.HighLevelAPI { /// <summary> /// Factories to be used by Developer and Pkcs11Interop library /// </summary> public class Pkcs11InteropFactories { /// <summary> /// Factory for creation of IPkcs11Library instances /// </summary> protected IPkcs11LibraryFactory _pkcs11LibraryFactory = null; /// <summary> /// Developer uses this factory to create correct IPkcs11Library instances possibly extended with vendor specific methods. /// </summary> public IPkcs11LibraryFactory Pkcs11LibraryFactory { get { return _pkcs11LibraryFactory; } } /// <summary> /// Factory for creation of ISlot instances /// </summary> protected ISlotFactory _slotFactory = null; /// <summary> /// Pkcs11Interop uses this factory to create ISlot instances possibly extended with vendor specific methods. /// </summary> public ISlotFactory SlotFactory { get { return _slotFactory; } } /// <summary> /// Factory for creation of ISession instances /// </summary> protected ISessionFactory _sessionFactory = null; /// <summary> /// Pkcs11Interop uses this factory to create ISession instances possibly extended with vendor specific methods. /// </summary> public ISessionFactory SessionFactory { get { return _sessionFactory; } } /// <summary> /// Factory for creation of IObjectAttribute instances /// </summary> protected IObjectAttributeFactory _objectAttributeFactory = null; /// <summary> /// Developer uses this factory to create correct IObjectAttribute instances. /// </summary> public IObjectAttributeFactory ObjectAttributeFactory { get { return _objectAttributeFactory; } } /// <summary> /// Factory for creation of IObjectHandle instances /// </summary> protected IObjectHandleFactory _objectHandleFactory = null; /// <summary> /// Developer rarely uses this factory to create correct IObjectHandle instances. /// </summary> public IObjectHandleFactory ObjectHandleFactory { get { return _objectHandleFactory; } } /// <summary> /// Factory for creation of IMechanism instances /// </summary> protected IMechanismFactory _mechanismFactory = null; /// <summary> /// Developer uses this factory to create correct IMechanism instances. /// </summary> public IMechanismFactory MechanismFactory { get { return _mechanismFactory; } } /// <summary> /// Factory for creation of IMechanismParams instances /// </summary> protected IMechanismParamsFactory _mechanismParamsFactory = null; /// <summary> /// Developer uses this factory to create correct IMechanismParams instances. /// </summary> public IMechanismParamsFactory MechanismParamsFactory { get { return _mechanismParamsFactory; } } /// <summary> /// Initializes new instance of Pkcs11Factories class with default factories /// </summary> public Pkcs11InteropFactories() { _pkcs11LibraryFactory = new Pkcs11LibraryFactory(); _slotFactory = new SlotFactory(); _sessionFactory = new SessionFactory(); _objectAttributeFactory = new ObjectAttributeFactory(); _objectHandleFactory = new ObjectHandleFactory(); _mechanismFactory = new MechanismFactory(); _mechanismParamsFactory = new MechanismParamsFactory(); } /// <summary> /// Initializes new instance of Pkcs11Factories class with custom or default factories /// </summary> /// <param name="pkcs11LibraryFactory">Custom factory for creation of IPkcs11Library instances or null for the default factory</param> /// <param name="slotFactory">Custom factory for creation of ISlot instances or null for the default factory</param> /// <param name="sessionFactory">Custom factory for creation of ISession instances or null for the default factory</param> /// <param name="objectAttributeFactory">Custom factory for creation of IObjectAttribute instances or null for the default factory</param> /// <param name="objectHandleFactory">Custom factory for creation of IObjectHandle instances or null for the default factory</param> /// <param name="mechanismFactory">Custom factory for creation of IMechanism instances or null for the default factory</param> /// <param name="mechanismParamsFactory">Custom factory for creation of IMechanismParams instances or null for the default factory</param> public Pkcs11InteropFactories(IPkcs11LibraryFactory pkcs11LibraryFactory, ISlotFactory slotFactory, ISessionFactory sessionFactory, IObjectAttributeFactory objectAttributeFactory, IObjectHandleFactory objectHandleFactory, IMechanismFactory mechanismFactory, IMechanismParamsFactory mechanismParamsFactory) { _pkcs11LibraryFactory = (pkcs11LibraryFactory != null) ? pkcs11LibraryFactory : new Pkcs11LibraryFactory(); _slotFactory = (slotFactory != null) ? slotFactory : new SlotFactory(); _sessionFactory = (sessionFactory != null) ? sessionFactory : new SessionFactory(); _objectAttributeFactory = (objectAttributeFactory != null) ? objectAttributeFactory : new ObjectAttributeFactory(); _objectHandleFactory = (objectHandleFactory != null) ? objectHandleFactory : new ObjectHandleFactory(); _mechanismFactory = (mechanismFactory != null) ? mechanismFactory : new MechanismFactory(); _mechanismParamsFactory = (mechanismParamsFactory != null) ? mechanismParamsFactory : new MechanismParamsFactory(); } } }
apache-2.0
augustogava/erp
admin/ajax_com/contas_receber_acao.php
507
<?php include "../includes/Main.class.php"; // chama a classe principal $Main = new Main(); $Main->Seguranca->verificaLogado(); if($_REQUEST["acao"] == "pagar"){ $Main->Fluxo->pagarFluxo($_GET["id"], $_GET["tipoPagamento"], $_GET["ocorrencia"], $_GET["valor"]); }else if($_REQUEST["acao"] == "descontar"){ $Main->Fluxo->descontarFluxo($_GET["id"], $_GET["tipoPagamento"], $_GET["ocorrencia"], $_GET["valor"]); }else if($_REQUEST["acao"] == "cancelar"){ $Main->Fluxo->cancelarFluxo($_GET["id"]); } ?>
apache-2.0
ypxu/sudoku
sudoku_py/utils.py
1037
#!/usr/bin/env python """ utils functions to read and write of sudoku file sample sudoku: 0,3,5,2,9,0,8,6,4 0,8,2,4,1,0,7,0,3 7,6,4,3,8,0,0,9,0 2,1,8,7,3,9,0,4,0 0,0,0,8,0,4,2,3,0 0,4,3,0,5,2,9,7,0 4,0,6,5,7,1,0,0,9 3,5,9,0,2,8,4,1,7 8,0,0,9,0,0,5,2,6 """ import os def read_sudoku(sudoku_file): """ assume sudoku is stored in a file. sudoku_file is a path or filename to that file. """ sudoku = [] if not sudoku_file or not os.path.exists(sudoku_file): raise Exception('Invalid Sudoku file') with open(sudoku_file, 'rb') as file: for sudoku_line in file: if sudoku_file: sudoku_line = sudoku_line.strip() sudoku_line = sudoku_line.split(',') sudoku.append(sudoku_line) return sudoku def write_sudoku(sudoku, sudoku_file): """ write 2D array of sudoku into a file, named from sudoku_file """ with open(sudoku_file, 'wb') as file: for r in sudoku: file.write('%s\n' % ','.join(r))
apache-2.0
mhurne/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/model/transform/ListCloudFrontOriginAccessIdentitiesRequestMarshaller.java
2911
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.List; import java.util.Map; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.cloudfront.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.XMLWriter; /** * ListCloudFrontOriginAccessIdentitiesRequest Marshaller */ public class ListCloudFrontOriginAccessIdentitiesRequestMarshaller implements Marshaller<Request<ListCloudFrontOriginAccessIdentitiesRequest>, ListCloudFrontOriginAccessIdentitiesRequest> { public Request<ListCloudFrontOriginAccessIdentitiesRequest> marshall( ListCloudFrontOriginAccessIdentitiesRequest listCloudFrontOriginAccessIdentitiesRequest) { if (listCloudFrontOriginAccessIdentitiesRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<ListCloudFrontOriginAccessIdentitiesRequest> request = new DefaultRequest<ListCloudFrontOriginAccessIdentitiesRequest>( listCloudFrontOriginAccessIdentitiesRequest, "AmazonCloudFront"); request.setHttpMethod(HttpMethodName.GET); String uriResourcePath = "/2016-01-28/origin-access-identity/cloudfront"; request.setResourcePath(uriResourcePath); if (listCloudFrontOriginAccessIdentitiesRequest.getMarker() != null) { request.addParameter("Marker", StringUtils .fromString(listCloudFrontOriginAccessIdentitiesRequest .getMarker())); } if (listCloudFrontOriginAccessIdentitiesRequest.getMaxItems() != null) { request.addParameter("MaxItems", StringUtils .fromString(listCloudFrontOriginAccessIdentitiesRequest .getMaxItems())); } return request; } }
apache-2.0
codarchlab/idai-field-client
desktop/src/app/components/navbar/taskbar-update.component.ts
2027
import {Component, NgZone} from '@angular/core'; import {SettingsProvider} from '../../core/settings/settings-provider'; const ipcRenderer = typeof window !== 'undefined' ? window.require('electron').ipcRenderer : require('electron').ipcRenderer; @Component({ selector: 'taskbar-update', templateUrl: './taskbar-update.html' }) /** * @author Thomas Kleinke */ export class TaskbarUpdateComponent { public version: string; public progressPercent: number = -1; public downloadComplete: boolean = false; public downloadInterrupted: boolean = false; public downloadError: boolean = false; private errorTimeout: any = undefined; constructor(private settingsProvider: SettingsProvider, zone: NgZone) { ipcRenderer.on('downloadProgress', (event: any, downloadInfo: any) => { zone.run(() => { this.progressPercent = Math.round(downloadInfo.progressPercent); this.version = downloadInfo.version; if (this.progressPercent === 100) this.waitForError(zone); }); }); ipcRenderer.on('updateDownloaded', () => { zone.run(() => { this.stopWaitingForError(); this.downloadComplete = true; }); }); ipcRenderer.on('downloadInterrupted', () => { zone.run(() => { if (this.progressPercent > -1) { this.stopWaitingForError(); this.downloadInterrupted = true; } }); }); } public isAutoUpdateActive = () => this.settingsProvider.getSettings().isAutoUpdateActive; public waitForError(zone: NgZone) { this.errorTimeout = setTimeout(() => { zone.run(() => { this.downloadError = true; }); }, 30000); } private stopWaitingForError() { if (this.errorTimeout) clearTimeout(this.errorTimeout); this.errorTimeout = undefined; } }
apache-2.0
sbryzak/DeltaSpike
deltaspike/core/api/src/main/java/org/apache/deltaspike/core/util/metadata/builder/AnnotationStore.java
2043
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.deltaspike.core.util.metadata.builder; import java.lang.annotation.Annotation; import java.util.Map; import java.util.Set; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.unmodifiableSet; /** * A helper class used to hold annotations on a type or member. */ class AnnotationStore { private final Map<Class<? extends Annotation>, Annotation> annotationMap; private final Set<Annotation> annotationSet; AnnotationStore(Map<Class<? extends Annotation>, Annotation> annotationMap, Set<Annotation> annotationSet) { this.annotationMap = annotationMap; this.annotationSet = unmodifiableSet(annotationSet); } AnnotationStore() { this.annotationMap = emptyMap(); this.annotationSet = emptySet(); } <T extends Annotation> T getAnnotation(Class<T> annotationType) { return annotationType.cast(annotationMap.get(annotationType)); } Set<Annotation> getAnnotations() { return annotationSet; } boolean isAnnotationPresent(Class<? extends Annotation> annotationType) { return annotationMap.containsKey(annotationType); } }
apache-2.0
dimm0/scidrive
src/edu/jhu/pha/vospace/protocol/ProtocolHandler.java
1282
/******************************************************************************* * Copyright 2013 Johns Hopkins University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package edu.jhu.pha.vospace.protocol; import edu.jhu.pha.vospace.rest.JobDescription; /** * This interface represents the implementation details of a protocol * involved in a data transfer */ public interface ProtocolHandler { /** * Return the registered identifier for this protocol * @return */ public String getUri(); /** * Invoke the protocol handler and transfer data * @param job * @return */ public void invoke(JobDescription job) throws Exception; }
apache-2.0
openprocurement/openprocurement.edge
setup.py
1699
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.rst')) as f: README = f.read() requires = [ 'cornice', 'gevent', 'pyramid_exclog', 'setuptools', 'couchdb', 'couchapp', 'pycrypto', 'openprocurement_client', 'munch', 'tzlocal', 'pyyaml', 'psutil', 'iso8601' ] test_requires = requires + [ 'requests', 'webtest', 'python-coveralls', 'nose', 'mock' ] entry_points = { 'paste.app_factory': [ 'main = openprocurement.edge.main:main' ], 'console_scripts': [ 'edge_data_bridge = openprocurement.edge.databridge:main' ] } setup(name='openprocurement.edge', version='1.0.0', description='openprocurement.edge', long_description=README, classifiers=[ "Framework :: Pylons", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application" ], keywords="web services", author='Quintagroup, Ltd.', author_email='info@quintagroup.com', license='Apache License 2.0', url='https://github.com/openprocurement/openprocurement.edge', packages=find_packages(exclude=['ez_setup']), namespace_packages=['openprocurement'], include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, extras_require={'test': test_requires}, test_suite="openprocurement.edge.tests.main.suite", entry_points=entry_points)
apache-2.0
mrtequino/JSW
spring-boot/ApiGeneradoAutomaticamente/src/main/java/com/ltg/apigenaut/security/JwtAuthenticationEntryPoint.java
1062
package com.ltg.apigenaut.security; import java.io.IOException; import java.io.Serializable; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.security.core.AuthenticationException; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.stereotype.Component; @Component public class JwtAuthenticationEntryPoint implements AuthenticationEntryPoint, Serializable { private static final long serialVersionUID = -8970718410437077606L; @Override public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException) throws IOException { // This is invoked when user tries to access a secured REST resource without supplying any credentials // We should just send a 401 Unauthorized response because there is no 'login page' to redirect to response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); } }
apache-2.0
objectiser/camel
platforms/spring-boot/components-starter/camel-telegram-starter/src/main/java/org/apache/camel/component/telegram/springboot/TelegramComponentConfiguration.java
4047
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.telegram.springboot; import javax.annotation.Generated; import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon; import org.springframework.boot.context.properties.ConfigurationProperties; /** * The telegram component provides access to the Telegram Bot API. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.telegram") public class TelegramComponentConfiguration extends ComponentConfigurationPropertiesCommon { /** * Whether to enable auto configuration of the telegram component. This is * enabled by default. */ private Boolean enabled; /** * The default Telegram authorization token to be used when the information * is not provided in the endpoints. */ private String authorizationToken; /** * Whether the component should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities */ private Boolean basicPropertyBinding = false; /** * Whether the producer should be started lazy (on the first message). By * starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during starting * and cause the route to fail being started. By deferring this startup to * be lazy then the startup failure can be handled during routing messages * via Camel's routing error handlers. Beware that when the first message is * processed then creating and starting the producer may take a little time * and prolong the total processing time of the processing. */ private Boolean lazyStartProducer = false; /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to pickup * incoming messages, or the likes, will now be processed as a message and * handled by the routing Error Handler. By default the consumer will use * the org.apache.camel.spi.ExceptionHandler to deal with exceptions, that * will be logged at WARN or ERROR level and ignored. */ private Boolean bridgeErrorHandler = false; public String getAuthorizationToken() { return authorizationToken; } public void setAuthorizationToken(String authorizationToken) { this.authorizationToken = authorizationToken; } public Boolean getBasicPropertyBinding() { return basicPropertyBinding; } public void setBasicPropertyBinding(Boolean basicPropertyBinding) { this.basicPropertyBinding = basicPropertyBinding; } public Boolean getLazyStartProducer() { return lazyStartProducer; } public void setLazyStartProducer(Boolean lazyStartProducer) { this.lazyStartProducer = lazyStartProducer; } public Boolean getBridgeErrorHandler() { return bridgeErrorHandler; } public void setBridgeErrorHandler(Boolean bridgeErrorHandler) { this.bridgeErrorHandler = bridgeErrorHandler; } }
apache-2.0
clintmanning/new-empty
engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/DeploymentEntity.java
4171
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.persistence.entity; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.camunda.bpm.engine.impl.context.Context; import org.camunda.bpm.engine.impl.db.PersistentObject; import org.camunda.bpm.engine.repository.Deployment; /** * @author Tom Baeyens */ public class DeploymentEntity implements Serializable, Deployment, PersistentObject { private static final long serialVersionUID = 1L; protected String id; protected String name; protected Map<String, ResourceEntity> resources; protected Date deploymentTime; protected boolean validatingSchema = true; protected boolean isNew; /** * Will only be used during actual deployment to pass deployed artifacts (eg process definitions). * Will be null otherwise. */ protected Map<Class<?>, List<Object>> deployedArtifacts; public ResourceEntity getResource(String resourceName) { return getResources().get(resourceName); } public void addResource(ResourceEntity resource) { if (resources==null) { resources = new HashMap<String, ResourceEntity>(); } resources.put(resource.getName(), resource); } // lazy loading ///////////////////////////////////////////////////////////// public Map<String, ResourceEntity> getResources() { if (resources==null && id!=null) { List<ResourceEntity> resourcesList = Context .getCommandContext() .getResourceManager() .findResourcesByDeploymentId(id); resources = new HashMap<String, ResourceEntity>(); for (ResourceEntity resource: resourcesList) { resources.put(resource.getName(), resource); } } return resources; } public Object getPersistentState() { // properties of this entity are immutable // so always the same value is returned // so never will an update be issued for a DeploymentEntity return DeploymentEntity.class; } // Deployed artifacts manipulation ////////////////////////////////////////// public void addDeployedArtifact(Object deployedArtifact) { if (deployedArtifacts == null) { deployedArtifacts = new HashMap<Class<?>, List<Object>>(); } Class<?> clazz = deployedArtifact.getClass(); List<Object> artifacts = deployedArtifacts.get(clazz); if (artifacts == null) { artifacts = new ArrayList<Object>(); deployedArtifacts.put(clazz, artifacts); } artifacts.add(deployedArtifact); } @SuppressWarnings("unchecked") public <T> List<T> getDeployedArtifacts(Class<T> clazz) { return (List<T>) deployedArtifacts.get(clazz); } // getters and setters ////////////////////////////////////////////////////// public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public void setResources(Map<String, ResourceEntity> resources) { this.resources = resources; } public Date getDeploymentTime() { return deploymentTime; } public void setDeploymentTime(Date deploymentTime) { this.deploymentTime = deploymentTime; } public boolean isValidatingSchema() { return validatingSchema; } public void setValidatingSchema(boolean validatingSchema) { this.validatingSchema = validatingSchema; } public boolean isNew() { return isNew; } public void setNew(boolean isNew) { this.isNew = isNew; } }
apache-2.0
darghex/PARCES-WS
db.py
937
from sqlalchemy import * from sqlalchemy.ext.declarative import declarative_base from main import engine Base = declarative_base() Base.metadata.reflect(engine) from sqlalchemy.orm import relationship, backref class instancias_curso(Base): __table__ = Base.metadata.tables['instancias_curso'] class actividades(Base): __table__ = Base.metadata.tables['actividades'] class asignaciones(Base): __table__ = Base.metadata.tables['asignaciones'] class propuestas_matricula(Base): __table__ = Base.metadata.tables['propuestas_matricula'] class comentarios_propuesta(Base): __table__ = Base.metadata.tables['comentarios_propuesta'] class calificaciones(Base): __table__ = Base.metadata.tables['calificaciones'] class comentarios_instancia_curso(Base): __table__ = Base.metadata.tables['comentarios_instancia_curso'] class asistencias(Base): __table__ = Base.metadata.tables['asistencias']
apache-2.0
aelij/roslyn
src/Features/CSharp/Portable/ExtractMethod/CSharpMethodExtractor.Analyzer.cs
7632
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.ExtractMethod; using Microsoft.CodeAnalysis.Operations; using Microsoft.CodeAnalysis.Shared.Extensions; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp.ExtractMethod { internal partial class CSharpMethodExtractor : MethodExtractor { private class CSharpAnalyzer : Analyzer { private static readonly HashSet<int> s_nonNoisySyntaxKindSet = new HashSet<int>(new int[] { (int)SyntaxKind.WhitespaceTrivia, (int)SyntaxKind.EndOfLineTrivia }); public static Task<AnalyzerResult> AnalyzeAsync(SelectionResult selectionResult, CancellationToken cancellationToken) { var analyzer = new CSharpAnalyzer(selectionResult, cancellationToken); return analyzer.AnalyzeAsync(); } public CSharpAnalyzer(SelectionResult selectionResult, CancellationToken cancellationToken) : base(selectionResult, cancellationToken) { } protected override VariableInfo CreateFromSymbol( Compilation compilation, ISymbol symbol, ITypeSymbol type, VariableStyle style, bool variableDeclared) { return CreateFromSymbolCommon<LocalDeclarationStatementSyntax>(compilation, symbol, type, style, s_nonNoisySyntaxKindSet); } protected override int GetIndexOfVariableInfoToUseAsReturnValue(IList<VariableInfo> variableInfo) { var numberOfOutParameters = 0; var numberOfRefParameters = 0; var outSymbolIndex = -1; var refSymbolIndex = -1; for (var i = 0; i < variableInfo.Count; i++) { var variable = variableInfo[i]; // there should be no-one set as return value yet Contract.ThrowIfTrue(variable.UseAsReturnValue); if (!variable.CanBeUsedAsReturnValue) { continue; } // check modifier if (variable.ParameterModifier == ParameterBehavior.Ref) { numberOfRefParameters++; refSymbolIndex = i; } else if (variable.ParameterModifier == ParameterBehavior.Out) { numberOfOutParameters++; outSymbolIndex = i; } } // if there is only one "out" or "ref", that will be converted to return statement. if (numberOfOutParameters == 1) { return outSymbolIndex; } if (numberOfRefParameters == 1) { return refSymbolIndex; } return -1; } protected override ITypeSymbol GetRangeVariableType(SemanticModel model, IRangeVariableSymbol symbol) { var info = model.GetSpeculativeTypeInfo(this.SelectionResult.FinalSpan.Start, SyntaxFactory.ParseName(symbol.Name), SpeculativeBindingOption.BindAsExpression); if (Microsoft.CodeAnalysis.Shared.Extensions.ISymbolExtensions.IsErrorType(info.Type)) { return null; } return info.Type == null || info.Type.SpecialType == Microsoft.CodeAnalysis.SpecialType.System_Object ? info.Type : info.ConvertedType; } protected override Tuple<SyntaxNode, SyntaxNode> GetFlowAnalysisNodeRange() { var csharpSelectionResult = this.SelectionResult as CSharpSelectionResult; var first = csharpSelectionResult.GetFirstStatement(); var last = csharpSelectionResult.GetLastStatement(); // single statement case if (first == last || first.Span.Contains(last.Span)) { return new Tuple<SyntaxNode, SyntaxNode>(first, first); } // multiple statement case var firstUnderContainer = csharpSelectionResult.GetFirstStatementUnderContainer(); var lastUnderContainer = csharpSelectionResult.GetLastStatementUnderContainer(); return new Tuple<SyntaxNode, SyntaxNode>(firstUnderContainer, lastUnderContainer); } protected override bool ContainsReturnStatementInSelectedCode(IEnumerable<SyntaxNode> jumpOutOfRegionStatements) { return jumpOutOfRegionStatements.Where(n => n is ReturnStatementSyntax).Any(); } protected override bool ReadOnlyFieldAllowed() { var scope = this.SelectionResult.GetContainingScopeOf<ConstructorDeclarationSyntax>(); return scope == null; } protected override ITypeSymbol GetSymbolType(SemanticModel semanticModel, ISymbol symbol) { var selectionOperation = semanticModel.GetOperation(this.SelectionResult.GetContainingScope()); switch (symbol) { case ILocalSymbol localSymbol when localSymbol.NullableAnnotation == NullableAnnotation.Annotated: case IParameterSymbol parameterSymbol when parameterSymbol.NullableAnnotation == NullableAnnotation.Annotated: // For local symbols and parameters, we can check what the flow state // for refences to the symbols are and determine if we can change // the nullability to a less permissive state. var references = selectionOperation.DescendantsAndSelf() .Where(IsSymbolReferencedByOperation); if (AreAllReferencesNotNull(references)) { return base.GetSymbolType(semanticModel, symbol).WithNullability(NullableAnnotation.NotAnnotated); } return base.GetSymbolType(semanticModel, symbol); default: return base.GetSymbolType(semanticModel, symbol); } bool AreAllReferencesNotNull(IEnumerable<IOperation> references) => references.All(r => semanticModel.GetTypeInfo(r.Syntax).Nullability.FlowState == NullableFlowState.NotNull); bool IsSymbolReferencedByOperation(IOperation operation) => operation switch { ILocalReferenceOperation localReference => localReference.Local.Equals(symbol), IParameterReferenceOperation parameterReference => parameterReference.Parameter.Equals(symbol), IAssignmentOperation assignment => IsSymbolReferencedByOperation(assignment.Target), _ => false }; } } } }
apache-2.0
spring-projects/spring-data-examples
jpa/deferred/src/main/java/example/service/Customer1366Service.java
225
package example.service; import example.repo.Customer1366Repository; import org.springframework.stereotype.Service; @Service public class Customer1366Service { public Customer1366Service(Customer1366Repository repo) {} }
apache-2.0
eagleamon/home-assistant
homeassistant/components/mysensors.py
15305
""" Connect to a MySensors gateway via pymysensors API. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.mysensors/ """ import logging import os import socket import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.bootstrap import setup_component from homeassistant.components.mqtt import (valid_publish_topic, valid_subscribe_topic) from homeassistant.const import (ATTR_BATTERY_LEVEL, CONF_NAME, CONF_OPTIMISTIC, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, STATE_OFF, STATE_ON) from homeassistant.helpers import discovery from homeassistant.loader import get_component _LOGGER = logging.getLogger(__name__) ATTR_NODE_ID = 'node_id' ATTR_CHILD_ID = 'child_id' ATTR_DESCRIPTION = 'description' ATTR_DEVICE = 'device' CONF_BAUD_RATE = 'baud_rate' CONF_DEVICE = 'device' CONF_DEBUG = 'debug' CONF_GATEWAYS = 'gateways' CONF_PERSISTENCE = 'persistence' CONF_PERSISTENCE_FILE = 'persistence_file' CONF_TCP_PORT = 'tcp_port' CONF_TOPIC_IN_PREFIX = 'topic_in_prefix' CONF_TOPIC_OUT_PREFIX = 'topic_out_prefix' CONF_RETAIN = 'retain' CONF_VERSION = 'version' DEFAULT_VERSION = 1.4 DEFAULT_BAUD_RATE = 115200 DEFAULT_TCP_PORT = 5003 DOMAIN = 'mysensors' MYSENSORS_GATEWAYS = 'mysensors_gateways' MQTT_COMPONENT = 'mqtt' REQUIREMENTS = [ 'https://github.com/theolind/pymysensors/archive/' '0b705119389be58332f17753c53167f551254b6c.zip#pymysensors==0.8'] def is_socket_address(value): """Validate that value is a valid address.""" try: socket.getaddrinfo(value, None) return value except OSError: raise vol.Invalid('Device is not a valid domain name or ip address') def has_parent_dir(value): """Validate that value is in an existing directory which is writetable.""" parent = os.path.dirname(os.path.realpath(value)) is_dir_writable = os.path.isdir(parent) and os.access(parent, os.W_OK) if not is_dir_writable: raise vol.Invalid( '{} directory does not exist or is not writetable'.format(parent)) return value def has_all_unique_files(value): """Validate that all persistence files are unique and set if any is set.""" persistence_files = [ gateway.get(CONF_PERSISTENCE_FILE) for gateway in value] if None in persistence_files and any( name is not None for name in persistence_files): raise vol.Invalid( 'persistence file name of all devices must be set if any is set') if not all(name is None for name in persistence_files): schema = vol.Schema(vol.Unique()) schema(persistence_files) return value CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_GATEWAYS): vol.All( cv.ensure_list, has_all_unique_files, [{ vol.Required(CONF_DEVICE): vol.Any(cv.isdevice, MQTT_COMPONENT, is_socket_address), vol.Optional(CONF_PERSISTENCE_FILE): vol.All(cv.string, has_parent_dir), vol.Optional( CONF_BAUD_RATE, default=DEFAULT_BAUD_RATE): cv.positive_int, vol.Optional( CONF_TCP_PORT, default=DEFAULT_TCP_PORT): cv.port, vol.Optional( CONF_TOPIC_IN_PREFIX, default=''): valid_subscribe_topic, vol.Optional( CONF_TOPIC_OUT_PREFIX, default=''): valid_publish_topic, }] ), vol.Optional(CONF_DEBUG, default=False): cv.boolean, vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean, vol.Optional(CONF_PERSISTENCE, default=True): cv.boolean, vol.Optional(CONF_RETAIN, default=True): cv.boolean, vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.Coerce(float), }) }, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Setup the MySensors component.""" import mysensors.mysensors as mysensors version = config[DOMAIN].get(CONF_VERSION) persistence = config[DOMAIN].get(CONF_PERSISTENCE) def setup_gateway(device, persistence_file, baud_rate, tcp_port, in_prefix, out_prefix): """Return gateway after setup of the gateway.""" if device == MQTT_COMPONENT: if not setup_component(hass, MQTT_COMPONENT, config): return mqtt = get_component(MQTT_COMPONENT) retain = config[DOMAIN].get(CONF_RETAIN) def pub_callback(topic, payload, qos, retain): """Call mqtt publish function.""" mqtt.publish(hass, topic, payload, qos, retain) def sub_callback(topic, callback, qos): """Call mqtt subscribe function.""" mqtt.subscribe(hass, topic, callback, qos) gateway = mysensors.MQTTGateway( pub_callback, sub_callback, event_callback=None, persistence=persistence, persistence_file=persistence_file, protocol_version=version, in_prefix=in_prefix, out_prefix=out_prefix, retain=retain) else: try: socket.getaddrinfo(device, None) # valid ip address gateway = mysensors.TCPGateway( device, event_callback=None, persistence=persistence, persistence_file=persistence_file, protocol_version=version, port=tcp_port) except OSError: # invalid ip address gateway = mysensors.SerialGateway( device, event_callback=None, persistence=persistence, persistence_file=persistence_file, protocol_version=version, baud=baud_rate) gateway.metric = hass.config.units.is_metric gateway.debug = config[DOMAIN].get(CONF_DEBUG) optimistic = config[DOMAIN].get(CONF_OPTIMISTIC) gateway = GatewayWrapper(gateway, optimistic, device) # pylint: disable=attribute-defined-outside-init gateway.event_callback = gateway.callback_factory() def gw_start(event): """Callback to trigger start of gateway and any persistence.""" gateway.start() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: gateway.stop()) if persistence: for node_id in gateway.sensors: gateway.event_callback('persistence', node_id) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, gw_start) return gateway gateways = hass.data.get(MYSENSORS_GATEWAYS) if gateways is not None: _LOGGER.error( '%s already exists in %s, will not setup %s component', MYSENSORS_GATEWAYS, hass.data, DOMAIN) return False # Setup all devices from config gateways = [] conf_gateways = config[DOMAIN][CONF_GATEWAYS] for index, gway in enumerate(conf_gateways): device = gway[CONF_DEVICE] persistence_file = gway.get( CONF_PERSISTENCE_FILE, hass.config.path('mysensors{}.pickle'.format(index + 1))) baud_rate = gway.get(CONF_BAUD_RATE) tcp_port = gway.get(CONF_TCP_PORT) in_prefix = gway.get(CONF_TOPIC_IN_PREFIX) out_prefix = gway.get(CONF_TOPIC_OUT_PREFIX) ready_gateway = setup_gateway( device, persistence_file, baud_rate, tcp_port, in_prefix, out_prefix) if ready_gateway is not None: gateways.append(ready_gateway) if not gateways: _LOGGER.error( 'No devices could be setup as gateways, check your configuration') return False hass.data[MYSENSORS_GATEWAYS] = gateways for component in ['sensor', 'switch', 'light', 'binary_sensor', 'climate', 'cover']: discovery.load_platform(hass, component, DOMAIN, {}, config) discovery.load_platform( hass, 'notify', DOMAIN, {CONF_NAME: DOMAIN}, config) return True def pf_callback_factory(map_sv_types, devices, entity_class, add_devices=None): """Return a new callback for the platform.""" def mysensors_callback(gateway, node_id): """Callback for mysensors platform.""" if gateway.sensors[node_id].sketch_name is None: _LOGGER.info('No sketch_name: node %s', node_id) return for child in gateway.sensors[node_id].children.values(): for value_type in child.values.keys(): key = node_id, child.id, value_type if child.type not in map_sv_types or \ value_type not in map_sv_types[child.type]: continue if key in devices: if add_devices: devices[key].schedule_update_ha_state(True) else: devices[key].update() continue name = '{} {} {}'.format( gateway.sensors[node_id].sketch_name, node_id, child.id) if isinstance(entity_class, dict): device_class = entity_class[child.type] else: device_class = entity_class devices[key] = device_class( gateway, node_id, child.id, name, value_type, child.type) if add_devices: _LOGGER.info('Adding new devices: %s', devices[key]) add_devices([devices[key]]) devices[key].schedule_update_ha_state(True) else: devices[key].update() return mysensors_callback class GatewayWrapper(object): """Gateway wrapper class.""" def __init__(self, gateway, optimistic, device): """Setup class attributes on instantiation. Args: gateway (mysensors.SerialGateway): Gateway to wrap. optimistic (bool): Send values to actuators without feedback state. device (str): Path to serial port, ip adress or mqtt. Attributes: _wrapped_gateway (mysensors.SerialGateway): Wrapped gateway. platform_callbacks (list): Callback functions, one per platform. optimistic (bool): Send values to actuators without feedback state. device (str): Device configured as gateway. __initialised (bool): True if GatewayWrapper is initialised. """ self._wrapped_gateway = gateway self.platform_callbacks = [] self.optimistic = optimistic self.device = device self.__initialised = True def __getattr__(self, name): """See if this object has attribute name.""" # Do not use hasattr, it goes into infinite recurrsion if name in self.__dict__: # This object has the attribute. return getattr(self, name) # The wrapped object has the attribute. return getattr(self._wrapped_gateway, name) def __setattr__(self, name, value): """See if this object has attribute name then set to value.""" if '_GatewayWrapper__initialised' not in self.__dict__: return object.__setattr__(self, name, value) elif name in self.__dict__: object.__setattr__(self, name, value) else: object.__setattr__(self._wrapped_gateway, name, value) def callback_factory(self): """Return a new callback function.""" def node_update(update_type, node_id): """Callback for node updates from the MySensors gateway.""" _LOGGER.debug('Update %s: node %s', update_type, node_id) for callback in self.platform_callbacks: callback(self, node_id) return node_update class MySensorsDeviceEntity(object): """Represent a MySensors entity.""" def __init__( self, gateway, node_id, child_id, name, value_type, child_type): """ Setup class attributes on instantiation. Args: gateway (GatewayWrapper): Gateway object. node_id (str): Id of node. child_id (str): Id of child. name (str): Entity name. value_type (str): Value type of child. Value is entity state. child_type (str): Child type of child. Attributes: gateway (GatewayWrapper): Gateway object. node_id (str): Id of node. child_id (str): Id of child. _name (str): Entity name. value_type (str): Value type of child. Value is entity state. child_type (str): Child type of child. battery_level (int): Node battery level. _values (dict): Child values. Non state values set as state attributes. mysensors (module): Mysensors main component module. """ self.gateway = gateway self.node_id = node_id self.child_id = child_id self._name = name self.value_type = value_type self.child_type = child_type self._values = {} @property def should_poll(self): """Mysensor gateway pushes its state to HA.""" return False @property def name(self): """The name of this entity.""" return self._name @property def device_state_attributes(self): """Return device specific state attributes.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] attr = { ATTR_BATTERY_LEVEL: node.battery_level, ATTR_CHILD_ID: self.child_id, ATTR_DESCRIPTION: child.description, ATTR_DEVICE: self.gateway.device, ATTR_NODE_ID: self.node_id, } set_req = self.gateway.const.SetReq for value_type, value in self._values.items(): try: attr[set_req(value_type).name] = value except ValueError: _LOGGER.error('Value_type %s is not valid for mysensors ' 'version %s', value_type, self.gateway.protocol_version) return attr @property def available(self): """Return True if entity is available.""" return self.value_type in self._values def update(self): """Update the controller with the latest value from a sensor.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] set_req = self.gateway.const.SetReq for value_type, value in child.values.items(): _LOGGER.debug( "%s: value_type %s, value = %s", self._name, value_type, value) if value_type in (set_req.V_ARMED, set_req.V_LIGHT, set_req.V_LOCK_STATUS, set_req.V_TRIPPED): self._values[value_type] = ( STATE_ON if int(value) == 1 else STATE_OFF) elif value_type == set_req.V_DIMMER: self._values[value_type] = int(value) else: self._values[value_type] = value
apache-2.0
JerryLead/spark
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
4419
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive import org.apache.spark.annotation.{Experimental, InterfaceStability} import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.analysis.Analyzer import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.execution.SparkPlanner import org.apache.spark.sql.execution.datasources._ import org.apache.spark.sql.hive.client.HiveClient import org.apache.spark.sql.internal.{BaseSessionStateBuilder, SessionResourceLoader, SessionState} /** * Builder that produces a Hive-aware `SessionState`. */ @Experimental @InterfaceStability.Unstable class HiveSessionStateBuilder(session: SparkSession, parentState: Option[SessionState] = None) extends BaseSessionStateBuilder(session, parentState) { private def externalCatalog: HiveExternalCatalog = session.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog] /** * Create a Hive aware resource loader. */ override protected lazy val resourceLoader: HiveSessionResourceLoader = { val client: HiveClient = externalCatalog.client.newSession() new HiveSessionResourceLoader(session, client) } /** * Create a [[HiveSessionCatalog]]. */ override protected lazy val catalog: HiveSessionCatalog = { val catalog = new HiveSessionCatalog( externalCatalog, session.sharedState.globalTempViewManager, new HiveMetastoreCatalog(session), functionRegistry, conf, SessionState.newHadoopConf(session.sparkContext.hadoopConfiguration, conf), sqlParser, resourceLoader) parentState.foreach(_.catalog.copyStateTo(catalog)) catalog } /** * A logical query plan `Analyzer` with rules specific to Hive. */ override protected def analyzer: Analyzer = new Analyzer(catalog, conf) { override val extendedResolutionRules: Seq[Rule[LogicalPlan]] = new ResolveHiveSerdeTable(session) +: new FindDataSourceTable(session) +: new ResolveSQLOnFile(session) +: customResolutionRules override val postHocResolutionRules: Seq[Rule[LogicalPlan]] = new DetermineTableStats(session) +: RelationConversions(conf, catalog) +: PreprocessTableCreation(session) +: PreprocessTableInsertion(conf) +: DataSourceAnalysis(conf) +: HiveAnalysis +: customPostHocResolutionRules override val extendedCheckRules: Seq[LogicalPlan => Unit] = PreWriteCheck +: customCheckRules } /** * Planner that takes into account Hive-specific strategies. */ override protected def planner: SparkPlanner = { new SparkPlanner(session.sparkContext, conf, experimentalMethods) with HiveStrategies { override val sparkSession: SparkSession = session override def extraPlanningStrategies: Seq[Strategy] = super.extraPlanningStrategies ++ customPlanningStrategies override def strategies: Seq[Strategy] = { experimentalMethods.extraStrategies ++ extraPlanningStrategies ++ Seq( FileSourceStrategy, DataSourceStrategy, SpecialLimits, InMemoryScans, HiveTableScans, Scripts, Aggregation, JoinSelection, BasicOperators ) } } } override protected def newBuilder: NewBuilder = new HiveSessionStateBuilder(_, _) } class HiveSessionResourceLoader( session: SparkSession, client: HiveClient) extends SessionResourceLoader(session) { override def addJar(path: String): Unit = { client.addJar(path) super.addJar(path) } }
apache-2.0
BappaMorya/java-pad-server
src/main/java/in/co/sh00nya/server/ServerMain.java
2488
package in.co.sh00nya.server; import in.co.sh00nya.cmn.ConfigException; import in.co.sh00nya.cmn.PropsPojoAdapter; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import org.apache.log4j.Logger; public class ServerMain { private static final Logger logger = Logger.getLogger(ServerMain.class); public static void main(String[] args) { logger.info("Java Pad Server booting up ..."); logger.info("Loading configuration ..."); Properties props = new Properties(); InputStream ins = ServerMain.class.getClassLoader().getResourceAsStream("server_config.properties"); if(ins != null) { try { props.load(ins); } catch (IOException e) { logger.error("Failed to load server_config.properties", e); System.exit(-1); } finally { if(ins != null) try { ins.close(); } catch (IOException e) { // Ignore this problem } } } else { logger.error("Failed to find server_config.properties"); System.exit(-1); } PropsPojoAdapter<ServerConfig> pojoAdpt = new PropsPojoAdapter<ServerConfig>(false); ServerConfig cfg = null; try { cfg = pojoAdpt.transformProps(props, ServerConfig.class); } catch (ConfigException e) { logger.error("Failed to parse server_config.properties", e); System.exit(-1); } IServer server = null; try { Class<IServer> handlerClass = (Class<IServer>) Class.forName(cfg.getServerHandlerClass()); server = handlerClass.newInstance(); server.setConfig(cfg); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { logger.error("Failed to load server handler class", e); System.exit(-1); } // Register shutdown trigger logger.info("Registering shutdown hook ..."); ServerShutdownTrigger shutdownTrigger = new ServerShutdownTrigger(); shutdownTrigger.setServer(server); Thread shutdownThread = new Thread(shutdownTrigger); shutdownThread.setName("ShutdownThread"); Runtime.getRuntime().addShutdownHook(shutdownThread); // Start server thread logger.info("Starting server ..."); ServerMT serverMt = new ServerMT(); serverMt.setCfg(cfg); serverMt.setServer(server); Thread serverThread = new Thread(serverMt); serverThread.setName("ServerMain"); serverThread.start(); try { serverThread.join(); } catch (InterruptedException e) { logger.error("Failed to wait for server thread to complete", e); System.exit(-1); } } }
apache-2.0
prpollock/virtualperry
Android/virtualperry/src/com/mysticplanet/virtualperry/ActionBase.java
1921
package com.mysticplanet.virtualperry; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; public class ActionBase extends Activity { Context _context; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getActionBar().setDisplayHomeAsUpEnabled(true); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { Intent myIntent = new Intent(_context, com.mysticplanet.virtualperry.SettingsActivity.class); startActivity(myIntent); return true; } if (id == R.id.tts) { Intent myIntent = new Intent(_context, com.mysticplanet.virtualperry.TTSActivity.class); startActivity(myIntent); return true; } if (id == R.id.cmd) { Intent myIntent = new Intent(_context, com.mysticplanet.virtualperry.CommandActivity.class); startActivity(myIntent); return true; } if (id == R.id.freeform) { Intent myIntent = new Intent(_context, com.mysticplanet.virtualperry.FreeformActivity.class); startActivity(myIntent); return true; } return super.onOptionsItemSelected(item); } }
apache-2.0
skeleton-software-community/skeleton-generator
generator-model/src/test/java/org/sklsft/generator/util/JavaClassNamingTest.java
683
package org.sklsft.generator.util; import org.junit.Assert; import org.junit.Test; import org.sklsft.generator.model.util.naming.JavaClassNaming; public class JavaClassNamingTest { @Test public void testClassToDatabaseName() { String arg = "MyDummyClass"; String expected = "MY_DUMMY_CLASS"; String result = JavaClassNaming.toDatabaseName(arg); System.out.println(result); Assert.assertEquals(expected, result); } @Test public void testObjectToDatabaseName() { String arg = "myDummyObject"; String expected = "MY_DUMMY_OBJECT"; String result = JavaClassNaming.toDatabaseName(arg); System.out.println(result); Assert.assertEquals(expected, result); } }
apache-2.0
grandeemme/v-ol3
gwt-ol3/src/main/java/org/vaadin/gwtol3/client/source/MapQuestSource.java
592
package org.vaadin.gwtol3.client.source; import java.util.logging.Logger; /** * Layer source for the MapQuest tile server */ public class MapQuestSource extends XYZSource { private static Logger logger= Logger.getLogger(MapQuestSource.class.getName()); public static final String LAYER_OSM="osm"; public static final String LAYER_SAT="sat"; public static final String LAYER_HYB="hyb"; protected MapQuestSource() { } public static final native MapQuestSource create(String layer) /*-{ return new $wnd.ol.source.MapQuest({layer: layer}); }-*/; }
apache-2.0
chanakaudaya/netty
buffer/src/main/java/io/netty/buffer/UnpooledDirectByteBuf.java
17324
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.util.internal.PlatformDependent; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.ClosedChannelException; import java.nio.channels.GatheringByteChannel; import java.nio.channels.ScatteringByteChannel; /** * A NIO {@link ByteBuffer} based buffer. It is recommended to use * {@link UnpooledByteBufAllocator#directBuffer(int, int)}, {@link Unpooled#directBuffer(int)} and * {@link Unpooled#wrappedBuffer(ByteBuffer)} instead of calling the constructor explicitly. */ public class UnpooledDirectByteBuf extends AbstractReferenceCountedByteBuf { private final ByteBufAllocator alloc; private ByteBuffer buffer; private ByteBuffer tmpNioBuf; private int capacity; private boolean doNotFree; /** * Creates a new direct buffer. * * @param initialCapacity the initial capacity of the underlying direct buffer * @param maxCapacity the maximum capacity of the underlying direct buffer */ public UnpooledDirectByteBuf(ByteBufAllocator alloc, int initialCapacity, int maxCapacity) { super(maxCapacity); if (alloc == null) { throw new NullPointerException("alloc"); } if (initialCapacity < 0) { throw new IllegalArgumentException("initialCapacity: " + initialCapacity); } if (maxCapacity < 0) { throw new IllegalArgumentException("maxCapacity: " + maxCapacity); } if (initialCapacity > maxCapacity) { throw new IllegalArgumentException(String.format( "initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity)); } this.alloc = alloc; setByteBuffer(ByteBuffer.allocateDirect(initialCapacity)); } /** * Creates a new direct buffer by wrapping the specified initial buffer. * * @param maxCapacity the maximum capacity of the underlying direct buffer */ protected UnpooledDirectByteBuf(ByteBufAllocator alloc, ByteBuffer initialBuffer, int maxCapacity) { super(maxCapacity); if (alloc == null) { throw new NullPointerException("alloc"); } if (initialBuffer == null) { throw new NullPointerException("initialBuffer"); } if (!initialBuffer.isDirect()) { throw new IllegalArgumentException("initialBuffer is not a direct buffer."); } if (initialBuffer.isReadOnly()) { throw new IllegalArgumentException("initialBuffer is a read-only buffer."); } int initialCapacity = initialBuffer.remaining(); if (initialCapacity > maxCapacity) { throw new IllegalArgumentException(String.format( "initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity)); } this.alloc = alloc; doNotFree = true; setByteBuffer(initialBuffer.slice().order(ByteOrder.BIG_ENDIAN)); writerIndex(initialCapacity); } /** * Allocate a new direct {@link ByteBuffer} with the given initialCapacity. */ protected ByteBuffer allocateDirect(int initialCapacity) { return ByteBuffer.allocateDirect(initialCapacity); } /** * Free a direct {@link ByteBuffer} */ protected void freeDirect(ByteBuffer buffer) { PlatformDependent.freeDirectBuffer(buffer); } private void setByteBuffer(ByteBuffer buffer) { ByteBuffer oldBuffer = this.buffer; if (oldBuffer != null) { if (doNotFree) { doNotFree = false; } else { freeDirect(oldBuffer); } } this.buffer = buffer; tmpNioBuf = null; capacity = buffer.remaining(); } @Override public boolean isDirect() { return true; } @Override public int capacity() { return capacity; } @Override public ByteBuf capacity(int newCapacity) { checkNewCapacity(newCapacity); int readerIndex = readerIndex(); int writerIndex = writerIndex(); int oldCapacity = capacity; if (newCapacity > oldCapacity) { ByteBuffer oldBuffer = buffer; ByteBuffer newBuffer = allocateDirect(newCapacity); oldBuffer.position(0).limit(oldBuffer.capacity()); newBuffer.position(0).limit(oldBuffer.capacity()); newBuffer.put(oldBuffer); newBuffer.clear(); setByteBuffer(newBuffer); } else if (newCapacity < oldCapacity) { ByteBuffer oldBuffer = buffer; ByteBuffer newBuffer = allocateDirect(newCapacity); if (readerIndex < newCapacity) { if (writerIndex > newCapacity) { writerIndex(writerIndex = newCapacity); } oldBuffer.position(readerIndex).limit(writerIndex); newBuffer.position(readerIndex).limit(writerIndex); newBuffer.put(oldBuffer); newBuffer.clear(); } else { setIndex(newCapacity, newCapacity); } setByteBuffer(newBuffer); } return this; } @Override public ByteBufAllocator alloc() { return alloc; } @Override public ByteOrder order() { return ByteOrder.BIG_ENDIAN; } @Override public boolean hasArray() { return false; } @Override public byte[] array() { throw new UnsupportedOperationException("direct buffer"); } @Override public int arrayOffset() { throw new UnsupportedOperationException("direct buffer"); } @Override public boolean hasMemoryAddress() { return false; } @Override public long memoryAddress() { throw new UnsupportedOperationException(); } @Override public byte getByte(int index) { ensureAccessible(); return _getByte(index); } @Override protected byte _getByte(int index) { return buffer.get(index); } @Override public short getShort(int index) { ensureAccessible(); return _getShort(index); } @Override protected short _getShort(int index) { return buffer.getShort(index); } @Override public int getUnsignedMedium(int index) { ensureAccessible(); return _getUnsignedMedium(index); } @Override protected int _getUnsignedMedium(int index) { return (getByte(index) & 0xff) << 16 | (getByte(index + 1) & 0xff) << 8 | getByte(index + 2) & 0xff; } @Override public int getInt(int index) { ensureAccessible(); return _getInt(index); } @Override protected int _getInt(int index) { return buffer.getInt(index); } @Override public long getLong(int index) { ensureAccessible(); return _getLong(index); } @Override protected long _getLong(int index) { return buffer.getLong(index); } @Override public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { checkDstIndex(index, length, dstIndex, dst.capacity()); if (dst.hasArray()) { getBytes(index, dst.array(), dst.arrayOffset() + dstIndex, length); } else if (dst.nioBufferCount() > 0) { for (ByteBuffer bb: dst.nioBuffers(dstIndex, length)) { int bbLen = bb.remaining(); getBytes(index, bb); index += bbLen; } } else { dst.setBytes(dstIndex, this, index, length); } return this; } @Override public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { getBytes(index, dst, dstIndex, length, false); return this; } private void getBytes(int index, byte[] dst, int dstIndex, int length, boolean internal) { checkDstIndex(index, length, dstIndex, dst.length); ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = buffer.duplicate(); } tmpBuf.clear().position(index).limit(index + length); tmpBuf.get(dst, dstIndex, length); } @Override public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { checkReadableBytes(length); getBytes(readerIndex, dst, dstIndex, length, true); readerIndex += length; return this; } @Override public ByteBuf getBytes(int index, ByteBuffer dst) { getBytes(index, dst, false); return this; } private void getBytes(int index, ByteBuffer dst, boolean internal) { checkIndex(index, dst.remaining()); ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = buffer.duplicate(); } tmpBuf.clear().position(index).limit(index + dst.remaining()); dst.put(tmpBuf); } @Override public ByteBuf readBytes(ByteBuffer dst) { int length = dst.remaining(); checkReadableBytes(length); getBytes(readerIndex, dst, true); readerIndex += length; return this; } @Override public ByteBuf setByte(int index, int value) { ensureAccessible(); _setByte(index, value); return this; } @Override protected void _setByte(int index, int value) { buffer.put(index, (byte) value); } @Override public ByteBuf setShort(int index, int value) { ensureAccessible(); _setShort(index, value); return this; } @Override protected void _setShort(int index, int value) { buffer.putShort(index, (short) value); } @Override public ByteBuf setMedium(int index, int value) { ensureAccessible(); _setMedium(index, value); return this; } @Override protected void _setMedium(int index, int value) { setByte(index, (byte) (value >>> 16)); setByte(index + 1, (byte) (value >>> 8)); setByte(index + 2, (byte) value); } @Override public ByteBuf setInt(int index, int value) { ensureAccessible(); _setInt(index, value); return this; } @Override protected void _setInt(int index, int value) { buffer.putInt(index, value); } @Override public ByteBuf setLong(int index, long value) { ensureAccessible(); _setLong(index, value); return this; } @Override protected void _setLong(int index, long value) { buffer.putLong(index, value); } @Override public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { checkSrcIndex(index, length, srcIndex, src.capacity()); if (src.nioBufferCount() > 0) { for (ByteBuffer bb: src.nioBuffers(srcIndex, length)) { int bbLen = bb.remaining(); setBytes(index, bb); index += bbLen; } } else { src.getBytes(srcIndex, this, index, length); } return this; } @Override public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { checkSrcIndex(index, length, srcIndex, src.length); ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index).limit(index + length); tmpBuf.put(src, srcIndex, length); return this; } @Override public ByteBuf setBytes(int index, ByteBuffer src) { ensureAccessible(); ByteBuffer tmpBuf = internalNioBuffer(); if (src == tmpBuf) { src = src.duplicate(); } tmpBuf.clear().position(index).limit(index + src.remaining()); tmpBuf.put(src); return this; } @Override public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { getBytes(index, out, length, false); return this; } private void getBytes(int index, OutputStream out, int length, boolean internal) throws IOException { ensureAccessible(); if (length == 0) { return; } if (buffer.hasArray()) { out.write(buffer.array(), index + buffer.arrayOffset(), length); } else { byte[] tmp = new byte[length]; ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = buffer.duplicate(); } tmpBuf.clear().position(index); tmpBuf.get(tmp); out.write(tmp); } } @Override public ByteBuf readBytes(OutputStream out, int length) throws IOException { checkReadableBytes(length); getBytes(readerIndex, out, length, true); readerIndex += length; return this; } @Override public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { return getBytes(index, out, length, false); } private int getBytes(int index, GatheringByteChannel out, int length, boolean internal) throws IOException { ensureAccessible(); if (length == 0) { return 0; } ByteBuffer tmpBuf; if (internal) { tmpBuf = internalNioBuffer(); } else { tmpBuf = buffer.duplicate(); } tmpBuf.clear().position(index).limit(index + length); return out.write(tmpBuf); } @Override public int readBytes(GatheringByteChannel out, int length) throws IOException { checkReadableBytes(length); int readBytes = getBytes(readerIndex, out, length, true); readerIndex += readBytes; return readBytes; } @Override public int setBytes(int index, InputStream in, int length) throws IOException { ensureAccessible(); if (buffer.hasArray()) { return in.read(buffer.array(), buffer.arrayOffset() + index, length); } else { byte[] tmp = new byte[length]; int readBytes = in.read(tmp); if (readBytes <= 0) { return readBytes; } ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index); tmpBuf.put(tmp, 0, readBytes); return readBytes; } } @Override public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { ensureAccessible(); ByteBuffer tmpBuf = internalNioBuffer(); tmpBuf.clear().position(index).limit(index + length); try { return in.read(tmpNioBuf); } catch (ClosedChannelException ignored) { return -1; } } @Override public int nioBufferCount() { return 1; } @Override public ByteBuffer[] nioBuffers(int index, int length) { return new ByteBuffer[] { nioBuffer(index, length) }; } @Override public ByteBuf copy(int index, int length) { ensureAccessible(); ByteBuffer src; try { src = (ByteBuffer) buffer.duplicate().clear().position(index).limit(index + length); } catch (IllegalArgumentException ignored) { throw new IndexOutOfBoundsException("Too many bytes to read - Need " + (index + length)); } return alloc().directBuffer(length, maxCapacity()).writeBytes(src); } @Override public ByteBuffer internalNioBuffer(int index, int length) { checkIndex(index, length); return (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length); } private ByteBuffer internalNioBuffer() { ByteBuffer tmpNioBuf = this.tmpNioBuf; if (tmpNioBuf == null) { this.tmpNioBuf = tmpNioBuf = buffer.duplicate(); } return tmpNioBuf; } @Override public ByteBuffer nioBuffer(int index, int length) { checkIndex(index, length); return ((ByteBuffer) buffer.duplicate().position(index).limit(index + length)).slice(); } @Override protected void deallocate() { ByteBuffer buffer = this.buffer; if (buffer == null) { return; } this.buffer = null; if (!doNotFree) { freeDirect(buffer); } } @Override public ByteBuf unwrap() { return null; } }
apache-2.0
cosmocode/cosmocode-commons
src/test/java/de/cosmocode/collections/utility/convert/ConvertDateTest.java
7435
/** * Copyright 2010 - 2013 CosmoCode GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cosmocode.collections.utility.convert; import de.cosmocode.collections.utility.Convert; import de.cosmocode.commons.DateMode; import de.cosmocode.junit.Asserts; import org.junit.Assert; import org.junit.Test; import java.util.Calendar; import java.util.Date; /** * Tests {@link Convert#intoDate(Object)} and * {@link Convert#intoDate(Object, Date)}. * * @author Willi Schoenborn */ public class ConvertDateTest { /** * Tests {@link Convert#intoDate(Object)} with null. */ @Test(expected = IllegalArgumentException.class) public void intoDateNull() { Convert.intoDate(null); } /** * Tests {@link Convert#intoDate(Object)} with "". */ @Test(expected = IllegalArgumentException.class) public void intoDateEmpty() { Convert.intoDate(""); } /** * Tests {@link Convert#intoDate(Object)} with new Object(). */ @Test(expected = IllegalArgumentException.class) public void intoDateObject() { Convert.intoDate(new Object()); } /** * Tests {@link Convert#intoDate(Object)} with valid attributes. */ @Test public void intoDate() { final Date date = new Date(); Assert.assertEquals(date, Convert.intoDate(date)); Assert.assertEquals(date, Convert.intoDate(date.getTime())); Assert.assertEquals(date, Convert.intoDate(Long.toString(date.getTime()))); final Calendar calendar = Calendar.getInstance(); Assert.assertEquals(calendar.getTime(), Convert.intoDate(calendar)); Assert.assertEquals(calendar.getTime(), Convert.intoDate(calendar.getTime())); Assert.assertEquals(calendar.getTime(), Convert.intoDate(calendar.getTime().getTime())); Assert.assertEquals(calendar.getTime(), Convert.intoDate(Long.toString(calendar.getTime().getTime()))); } /** * Tests {@link Convert#intoDate(Object, Date)} with valid attributes. */ @Test public void intoDateDefaultUnused() { final Date date = new Date(); final Date defaultValue = null; Assert.assertEquals(date, Convert.intoDate(date, defaultValue)); Assert.assertEquals(date, Convert.intoDate(date.getTime(), defaultValue)); Assert.assertEquals(date, Convert.intoDate(Long.toString(date.getTime()), defaultValue)); final Calendar calendar = Calendar.getInstance(); Assert.assertEquals(calendar.getTime(), Convert.intoDate(calendar, defaultValue)); Assert.assertEquals(calendar.getTime(), Convert.intoDate(calendar.getTime(), defaultValue)); Assert.assertEquals(calendar.getTime(), Convert.intoDate(calendar.getTime().getTime(), defaultValue)); final String timeString = Long.toString(calendar.getTime().getTime()); Assert.assertEquals(calendar.getTime(), Convert.intoDate(timeString, defaultValue)); } /** * Tests {@link Convert#intoDate(Object, Date)} with invalid attributes. */ @Test public void intoDateDefaultUsed() { final Date defaultValue = new Date(); Assert.assertSame(defaultValue, Convert.intoDate(null, defaultValue)); Assert.assertSame(defaultValue, Convert.intoDate("", defaultValue)); Assert.assertSame(defaultValue, Convert.intoDate(new Object(), defaultValue)); } /** * Tests {@link Convert#intoDate(Object, DateMode)} with null dateMode. */ @Test(expected = NullPointerException.class) public void intoDateModeNullMode() { final DateMode dateMode = null; Convert.intoDate(null, dateMode); } /** * Tests {@link Convert#intoDate(Object, DateMode)} with null. */ @Test(expected = IllegalArgumentException.class) public void intoDateModeNull() { Convert.intoDate(null, DateMode.JAVA); } /** * Tests {@link Convert#intoDate(Object, DateMode)} with "". */ @Test(expected = IllegalArgumentException.class) public void intoDateModeEmpty() { Convert.intoDate("", DateMode.JAVA); } /** * Tests {@link Convert#intoDate(Object, DateMode)} with new Object(). */ @Test(expected = IllegalArgumentException.class) public void intoDateModeObject() { Convert.intoDate(new Object(), DateMode.JAVA); } /** * Tests {@link Convert#intoDate(Object, DateMode)} with valid {@link Date} attributes. */ @Test public void intoDateMode() { final Date date = new Date(); final long unix = date.getTime() / 1000; Assert.assertSame(0, DateMode.UNIXTIME.compare(date, Convert.intoDate(date, DateMode.UNIXTIME))); Assert.assertSame(0, DateMode.UNIXTIME.compare(date, Convert.intoDate(unix, DateMode.UNIXTIME))); Assert.assertSame(0, DateMode.UNIXTIME.compare(date, Convert.intoDate(Long.toString(unix), DateMode.UNIXTIME))); } /** * Tests {@link Convert#intoDate(Object, DateMode)} with valid {@link Calendar} attributes. */ @Test public void intoDateModeCalendar() { final Calendar calendar = Calendar.getInstance(); final Date date = calendar.getTime(); final long unix = calendar.getTimeInMillis() / 1000; Assert.assertSame(0, DateMode.UNIXTIME.compare(date, Convert.intoDate(date, DateMode.UNIXTIME))); Assert.assertSame(0, DateMode.UNIXTIME.compare(date, Convert.intoDate(unix, DateMode.UNIXTIME))); Assert.assertSame(0, DateMode.UNIXTIME.compare(date, Convert.intoDate(Long.toString(unix), DateMode.UNIXTIME))); } /** * Tests {@link Convert#intoDate(Object, DateMode, Date)} with valid attributes. */ @Test public void intoDateModeDefaultUnused() { final Date date = new Date(); final long unix = date.getTime() / 1000; final Date somewhen = new Date(date.getTime() / 2); Asserts.assertNotEquals(somewhen, Convert.intoDate(date, DateMode.UNIXTIME, somewhen)); Asserts.assertNotEquals(somewhen, Convert.intoDate(unix, DateMode.UNIXTIME, somewhen)); Asserts.assertNotEquals(somewhen, Convert.intoDate(Long.toString(unix), DateMode.UNIXTIME, somewhen)); } /** * Tests {@link Convert#intoDate(Object, DateMode, Date)} with invalid attributes. */ @Test public void intoDateModeDefaultUsed() { final Date date = new Date(); final Date somewhen = new Date(date.getTime() / 2); Asserts.assertEquals(somewhen, Convert.intoDate(null, DateMode.UNIXTIME, somewhen)); Asserts.assertEquals(somewhen, Convert.intoDate("", DateMode.UNIXTIME, somewhen)); Asserts.assertEquals(somewhen, Convert.intoDate(new Object(), DateMode.UNIXTIME, somewhen)); Asserts.assertEquals(somewhen, Convert.intoDate(-1, DateMode.UNIXTIME, somewhen)); } }
apache-2.0
techaddict/spark
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala
12061
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive import org.apache.spark.sql.{QueryTest, Row, SaveMode} import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.catalog.CatalogTableType import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.plans.logical.SubqueryAlias import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.internal.{HiveSerDe, SQLConf} import org.apache.spark.sql.test.{ExamplePointUDT, SQLTestUtils} import org.apache.spark.sql.types._ class HiveMetastoreCatalogSuite extends TestHiveSingleton with SQLTestUtils { import spark.implicits._ test("struct field should accept underscore in sub-column name") { val hiveTypeStr = "struct<a: int, b_1: string, c: string>" val dataType = CatalystSqlParser.parseDataType(hiveTypeStr) assert(dataType.isInstanceOf[StructType]) } test("udt to metastore type conversion") { val udt = new ExamplePointUDT assertResult(udt.sqlType.catalogString) { udt.catalogString } } test("duplicated metastore relations") { val df = spark.sql("SELECT * FROM src") logInfo(df.queryExecution.toString) df.as('a).join(df.as('b), $"a.key" === $"b.key") } test("should not truncate struct type catalog string") { def field(n: Int): StructField = { StructField("col" + n, StringType) } val dataType = StructType((1 to 100).map(field)) assert(CatalystSqlParser.parseDataType(dataType.catalogString) == dataType) } test("view relation") { withView("vw1") { spark.sql("create view vw1 as select 1 as id") val plan = spark.sql("select id from vw1").queryExecution.analyzed val aliases = plan.collect { case x @ SubqueryAlias(AliasIdentifier("vw1", Some("default")), _) => x } assert(aliases.size == 1) } } test("Validate catalog metadata for supported data types") { withTable("t") { sql( """ |CREATE TABLE t ( |c1 boolean, |c2 tinyint, |c3 smallint, |c4 short, |c5 bigint, |c6 long, |c7 float, |c8 double, |c9 date, |c10 timestamp, |c11 string, |c12 char(10), |c13 varchar(10), |c14 binary, |c15 decimal, |c16 decimal(10), |c17 decimal(10,2), |c18 array<string>, |c19 array<int>, |c20 array<char(10)>, |c21 map<int,int>, |c22 map<int,char(10)>, |c23 struct<a:int,b:int>, |c24 struct<c:varchar(10),d:int> |) """.stripMargin) val schema = hiveClient.getTable("default", "t").schema val expectedSchema = new StructType() .add("c1", "boolean") .add("c2", "tinyint") .add("c3", "smallint") .add("c4", "short") .add("c5", "bigint") .add("c6", "long") .add("c7", "float") .add("c8", "double") .add("c9", "date") .add("c10", "timestamp") .add("c11", "string") .add("c12", "string", true, new MetadataBuilder().putString(HIVE_TYPE_STRING, "char(10)").build()) .add("c13", "string", true, new MetadataBuilder().putString(HIVE_TYPE_STRING, "varchar(10)").build()) .add("c14", "binary") .add("c15", "decimal") .add("c16", "decimal(10)") .add("c17", "decimal(10,2)") .add("c18", "array<string>") .add("c19", "array<int>") .add("c20", "array<string>", true, new MetadataBuilder().putString(HIVE_TYPE_STRING, "array<char(10)>").build()) .add("c21", "map<int,int>") .add("c22", "map<int,string>", true, new MetadataBuilder().putString(HIVE_TYPE_STRING, "map<int,char(10)>").build()) .add("c23", "struct<a:int,b:int>") .add("c24", "struct<c:string,d:int>", true, new MetadataBuilder().putString(HIVE_TYPE_STRING, "struct<c:varchar(10),d:int>").build()) assert(schema == expectedSchema) } } } class DataSourceWithHiveMetastoreCatalogSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { import hiveContext._ import testImplicits._ private val testDF = range(1, 3).select( ('id + 0.1) cast DecimalType(10, 3) as 'd1, 'id cast StringType as 'd2 ).coalesce(1) override def beforeAll(): Unit = { super.beforeAll() sparkSession.sessionState.catalog.reset() sparkSession.metadataHive.reset() } Seq( "parquet" -> (( "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe" )), "org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat" -> (( "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe" )), "orc" -> (( "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcSerde" )), "org.apache.spark.sql.hive.orc" -> (( "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcSerde" )), "org.apache.spark.sql.execution.datasources.orc.OrcFileFormat" -> (( "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcSerde" )) ).foreach { case (provider, (inputFormat, outputFormat, serde)) => test(s"Persist non-partitioned $provider relation into metastore as managed table") { withTable("t") { withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> "true") { testDF .write .mode(SaveMode.Overwrite) .format(provider) .saveAsTable("t") } val hiveTable = sessionState.catalog.getTableMetadata(TableIdentifier("t", Some("default"))) assert(hiveTable.storage.inputFormat === Some(inputFormat)) assert(hiveTable.storage.outputFormat === Some(outputFormat)) assert(hiveTable.storage.serde === Some(serde)) assert(hiveTable.partitionColumnNames.isEmpty) assert(hiveTable.tableType === CatalogTableType.MANAGED) val columns = hiveTable.schema assert(columns.map(_.name) === Seq("d1", "d2")) assert(columns.map(_.dataType) === Seq(DecimalType(10, 3), StringType)) checkAnswer(table("t"), testDF) if (HiveUtils.isHive23) { assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1.100\t1", "2.100\t2")) } else { assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1.1\t1", "2.1\t2")) } } } test(s"Persist non-partitioned $provider relation into metastore as external table") { withTempPath { dir => withTable("t") { val path = dir.getCanonicalFile withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> "true") { testDF .write .mode(SaveMode.Overwrite) .format(provider) .option("path", path.toString) .saveAsTable("t") } val hiveTable = sessionState.catalog.getTableMetadata(TableIdentifier("t", Some("default"))) assert(hiveTable.storage.inputFormat === Some(inputFormat)) assert(hiveTable.storage.outputFormat === Some(outputFormat)) assert(hiveTable.storage.serde === Some(serde)) assert(hiveTable.tableType === CatalogTableType.EXTERNAL) assert(hiveTable.storage.locationUri === Some(makeQualifiedPath(dir.getAbsolutePath))) val columns = hiveTable.schema assert(columns.map(_.name) === Seq("d1", "d2")) assert(columns.map(_.dataType) === Seq(DecimalType(10, 3), StringType)) checkAnswer(table("t"), testDF) if (HiveUtils.isHive23) { assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1.100\t1", "2.100\t2")) } else { assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1.1\t1", "2.1\t2")) } } } } test(s"Persist non-partitioned $provider relation into metastore as managed table using CTAS") { withTempPath { dir => withTable("t") { sql( s"""CREATE TABLE t USING $provider |OPTIONS (path '${dir.toURI}') |AS SELECT 1 AS d1, "val_1" AS d2 """.stripMargin) val hiveTable = sessionState.catalog.getTableMetadata(TableIdentifier("t", Some("default"))) assert(hiveTable.storage.inputFormat === Some(inputFormat)) assert(hiveTable.storage.outputFormat === Some(outputFormat)) assert(hiveTable.storage.serde === Some(serde)) assert(hiveTable.partitionColumnNames.isEmpty) assert(hiveTable.tableType === CatalogTableType.EXTERNAL) val columns = hiveTable.schema assert(columns.map(_.name) === Seq("d1", "d2")) assert(columns.map(_.dataType) === Seq(IntegerType, StringType)) checkAnswer(table("t"), Row(1, "val_1")) assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1\tval_1")) } } } } test("SPARK-27592 set the bucketed data source table SerDe correctly") { val provider = "parquet" withTable("t") { spark.sql( s""" |CREATE TABLE t |USING $provider |CLUSTERED BY (c1) |SORTED BY (c1) |INTO 2 BUCKETS |AS SELECT 1 AS c1, 2 AS c2 """.stripMargin) val metadata = sessionState.catalog.getTableMetadata(TableIdentifier("t", Some("default"))) val hiveSerDe = HiveSerDe.sourceToSerDe(provider).get assert(metadata.storage.serde === hiveSerDe.serde) assert(metadata.storage.inputFormat === hiveSerDe.inputFormat) assert(metadata.storage.outputFormat === hiveSerDe.outputFormat) // It's a bucketed table at Spark side assert(sql("DESC FORMATTED t").collect().containsSlice( Seq(Row("Num Buckets", "2", ""), Row("Bucket Columns", "[`c1`]", "")) )) checkAnswer(table("t"), Row(1, 2)) // It's not a bucketed table at Hive side val hiveSide = sparkSession.metadataHive.runSqlHive("DESC FORMATTED t") assert(hiveSide.contains("Num Buckets: \t-1 \t ")) assert(hiveSide.contains("Bucket Columns: \t[] \t ")) assert(hiveSide.contains("\tspark.sql.sources.schema.numBuckets\t2 ")) assert(hiveSide.contains("\tspark.sql.sources.schema.bucketCol.0\tc1 ")) assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1\t2")) } } }
apache-2.0
google/loaner
loaner/web_app/frontend/src/services/dialog/index.ts
1092
// Copyright 2018 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import {NgModule} from '@angular/core'; import {MaterialModule} from '../../core/material_module'; import {ConfirmDialog} from './confirm_dialog'; import {Dialog} from './dialog'; export * from './dialog'; export * from './confirm_dialog'; @NgModule({ declarations: [ ConfirmDialog, ], exports: [ ConfirmDialog, ], entryComponents: [ ConfirmDialog, ], imports: [ MaterialModule, ], providers: [ Dialog, ], }) export class DialogsModule { }
apache-2.0
thiagoramos23/java_mock_project
thramos-model/src/main/java/com/thramos/framework/contexto/Contexto.java
72
package com.thramos.framework.contexto; public interface Contexto { }
apache-2.0
containous/yaegi
stdlib/go1_14_path_filepath.go
1644
// Code generated by 'github.com/containous/yaegi/extract path/filepath'. DO NOT EDIT. // +build go1.14,!go1.15 package stdlib import ( "go/constant" "go/token" "path/filepath" "reflect" ) func init() { Symbols["path/filepath"] = map[string]reflect.Value{ // function, constant and variable definitions "Abs": reflect.ValueOf(filepath.Abs), "Base": reflect.ValueOf(filepath.Base), "Clean": reflect.ValueOf(filepath.Clean), "Dir": reflect.ValueOf(filepath.Dir), "ErrBadPattern": reflect.ValueOf(&filepath.ErrBadPattern).Elem(), "EvalSymlinks": reflect.ValueOf(filepath.EvalSymlinks), "Ext": reflect.ValueOf(filepath.Ext), "FromSlash": reflect.ValueOf(filepath.FromSlash), "Glob": reflect.ValueOf(filepath.Glob), "HasPrefix": reflect.ValueOf(filepath.HasPrefix), "IsAbs": reflect.ValueOf(filepath.IsAbs), "Join": reflect.ValueOf(filepath.Join), "ListSeparator": reflect.ValueOf(constant.MakeFromLiteral("58", token.INT, 0)), "Match": reflect.ValueOf(filepath.Match), "Rel": reflect.ValueOf(filepath.Rel), "Separator": reflect.ValueOf(constant.MakeFromLiteral("47", token.INT, 0)), "SkipDir": reflect.ValueOf(&filepath.SkipDir).Elem(), "Split": reflect.ValueOf(filepath.Split), "SplitList": reflect.ValueOf(filepath.SplitList), "ToSlash": reflect.ValueOf(filepath.ToSlash), "VolumeName": reflect.ValueOf(filepath.VolumeName), "Walk": reflect.ValueOf(filepath.Walk), // type definitions "WalkFunc": reflect.ValueOf((*filepath.WalkFunc)(nil)), } }
apache-2.0
shot/hadoop-source-reading
src/hdfs/org/apache/hadoop/hdfs/server/protocol/InterDatanodeProtocol.java
1712
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.protocol; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.ipc.VersionedProtocol; /** * An inter-datanode protocol for updating generation stamp */ public interface InterDatanodeProtocol extends VersionedProtocol { public static final Log LOG = LogFactory .getLog(InterDatanodeProtocol.class); /** * 3: added a finalize parameter to updateBlock */ public static final long versionID = 3L; /** * @return the BlockMetaDataInfo of a block; null if the block is not found */ BlockMetaDataInfo getBlockMetaDataInfo(Block block) throws IOException; /** * Update the block to the new generation stamp and length. */ void updateBlock(Block oldblock, Block newblock, boolean finalize) throws IOException; }
apache-2.0
googleapis/google-api-php-client-services
src/Drive/ReplyList.php
1689
<?php /* * Copyright 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ namespace Google\Service\Drive; class ReplyList extends \Google\Collection { protected $collection_key = 'replies'; /** * @var string */ public $kind; /** * @var string */ public $nextPageToken; protected $repliesType = Reply::class; protected $repliesDataType = 'array'; /** * @param string */ public function setKind($kind) { $this->kind = $kind; } /** * @return string */ public function getKind() { return $this->kind; } /** * @param string */ public function setNextPageToken($nextPageToken) { $this->nextPageToken = $nextPageToken; } /** * @return string */ public function getNextPageToken() { return $this->nextPageToken; } /** * @param Reply[] */ public function setReplies($replies) { $this->replies = $replies; } /** * @return Reply[] */ public function getReplies() { return $this->replies; } } // Adding a class alias for backwards compatibility with the previous class name. class_alias(ReplyList::class, 'Google_Service_Drive_ReplyList');
apache-2.0
jflory7/SFHSAPCompSci2015
src/main/java/com/justinwflory/assignments/spring/elevens-lab/BoardTester.java
1013
// Justin Flory's Code import java.util.List; /** * AP Computer Science A 2015 * * A simple BoardTester class * creates an ElevensBoard object and then prints the cards that were initially dealt to the board */ public class BoardTester { public static void main(String[] args) { ElevensBoard board = new ElevensBoard(); for (int i=0; i<board.boardSize(); i++) { board.deal(i); } // using the ElevensBoard toString System.out.println(); System.out.println("Printing the board using the toString() method"); System.out.println(board); // using the method you have to write below System.out.println(); System.out.println("Printing the board using the printCards() method"); printCards(board); } /** * Print all of the cards on the board. */ public static void printCards(ElevensBoard board) { List<Integer> cIndexes = board.cardIndexes(); for (int i : cIndexes) { System.out.println(board.cardAt(i)); } } }
apache-2.0