text
stringlengths
1
1.05M
# This script performs bootstrap installation of upip package manager from PyPI # All the other packages can be installed using it. saved="$PWD" if [ "$1" = "" ]; then dest=~/.micropython/lib/ else dest="$1" fi if [ -z "$TMPDIR" ]; then cd /tmp else cd $TMPDIR fi # Remove any stale old version rm -rf micropython-upip-* wget -nd -rH -l1 -D files.pythonhosted.org https://pypi.org/project/micropython-upip/ --reject=html tar xfz micropython-upip-*.tar.gz tmpd="$PWD" cd "$saved" mkdir -p "$dest" cp "$tmpd"/micropython-upip-*/upip*.py "$dest" echo "upip is installed. To use:" echo "micropython -m upip --help"
def addArrays(arr1, arr2): if len(arr1) != len(arr2): raise ValueError("Arrays length do not match") res = [] for i in range(len(arr1)): res.append(arr1[i] + arr2[i]) return res
#!/usr/bin/env bash #echo 'shutdown -P now' > /tmp/shutdown.sh; echo '{{user `ssh_password`}}'|sudo -S sh '/tmp/shutdown.sh' sudo /usr/sbin/shutdown -P now #ssh -tt aemdesign@$(hostname) sudo shutdown -P now
import React from 'react'; import { Router, Route, Switch } from 'dva/router'; import FruitRoute from './routes/Fruit/Fruit'; import FruitFormRoute from './routes/Fruit/FruitForm'; function RouterConfig({ history }) { return ( <Router history={history}> <Switch> <Route path="/" exact component={FruitRoute} /> <Route path="/fruit" component={FruitRoute} /> <Route path="/fruitForm" component={FruitFormRoute} /> </Switch> </Router> ); } export default RouterConfig;
clear && cat closures.go && go run closures.go
def intersection(arr1, arr2): i, j = 0, 0 intersection = [] while i < len(arr1) and j < len(arr2): if arr1[i] == arr2[j]: intersection.append(arr1[i]) i+=1 j+=1 elif arr1[i] < arr2[j]: i+=1 else: j+=1 return intersection
#!/bin/bash # Usage: # ./experiments/scripts/faster_rcnn_end2end.sh GPU NET DATASET [options args to {train,test}_net.py] # DATASET is either pascal_voc or coco. # # Example: # ./experiments/scripts/faster_rcnn_end2end.sh 0 VGG_CNN_M_1024 pascal_voc \ # --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" set -x set -e export PYTHONUNBUFFERED="True" GPU_ID=$1 NET=$2 NET_lc=${NET,,} DATASET=$3 array=( $@ ) len=${#array[@]} EXTRA_ARGS=${array[@]:3:$len} EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} case $DATASET in wider) TRAIN_IMDB="wider_train" TEST_IMDB="wider_test" PT_DIR="wider" ITERS=80000 ;; *) echo "No dataset given" exit ;; esac LOG="experiments/logs/faster_rcnn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" exec &> >(tee -a "$LOG") echo Logging output to "$LOG" time ./tools/train_net.py --gpu ${GPU_ID} \ --solver models/${PT_DIR}/${NET}/faster_rcnn_end2end/solver.prototxt \ --weights data/imagenet_models/ResNet-101-model.caffemodel \ --imdb ${TRAIN_IMDB} \ --iters ${ITERS} \ --cfg experiments/cfgs/faster_rcnn_end2end.yml \ ${EXTRA_ARGS} #set +x #NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` #set -x #time ./tools/test_net.py --gpu ${GPU_ID} \ # --def models/${PT_DIR}/${NET}/faster_rcnn_end2end/test.prototxt \ # --net ${NET_FINAL} \ # --imdb ${TEST_IMDB} \ # --cfg experiments/cfgs/faster_rcnn_end2end.yml \ # ${EXTRA_ARGS} time ./tools/wider_val.py --maxIter ${ITERS} --gpu ${GPU_ID}
# encoding: utf-8 require_relative '../spec_helper' describe "API" do subject { SCB::API } let(:api) { api_with_test_config(SCB::API.new) } let(:base_url) { "http://api.test/name/v0/lang/db" } let(:expected_uri) { URI.parse "#{base_url}/endpoint" } let(:fake_http_post) { # With UTF8 BOM, since the real API has this bug. ->(path, json){ fake_response("\xEF\xBB\xBF#{json}") } } describe "initialize" do it "takes a config" do subject.new("foo").config.must_equal "foo" end it "instantiates a default config" do subject.new.config.api_host.must_equal "api.scb.se" end end describe "get" do it "checks for nil responses" do api.stub(:http_get, nil) do api.get('nil').must_be_nil end end it "retrieves the data using a HTTP GET" do api.stub(:http_get, fake_response('foo')) do api.get.must_equal "foo" end end end describe "get_and_parse" do it "parses the returned body" do api.stub(:http_get, fixture_response('sv/ssd/BO')) do api.get_and_parse.last["id"].must_equal "BO0701" end end end describe "post" do it "unfortunately receives the body with an UTF8 BOM" do api.stub(:http_post, fake_http_post) do api.post(nil, { foo: "bar"}). must_equal "\xEF\xBB\xBF{\"foo\":\"bar\"}" end end it "strips the UTF8 BOM if the format is json" do api.stub(:http_post, fake_http_post) do query = { foo: "bar", response: { format: "json" } } api.post(nil, query). must_equal '{"foo":"bar","response":{"format":"json"}}' end end end describe "post_and_parse" do it "forces the format to be JSON" do api.stub(:http_post, fake_http_post) do query = { response: { format: "png" } } api.post_and_parse(nil, query).must_equal({ "response" => { "format" => "json" } }) end end end describe "base_url" do it "returns the base URL" do api.base_url.must_equal base_url end end describe "uri" do it "it returns a URI pointing to the provided endpoint" do api.uri("endpoint").must_equal expected_uri end end describe "load_json" do it "can load JSON" do api.load_json('{"baz":[1,2,3]}')['baz'].first.must_equal 1 end end describe "dump_json" do it "can dump JSON" do api.dump_json({ foo: 123}).must_equal '{"foo":123}' end end end
<reponame>kv-zuiwanyuan/kudu // Copyright 2015 Cloudera, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.kududb.client; import com.google.common.collect.Lists; import org.junit.Test; import org.kududb.ColumnSchema; import org.kududb.Type; import org.kududb.tserver.Tserver; import java.io.IOException; import java.util.List; import static org.junit.Assert.*; public class TestColumnRangePredicate { @Test public void testRawLists() { ColumnSchema col1 = new ColumnSchema.ColumnSchemaBuilder("col1", Type.INT32).build(); ColumnSchema col2 = new ColumnSchema.ColumnSchemaBuilder("col2", Type.STRING).build(); ColumnRangePredicate pred1 = new ColumnRangePredicate(col1); pred1.setLowerBound(1); ColumnRangePredicate pred2 = new ColumnRangePredicate(col1); pred2.setUpperBound(2); ColumnRangePredicate pred3 = new ColumnRangePredicate(col2); pred3.setLowerBound("aaa"); pred3.setUpperBound("bbb"); List<ColumnRangePredicate> preds = Lists.newArrayList(pred1, pred2, pred3); byte[] rawPreds = ColumnRangePredicate.toByteArray(preds); List<Tserver.ColumnRangePredicatePB> decodedPreds = null; try { decodedPreds = ColumnRangePredicate.fromByteArray(rawPreds); } catch (IllegalArgumentException e) { fail("Couldn't decode: " + e.getMessage()); } assertEquals(3, decodedPreds.size()); assertEquals(col1.getName(), decodedPreds.get(0).getColumn().getName()); assertEquals(1, Bytes.getInt(Bytes.get(decodedPreds.get(0).getLowerBound()))); assertFalse(decodedPreds.get(0).hasUpperBound()); assertEquals(col1.getName(), decodedPreds.get(1).getColumn().getName()); assertEquals(2, Bytes.getInt(Bytes.get(decodedPreds.get(1).getUpperBound()))); assertFalse(decodedPreds.get(1).hasLowerBound()); assertEquals(col2.getName(), decodedPreds.get(2).getColumn().getName()); assertEquals("aaa", Bytes.getString(Bytes.get(decodedPreds.get(2).getLowerBound()))); assertEquals("bbb", Bytes.getString(Bytes.get(decodedPreds.get(2).getUpperBound()))); } }
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/data/DataRegistryImpl.java package io.opensphere.core.data; import java.io.NotSerializableException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; import gnu.trove.iterator.TIntIterator; import gnu.trove.list.array.TIntArrayList; import gnu.trove.list.array.TLongArrayList; import io.opensphere.core.api.Envoy; import io.opensphere.core.api.Transformer; import io.opensphere.core.cache.Cache; import io.opensphere.core.cache.CacheDeposit; import io.opensphere.core.cache.CacheException; import io.opensphere.core.cache.CacheModificationListener; import io.opensphere.core.cache.CacheRemovalListener; import io.opensphere.core.cache.ClassProvider; import io.opensphere.core.cache.DefaultCacheModificationListener; import io.opensphere.core.cache.PropertyValueMap; import io.opensphere.core.cache.SingleSatisfaction; import io.opensphere.core.cache.accessor.GeometryAccessor; import io.opensphere.core.cache.accessor.IntervalPropertyAccessor; import io.opensphere.core.cache.accessor.PropertyAccessor; import io.opensphere.core.cache.matcher.IntervalPropertyMatcher; import io.opensphere.core.cache.matcher.PropertyMatcherUtilities; import io.opensphere.core.cache.mem.MemoryCache; import io.opensphere.core.cache.util.IntervalPropertyValueSet; import io.opensphere.core.cache.util.PropertyDescriptor; import io.opensphere.core.data.util.DataModelCategory; import io.opensphere.core.data.util.PropertyValueIdReceiver; import io.opensphere.core.data.util.PropertyValueReceiver; import io.opensphere.core.data.util.Query; import io.opensphere.core.data.util.QueryTracker; import io.opensphere.core.data.util.QueryTracker.QueryStatus; import io.opensphere.core.data.util.Satisfaction; import io.opensphere.core.util.Utilities; import io.opensphere.core.util.collections.CollectionUtilities; import io.opensphere.core.util.collections.New; import io.opensphere.core.util.lang.ImpossibleException; import io.opensphere.core.util.lang.Pair; import io.opensphere.core.util.lang.StringUtilities; /** * Registry for data models. Data models are produced by {@link Envoy}s and used * by {@link Transformer}s, as well as other plug-in components. */ @SuppressWarnings("PMD.GodClass") public class DataRegistryImpl implements DataRegistry { /** Failure message. */ private static final String CACHE_FAILURE_MSG = "Failed to retrieve objects from the cache: "; /** Logger reference. */ private static final Logger LOGGER = Logger.getLogger(DataRegistryImpl.class); /** The data cache. */ private final Cache myCache; /** * The collection of class providers the cache can use when deserializing * objects that are not found by the system class loader. */ private final List<ClassProvider> myClassProviders = Collections.synchronizedList(New.<ClassProvider>list()); /** The providers used for queries. */ private final List<CachingDataRegistryDataProvider> myDataProviders = new CopyOnWriteArrayList<>(); /** An executor for background tasks. */ private final ExecutorService myExecutor; /** The listener manager. */ private final DataRegistryListenerManager myListenerManager = new DataRegistryListenerManager(); /** The query manager. */ private final QueryManager myQueryManager = new QueryManager(mt -> performQuery(mt, false, false)); /** * Construct a data registry. * * @param executor The executor for background tasks. * @param cache The cache to use. */ public DataRegistryImpl(ExecutorService executor, Cache cache) { myExecutor = executor; myCache = new MemoryCache(cache); myCache.setClassProvider(className -> { Class<?> theClass = null; synchronized (myClassProviders) { for (ClassProvider provider : myClassProviders) { theClass = provider.getClass(className); if (theClass != null) { break; } } } return theClass; }); } @Override public <T> void addChangeListener(DataRegistryListener<T> listener, DataModelCategory dataModelCategory, PropertyDescriptor<T> propertyDescriptor) { myListenerManager.addChangeListener(listener, dataModelCategory, propertyDescriptor); } @Override public void addClassProvider(ClassProvider provider) { myClassProviders.add(provider); } @Override public void addDataProvider(DataRegistryDataProvider dataProvider, ThreadPoolExecutor executor) { myDataProviders.add(new CachingDataRegistryDataProvider(dataProvider, executor, myCache)); } @Override public <T> long[] addModels(final CacheDeposit<T> insert) { return addModels(insert, (Object)null); } @Override public <T> long[] addModels(final CacheDeposit<T> insert, Object source) { Utilities.checkNull(insert, "insert"); DataModelCategory category = insert.getCategory(); Utilities.checkNull(category, "insert.getCategory()"); Utilities.checkNull(insert.getAccessors(), "insert.getAccessors()"); Utilities.checkNull(insert.getInput(), "insert.getInput()"); if (!insert.isNew()) { throw new IllegalArgumentException("Update passed to addModels(). Please use updateModels() instead."); } Utilities.checkNull(category.getSource(), "insert.getCategory().getSource()"); Utilities.checkNull(category.getFamily(), "insert.getCategory().getFamily()"); Utilities.checkNull(category.getCategory(), "insert.getCategory().getCategory()"); DefaultCacheModificationListener listener = new DefaultCacheModificationListener(); doAddOrUpdate(insert, source, listener); if (listener.getReports().isEmpty()) { return new long[0]; } return listener.getReports().iterator().next().getIds(); } @Override public void close() { myCache.close(); } @Override public DataModelCategory[] getDataModelCategories(long[] ids) { DataModelCategory[] result; try { result = myCache.getDataModelCategories(ids); } catch (CacheException e) { LOGGER.error("Failed to get data model categories: " + e, e); result = null; } return result; } @Override public Set<DataModelCategory> getDataModelCategories(long[] ids, boolean source, boolean family, boolean category) { Set<DataModelCategory> result; try { result = New.set(myCache.getDataModelCategoriesByModelId(ids, source, family, category)); } catch (CacheException e) { LOGGER.error("Failed to get data model categories: " + e, e); result = null; } return result; } @Override public long[] getPersistedSizes(long[] ids, PropertyDescriptor<?> propertyDescriptor) { long[] result; try { result = myCache.getValueSizes(ids, propertyDescriptor); } catch (CacheException e) { LOGGER.error("Failed to get persisted sizes: " + e, e); result = null; } return result; } @Override public boolean hasThreadCapacity(DataModelCategory dmc) { for (Iterator<CachingDataRegistryDataProvider> iter = getDataProviderIterator(dmc); iter.hasNext();) { CachingDataRegistryDataProvider provider = iter.next(); if (provider.getExecutorSaturation() < 1.) { return true; } } return false; } @Override public int performLocalQuery(long[] ids, Query query) { final DefaultQueryTracker tracker = new DefaultQueryTracker(query, true); int result = 0; try { for (int index = query.getStartIndex(); index - query.getStartIndex() < query.getLimit(); index += query .getBatchSize()) { int lastIndex; lastIndex = Math.min(ids.length, index + query.getBatchSize()); lastIndex = Math.min(lastIndex, query.getStartIndex() + query.getLimit()); int count = doValueQuery(tracker, index, Arrays.copyOfRange(ids, index, lastIndex)); result += count; if (count < query.getBatchSize()) { break; } } } catch (RuntimeException | CacheException e) { LOGGER.error(CACHE_FAILURE_MSG + e, e); } return result; } @Override public long[] performLocalQuery(Query query) { QueryTracker tracker = startQuery(query, true, true); tracker.logException(); return tracker.getIds(); } @Override public QueryTracker performQuery(Query query) { QueryTracker tracker = startQuery(query, false, true); tracker.awaitCompletion(); return tracker; } @Override public void removeChangeListener(DataRegistryListener<?> listener) { myListenerManager.removeChangeListener(listener); } @Override public void removeClassProvider(ClassProvider provider) { myClassProviders.remove(provider); } @Override public void removeDataProvider(DataRegistryDataProvider dataProvider) { Collection<CachingDataRegistryDataProvider> toBeRemoved = New.collection(); for (CachingDataRegistryDataProvider dp : myDataProviders) { if (Utilities.sameInstance(dataProvider, dp.getDataProvider())) { toBeRemoved.add(dp); } } myDataProviders.removeAll(toBeRemoved); } @Override public long[] removeModels(DataModelCategory category, boolean returnIds) { return removeModels(category, returnIds, null); } @Override public long[] removeModels(DataModelCategory category, boolean returnIds, final Object source) { long[] ids; try { if (category == null || category.getCategory() == null && category.getFamily() == null && category.getSource() == null) { ids = returnIds ? myCache.getIds(category, null, null, 0, Integer.MAX_VALUE) : null; myCache.clear(); myListenerManager.notifyAllRemoved(source); } else { final TLongArrayList idList = returnIds ? new TLongArrayList() : null; myCache.clear(category, returnIds, new CacheRemovalListener() { @Override public void valuesRemoved(DataModelCategory dmc, long[] idsForGroup) { if (idList != null) { idList.add(idsForGroup); } myListenerManager.notifyRemoves(dmc, idsForGroup, source); } @Override public <T> void valuesRemoved(DataModelCategory dmc, long[] idsForGroup, PropertyDescriptor<T> desc, Iterable<? extends T> values) { myListenerManager.notifyRemoves(dmc, idsForGroup, desc, values, source); } }); ids = idList == null ? null : idList.toArray(); } } catch (CacheException e) { LOGGER.error(CACHE_FAILURE_MSG + e, e); ids = returnIds ? new long[0] : null; } catch (NotSerializableException e) { throw new ImpossibleException(e); } return ids; } @Override public void removeModels(long[] ids) { removeModels(ids, null); } @Override public void removeModels(long[] ids, final boolean waitForListeners, final Object source) { try { DataModelCategory[] dataModelCategories = myCache.getDataModelCategories(ids); // Construct a map of categories to lists of ids. These are // necessary for notifying listeners. Map<DataModelCategory, TLongArrayList> map = New.map(); for (int index = 0; index < ids.length; ++index) { CollectionUtilities.multiMapAdd(map, dataModelCategories[index], ids[index]); } final Collection<CountDownLatch> latches = New.collection(map.size() * 2); long t0 = System.nanoTime(); if (myListenerManager.isWantingRemovedObjects()) { for (Map.Entry<DataModelCategory, TLongArrayList> entry : map.entrySet()) { final DataModelCategory dataModelCategory = entry.getKey(); long[] idsForGroup = entry.getValue().toArray(); myCache.clear(idsForGroup, new CacheRemovalListener() { @Override public void valuesRemoved(DataModelCategory dmc, long[] idsRemoved) { } @Override public <T> void valuesRemoved(DataModelCategory dmc, long[] idsRemoved, PropertyDescriptor<T> desc, Iterable<? extends T> values) { CountDownLatch latch = myListenerManager.notifyRemoves(dataModelCategory, idsRemoved, desc, values, source); if (waitForListeners) { latches.add(latch); } } }); } } else { myCache.clear(ids); } if (LOGGER.isDebugEnabled()) { long et = System.nanoTime() - t0; LOGGER.debug(StringUtilities.formatTimingMessage("Time to remove " + ids.length + " ids from cache: ", et)); } // Notify listeners. for (Map.Entry<DataModelCategory, TLongArrayList> entry : map.entrySet()) { DataModelCategory dataModelCategory = entry.getKey(); long[] idsForGroup = entry.getValue().toArray(); CountDownLatch latch = myListenerManager.notifyRemoves(dataModelCategory, idsForGroup, source); if (waitForListeners) { latches.add(latch); } } for (CountDownLatch latch : latches) { waitForLatch(latch); } } catch (CacheException e) { LOGGER.error(CACHE_FAILURE_MSG + e, e); } } @Override public void removeModels(long[] ids, final Object source) { removeModels(ids, false, source); } /** * Set the number of bytes that can be used by my in-memory cache. * * @param bytes The bytes. */ public void setInMemoryCacheSizeBytes(long bytes) { try { myCache.setInMemorySizeBytes(bytes); } catch (CacheException e) { LOGGER.warn("Failed to set cache size to " + bytes + " bytes: " + e, e); } } @Override public QueryTracker submitLocalQuery(Query query) { return startQuery(query, true, false); } @Override public QueryTracker submitQuery(Query query) { return startQuery(query, false, false); } @Override public <T> void updateModels(CacheDeposit<T> insert, CacheModificationListener listener) { updateModels(insert, null, listener); } @Override public <T> void updateModels(CacheDeposit<T> insert, Object source, CacheModificationListener listener) { Utilities.checkNull(insert, "insert"); Utilities.checkNull(insert.getCategory(), "insert.getCategory()"); Utilities.checkNull(insert.getAccessors(), "insert.getAccessors()"); Iterable<? extends T> input = insert.getInput(); Utilities.checkNull(input, "insert.getInput()"); if (insert.isNew()) { throw new IllegalArgumentException("Add passed to updateModels(). Please use addModels() instead."); } doAddOrUpdate(insert, source, listener); } @Override public <T> void updateModels(long[] ids, Collection<? extends T> input, Collection<? extends PropertyAccessor<? super T, ?>> accessors, CacheModificationListener listener) { updateModels(ids, input, accessors, null, false, listener); } @Override public <T> void updateModels(final long[] ids, final Collection<? extends T> input, final Collection<? extends PropertyAccessor<? super T, ?>> accessors, final Object source, boolean returnEarly, final CacheModificationListener listener) { Utilities.checkNull(ids, "ids"); Utilities.checkNull(accessors, "accessors"); if (input.size() != 1 && ids.length != input.size()) { throw new IllegalArgumentException( "Either the input collection must be a singleton or must match the size of the id array."); } try { long t0 = System.nanoTime(); myCache.updateValues(ids, input, accessors, returnEarly ? myExecutor : null, cmr -> { myListenerManager.notifyAddsOrUpdates(cmr, ids, input, cmr.filterAccessors(accessors), DataRegistryListenerManager.ChangeType.UPDATE, source); if (listener != null) { listener.cacheModified(cmr); } }); if (LOGGER.isDebugEnabled()) { long t1 = System.nanoTime(); LOGGER.debug(StringUtilities.formatTimingMessage("Time to update " + ids.length + " values in cache: ", t1 - t0)); } } catch (CacheException e) { LOGGER.error("Failed to cache data: " + e, e); } catch (NotSerializableException e) { LOGGER.error("Data was not serializable for input object [" + input + "]: " + e, e); } } /** * Deliver property values to property value receivers. * * @param <T> The type of the property values. * @param ids The requested ids. * @param startIndex The index of the first property value. * @param resultMap Map of property descriptions to property values. * @param failedIndices List of failed indices. These are indices into the * property value lists. * @param receiver The object that will receive the values. */ protected <T> void deliverValuesToReceiver(long[] ids, int startIndex, PropertyValueMap resultMap, TIntArrayList failedIndices, PropertyValueReceiver<T> receiver) { PropertyDescriptor<T> propertyDescriptor = receiver.getPropertyDescriptor(); List<T> values = resultMap.getResultList(propertyDescriptor); // This is safe because the values in the result map must be the // same type as specified by the property descriptor from the // receiver. @SuppressWarnings("unchecked") PropertyValueReceiver<Object> cast = (PropertyValueReceiver<Object>)receiver; // Only send values to the receivers that are not in the failed // indices. int fromIndex = 0; int toIndex; for (TIntIterator iter = failedIndices.iterator(); iter.hasNext();) { toIndex = iter.next(); if (fromIndex < toIndex) { if (cast instanceof PropertyValueIdReceiver) { long[] batch = new long[toIndex - fromIndex]; System.arraycopy(ids, startIndex + fromIndex, batch, 0, batch.length); ((PropertyValueIdReceiver<Object>)cast).receive(batch, startIndex + fromIndex, values.subList(fromIndex, toIndex)); } else { cast.receive(values.subList(fromIndex, toIndex)); } } fromIndex = toIndex + 1; } toIndex = values.size(); if (fromIndex < toIndex) { if (fromIndex > 0) { values = values.subList(fromIndex, toIndex); } if (cast instanceof PropertyValueIdReceiver) { long[] batch; if (startIndex + fromIndex == 0) { batch = ids; } else { batch = new long[toIndex - fromIndex]; System.arraycopy(ids, startIndex + fromIndex, batch, 0, batch.length); } ((PropertyValueIdReceiver<Object>)cast).receive(batch, startIndex + fromIndex, values); } else { cast.receive(values); } } } /** * Perform an add or update on the registry. Check the integrity of the * insert, update the persistent and in-memory caches, and update * subscribers. * * @param <T> The type of input objects. * @param insert The insert object. * @param source The originator of the change. * @param listener Optional listener for cache modification reports. */ protected <T> void doAddOrUpdate(final CacheDeposit<T> insert, final Object source, final CacheModificationListener listener) { Collection<String> propertyKeys = New.set(insert.getAccessors().size()); boolean foundGeometryAccessor = false; boolean foundIntervalAccessor = false; for (PropertyAccessor<? super T, ?> accessor : insert.getAccessors()) { if (accessor instanceof GeometryAccessor) { if (foundGeometryAccessor) { throw new IllegalArgumentException("Cannot have more than one geometry accessor."); } foundGeometryAccessor = true; foundIntervalAccessor = true; } else if (accessor instanceof IntervalPropertyAccessor) { foundIntervalAccessor = true; } if (!propertyKeys.add(accessor.getPropertyDescriptor().getPropertyName())) { throw new IllegalArgumentException( "Cannot have more than one accessor for the same property name. Found more than once accessor for property [" + accessor.getPropertyDescriptor().getPropertyName() + "]"); } } propertyKeys = null; if (!foundIntervalAccessor && !CollectionUtilities.hasContent(insert.getInput())) { return; } try { long t0 = System.nanoTime(); final DataRegistryListenerManager.ChangeType changeType = insert.isNew() ? DataRegistryListenerManager.ChangeType.ADD : DataRegistryListenerManager.ChangeType.UPDATE; long[] ids = myCache.put(insert, cmr -> { myListenerManager.notifyAddsOrUpdates(cmr, cmr.getIds(), insert.getInput(), cmr.filterAccessors(insert.getAccessors()), changeType, source); if (listener != null) { listener.cacheModified(cmr); } }); if (LOGGER.isDebugEnabled()) { long t1 = System.nanoTime(); LOGGER.debug( StringUtilities.formatTimingMessage("Time to add " + ids.length + " models to the cache: ", t1 - t0)); } } catch (CacheException e) { LOGGER.error("Failed to cache data: " + e, e); } catch (NotSerializableException e) { LOGGER.error("Data of category [" + insert.getCategory() + "] was not serializable: " + e, e); } } /** * Perform a query to get the property values for some element ids. * * @param tracker The query tracker. * @param index The start index. * @param ids The array of numeric ids. * @return The number of ids that were matched. * @throws CacheException If there's a database error. */ protected int doValueQuery(MutableQueryTracker tracker, int index, long[] ids) throws CacheException { int count = 0; long t0 = System.nanoTime(); Collection<? extends PropertyValueReceiver<?>> receivers = tracker.getQuery().getPropertyValueReceivers(); if (CollectionUtilities.hasContent(receivers)) { count += retrievePropertyValues(tracker, index, ids, receivers); if (LOGGER.isDebugEnabled()) { long t1 = System.nanoTime(); LOGGER.debug(StringUtilities.formatTimingMessage("Time to retrieve " + receivers.size() * ids.length + " property values for category " + tracker.getQuery().getDataModelCategory() + " from cache: ", t1 - t0)); } } return count; } /** * Worker method that performs a query, interacting with a * {@link QueryTracker}. * * @param tracker The query tracker. * @param cacheOnly Flag indicating if only the cache should be queried. * @param synchronous Flag indicating if the query should be done on the * current thread only. */ protected void performQuery(final MultiQueryTracker tracker, boolean cacheOnly, boolean synchronous) { // Capture the unsatisfied intervals to be handled in this method. We // don't want unsatisfied intervals to be added in between determining // the cache satisfactions and determining the data provider // satisfactions, because that could result in making a query against // the data providers without checking the cache first. List<IntervalPropertyValueSet> unsatisfied = tracker.getUnsatisfied(); final Collection<? extends MutableQueryTracker> cacheTrackers; try { cacheTrackers = determineCacheSatisfactions(tracker, unsatisfied); } catch (CacheException e) { if (!tracker.isCancelled()) { LOGGER.error(CACHE_FAILURE_MSG + e, e); tracker.setQueryStatus(QueryStatus.FAILED, e); } return; } catch (RuntimeException e) { LOGGER.error(CACHE_FAILURE_MSG + e, e); tracker.setQueryStatus(QueryStatus.FAILED, e); return; } catch (NotSerializableException e) { LOGGER.error("Query parameter was not serializable: " + e, e); tracker.setQueryStatus(QueryStatus.FAILED, e); return; } // If this is a cache-only query or the query is fully satisfied by the // cache, don't bother with the data providers. final Collection<Pair<CachingDataRegistryDataProvider, MutableQueryTracker>> dataProviders; if (cacheOnly || unsatisfied.isEmpty()) { dataProviders = null; } else { validateQuery(tracker.getQuery()); try { dataProviders = determineDataProviderSatisfactions(tracker, unsatisfied); } catch (RuntimeException e) { LOGGER.error("Failed to determine data provider satisfactions: " + e, e); tracker.setQueryStatus(QueryStatus.FAILED, e); return; } } if (tracker.isCancelled()) { return; } if (!unsatisfied.isEmpty()) { String msg = "No data provider found for query [" + tracker.getQuery() + "]"; if (LOGGER.isDebugEnabled()) { LOGGER.debug(msg); } tracker.setQueryStatus(QueryStatus.FAILED, cacheOnly ? null : new CacheException(msg)); return; } // Wait to start the queries until all the sub-trackers are created so // the multi-query tracker knows when it's done. runQueries(cacheTrackers, dataProviders, synchronous, tracker.isIntervalQuery()); } /** * Determine if the cache can satisfy any of a query. If the query has * multiple unsatisfied regions, multiple trackers may be returned. * * @param tracker The query tracker. * @param unsatisfied Input/output list of intervals to be satisfied. These * should be a subset of the unsatisfied intervals in the query * tracker. The intervals satisfied by created cache trackers * will be removed from this list. * @return The sub-query trackers. * @throws CacheException If there's a problem accessing the cache. * @throws NotSerializableException If one of the parameter values is not * serializable. */ private Collection<? extends MutableQueryTracker> determineCacheSatisfactions(MultiQueryTracker tracker, List<IntervalPropertyValueSet> unsatisfied) throws NotSerializableException, CacheException { Collection<MutableQueryTracker> cacheTrackers; if (tracker.isIntervalQuery()) { cacheTrackers = New.list(); for (IntervalPropertyValueSet interval : New.collection(unsatisfied)) { Collection<Collection<? extends IntervalPropertyMatcher<?>>> groupMatchers = PropertyMatcherUtilities .getGroupMatchers(interval); for (Collection<? extends IntervalPropertyMatcher<?>> params : groupMatchers) { Collection<? extends Satisfaction> satisfactions = myCache .getIntervalSatisfactions(tracker.getQuery().getDataModelCategory(), params); if (LOGGER.isTraceEnabled()) { LOGGER.trace(StringUtilities.concat("Cache satisfactions for ", tracker, " params ", params, " are ", satisfactions)); } if (!satisfactions.isEmpty()) { MutableQueryTracker subTracker = tracker.createSubTracker(true, satisfactions, params); if (subTracker != null) { cacheTrackers.add(subTracker); } // Remove the satisfactions from the unsatisfied list // even if the tracker was not added, because this means // that a tracker was added by a different thread for // the same satisfaction, and it still means we should // not try to satisfy it. for (Satisfaction satisfaction : satisfactions) { IntervalPropertyValueSet.subtract(unsatisfied, satisfaction.getIntervalPropertyValueSet()); } } } } } else if (!unsatisfied.isEmpty()) { long[] ids = myCache.getIds(tracker.getQuery().getDataModelCategory(), tracker.getParameters(), tracker.getQuery().getOrderSpecifiers(), tracker.getQuery().getStartIndex(), Math.min(tracker.getQuery().getLimit(), tracker.getQuery().getBatchSize())); MutableQueryTracker subTracker; if (ids.length == 0) { subTracker = null; } else { IdSatisfaction satisfaction = new IdSatisfaction(ids); subTracker = tracker.createSubTracker(true, Collections.singleton(satisfaction)); IntervalPropertyValueSet.subtract(unsatisfied, satisfaction.getIntervalPropertyValueSet()); } if (subTracker == null) { cacheTrackers = Collections.emptyList(); } else { cacheTrackers = Collections.singleton(subTracker); } } else { cacheTrackers = Collections.emptyList(); } return cacheTrackers; } /** * Determine what data providers can satisfy the current query. * * @param tracker The query tracker. * @param unsatisfied Input/output list of unsatisfied intervals. * @return A collection of pairs of data providers and sub-query trackers. */ @SuppressWarnings("PMD.SimplifiedTernary") private Collection<Pair<CachingDataRegistryDataProvider, MutableQueryTracker>> determineDataProviderSatisfactions( MultiQueryTracker tracker, List<IntervalPropertyValueSet> unsatisfied) { Collection<Pair<CachingDataRegistryDataProvider, MutableQueryTracker>> dataProviders = New.collection(); for (CachingDataRegistryDataProviderIterator dataProviderIter = getDataProviderIterator( tracker.getQuery().getDataModelCategory()); !unsatisfied.isEmpty() && dataProviderIter.hasNext();) { final CachingDataRegistryDataProvider dp = dataProviderIter.next(); if (tracker.isCancelled()) { break; } if (tracker.isIntervalQuery()) { // Get the satisfactions from the data provider. Collection<? extends Satisfaction> satisfactions = dp.getSatisfaction(tracker.getQuery().getDataModelCategory(), unsatisfied); MutableQueryTracker subTracker = CollectionUtilities.hasContent(satisfactions) ? tracker.createSubTracker(false, satisfactions) : null; if (subTracker != null) { dataProviders.add(Pair.create(dp, subTracker)); } for (Satisfaction satisfaction : satisfactions) { IntervalPropertyValueSet.subtract(unsatisfied, satisfaction.getIntervalPropertyValueSet()); } } else { MutableQueryTracker subTracker = tracker.createSubTracker(false, SingleSatisfaction.generateSatisfactions(unsatisfied)); if (subTracker != null) { dataProviders.add(Pair.create(dp, subTracker)); } unsatisfied.clear(); } } return dataProviders; } /** * Get an iterator over the data providers that are available for a category * of data. * * @param dataModelCategory The category of data. * @return The iterator. */ private CachingDataRegistryDataProviderIterator getDataProviderIterator(final DataModelCategory dataModelCategory) { return new CachingDataRegistryDataProviderIterator(dataModelCategory, myDataProviders.iterator()); } /** * Perform a query against the cache. * * @param tracker The query tracker. */ private void performCacheQuery(MutableQueryTracker tracker) { long[] result = new long[0]; try { for (int index = tracker.getQuery().getStartIndex(); index - tracker.getQuery().getStartIndex() < tracker.getQuery() .getLimit(); index += tracker.getQuery().getBatchSize()) { if (tracker.isCancelled()) { return; } long t0 = System.nanoTime(); long[] ids; Satisfaction firstSatisfaction = CollectionUtilities.getItem(tracker.getSatisfactions(), 0); if (firstSatisfaction instanceof IdSatisfaction) { ids = ((IdSatisfaction)firstSatisfaction).getIds(); } else { ids = myCache.getIds(tracker.getSatisfactions(), tracker.getParameters(), tracker.getQuery().getOrderSpecifiers(), index, Math.min(tracker.getQuery().getLimit(), tracker.getQuery().getBatchSize())); } long t1 = System.nanoTime(); if (LOGGER.isDebugEnabled()) { LOGGER.debug(StringUtilities.formatTimingMessage("Time to retrieve " + ids.length + " ids for category " + tracker.getQuery().getDataModelCategory() + " from cache: ", t1 - t0)); } if (ids.length > 0) { result = Utilities.concatenate(result, ids); tracker.addIds(result); doValueQuery(tracker, index, ids); } if (ids.length < tracker.getQuery().getBatchSize()) { break; } } tracker.setQueryStatus(QueryStatus.SUCCESS, (Throwable)null); } catch (CacheException | RuntimeException e) { LOGGER.error(CACHE_FAILURE_MSG + e, e); tracker.setQueryStatus(QueryStatus.FAILED, e); } catch (NotSerializableException e) { LOGGER.error("Query parameter was not serializable: " + e, e); tracker.setQueryStatus(QueryStatus.FAILED, e); } } /** * Perform a cache query. * * @param cacheTracker The query tracker. * @param synchronous If the query should be done on the current thread. * @return {@code true} if the query is synchronous and the cache has * results. */ private boolean performCacheQuery(final MutableQueryTracker cacheTracker, boolean synchronous) { Runnable r = cacheTracker.wrapRunnable(() -> performCacheQuery(cacheTracker)); if (synchronous) { r.run(); return cacheTracker.getIds().length > 0; } myExecutor.execute(r); return false; } /** * Get the property values associated with the given ids from the cache. * * @param tracker The query tracker. * @param startIndex The index of the first property value. * @param ids The ids for the models. * @param receivers The objects that define what properties are to be * retrieved and also receive the retrieved properties. * @return The number of ids matched. * @throws CacheException If the properties cannot be retrieved. */ private int retrievePropertyValues(MutableQueryTracker tracker, int startIndex, long[] ids, Collection<? extends PropertyValueReceiver<?>> receivers) throws CacheException { if (tracker.isCancelled()) { return 0; } PropertyValueMap resultMap = new PropertyValueMap(); for (PropertyValueReceiver<?> receiver : receivers) { resultMap.addResultList(receiver.getPropertyDescriptor(), ids.length); } TIntArrayList failedIndices = new TIntArrayList(); try { myCache.getValues(ids, resultMap, failedIndices); } finally // deliver any retrieved values, even if there was an exception { for (PropertyValueReceiver<?> receiver : receivers) { deliverValuesToReceiver(ids, startIndex, resultMap, failedIndices, receiver); } } return resultMap.values().iterator().next().size(); } /** * Run queries against the cache and data providers. * * @param cacheTrackers The cache query trackers. * @param dataProviders The optional data providers. * @param synchronous Flag indicating if the queries must be done on the * current thread. * @param intervalQuery Flag indicating if this is an interval-based query. */ private void runQueries(Collection<? extends MutableQueryTracker> cacheTrackers, Collection<Pair<CachingDataRegistryDataProvider, MutableQueryTracker>> dataProviders, boolean synchronous, boolean intervalQuery) { for (MutableQueryTracker cacheTracker : cacheTrackers) { performCacheQuery(cacheTracker, synchronous); } if (dataProviders != null) { boolean shortCircuit = false; for (Pair<CachingDataRegistryDataProvider, MutableQueryTracker> pair : dataProviders) { if (shortCircuit) { pair.getSecondObject().setQueryStatus(QueryStatus.SUCCESS, (Throwable)null); } else { pair.getFirstObject().query(pair.getSecondObject(), intervalQuery, myListenerManager); if (synchronous && !intervalQuery && pair.getSecondObject().awaitCompletion().length > 0) { shortCircuit = true; } } } } } /** * Helper method that initiates a query. * * @param query The query. * @param cacheOnly If the query is for the local cache only. * @param synchronous If the query is to be done on the current thread. * @return The query tracker. */ private QueryTracker startQuery(Query query, final boolean cacheOnly, final boolean synchronous) { final MultiQueryTracker tracker; if (cacheOnly) { tracker = new MultiQueryTracker(query, null); } else { // Submit the query to the query manager. This will check for other // concurrent queries that overlay this query. If any are found, // slave trackers will be added to the tracker that comes // back from the query manager. The slave trackers listen for the // other concurrent queries to finish, and trigger this query to be // resubmitted when they do. tracker = myQueryManager.submitQuery(query); if (!tracker.hasUnsatisfied()) { return tracker; } } Runnable runner = tracker.wrapRunnable(() -> performQuery(tracker, cacheOnly, synchronous)); if (synchronous) { runner.run(); } else { myExecutor.execute(runner); } return tracker; } /** * Check a query to see if it can be supported. * * @param query The query. */ private void validateQuery(Query query) { if (query.getStartIndex() != 0) { throw new UnsupportedOperationException("Queries are not currently supported with a start index > 0."); } if (query.getLimit() != Integer.MAX_VALUE) { throw new UnsupportedOperationException("Queries are not currently supported with a limit < 2147483647."); } } /** * Wait for a latch, or report an error if interrupted. * * @param latch The latch. */ private void waitForLatch(CountDownLatch latch) { try { if (!latch.await(1, TimeUnit.SECONDS)) { LOGGER.warn("Time expired waiting for latch."); } } catch (InterruptedException e) { LOGGER.warn("Interrupted while waiting for latch: " + e, e); } } /** * Implementation of {@link io.opensphere.core.data.util.Satisfaction} that * tracks the ids that satisfy the query. */ protected static class IdSatisfaction extends SingleSatisfaction { /** The ids. */ private final long[] myIds; /** * Constructor. * * @param ids The ids. */ public IdSatisfaction(long[] ids) { super(null); myIds = ids.clone(); } /** * The ids. * * @return The ids. */ public long[] getIds() { return myIds.clone(); } } }
package calc /** * Which of these statements are truths and which are lies? * * 1. Statement 2 and Statement 5 are either both truths or both lies. * 2. Statement 3 and Statement 5 are either both truths or both lies. * 3. Exactly two of the statements are truths. * 4. Statement 1 and Statement 2 are either both truths or both lies. * 5. Statement 3 is a lie. */ object Calc37 extends App { def rule1(seq: Seq[Boolean]): Boolean = seq(1) && seq(4) || !seq(1) && !seq(4) def rule2(seq: Seq[Boolean]): Boolean = seq(2) && seq(4) || !seq(2) && !seq(4) def rule3(seq: Seq[Boolean]): Boolean = seq.count(_ == true) == 2 //noinspection ZeroIndexToHead def rule4(seq: Seq[Boolean]): Boolean = seq(0) && seq(1) || !seq(0) && !seq(1) def rule5(seq: Seq[Boolean]): Boolean = !seq(2) val rules = Seq(rule1 _, rule2 _, rule3 _, rule4 _, rule5 _) val inputs = for { x <- Seq(true, false) y <- Seq(true, false) z <- Seq(true, false) p <- Seq(true, false) q <- Seq(true, false) } yield Seq(x, y, z, p, q) val results = for { input <- inputs } yield { input.zip { rules.map(_(input)) } } results.filter(result => result.count(a => a._1 == a._2) == 5).foreach(println) }
// written by <NAME> 2008 - 2014 // this work is public domain. the code is undocumented, scruffy, untested, and should generally not be used for anything important. // i do not offer support, so don't ask. to be used for inspiration :) #include "animator.hpp" #include "managers/usereventmanager.hpp" #include "managers/resourcemanagerpool.hpp" #include "scene/objects/geometry.hpp" #include "scene/objectfactory.hpp" #include "utils/objectloader.hpp" #include "gui/gui.hpp" #include "blunted.hpp" Animator::Animator(boost::shared_ptr<Scene2D> scene2D, boost::shared_ptr<Scene3D> scene3D, GuiInterface *guiInterface) : scene2D(scene2D), scene3D(scene3D), guiInterface(guiInterface) { camera = static_pointer_cast<Camera>(ObjectFactory::GetInstance().CreateObject("camera", e_ObjectType_Camera)); scene3D->CreateSystemObjects(camera); ObjectLoader loader; objectNode = boost::intrusive_ptr<Node>(new Node("the world!")); objectNode->AddNode(loader.LoadObject(scene3D, "media/objects/studio/studio.object")); // debug pilon boost::intrusive_ptr < Resource<GeometryData> > geometry = ResourceManagerPool::GetInstance().GetManager<GeometryData>(e_ResourceType_GeometryData)->Fetch("media/objects/helpers/green.ase", true); greenPilon = static_pointer_cast<Geometry>(ObjectFactory::GetInstance().CreateObject("greenPilon", e_ObjectType_Geometry)); scene3D->CreateSystemObjects(greenPilon); greenPilon->SetGeometryData(geometry); greenPilon->SetLocalMode(e_LocalMode_Absolute); scene3D->AddObject(greenPilon); geometry = ResourceManagerPool::GetInstance().GetManager<GeometryData>(e_ResourceType_GeometryData)->Fetch("media/objects/helpers/blue.ase", true); bluePilon = static_pointer_cast<Geometry>(ObjectFactory::GetInstance().CreateObject("bluePilon", e_ObjectType_Geometry)); scene3D->CreateSystemObjects(bluePilon); bluePilon->SetGeometryData(geometry); bluePilon->SetLocalMode(e_LocalMode_Absolute); scene3D->AddObject(bluePilon); camera->Init(); Quaternion rot; rot.SetAngleAxis(0.42 * pi, Vector3(1, 0, 0)); camera->SetRotation(rot); camera->SetPosition(Vector3(0, -4, 1)); camera->SetFOV(44); camera->SetCapping(0.2, 120.0); cameraNode = boost::intrusive_ptr<Node>(new Node("camera")); cameraNode->AddObject(camera); objectNode->AddNode(cameraNode); playerNode = loader.LoadObject(scene3D, "./media/objects/players/player.object"); playerNode->SetName("player"); playerNode->SetPosition(Vector3(0, 0, 0)); objectNode->AddNode(playerNode); FillNodeMap(playerNode, nodeMap); ballNode = loader.LoadObject(scene3D, "./media/objects/balls/generic.object"); ballNode->SetName("ball"); objectNode->AddNode(ballNode); currentDir = "./media/animations"; //rot.SetAngleAxis(-0.3 * pi, Vector3(0, 0, 1)); //objectNode->SetRotation(rot); scene3D->AddNode(objectNode); // interface /* GuiButton *button = new GuiButton(scene2D, "testButton", 0, 0, 20, 5, "test!"); guiInterface->AddView(button); button = new GuiButton(scene2D, "testButton2", 0, 5, 20, 10, "ook een test.."); guiInterface->AddView(button); button = new GuiButton(scene2D, "testButton3", 0, 10, 20, 15, "jawohl dat ist"); guiInterface->AddView(button); */ GuiButton *button = new GuiButton(scene2D, "button_load", 0, 0, 20, 5, "Load animation"); guiInterface->AddView(button); button = new GuiButton(scene2D, "button_save", 0, 5, 20, 10, "Save animation"); guiInterface->AddView(button); timeline = new GuiTimeline(scene2D, "motionTimeline", 0, 75, 100, 100); guiInterface->AddView(timeline); //playerNode->PrintTree(); animation = new Animation; timeline->AddPart("player", "player"); managedNodes.insert(std::pair < std::string, boost::intrusive_ptr<Node> >("player", playerNode)); Vector3 position = playerNode->GetPosition(); // initial position animation->SetKeyFrame("player", 0, Quaternion(QUATERNION_IDENTITY), position); AddNodeToTimeline(playerNode, timeline); boost::shared_ptr<FootballAnimationExtension> extension(new FootballAnimationExtension(animation)); animation->AddExtension("football", extension); timeline->AddPart("ball", "ball"); //animation->GetExtension("football")->SetKeyFrame(0, Vector3(0), Vector3(0), 1.0); timelineIndex = new GuiCaption(scene2D, "caption_timelineIndex", 90, 0, 100, 10, "0/0"); guiInterface->AddView(timelineIndex); debugValues1 = new GuiCaption(scene2D, "caption_debugValues1", 80, 10, 100, 15, ""); guiInterface->AddView(debugValues1); debugValues2 = new GuiCaption(scene2D, "caption_debugValues2", 80, 15, 100, 20, ""); guiInterface->AddView(debugValues2); debugValues3 = new GuiCaption(scene2D, "caption_debugValues3", 80, 20, 100, 25, ""); guiInterface->AddView(debugValues3); // 当前动画名字 caption_animName = new GuiCaption(scene2D, "caption_animName", 0, 0, 100, 5, ""); guiInterface->AddView(caption_animName); studioRot = 0; play = false; currentPlayFrame = 0; PopulateTimeline(animation, timeline); currentFile = "untitled.anim"; caption_animName->Set(currentFile); counter = 0; } Animator::~Animator() { delete animation; scene3D->DeleteObject(greenPilon); scene3D->DeleteObject(bluePilon); scene3D->DeleteNode(objectNode); camera.reset(); objectNode.reset(); playerNode.reset(); scene2D.reset(); scene3D.reset(); } Vector3 GetFrontOfFootOffset(float velocity) { Vector3 ffo = Vector3(0, -0.2, 0); // basic ffo ffo += Vector3(0, -velocity / 35.0, 0); return ffo; } void Animator::FillNodeMap(boost::intrusive_ptr<Node> targetNode, std::map < const std::string, boost::intrusive_ptr<Node> > &nodeMap) { nodeMap.insert(std::pair < std::string, boost::intrusive_ptr<Node> >(targetNode->GetName(), targetNode)); std::vector < boost::intrusive_ptr<Node> > gatherNodes; targetNode->GetNodes(gatherNodes); for (int i = 0; i < (signed int)gatherNodes.size(); i++) { FillNodeMap(gatherNodes.at(i), nodeMap); } } void Animator::AddNodeToTimeline(boost::intrusive_ptr<Node> node, GuiTimeline *timeline) { std::vector < boost::intrusive_ptr<Node> > nodes; node->GetNodes(nodes); for (int i = 0; i < (signed int)nodes.size(); i++) { timeline->AddPart(nodes.at(i)->GetName(), nodes.at(i)->GetName()); managedNodes.insert(std::pair < std::string, boost::intrusive_ptr<Node> >(nodes.at(i)->GetName(), nodes.at(i))); Vector3 position = nodes.at(i)->GetPosition(); // initial position Vector3 angles; // initial angles if (nodes.at(i)->GetName() == "left_shoulder") angles.Set(0.1, -0.25, -0.15); if (nodes.at(i)->GetName() == "right_shoulder") angles.Set(0.1, 0.25, 0.15); if (nodes.at(i)->GetName() == "left_elbow") angles.Set(-0.2, 0.0, 0.0); if (nodes.at(i)->GetName() == "right_elbow") angles.Set(-0.2, -0.0, 0.0); if (nodes.at(i)->GetName() == "left_thigh") angles.Set(-0.06, -0.04, 0); if (nodes.at(i)->GetName() == "right_thigh") angles.Set(-0.06, 0.04, 0); if (nodes.at(i)->GetName() == "left_knee") angles.Set(0.18, 0, 0); if (nodes.at(i)->GetName() == "right_knee") angles.Set(0.18, 0, 0); if (nodes.at(i)->GetName() == "left_ankle") angles.Set(-0.12, 0, 0); if (nodes.at(i)->GetName() == "right_ankle") angles.Set(-0.12, 0, 0); Quaternion rotX, rotY, rotZ, quat; rotX.SetAngleAxis(angles.coords[0], Vector3(1, 0, 0)); rotY.SetAngleAxis(angles.coords[1], Vector3(0, 1, 0)); rotZ.SetAngleAxis(angles.coords[2], Vector3(0, 0, 1)); quat = rotX * rotY * rotZ; animation->SetKeyFrame(nodes.at(i)->GetName(), 0, quat, position); AddNodeToTimeline(nodes.at(i), timeline); } } void Animator::PopulateTimeline(Animation *animation, GuiTimeline *timeline) { std::vector<NodeAnimation*> &nodeAnimations = animation->GetNodeAnimations(); // iterate nodes int animSize = nodeAnimations.size(); for (int i = 0; i < animSize; i++) { NodeAnimation *nodeAnimation = nodeAnimations.at(i); std::map<int, KeyFrame>::iterator animIter = nodeAnimation->animation.begin(); while (animIter != nodeAnimation->animation.end()) { timeline->EnableKeyFrame(nodeAnimations.at(i)->nodeName, animIter->first); animIter++; } } int i = animSize; std::map < std::string, boost::shared_ptr<AnimationExtension> >::iterator extensionIter = animation->GetExtensions().begin(); while (extensionIter != animation->GetExtensions().end()) { //extensionIter->second->PopulateTimeline(i, timeline); //void FootballAnimationExtension::PopulateTimeline(int part, GuiTimeline *timeline) { std::map<int, FootballKeyFrame>::iterator animIter = boost::static_pointer_cast<FootballAnimationExtension>(extensionIter->second)->GetAnimation().begin(); while (animIter != boost::static_pointer_cast<FootballAnimationExtension>(extensionIter->second)->GetAnimation().end()) { timeline->EnableKeyFrame(i, animIter->first); animIter++; } extensionIter++; i++; } timeline->Redraw(); } //! // adds touches around main touch // debug: adds touches around main touch void AddExtraTouches(Animation* animation, boost::intrusive_ptr<Node> playerNode, const std::list < boost::intrusive_ptr<Object> > &bodyParts, const std::map < const std::string, boost::intrusive_ptr<Node> > &nodeMap) { Vector3 animBallPos; int animTouchFrame; bool isTouch = boost::static_pointer_cast<FootballAnimationExtension>(animation->GetExtension("football"))->GetFirstTouch(animBallPos, animTouchFrame); if (isTouch) { // find out what body part the balltouchpos is closest to animation->Apply(nodeMap, animTouchFrame, 0, false); boost::intrusive_ptr<Object> closestBodyPart = (*bodyParts.begin()); float closestDistance = 100; Vector3 toBallVector = Vector3(0); std::list < boost::intrusive_ptr<Object> > ::const_iterator iter = bodyParts.begin(); while (iter != bodyParts.end()) { float distance = (animBallPos - (*iter)->GetDerivedPosition()).GetLength(); if (distance < closestDistance) { closestDistance = distance; closestBodyPart = *iter; toBallVector = animBallPos - (*iter)->GetDerivedPosition(); } iter++; } //printf("closest: %s\n", closestBodyPart->GetName().c_str()); float heightCheat = 1.0; if (animBallPos.coords[2] > 0.8) heightCheat = 1.6; int range_pre = int(round(2.f * heightCheat)); int range_post = int(round(4.f * heightCheat)); if (animation->GetVariable("type") == "trap" || animation->GetVariable("type") == "interfere") { range_pre = int(round(4.f * heightCheat)); range_post = int(round(4.f * heightCheat)); } else if (animation->GetVariable("type") == "deflect") { range_pre = int(round(4.f * heightCheat)); range_post = int(round(6.f * heightCheat)); } else if (animation->GetVariable("type") == "sliding") { range_pre = int(round(6.f * heightCheat)); range_post = int(round(6.f * heightCheat)); } else if (animation->GetVariable("type") == "ballcontrol") { range_pre = int(round(2.f * heightCheat)); range_post = int(round(6.f * heightCheat)); } //range_pre *= 0.6; int frameOffset = 4; // correct for animation smoothing: player limbs always seem to be late at ballposition otherwise if (animTouchFrame + frameOffset - range_pre <= animation->GetFrameCount()) { boost::static_pointer_cast<FootballAnimationExtension>(animation->GetExtension("football"))->DeleteKeyFrame(animTouchFrame); } for (int i = animTouchFrame - range_pre; i < animTouchFrame + range_post + 1; i += 2) { if (i >= 0 && /*i != animTouchFrame &&*/ i < animation->GetFrameCount() - frameOffset - 1) { // set animation to this frame animation->Apply(nodeMap, i, 0, false); // find new ball position, based on the closest body part's position in this frame Vector3 position = closestBodyPart->GetDerivedPosition() + toBallVector; Vector3 origBodyPos = nodeMap.find("player")->second->GetDerivedPosition(); animation->Apply(nodeMap, i + frameOffset, 0, false); Vector3 futureBodyPos = nodeMap.find("player")->second->GetDerivedPosition(); //origBodyPos.Print(); Vector3 diff2D = (futureBodyPos - origBodyPos).Get2D(); Quaternion orientation; //Vector3 position = animBallPos + (animBallPos.Get2D() * (-animTouchFrame + i) * 0.05); boost::static_pointer_cast<FootballAnimationExtension>(animation->GetExtension("football"))->SetKeyFrame(i + frameOffset, orientation, position + diff2D, 0); } } } } void Animator::GetPhase() { } void Animator::ProcessPhase() { std::string partName; int currentFrame; bool isKeyFrame; timeline->GetLocation(partName, currentFrame, isKeyFrame); //test->SetPosition(Vector3(1, -0.01, 0.47)); bool newMessage = true; while (newMessage) { GuiSignal signal = guiInterface->signalQueue.GetMessage(newMessage); if (newMessage) { printf("Get Message: %s\n", signal.sender->GetName().c_str()); if (signal.sender->GetName() == "button_save" && signal.key == SDLK_RETURN) { saveDialog = new GuiFileDialog(scene2D, "dialog_save", 30, 10, 70, 90, currentDir, currentFile.substr(currentFile.find_last_of("/") + 1, std::string::npos)); guiInterface->AddView(saveDialog); guiInterface->SetFocussedView(saveDialog); } else if (signal.sender->GetName() == "dialog_save_CancelButton" && signal.key == SDLK_RETURN) { guiInterface->DeleteView(saveDialog); saveDialog = 0; } else if (signal.sender->GetName() == "dialog_save_OkayButton" && signal.key == SDLK_RETURN) { currentFile = saveDialog->GetFilename(); currentDir = saveDialog->GetDirectory(); //printf("loading %s\n", currentFile.c_str()); animation->Save(currentFile); guiInterface->DeleteView(saveDialog); saveDialog = 0; } else if (signal.sender->GetName() == "button_load" && signal.key == SDLK_RETURN) { loadDialog = new GuiFileDialog(scene2D, "dialog_load", 30, 10, 70, 90, currentDir, currentFile.substr(currentFile.find_last_of("/") + 1, std::string::npos)); guiInterface->AddView(loadDialog); guiInterface->SetFocussedView(loadDialog); } else if (signal.sender->GetName() == "dialog_load_CancelButton" && signal.key == SDLK_RETURN) { guiInterface->DeleteView(loadDialog); loadDialog = 0; } else if (signal.sender->GetName() == "dialog_load_OkayButton" && signal.key == SDLK_RETURN) { currentFile = loadDialog->GetFilename(); currentDir = loadDialog->GetDirectory(); //printf("loading %s\n", currentFile.c_str()); animation->Reset(); delete animation; animation = new Animation; boost::shared_ptr<FootballAnimationExtension> extension(new FootballAnimationExtension(animation)); animation->AddExtension("football", extension); animation->Load(currentFile); //animation->ConvertToStartFacingForwardIfIdle(); //animation->ConvertAngles(); //animation->Invert(); /* // debug std::list < boost::intrusive_ptr<Object> > bodyParts; playerNode->GetObjects(e_ObjectType_Geometry, bodyParts, true); std::map < const std::string, boost::intrusive_ptr<Node> > nodeMap; FillNodeMap(playerNode, nodeMap); AddExtraTouches(animation, playerNode, bodyParts, nodeMap); //! */ timeline->ClearKeys(); PopulateTimeline(animation, timeline); //animation->Hax(); printf("angle: %f\n", animation->GetOutgoingAngle()); printf("body angle: %f\n", animation->GetOutgoingBodyAngle()); float velocity = animation->GetIncomingMovement().GetLength(); std::string mode; if (velocity < 1.8) mode = "idle"; else if (velocity >= 1.8 && velocity < 4.2) mode = "dribble"; else if (velocity >= 4.2 && velocity < 6.0) mode = "walk"; else if (velocity >= 6.0) mode = "sprint"; printf("%s - ", mode.c_str()); velocity = animation->GetOutgoingMovement().GetLength(); if (velocity < 1.8) mode = "idle"; else if (velocity >= 1.8 && velocity < 4.2) mode = "dribble"; else if (velocity >= 4.2 && velocity < 6.0) mode = "walk"; else if (velocity >= 6.0) mode = "sprint"; printf("%s\n", mode.c_str()); guiInterface->DeleteView(loadDialog); loadDialog = 0; } else if (signal.sender->GetName() == "motionTimeline") { if (signal.key == SDLK_INSERT) { // insert new frame (move all keyframes from this frame on to the 'right') printf("shifting..\n"); animation->Shift(currentFrame, +1); timeline->ClearKeys(); PopulateTimeline(animation, timeline); } if (signal.key == SDLK_BACKSPACE) { // insert new frame (move all keyframes from this frame on to the 'left', erasing current keyframes) printf("shifting..\n"); animation->Shift(currentFrame, -1); timeline->ClearKeys(); PopulateTimeline(animation, timeline); } // is this a normal node? if (managedNodes.find(partName) != managedNodes.end()) { if (signal.key == SDLK_DELETE && isKeyFrame && currentFrame != 0) { static_cast<GuiTimeline*>(signal.sender)->ToggleKeyFrame(); animation->DeleteKeyFrame(partName, currentFrame); } if (signal.key == SDLK_F4) { animation->Hax(); } if (signal.key == SDLK_q || signal.key == SDLK_w || signal.key == SDLK_a || signal.key == SDLK_s || signal.key == SDLK_z || signal.key == SDLK_x || signal.key == SDLK_e || signal.key == SDLK_d || signal.key == SDLK_c || signal.key == SDLK_i || signal.key == SDLK_k || signal.key == SDLK_j || signal.key == SDLK_l || signal.key == SDLK_u || signal.key == SDLK_o || signal.key == SDLK_0) { Quaternion orientation; Vector3 rotationAngles, position; int adaptedCurrentFrame = currentFrame; if (adaptedCurrentFrame > animation->GetFrameCount() - 1) adaptedCurrentFrame = animation->GetFrameCount() - 1; animation->GetKeyFrame(partName, adaptedCurrentFrame, orientation, position); orientation.GetAngles(rotationAngles.coords[0], rotationAngles.coords[1], rotationAngles.coords[2]); if (!isKeyFrame) { static_cast<GuiTimeline*>(signal.sender)->ToggleKeyFrame(); //position = managedNodes.find(partName)->second->GetPosition(); } float rotStep = pi * 0.01; if (signal.key == SDLK_q) rotationAngles.coords[0] -= rotStep; if (signal.key == SDLK_e) rotationAngles.coords[0] += rotStep; if (signal.key == SDLK_w) rotationAngles.coords[0] = 0; if (signal.key == SDLK_a) rotationAngles.coords[1] -= rotStep; if (signal.key == SDLK_d) rotationAngles.coords[1] += rotStep; if (signal.key == SDLK_s) rotationAngles.coords[1] = 0; if (signal.key == SDLK_z) rotationAngles.coords[2] -= rotStep; if (signal.key == SDLK_c) rotationAngles.coords[2] += rotStep; if (signal.key == SDLK_x) rotationAngles.coords[2] = 0; if (signal.key == SDLK_j) position.coords[0] += 0.02; if (signal.key == SDLK_l) position.coords[0] -= 0.02; if (signal.key == SDLK_i) position.coords[1] -= 0.02; if (signal.key == SDLK_k) position.coords[1] += 0.02; if (signal.key == SDLK_u) position.coords[2] -= 0.01; if (signal.key == SDLK_o) position.coords[2] += 0.01; if (signal.key == SDLK_0) position.Set(0, 0, 0); Quaternion rotX, rotY, rotZ, quat; rotX.SetAngleAxis(rotationAngles.coords[0], Vector3(1, 0, 0)); rotY.SetAngleAxis(rotationAngles.coords[1], Vector3(0, 1, 0)); rotZ.SetAngleAxis(rotationAngles.coords[2], Vector3(0, 0, 1)); quat = rotX * rotY * rotZ; animation->SetKeyFrame(partName, currentFrame, quat, position); float velocity = animation->GetIncomingMovement().GetLength(); std::string mode; if (velocity < 1.8) mode = "idle"; else if (velocity >= 1.8 && velocity < 4.2) mode = "dribble"; else if (velocity >= 4.2 && velocity < 6.0) mode = "walk"; else if (velocity >= 6.0) mode = "sprint"; float diff = 0; if (mode == "idle") diff = velocity - idleVelocity; if (mode == "dribble") diff = velocity - dribbleVelocity; if (mode == "walk") diff = velocity - walkVelocity; if (mode == "sprint") diff = velocity - sprintVelocity; printf("%s (%f) - ", mode.c_str(), diff); velocity = animation->GetOutgoingMovement().GetLength(); if (velocity < 1.8) mode = "idle"; else if (velocity >= 1.8 && velocity < 4.2) mode = "dribble"; else if (velocity >= 4.2 && velocity < 6.0) mode = "walk"; else if (velocity >= 6.0) mode = "sprint"; if (mode == "idle") diff = velocity - idleVelocity; if (mode == "dribble") diff = velocity - dribbleVelocity; if (mode == "walk") diff = velocity - walkVelocity; if (mode == "sprint") diff = velocity - sprintVelocity; printf("%s (%f)\n", mode.c_str(), diff); //animation->GetOutgoingDirection().Print(); } // then what is it? a football extension maybe? } else { if (signal.key == SDLK_DELETE && isKeyFrame) { static_cast<GuiTimeline*>(signal.sender)->ToggleKeyFrame(); animation->GetExtension("football")->DeleteKeyFrame(currentFrame); } if (signal.key == SDLK_i || signal.key == SDLK_k || signal.key == SDLK_j || signal.key == SDLK_l || signal.key == SDLK_u || signal.key == SDLK_o || signal.key == SDLK_0) { Quaternion tmp; Vector3 position; float power; animation->GetExtension("football")->GetKeyFrame(currentFrame, tmp, position, power); if (!isKeyFrame) { static_cast<GuiTimeline*>(signal.sender)->ToggleKeyFrame(); //position = managedNodes.find(partName)->second->GetPosition(); position.coords[2] = 0.11; } float rotStep = pi * 0.01; if (signal.key == SDLK_j) position.coords[0] += 0.01; if (signal.key == SDLK_l) position.coords[0] -= 0.01; if (signal.key == SDLK_i) position.coords[1] -= 0.01; if (signal.key == SDLK_k) position.coords[1] += 0.01; if (signal.key == SDLK_u) position.coords[2] -= 0.01; if (signal.key == SDLK_o) position.coords[2] += 0.01; if (signal.key == SDLK_0) position.Set(0, 0, 0.11); animation->GetExtension("football")->SetKeyFrame(currentFrame, tmp, position, power); } } if (signal.key == SDLK_SPACE) { if (!play) play = true; else play = false; } } else if (signal.sender->GetName() == "dialog_load" && signal.key == SDLK_ESCAPE) { currentFile = loadDialog->GetFilename(); currentDir = loadDialog->GetDirectory(); //printf("loading %s\n", currentFile.c_str()); animation->Reset(); delete animation; animation = new Animation; boost::shared_ptr<FootballAnimationExtension> extension(new FootballAnimationExtension(animation)); animation->AddExtension("football", extension); animation->Load(currentFile); //animation->ConvertToStartFacingForwardIfIdle(); //animation->ConvertAngles(); //animation->Invert(); /* // debug std::list < boost::intrusive_ptr<Object> > bodyParts; playerNode->GetObjects(e_ObjectType_Geometry, bodyParts, true); std::map < const std::string, boost::intrusive_ptr<Node> > nodeMap; FillNodeMap(playerNode, nodeMap); AddExtraTouches(animation, playerNode, bodyParts, nodeMap); //! */ timeline->ClearKeys(); PopulateTimeline(animation, timeline); //animation->Hax(); printf("angle: %f\n", animation->GetOutgoingAngle()); printf("body angle: %f\n", animation->GetOutgoingBodyAngle()); float velocity = animation->GetIncomingMovement().GetLength(); std::string mode; if (velocity < 1.8) mode = "idle"; else if (velocity >= 1.8 && velocity < 4.2) mode = "dribble"; else if (velocity >= 4.2 && velocity < 6.0) mode = "walk"; else if (velocity >= 6.0) mode = "sprint"; printf("%s - ", mode.c_str()); velocity = animation->GetOutgoingMovement().GetLength(); if (velocity < 1.8) mode = "idle"; else if (velocity >= 1.8 && velocity < 4.2) mode = "dribble"; else if (velocity >= 4.2 && velocity < 6.0) mode = "walk"; else if (velocity >= 6.0) mode = "sprint"; printf("%s\n", mode.c_str()); guiInterface->DeleteView(loadDialog); loadDialog = 0; } } } Quaternion orientation; Vector3 position; float power; if (managedNodes.find(partName) != managedNodes.end()) { animation->GetKeyFrame(partName, currentFrame, orientation, position); if (partName != "player") position = managedNodes.find(partName)->second->GetDerivedPosition(); } else { animation->GetExtension("football")->GetKeyFrame(currentFrame, orientation, position, power); } Vector3 rotationAngles; orientation.GetAngles(rotationAngles.coords[0], rotationAngles.coords[1], rotationAngles.coords[2]); debugValues1->Set("rot: " + int_to_str(360 * rotationAngles.coords[0] / (pi * 2)) + ", " + int_to_str(360 * rotationAngles.coords[1] / (pi * 2)) + ", " + int_to_str(360 * rotationAngles.coords[2] / (pi * 2))); debugValues2->Set("pos: " + real_to_str(position.coords[0]).substr(0, 5) + ", " + real_to_str(position.coords[1]).substr(0, 5) + ", " + real_to_str(position.coords[2]).substr(0, 5)); debugValues3->Set("outgoing rot: " + int_to_str(int(round(animation->GetOutgoingAngle() / (2 * pi) * 360)))); caption_animName->Set(currentFile); Vector3 ballPosition; Quaternion tmp; if (!play) { int adaptedCurrentFrame = currentFrame; if (adaptedCurrentFrame > animation->GetFrameCount() - 1) adaptedCurrentFrame = animation->GetFrameCount() - 1; animation->Apply(nodeMap, adaptedCurrentFrame, 0, false, 1.0f, Vector(0), 0); timelineIndex->Set(int_to_str(currentFrame) + "/" + int_to_str(timeline->GetFrameCount())); animation->GetExtension("football")->GetKeyFrame(currentFrame, tmp, ballPosition, power); //counter = 0; } else { animation->Apply(nodeMap, currentPlayFrame, 0, false); /*if (is_odd(counter)) */currentPlayFrame++; counter++; int frameCount = timeline->GetFrameCount(); if (currentPlayFrame >= frameCount - 1) currentPlayFrame = 0; timelineIndex->Set(int_to_str(currentPlayFrame) + "/" + int_to_str(timeline->GetFrameCount())); animation->GetExtension("football")->GetKeyFrame(currentPlayFrame, tmp, ballPosition, power); } float velocity = animation->GetIncomingVelocity(); greenPilon->SetPosition(GetFrontOfFootOffset(velocity)); ballNode->GetObject("genericball")->SetPosition(ballPosition); if (ballPosition != Vector3(0)) { Vector3 ballDir; if (animation->GetVariable("incomingballdirection") != "") ballDir = GetVectorFromString(animation->GetVariable("incomingballdirection")) * -1.0; if (ballDir.GetLength() == 0) if (animation->GetVariable("balldirection") != "") ballDir = GetVectorFromString(animation->GetVariable("balldirection")); if (ballDir.GetLength() != 0) bluePilon->SetPosition(ballPosition + ballDir); } // 鼠标旋转镜头 // studioRot += UserEventManager::GetInstance().GetMouseRelativePos().coords[0] / 100.0; studioRot = 0.5f; // printf("studioRot: %f\n", studioRot); Quaternion rot; rot.SetAngleAxis(studioRot, Vector3(0, 0, 1)); cameraNode->SetRotation(rot); cameraNode->SetPosition(playerNode->GetPosition().Get2D() + Vector3(0, 0, 0.65)); // camera->SetPosition(Vector3(0, -3.4, 1.8 - 0.74) + playerNode->GetPosition()); } void Animator::PutPhase() { }
# Run MPC Controller cd ./build ./mpc
<reponame>lgoldstein/communitychest /* * */ package net.community.apps.tools.svn.wc; import java.io.File; import java.io.FileFilter; import javax.swing.filechooser.FileSystemView; import javax.swing.table.TableCellRenderer; import net.community.apps.tools.svn.resources.DefaultResourcesAnchor; import net.community.chest.svn.ui.filesmgr.SVNLocalCopyFileManagerModel; import net.community.chest.svnkit.SVNFoldersFilter; import net.community.chest.svnkit.core.wc.SVNLocalCopyData; import net.community.chest.ui.components.table.file.FilesTableColumns; import net.community.chest.util.compare.AbstractComparator; /** * <P>Copyright 2009 as per GPLv2</P> * * @author <NAME>. * @since Aug 5, 2009 3:16:11 PM */ public class WCLocalFilesModel extends SVNLocalCopyFileManagerModel { /** * */ private static final long serialVersionUID = 8517775317667782136L; private final WCMainFrame _f; public final WCMainFrame getFrame () { return _f; } public WCLocalFilesModel (WCMainFrame f, int initialSize) { super(initialSize); if (null == (_f=f)) throw new IllegalArgumentException("No " + WCMainFrame.class.getSimpleName() + " instance provided"); } public WCLocalFilesModel (WCMainFrame f) { this(f, 10); } private File _parentFolder; public File getParentFolder () { return _parentFolder; } public void setParentFolder (File d) { final File[] fa; if (d != null) { if (!WCLocationFileInputVerifier.DEFAULT.verifyFile(d)) return; fa = d.listFiles((FileFilter) SVNFoldersFilter.DEFAULT); } else { fa = File.listRoots(); } clear(false); if (d != null) add(new SVNLocalCopyData(d), false); // add parent entry if ((fa != null) && (fa.length > 0)) { for (final File f : fa) { if (!SVNFoldersFilter.DEFAULT.accept(f)) continue; add(new SVNLocalCopyData(f), false); } } _parentFolder = d; // null == top-level final WCMainFrame f=getFrame(); /* * NOTE !!! we rely on the fact that if this method was called from * "setWCLocation" then it will not be called again since the * reported location should match the one that we got as parameter */ f.setWCLocation(d, null, false); fireTableDataChanged(); } public boolean isParentFolder (File f) { return AbstractComparator.compareObjects(getParentFolder(), f); } /* * @see net.community.chest.svn.ui.filesmgr.SVNLocalCopyFileManagerModel#resolveColumnRenderer(net.community.chest.ui.components.table.file.FilesTableColumns, javax.swing.table.TableCellRenderer) */ @Override protected TableCellRenderer resolveColumnRenderer (FilesTableColumns colIndex, TableCellRenderer r) { if (FilesTableColumns.NAME.equals(colIndex) && (null == r)) return new WCLocalFileCellRenderer(this, FileSystemView.getFileSystemView(), DefaultResourcesAnchor.getInstance().getStatusIconsMap()); else if (FilesTableColumns.ATTRS.equals(colIndex) && (null == r)) return new WCFileAttrsCellRenderer(DefaultResourcesAnchor.getInstance().getStatusIconsMap()); else return super.resolveColumnRenderer(colIndex, r); } /* * @see net.community.chest.svn.ui.filesmgr.SVNLocalCopyFileManagerModel#getColumnClass(net.community.chest.ui.components.table.file.FilesTableColumns) */ @Override public Class<?> getColumnClass (FilesTableColumns colIndex) { if (FilesTableColumns.ATTRS.equals(colIndex)) return File.class; else return super.getColumnClass(colIndex); } /* * @see net.community.chest.svn.ui.filesmgr.SVNLocalCopyFileManagerModel#getColumnValue(int, net.community.chest.svnkit.core.wc.SVNLocalCopyData, net.community.chest.ui.components.table.file.FilesTableColumns) */ @Override public Object getColumnValue (int rowIndex, SVNLocalCopyData row, FilesTableColumns colIndex) { if (FilesTableColumns.ATTRS.equals(colIndex)) return row.getFile(); else return super.getColumnValue(rowIndex, row, colIndex); } }
pkg_name=shield-proxy pkg_origin=core pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>" pkg_description="Proxy package for the Shield backup and restore tool" pkg_license=('Apache-2.0') pkg_version=0.10.8 pkg_svc_user=root pkg_svc_group="${pkg_svc_user}" pkg_upstream_url="" pkg_deps=( core/nginx core/openssl core/bash ) do_build() { return 0 } do_download() { return 0 } do_install() { return 0 }
<reponame>houjianping/my_base<filename>base_library/src/main/java/com/androidapp/mvp/MvpPresenter.java package com.androidapp.mvp; public interface MvpPresenter<View,Model> { // 绑定View控件 void attachView(View view); // 绑定Model void attachModel(Model model); // 注销View控件 void detachView(); // 注销Model对象 void detachModel(); }
<gh_stars>0 package helper import ( "errors" "fmt" "strings" "testing" "github.com/stretchr/testify/require" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) func TestError(t *testing.T) { errorMessage := "sentinel error" input := errors.New(errorMessage) inputGRPCCode := codes.Unauthenticated inputGRPC := status.Error(inputGRPCCode, errorMessage) for _, tc := range []struct { desc string errorf func(err error) error code codes.Code }{ { desc: "Canceled", errorf: ErrCanceled, code: codes.Canceled, }, { desc: "Internal", errorf: ErrInternal, code: codes.Internal, }, { desc: "InvalidArgument", errorf: ErrInvalidArgument, code: codes.InvalidArgument, }, { desc: "FailedPrecondition", errorf: ErrFailedPrecondition, code: codes.FailedPrecondition, }, { desc: "NotFound", errorf: ErrNotFound, code: codes.NotFound, }, { desc: "Unavailable", errorf: ErrUnavailable, code: codes.Unavailable, }, } { t.Run(tc.desc, func(t *testing.T) { // tc.code and our canary test code must not // clash! require.NotEqual(t, tc.code, inputGRPCCode) // When not re-throwing an error we get the // GRPC error code corresponding to the // function's name. err := tc.errorf(input) require.EqualError(t, err, errorMessage) require.False(t, errors.Is(err, inputGRPC)) require.Equal(t, tc.code, status.Code(err)) // When re-throwing an error an existing GRPC // error code will get preserved, instead of // the one corresponding to the function's // name. err = tc.errorf(inputGRPC) require.True(t, errors.Is(err, inputGRPC)) require.False(t, errors.Is(err, input)) require.Equal(t, inputGRPCCode, status.Code(err)) require.NotEqual(t, tc.code, status.Code(inputGRPC)) }) } } func TestErrorF_withVFormat(t *testing.T) { testErrorfFormat(t, "expected %v", "expected %v") } func TestErrorF_withWFormat(t *testing.T) { testErrorfFormat(t, "expected %w", "expected %s") } func testErrorfFormat(t *testing.T, errorFormat, errorFormatEqual string) { isFormatW := strings.Contains(errorFormat, "%w") errorMessage := "sentinel error" input := errors.New(errorMessage) inputGRPCCode := codes.Unauthenticated inputGRPC := status.Error(inputGRPCCode, errorMessage) inputGRPCFmt := status.Errorf(inputGRPCCode, errorFormat, errorMessage) for _, tc := range []struct { desc string errorf func(format string, a ...interface{}) error code codes.Code }{ { desc: "Internalf", errorf: ErrInternalf, code: codes.Internal, }, { desc: "InvalidArgumentf", errorf: ErrInvalidArgumentf, code: codes.InvalidArgument, }, { desc: "FailedPreconditionf", errorf: ErrFailedPreconditionf, code: codes.FailedPrecondition, }, { desc: "NotFoundf", errorf: ErrNotFoundf, code: codes.NotFound, }, { desc: "ErrUnavailablef", errorf: ErrUnavailablef, code: codes.Unavailable, }, } { t.Run(tc.desc, func(t *testing.T) { require.NotEqual(t, tc.code, inputGRPCCode, "canary test code and tc.code may not be the same") // When not re-throwing an error we get the GRPC error code corresponding to // the function's name. Just like the non-f functions. err := tc.errorf(errorFormat, input) require.EqualError(t, err, fmt.Sprintf(errorFormatEqual, errorMessage)) require.False(t, errors.Is(err, inputGRPC)) require.Equal(t, tc.code, status.Code(err)) // When wrapping an existing gRPC error, then the error code will stay the // same. err = tc.errorf(errorFormat, inputGRPCFmt) require.False(t, errors.Is(err, input)) if isFormatW { require.Equal(t, inputGRPCCode, status.Code(err)) } else { require.Equal(t, tc.code, status.Code(err)) } require.NotEqual(t, tc.code, status.Code(inputGRPC)) // The same as above, except that we test with an error returned by // `status.Error()`. errX := tc.errorf(errorFormat, inputGRPC) require.Equal(t, errors.Is(errX, inputGRPC), isFormatW) // .True() for non-f require.False(t, errors.Is(errX, input)) if isFormatW { require.Equal(t, inputGRPCCode, status.Code(errX)) } else { require.Equal(t, tc.code, status.Code(errX)) } require.Equal(t, inputGRPCCode, status.Code(inputGRPC)) }) } }
#pragma once #include "Renderer2D.h" #include "Mathman.h" #include "GraphNode.h" #include <vector> #include "Heap.h" #include "Renderer2D.h" #define GRID_WIDTH 50 #define GRID_HEIGHT 50 #define CELL_SIZE 15 #define CELL_PADDING 2 #define COST_SIDE 10 #define COST_DIAG 14 class Pathfinder { public: Pathfinder(); ~Pathfinder(); void ResetGrid(); GraphNode* GetNodeByPos(Vector2 vec); Vector2 GetPosFromNode(GraphNode* node); int CalculateHeuristic(GraphNode* startNode, GraphNode* endNode); bool DijkstrasPath(Vector2 start, Vector2 end, std::vector<Vector2>& finalPath); bool AStarPath(Vector2 start, Vector2 end, std::vector<Vector2>& finalPath); void Draw(aie::Renderer2D* renderer); private: GraphNode* grid[GRID_WIDTH][GRID_HEIGHT]; Heap openList; char closedList[GRID_WIDTH][GRID_HEIGHT]; int baseCosts[8] = { COST_DIAG, COST_SIDE, COST_DIAG, COST_SIDE, COST_SIDE, COST_DIAG, COST_SIDE, COST_DIAG }; };
/* * Copyright (c) 2019-2021. <NAME> and others. * https://github.com/mfvanek/pg-index-health * * This file is a part of "pg-index-health" - a Java library for * analyzing and maintaining indexes health in PostgreSQL databases. * * Licensed under the Apache License 2.0 */ package io.github.mfvanek.pg.settings; import io.github.mfvanek.pg.utils.Validators; import javax.annotation.Nonnull; public class ServerSpecification { private final int cpuCoresAmount; private final long memoryAmountInBytes; private final boolean hasSSD; ServerSpecification(int cpuCoresAmount, long memoryAmountInBytes, boolean hasSSD) { Validators.valueIsPositive(cpuCoresAmount, "cpuCoresAmount"); this.cpuCoresAmount = cpuCoresAmount; this.memoryAmountInBytes = Validators.valueIsPositive(memoryAmountInBytes, "memoryAmountInBytes"); this.hasSSD = hasSSD; } public int getCpuCoresAmount() { return cpuCoresAmount; } public long getMemoryAmountInBytes() { return memoryAmountInBytes; } public boolean hasSSD() { return hasSSD; } @Override public String toString() { return ServerSpecification.class.getSimpleName() + '{' + "cpuCoresAmount=" + cpuCoresAmount + ", memoryAmountInBytes=" + memoryAmountInBytes + ", hasSSD=" + hasSSD + '}'; } @Nonnull public static ServerSpecificationBuilder builder() { return new ServerSpecificationBuilder(); } }
def gradient_descent(x0, learning_rate, max_iterations, objective_function): xks = [x0] it = 0 while it < max_iterations: xk = xks[-1] gradient = (objective_function(xk + 0.0001) - objective_function(xk)) / 0.0001 # Approximate gradient xk -= learning_rate * gradient xks.append(xk) it += 1 if it == max_iterations: print("Maximum number of iterations reached") return xks[-1], it, xks
<gh_stars>1-10 import kmeans.Centroide; import kmeans.Elemento; import kmeans.Kmeans; import java.util.Arrays; import java.util.List; public class KmeansToStringConverter { public static final String convert(Kmeans kmeans) { return kmeans.getAgrupamentos() .entrySet() .stream() .map((entry) -> { var centroideString = KmeansToStringConverter.convert(entry.getKey()); var elementsString = KmeansToStringConverter.convert(entry.getValue()); return centroideString + "\n" + elementsString; }) .reduce((s1, s2) -> s1 + "\n" + s2) .orElse(""); } private static String convert(List<Elemento> elementos) { StringBuilder builder = new StringBuilder(); builder.append(String.format("Elementos%n")); for (var e: elementos) { builder.append(String.format("%s%n", Arrays.toString(e.getParametros().toArray()))); } return builder.toString(); } private static String convert(Centroide c) { return String.format("Centroide %s", Arrays.toString(c.getParametros().toArray())); } }
#!/bin/sh do_generate() { cd "$(dirname "$0")" protoc --version protoc -I=. --python_out=orwell/messages common.proto controller.proto robot.proto server-game.proto server-web.proto } do_generate
$(document).ready(function(){ $("#nav-icon").click(function (e) { $(this).toggleClass("open"), $("div.menu-wrap").toggleClass("active") }); $('#mycarousel').slick({ scroll: 1 }); $('.league-table-nav').slick({ slidesToShow: 2, slidesToScroll: 1, responsive: [ { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1 } } ] }); $("form.member-login").submit(function(){ if(FLGERR == 0){ var $frm = $(this); var aUrl = SITEURL+"/inc/site.inc.php?xAction=memberLogin&"+$("form#member-login").serialize(); showLoader(); $.ajax({ type: 'post', url: aUrl, success: function(data){ if(data == 'OK'){ var redirectURL = ""; if($.trim($("form#member-login").find("#redirectMe").val()) != "") redirectURL = SITEURL+"/"+$.trim($("form#member-login").find("#redirectMe").val()); else redirectURL = SITEURL; window.location = redirectURL; } else if(data == 'ERR'){ hideLoader(); $.mxalert({msg:"Please enter valid login credentials."}); } } }); } return false; }); /*Code for distroy session when click on logout*/ $("a.memberLogout").click(function(){ var aUrl = SITEURL+"/inc/site.inc.php?xAction=memberLogout"; showLoader(); $.ajax({ type: 'post', url: aUrl, success: function(data){ if(data == 'OK'){ window.location = SITEURL; } else if(data == 'ERR'){ hideLoader(); $.mxalert({msg:"Something went wrong"}); } } }); return false; }); /*End of logout*/ $("form#stay-connected").submit(function(){ if(FLGERR == 0){ var $frm = $(this); var aUrl = SITEURL+"/inc/site.inc.php?xAction=stayConnectedEmailSave"; showLoader(); $.ajax({ type: 'post', url: aUrl, data:$frm.serialize(), /*success: function(data){ hideLoader(); if(data == 'OK'){ $.mxalert({msg:"Email Send Successfully"}); }else{ $.mxalert({msg:"Email Not Send Successfully"}); } }*/ success: function(data){ hideLoader(); if($.trim(data) == 'OK'){ $.mxalert( { msg: "You are subscription successfull." }); $("form#stay-connected").trigger('reset'); return false; } else if($.trim(data) == 'REGISTERED'){ $.mxalert( { msg: "Mail ID alredy exists." }); } else { $("form#stay-connected").trigger('reset'); $.mxalert({msg:"Something went wrong please try again."}); return false; } } }); } return false; }); //code for assign city session $('select#cityPage,select#cityPopup,select#homeCity').change(function(){ var aUrl = SITEURL+"/inc/site.inc.php?xAction=setCityInSession"; showLoader(); $.ajax({ type:"POST", url:aUrl, data:{'cityID':$(this).val()}, success:function(data){ if(data=="OK"){ window.location.reload(); }else{ hideLoader(); $.mxalert({msg:"Something went wrong"}); } } }) }); }); //////////////////////////////////////////////////////////////////////////////
import React from 'react'; import {Link} from 'gatsby'; import classNames from 'classnames'; import styles from './HeaderLink.module.scss'; export default class HeaderLink extends React.Component { componentDidMount() { if (this.props.isActive) { this.scrollIntoView(); } } scrollIntoView = () => { if (this.button.scrollIntoView) { this.button.scrollIntoView({ behaviour: 'smooth', inline: 'center', }); } }; render() { const {isActive, title, to} = this.props; return ( <Link to={to} className={classNames({ [styles.link]: true, [styles.link_active]: isActive, })} innerRef={node => (this.button = node)} > {title} </Link> ); } }
import axios from "../axios"; interface Issue { ruleId: string; position: string; name: string; detail: string; ruleType: string; severity: string; fullName: string; source: string; } interface IssuePosition { startLine: number; startColumn: number; endLine: number; endColumn: number; } export function getAllIssue(id: number) { return axios<Issue[]>({ url: `/api/systems/${id}/issue`, method: "GET", }); }
<filename>lib/toyrobot/cli.rb module Toyrobot class CLI def initialize(filename) @filename = filename @table = Table.new @simulation = Simulation.new(@table) @command = Command.new end def run File.open(@filename, 'r').each { |line| run_command line } end private def run_command(line) @simulation.send(*@command.parse(line)) end end end
import { APYData } from "./types"; export interface SetBankAPY { chainId: number; address: string; apys: APYData[]; }
def all_caps(words): for word in words: for char in word: if char.islower(): return False return True print(all_caps(words))
def convertToSpecificFormat(matrix): formatted_rows = [f"[{', '.join(map(str, row))}]" for row in matrix] return ';'.join(formatted_rows)
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.dbcp.dbcp.datasources; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import javax.sql.ConnectionEvent; import javax.sql.ConnectionEventListener; import javax.sql.ConnectionPoolDataSource; import javax.sql.PooledConnection; import org.apache.tomcat.dbcp.pool.KeyedObjectPool; import org.apache.tomcat.dbcp.pool.KeyedPoolableObjectFactory; /** * A {*link PoolableObjectFactory} that creates * {*link PoolableConnection}s. * * @author <NAME> */ class KeyedCPDSConnectionFactory implements KeyedPoolableObjectFactory<UserPassKey, PooledConnectionAndInfo>, ConnectionEventListener, PooledConnectionManager { private static final String NO_KEY_MESSAGE = "close() was called on a Connection, but " + "I have no record of the underlying PooledConnection."; private final ConnectionPoolDataSource _cpds; private final String _validationQuery; private final boolean _rollbackAfterValidation; private final KeyedObjectPool<UserPassKey, PooledConnectionAndInfo> _pool; /** * Map of PooledConnections for which close events are ignored. * Connections are muted when they are being validated. */ private final Set<PooledConnection> validatingSet = Collections.newSetFromMap( new ConcurrentHashMap<PooledConnection, Boolean>()); /** * Map of PooledConnectionAndInfo instances */ private final Map<PooledConnection, PooledConnectionAndInfo> pcMap = new ConcurrentHashMap<PooledConnection, PooledConnectionAndInfo>(); /** * Create a new <code>KeyedPoolableConnectionFactory</code>. * @param cpds the ConnectionPoolDataSource from which to obtain PooledConnection's * @param pool the {*link ObjectPool} in which to pool those {*link Connection}s * @param validationQuery a query to use to {*link #validateObject validate} {*link Connection}s. * Should return at least one row. May be <code>null</code> */ public KeyedCPDSConnectionFactory(ConnectionPoolDataSource cpds, KeyedObjectPool<UserPassKey, PooledConnectionAndInfo> pool, String validationQuery) { this(cpds , pool, validationQuery, false); } /** * Create a new <code>KeyedPoolableConnectionFactory</code>. * @param cpds the ConnectionPoolDataSource from which to obtain * PooledConnections * @param pool the {@link KeyedObjectPool} in which to pool those * {@link Connection}s * @param validationQuery a query to use to {@link #validateObject validate} * {@link Connection}s. Should return at least one row. May be <code>null</code> * @param rollbackAfterValidation whether a rollback should be issued after * {@link #validateObject validating} {@link Connection}s. */ public KeyedCPDSConnectionFactory(ConnectionPoolDataSource cpds, KeyedObjectPool<UserPassKey, PooledConnectionAndInfo> pool, String validationQuery, boolean rollbackAfterValidation) { _cpds = cpds; _pool = pool; pool.setFactory(this); _validationQuery = validationQuery; _rollbackAfterValidation = rollbackAfterValidation; } /** * Returns the keyed object pool used to pool connections created by this factory. * * @return KeyedObjectPool managing pooled connections */ public KeyedObjectPool<UserPassKey, PooledConnectionAndInfo> getPool() { return _pool; } /** * Creates a new {@link PooledConnectionAndInfo} from the given {@link UserPassKey}. * * @param upkey {@link UserPassKey} containing user credentials * @throws SQLException if the connection could not be created. * @see org.apache.tomcat.dbcp.pool.KeyedPoolableObjectFactory#makeObject(java.lang.Object) */ @Override public synchronized PooledConnectionAndInfo makeObject(UserPassKey upkey) throws Exception { PooledConnectionAndInfo obj = null; PooledConnection pc = null; String username = upkey.getUsername(); String password = upkey.getPassword(); if (username == null) { pc = _cpds.getPooledConnection(); } else { pc = _cpds.getPooledConnection(username, password); } if (pc == null) { throw new IllegalStateException("Connection pool data source returned null from getPooledConnection"); } // should we add this object as a listener or the pool. // consider the validateObject method in decision pc.addConnectionEventListener(this); obj = new PooledConnectionAndInfo(pc, username, password); pcMap.put(pc, obj); return obj; } /** * Closes the PooledConnection and stops listening for events from it. */ @Override public void destroyObject(UserPassKey key, PooledConnectionAndInfo obj) throws Exception { PooledConnection pc = obj.getPooledConnection(); pc.removeConnectionEventListener(this); pcMap.remove(pc); pc.close(); } /** * Validates a pooled connection. * * @param key ignored * @param obj {@link PooledConnectionAndInfo} containing the connection to validate * @return true if validation suceeds */ @Override public boolean validateObject(UserPassKey key, PooledConnectionAndInfo obj) { boolean valid = false; PooledConnection pconn = obj.getPooledConnection(); String query = _validationQuery; if (null != query) { Connection conn = null; Statement stmt = null; ResultSet rset = null; // logical Connection from the PooledConnection must be closed // before another one can be requested and closing it will // generate an event. Keep track so we know not to return // the PooledConnection validatingSet.add(pconn); try { conn = pconn.getConnection(); stmt = conn.createStatement(); rset = stmt.executeQuery(query); if (rset.next()) { valid = true; } else { valid = false; } if (_rollbackAfterValidation) { conn.rollback(); } } catch(Exception e) { valid = false; } finally { if (rset != null) { try { rset.close(); } catch (Throwable t) { // ignore } } if (stmt != null) { try { stmt.close(); } catch (Throwable t) { // ignore } } if (conn != null) { try { conn.close(); } catch (Throwable t) { // ignore } } validatingSet.remove(pconn); } } else { valid = true; } return valid; } @Override public void passivateObject(UserPassKey key, PooledConnectionAndInfo obj) { } @Override public void activateObject(UserPassKey key, PooledConnectionAndInfo obj) { } // *********************************************************************** // java.sql.ConnectionEventListener implementation // *********************************************************************** /** * This will be called if the Connection returned by the getConnection * method came from a PooledConnection, and the user calls the close() * method of this connection object. What we need to do here is to * release this PooledConnection from our pool... */ @Override public void connectionClosed(ConnectionEvent event) { PooledConnection pc = (PooledConnection)event.getSource(); // if this event occurred because we were validating, or if this // connection has been marked for removal, ignore it // otherwise return the connection to the pool. if (!validatingSet.contains(pc)) { PooledConnectionAndInfo info = pcMap.get(pc); if (info == null) { throw new IllegalStateException(NO_KEY_MESSAGE); } try { _pool.returnObject(info.getUserPassKey(), info); } catch (Exception e) { System.err.println("CLOSING DOWN CONNECTION AS IT COULD " + "NOT BE RETURNED TO THE POOL"); pc.removeConnectionEventListener(this); try { _pool.invalidateObject(info.getUserPassKey(), info); } catch (Exception e3) { System.err.println("EXCEPTION WHILE DESTROYING OBJECT " + info); e3.printStackTrace(); } } } } /** * If a fatal error occurs, close the underlying physical connection so as * not to be returned in the future */ @Override public void connectionErrorOccurred(ConnectionEvent event) { PooledConnection pc = (PooledConnection)event.getSource(); if (null != event.getSQLException()) { System.err .println("CLOSING DOWN CONNECTION DUE TO INTERNAL ERROR (" + event.getSQLException() + ")"); } pc.removeConnectionEventListener(this); PooledConnectionAndInfo info = pcMap.get(pc); if (info == null) { throw new IllegalStateException(NO_KEY_MESSAGE); } try { _pool.invalidateObject(info.getUserPassKey(), info); } catch (Exception e) { System.err.println("EXCEPTION WHILE DESTROYING OBJECT " + info); e.printStackTrace(); } } // *********************************************************************** // PooledConnectionManager implementation // *********************************************************************** /** * Invalidates the PooledConnection in the pool. The KeyedCPDSConnectionFactory * closes the connection and pool counters are updated appropriately. * Also clears any idle instances associated with the username that was used * to create the PooledConnection. Connections associated with this user * are not affected and they will not be automatically closed on return to the pool. */ @Override public void invalidate(PooledConnection pc) throws SQLException { PooledConnectionAndInfo info = pcMap.get(pc); if (info == null) { throw new IllegalStateException(NO_KEY_MESSAGE); } UserPassKey key = info.getUserPassKey(); try { _pool.invalidateObject(key, info); // Destroy and update pool counters _pool.clear(key); // Remove any idle instances with this key } catch (Exception ex) { throw (SQLException) new SQLException("Error invalidating connection").initCause(ex); } } /** * Does nothing. This factory does not cache user credentials. */ @Override public void setPassword(String password) { } /** * This implementation does not fully close the KeyedObjectPool, as * this would affect all users. Instead, it clears the pool associated * with the given user. This method is not currently used. */ @Override public void closePool(String username) throws SQLException { try { _pool.clear(new UserPassKey(username, null)); } catch (Exception ex) { throw (SQLException) new SQLException("Error closing connection pool").initCause(ex); } } }
<filename>funds/variable.js var x = 2; var y = {name: 'Amin'}; var w = y; console.log(w === y); // true console.log(y.name === w.name); // true y.name = 'Tom'; console.log(w.name); // 'Tom'
if (( ! $+commands[fzf] )); then echo "Installing fzf" brew install fzf fi [[ $- == *i* ]] && source "/usr/local/opt/fzf/shell/completion.zsh" 2> /dev/null source "/usr/local/opt/fzf/shell/key-bindings.zsh" # export FZF_DEFAULT_COMMAND='rg --files --no-ignore --hidden --follow -g "!{.git,node_modules}/*" 2> /dev/null' export FZF_DEFAULT_COMMAND='fd --type f --hidden --follow' export FZF_COMPLETION_TRIGGER='kk' export FZF_TMUX_OPTS='-d 40%' # Use fd (https://github.com/sharkdp/fd) instead of the default find # command for listing path candidates. # - The first argument to the function ($1) is the base path to start traversal # - See the source code (completion.{bash,zsh}) for the details. _fzf_compgen_path() { fd --hidden --follow --exclude ".git" . "$1" } # Use fd to generate the list for directory completion _fzf_compgen_dir() { fd --type d --hidden --follow --exclude ".git" . "$1" } # (EXPERIMENTAL) Advanced customization of fzf options via _fzf_comprun function # - The first argument to the function is the name of the command. # - You should make sure to pass the rest of the arguments to fzf. _fzf_comprun() { local command=$1 shift case "$command" in # cd) fzf "$@" --preview 'tree -C {} | head -200' ;; export|unset) fzf "$@" --preview "eval 'echo \$'{}" ;; ssh) fzf "$@" --preview 'dig {}' ;; *) fzf "$@" ;; esac } source $ZPREZTODIR/modules/fzf/fzf-tab/fzf-tab.zsh
<gh_stars>0 'use strict'; const should = require('chai').should(); // eslint-disable-line const pathFn = require('path'); const fs = require('fs'); const rewire = require('rewire'); describe('spawn', () => { const spawn = require('../../lib/spawn'); const CacheStream = require('../../lib/cache_stream'); const fixturePath = pathFn.join(__dirname, 'spawn_test.txt'); const fixture = 'test content'; before(done => { fs.writeFile(fixturePath, fixture, done); }); after(done => { fs.unlink(fixturePath, done); }); it('default', () => spawn('cat', [fixturePath]).then(content => { content.should.eql(fixture); })); it('command is required', () => { try { spawn(); } catch (err) { err.should.have.property('message', 'command is required!'); } }); it('error', () => spawn('cat', ['nothing']).catch(err => { err.message.trim().should.eql('cat: nothing: No such file or directory'); err.code.should.eql(1); })); it('verbose - stdout', () => { const spawn = rewire('../../lib/spawn'); const stdoutCache = new CacheStream(); const stderrCache = new CacheStream(); const content = 'something'; spawn.__set__('process', { stdout: stdoutCache, stderr: stderrCache }); return spawn('echo', [content], { verbose: true }).then(() => { stdoutCache.getCache().toString('utf8').trim().should.eql(content); }); }); it('verbose - stderr', () => { const spawn = rewire('../../lib/spawn'); const stdoutCache = new CacheStream(); const stderrCache = new CacheStream(); spawn.__set__('process', { stdout: stdoutCache, stderr: stderrCache }); return spawn('cat', ['nothing'], { verbose: true }).catch(() => { stderrCache.getCache().toString('utf8').trim().should .eql('cat: nothing: No such file or directory'); }); }); it('custom encoding', () => spawn('cat', [fixturePath], {encoding: 'hex'}).then(content => { content.should.eql(Buffer.from(fixture).toString('hex')); })); it('encoding = null', () => spawn('cat', [fixturePath], {encoding: null}).then(content => { content.should.eql(Buffer.from(fixture)); })); it('stdio = inherit', () => spawn('echo', ['something'], { stdio: 'inherit' })); });
#!/bin/bash BASE=$(dirname $(dirname $(dirname $(readlink -f ${0})))) mkdir -p ${BASE}/source/data/netmhcpan-2.8a cd ${BASE}/source/data/netmhcpan-2.8a wget http://www.cbs.dtu.dk/services/NetMHCpan-2.8/data.tar.gz
#!/bin/sh rm -rf vendor/ && govendor init && govendor add +e # don't vendor commonly used interfaces rm -rf vendor/dgruber/drmaa2interface
package main import ( "errors" "fmt" "github.com/ops-class/test161" "os" "os/exec" "regexp" "strconv" "strings" ) type gitRepo struct { dir string remoteName string remoteRef string remoteURL string localRef string remoteUpdated bool gitSSHCommand string } var minGitVersion = test161.ProgramVersion{ Major: 2, Minor: 3, Revision: 0, } const GitUpgradeInst = ` Your version of Git must be at least 2.3.0 (you're running %v). To upgrade on Ubuntu, perform the following commands to add the Git stable ppa, and install the latest version of Git: sudo add-apt-repository ppa:git-core/ppa sudo apt-get update sudo apt-get install -y git ` const ( DoNotUseDeployKey = iota UseDeployKeyOnly TryDeployKey ) type gitCmdSpec struct { cmdline string allowEmpty bool debug bool deployKey int // Defaults to not use } func (git *gitRepo) setRemoteInfo(debug bool) error { // Infer the remote name and branch. We can get what we need if they're on a branch // and it's set up to track a remote. upstreamCmd := &gitCmdSpec{ cmdline: "git rev-parse --abbrev-ref --symbolic-full-name @{u}", debug: debug, } if remoteInfo, err := git.doOneCommand(upstreamCmd); err == nil { where := strings.Index(remoteInfo, "/") if where < 0 { // This shouldn't happen, but you never know return fmt.Errorf("git rev-parse not of format remote/branch: %v", remoteInfo) } git.remoteName = remoteInfo[0:where] git.remoteRef = remoteInfo[where+1:] // Get the URL of the remote urlCmd := &gitCmdSpec{ cmdline: fmt.Sprintf("git ls-remote --get-url %v", git.remoteName), debug: debug, } if url, err := git.doOneCommand(urlCmd); err != nil { fmt.Println(url, err) return err } else { git.remoteURL = url } } else { return err } return nil } const remoteErr = `Your current branch is not set up to track a remote, Use 'git branch -u <upstream>' to set the upstream for this branch, if one exists. If this is a new branch, use 'git push -u <remote> [<branch>]' to push the new branch to your remote. See 'man git branch' and 'man git push' for more information. ` const httpErr = `test161 will not accept submissions with http or https repository URLs. Please use 'git remote set-url <remote_name> <url>' to change your upstream, where <url> is the SSH URL of your repository (i.e. git@...). ` func (git *gitRepo) canSubmit() bool { if git.remoteURL == "" { fmt.Fprintf(os.Stderr, remoteErr) return false } else if strings.HasPrefix(git.remoteURL, "http") { fmt.Fprintf(os.Stderr, httpErr) return false } return true } // Get the commit corresponding to HEAD, and check for modifications, remote up-to-date, etc. func (git *gitRepo) commitFromHEAD(debug bool) (commit, ref string, err error) { ref = "" commit = "" var dirty, ok bool // Check for local modifications or untracked files if dirty, err = git.isLocalDirty(debug); err != nil { err = fmt.Errorf("Cannot determine local status: %v", err) return } else if dirty { err = errors.New("Submission not permitted while changes exist in your working directory.\nRun 'git status' to see what files have changed.") return } if git.localRef == "HEAD" { fmt.Fprintf(os.Stderr, "Warning: You are in a detached HEAD state, submitting HEAD commit\n") ref = "HEAD" } else if git.remoteName == "" || git.remoteRef == "" { fmt.Fprintf(os.Stderr, "Warning: No remote name or ref, submitting HEAD commit\n") ref = "HEAD" } else { // Try the deploy key, but don't fail if it doesn't exist. // We'll explicitly check later when before we build. // Check for changes with the remote ref = git.remoteName + "/" + git.remoteRef if ok, err = git.isRemoteUpToDate(debug, TryDeployKey); err != nil { err = fmt.Errorf("Cannot determine remote status: %v", err) return } else if !ok { err = errors.New("Your remote is not up-to-date with your local branch. Please push any changes or specify a commit id.") return } } // Finally, get the commit id from the ref commitCmd := &gitCmdSpec{ cmdline: "git rev-parse " + ref, debug: debug, } if commit, err = git.doOneCommand(commitCmd); err != nil { err = fmt.Errorf("Cannot rev-parse ref %v: %v", ref, err) } return } // Get the commit ID from a treeish string, which may be a hex commit id, tag, or branch. // It's OK if we have modifications, detached head, etc.; we just need to find the commit, // which we can do if its remote/branch or a tag on the tracked remote. func (git *gitRepo) commitFromTreeish(treeish string, debug bool) (commit, ref string, err error) { commit, ref = "", "" var ok bool // Break this down into remote/branch for where := strings.Index(treeish, "/") if where > 0 { git.remoteName = treeish[0:where] git.remoteRef = treeish[where+1:] } else { git.remoteRef = treeish } // First, figure out if this is a ref or a commit id if ok, err = regexp.MatchString("^[0-9a-f]+$", treeish); ok { // Done, it's just the commit it. commit = treeish } else { // See if we can actually find the ref. if ok, err = git.verifyLocalRef(treeish, debug); err != nil { err = fmt.Errorf("Error verifying local ref '%v': %v", treeish, err) return } else if !ok { err = fmt.Errorf("Unable to verify local ref '%v'", treeish) return } else if ok, err = git.verifyRemoteRef(git.remoteRef, debug, TryDeployKey); err != nil { err = fmt.Errorf("Error verifying remote ref '%v': %v", treeish, err) return } else if !ok { err = fmt.Errorf("Unable to verify remote ref '%v'", treeish) return } // Get the commit id ref = treeish commitCmd := &gitCmdSpec{ cmdline: "git rev-parse " + ref, debug: debug, } commit, err = git.doOneCommand(commitCmd) if err != nil { err = fmt.Errorf("Cannot rev-parse ref %v: %v", ref, err) } } return } // Infer all of the Git information we can from the source directory. Some of this // depends on how they set things up and if they are on a branch or detached. func gitRepoFromDir(src string, debug bool) (*gitRepo, error) { git := &gitRepo{} git.dir = src // Verify that we're in a git repo statusCmd := &gitCmdSpec{ cmdline: "git status", allowEmpty: true, debug: debug, } if res, err := git.doOneCommand(statusCmd); err != nil { return nil, fmt.Errorf("%v", res) } // This might fail, and if it does, we'll deal with it at submission time. if err := git.setRemoteInfo(debug); err != nil && debug { return nil, err } // Get the local branch (or HEAD if detached). We'll need this if submitting without // specifying the branch/tag/commit. branchCmd := &gitCmdSpec{ cmdline: "git rev-parse --abbrev-ref HEAD", debug: debug, } if branch, err := git.doOneCommand(branchCmd); err == nil { git.localRef = branch } // Finally, set the ssh command we'll use for Git git.gitSSHCommand = getGitSSHCommand() return git, nil } func getGitSSHCommand() string { users := []string{} for _, user := range clientConf.Users { users = append(users, user.Email) } if len(users) > 0 { return test161.GetDeployKeySSHCmd(users, KEYS_DIR) } else { return "" } } func (git *gitRepo) doOneCommand(gitCmd *gitCmdSpec) (string, error) { args := strings.Split(gitCmd.cmdline, " ") cmd := exec.Command(args[0], args[1:]...) cmd.Dir = git.dir if git.gitSSHCommand != "" && gitCmd.deployKey != DoNotUseDeployKey { cmd.Env = append(os.Environ(), git.gitSSHCommand) if gitCmd.debug { fmt.Println("Env:", git.gitSSHCommand) } } if gitCmd.debug { fmt.Println(gitCmd.cmdline) } output, err := cmd.CombinedOutput() if gitCmd.debug { fmt.Println(string(output)) } // Just trying, but fall back to local authentication for the command if err != nil && gitCmd.deployKey == TryDeployKey && git.gitSSHCommand != "" { if gitCmd.debug { fmt.Println("Git command failed using deployment key:", err) fmt.Println("Falling back to local authentication") } cmdCopy := *(gitCmd) cmdCopy.deployKey = DoNotUseDeployKey return git.doOneCommand(&cmdCopy) } else if err != nil { return "", fmt.Errorf(`Failed executing command "%v": %v`, gitCmd.cmdline, err) } else if len(output) == 0 && !gitCmd.allowEmpty { return "", fmt.Errorf(`No output from "%v"`, gitCmd.cmdline) } return strings.TrimSpace(string(output)), err } func (git *gitRepo) updateRemote(debug bool, deployKey int) error { // Update the local refs updateCmd := &gitCmdSpec{ cmdline: "git remote update " + git.remoteName, debug: debug, allowEmpty: true, deployKey: deployKey, } _, err := git.doOneCommand(updateCmd) if err != nil { git.remoteUpdated = true } return err } func (git *gitRepo) lookForRef(cmd, ref string, debug bool, deployKey int) (bool, error) { gitCmd := &gitCmdSpec{ cmdline: cmd, debug: debug, allowEmpty: true, deployKey: deployKey, } res, err := git.doOneCommand(gitCmd) if err != nil { return false, err } search := []string{ "refs/heads/", "refs/tags/", "refs/remotes/", } lines := strings.Split(res, "\n") for _, line := range lines { for _, s := range search { if strings.Contains(line, s+ref) { return true, nil } } } return false, nil } // Verfify a ref exists locally. Ref could be a branch head or tag. func (git *gitRepo) verifyLocalRef(ref string, debug bool) (bool, error) { return git.lookForRef("git show-ref", ref, debug, DoNotUseDeployKey) } // Verify a ref exists remotely. Ref could be a branch head or tag. func (git *gitRepo) verifyRemoteRef(ref string, debug bool, deployKey int) (bool, error) { return git.lookForRef("git ls-remote "+git.remoteName, ref, debug, deployKey) } // Determine if the working directory has uncommitted work func (git *gitRepo) isLocalDirty(debug bool) (bool, error) { // Just check if git status --porcelain outputs anything dirtyCmd := &gitCmdSpec{ cmdline: "git status --porcelain", allowEmpty: true, debug: debug, } if res, err := git.doOneCommand(dirtyCmd); err != nil { return false, err } else { return len(res) > 0, nil } } // Determine if the remote is up-to-date with the local. func (git *gitRepo) isRemoteUpToDate(debug bool, deployKey int) (bool, error) { if git.remoteName == "" { return false, errors.New("Cannot determine if your remote is up-to-date, undetermined remote name") } if !git.remoteUpdated { if err := git.updateRemote(debug, deployKey); err != nil { return false, err } } // Get our local commit gitCmd := &gitCmdSpec{ cmdline: "git rev-parse HEAD", debug: debug, } localCommit, err := git.doOneCommand(gitCmd) if err != nil { return false, err } // Get the remote commit gitCmd = &gitCmdSpec{ cmdline: fmt.Sprintf("git rev-parse %v/%v", git.remoteName, git.remoteRef), debug: debug, } remoteCommit, err := git.doOneCommand(gitCmd) if err != nil { return false, err } return localCommit == remoteCommit, nil } // Normally, this is 'git version M.m.r', but we've seen -rcN tacked on for // release candidates. var gitVersionRegexp *regexp.Regexp = regexp.MustCompile(`^git version (\d+)\.(\d+)\.(\d+).*$`) func gitVersion() (ver test161.ProgramVersion, err error) { var verText string git := &gitRepo{} if verText, err = git.doOneCommand(&gitCmdSpec{cmdline: "git version"}); err != nil { return } if res := gitVersionRegexp.FindStringSubmatch(verText); len(res) == 4 { maj, _ := strconv.Atoi(res[1]) min, _ := strconv.Atoi(res[2]) rev, _ := strconv.Atoi(res[3]) ver.Major = uint(maj) ver.Minor = uint(min) ver.Revision = uint(rev) } else { err = fmt.Errorf("`git version` does not match expected output: %v", verText) } return } // Compare the current version of git vs. our required version. Return true // if the current version meets our requirement, false otherwise. If the verison // is not recent enough, tell the user how to upgrade. func checkGitVersionAndComplain() (bool, error) { ver, err := gitVersion() if err != nil { return false, err } // At least min version if ver.CompareTo(minGitVersion) >= 0 { return true, nil } else { fmt.Printf(GitUpgradeInst, ver) return false, nil } } func (git *gitRepo) verifyDeploymentKey(debug bool) error { git.remoteUpdated = false //force return git.updateRemote(debug, UseDeployKeyOnly) }
#ifndef DYNAMIC_ARRAY_H #define DYNAMIC_ARRAY_H template <typename Type> class DynamicArray { private: Type* m_data; size_t m_size; public: DynamicArray() : m_data(nullptr), m_size(0) { } ~DynamicArray() { delete [] m_data; } void append(Type newData) { int newDataSize = m_size + 1; Type* dataTemp = new Type[newDataSize]; for (size_t i = 0; i < m_size; ++i) { dataTemp[i] = m_data[i]; } dataTemp[newDataSize - 1] = newData; m_size++; delete [] m_data; m_data = dataTemp; } size_t size() { return m_size; } Type at(int i) { return m_data[i]; } }; #endif
<reponame>EricEntropy/SnkrReleases2021 # frozen_string_literal: true require 'pry' require 'nokogiri' require 'open-uri' require 'net/http' require 'json' require_relative "UpcomingSnkrReleases/version" require_relative './UpcomingSnkrReleases/Snkr.rb' require_relative './UpcomingSnkrReleases/Get_API_Data.rb' require_relative './UpcomingSnkrReleases/CLI.rb' module UpcomingSnkrReleases class Error < StandardError; end # Your code goes here... end
package com.md.appuserconnect.core.services.internal; import java.io.IOException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.json.JSONException; import com.google.appengine.api.users.User; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.md.appuserconnect.core.utils.JSONObject2; @SuppressWarnings("serial") public class LoginCheckService extends HttpServlet { public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { UserService userService = UserServiceFactory.getUserService(); if (userService.isUserLoggedIn()) { User user = userService.getCurrentUser(); try { JSONObject2 json = new JSONObject2(); json.put("Status", "Y"); json.put("StatusText", "Logged In"); json.put("Email", user.getEmail()); json.put("UserID", user.getUserId()); json.put("Nickname", user.getNickname()); json.put("Admin", (userService.isUserAdmin() ? "Yes" : "No")); //json.append("LogoutURL", userService.createLogoutURL("/logincheck")); json.sendAsRepsonse(resp); } catch (JSONException e) { e.printStackTrace(); } } else { try { JSONObject2 json = new JSONObject2(); json.put("Status", "N"); json.put("StatusText", "Not Logged In"); //json.append("LoginURL", userService.createLoginURL("/logincheck")); json.sendAsRepsonse(resp); } catch (JSONException e) { e.printStackTrace(); } } } }
<reponame>siklu/mina-sshd /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.server.shell; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.Map; import java.util.Objects; import java.util.Set; import org.apache.sshd.common.channel.PtyMode; import org.apache.sshd.common.util.GenericUtils; import org.apache.sshd.common.util.buffer.Buffer; import org.apache.sshd.common.util.buffer.ByteArrayBuffer; /** * Handles the input while taking into account the {@link PtyMode}s for handling CR / LF * * @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a> */ public class TtyFilterInputStream extends FilterInputStream { public static final Set<PtyMode> INPUT_OPTIONS = Collections.unmodifiableSet(EnumSet.of(PtyMode.ONLCR, PtyMode.OCRNL, PtyMode.ONLRET, PtyMode.ONOCR)); private final Set<PtyMode> ttyOptions; private Buffer buffer = new ByteArrayBuffer(Integer.SIZE, false); private int lastChar = -1; public TtyFilterInputStream(InputStream in, Map<PtyMode, ?> modes) { this(in, PtyMode.resolveEnabledOptions(modes, INPUT_OPTIONS)); } public TtyFilterInputStream(InputStream in, Collection<PtyMode> ttyOptions) { super(Objects.requireNonNull(in, "No input stream provided")); // we create a copy of the options so as to avoid concurrent modifications this.ttyOptions = GenericUtils.of(ttyOptions); // TODO validate non-conflicting options } public synchronized void write(int c) { buffer.putByte((byte) c); } public synchronized void write(byte[] buf, int off, int len) { if (len == 1) { write(buf[off] & 0xFF); } else { buffer.putBytes(buf, off, len); } } @Override public synchronized int available() throws IOException { return super.available() + buffer.available(); } @Override public synchronized int read() throws IOException { int c = readRawInput(); if (c == -1) { return c; } if (c == '\r') { c = handleCR(); } else if (c == '\n') { c = handleLF(); } lastChar = c; return c; } protected int handleCR() throws IOException { if (ttyOptions.contains(PtyMode.OCRNL)) { return '\n'; // Translate carriage return to newline } else { return '\r'; } } protected int handleLF() throws IOException { // Map NL to CR-NL. if ((ttyOptions.contains(PtyMode.ONLCR) || ttyOptions.contains(PtyMode.ONOCR)) && (lastChar != '\r')) { buffer = insertCharacter(buffer, '\n'); return '\r'; } else if (ttyOptions.contains(PtyMode.ONLRET)) { // Newline performs a carriage return return '\r'; } else { return '\n'; } } // TODO add 'insertXXX' methods to the Buffer class protected Buffer insertCharacter(Buffer org, int c) { int remaining = org.capacity(); int readPos = org.rpos(); // see if can accommodate the character in the original buffer if ((remaining > 0) && (readPos > 0)) { int writePos = org.wpos(); org.wpos(readPos - 1); org.putByte((byte) c); org.wpos(writePos); org.rpos(readPos - 1); return org; } else { Buffer buf = new ByteArrayBuffer(org.available() + Byte.SIZE, false); buf.putByte((byte) c); buf.putBuffer(org); return buf; } } protected int readRawInput() throws IOException { if (buffer.available() > 0) { return buffer.getUByte(); } else { return this.in.read(); } } @Override public synchronized int read(byte[] b, int off, int len) throws IOException { if (len == 1) { int c = read(); if (c == -1) { return -1; } b[off] = (byte) c; return 1; } if (buffer.available() == 0) { buffer.compact(); int nb = this.in.read(b, off, len); if (nb == -1) { return nb; } buffer.putRawBytes(b, off, nb); } int nb = 0; for (int curPos = off; (nb < len) && (buffer.available() > 0); nb++, curPos++) { b[curPos] = (byte) read(); } return nb; } }
<gh_stars>100-1000 export { default as Tooltip } from "./Tooltip.svelte";
<reponame>Binotto/angular<filename>packages/core/src/render3/instructions/class_map_interpolation.ts /** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {getLView} from '../state'; import {NO_CHANGE} from '../tokens'; import {interpolation1, interpolation2, interpolation3, interpolation4, interpolation5, interpolation6, interpolation7, interpolation8, interpolationV} from './interpolation'; import {ɵɵclassMap} from './styling'; /** * * Update an interpolated class on an element with single bound value surrounded by text. * * Used when the value passed to a property has 1 interpolated value in it: * * ```html * <div class="prefix{{v0}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate1('prefix', v0, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate1(prefix: string, v0: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation1(getLView(), prefix, v0, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 2 bound values surrounded by text. * * Used when the value passed to a property has 2 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate2('prefix', v0, '-', v1, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate2( prefix: string, v0: any, i0: string, v1: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation2(getLView(), prefix, v0, i0, v1, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 3 bound values surrounded by text. * * Used when the value passed to a property has 3 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}-{{v2}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate3( * 'prefix', v0, '-', v1, '-', v2, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param i1 Static value used for concatenation only. * @param v2 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate3( prefix: string, v0: any, i0: string, v1: any, i1: string, v2: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation3(getLView(), prefix, v0, i0, v1, i1, v2, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 4 bound values surrounded by text. * * Used when the value passed to a property has 4 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}-{{v2}}-{{v3}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate4( * 'prefix', v0, '-', v1, '-', v2, '-', v3, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param i1 Static value used for concatenation only. * @param v2 Value checked for change. * @param i2 Static value used for concatenation only. * @param v3 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate4( prefix: string, v0: any, i0: string, v1: any, i1: string, v2: any, i2: string, v3: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation4(getLView(), prefix, v0, i0, v1, i1, v2, i2, v3, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 5 bound values surrounded by text. * * Used when the value passed to a property has 5 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}-{{v2}}-{{v3}}-{{v4}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate5( * 'prefix', v0, '-', v1, '-', v2, '-', v3, '-', v4, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param i1 Static value used for concatenation only. * @param v2 Value checked for change. * @param i2 Static value used for concatenation only. * @param v3 Value checked for change. * @param i3 Static value used for concatenation only. * @param v4 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate5( prefix: string, v0: any, i0: string, v1: any, i1: string, v2: any, i2: string, v3: any, i3: string, v4: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation5(getLView(), prefix, v0, i0, v1, i1, v2, i2, v3, i3, v4, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 6 bound values surrounded by text. * * Used when the value passed to a property has 6 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}-{{v2}}-{{v3}}-{{v4}}-{{v5}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate6( * 'prefix', v0, '-', v1, '-', v2, '-', v3, '-', v4, '-', v5, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param i1 Static value used for concatenation only. * @param v2 Value checked for change. * @param i2 Static value used for concatenation only. * @param v3 Value checked for change. * @param i3 Static value used for concatenation only. * @param v4 Value checked for change. * @param i4 Static value used for concatenation only. * @param v5 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate6( prefix: string, v0: any, i0: string, v1: any, i1: string, v2: any, i2: string, v3: any, i3: string, v4: any, i4: string, v5: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation6(getLView(), prefix, v0, i0, v1, i1, v2, i2, v3, i3, v4, i4, v5, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 7 bound values surrounded by text. * * Used when the value passed to a property has 7 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}-{{v2}}-{{v3}}-{{v4}}-{{v5}}-{{v6}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate7( * 'prefix', v0, '-', v1, '-', v2, '-', v3, '-', v4, '-', v5, '-', v6, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param i1 Static value used for concatenation only. * @param v2 Value checked for change. * @param i2 Static value used for concatenation only. * @param v3 Value checked for change. * @param i3 Static value used for concatenation only. * @param v4 Value checked for change. * @param i4 Static value used for concatenation only. * @param v5 Value checked for change. * @param i5 Static value used for concatenation only. * @param v6 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate7( prefix: string, v0: any, i0: string, v1: any, i1: string, v2: any, i2: string, v3: any, i3: string, v4: any, i4: string, v5: any, i5: string, v6: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation7( getLView(), prefix, v0, i0, v1, i1, v2, i2, v3, i3, v4, i4, v5, i5, v6, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * * Update an interpolated class on an element with 8 bound values surrounded by text. * * Used when the value passed to a property has 8 interpolated values in it: * * ```html * <div class="prefix{{v0}}-{{v1}}-{{v2}}-{{v3}}-{{v4}}-{{v5}}-{{v6}}-{{v7}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolate8( * 'prefix', v0, '-', v1, '-', v2, '-', v3, '-', v4, '-', v5, '-', v6, '-', v7, 'suffix'); * ``` * * @param prefix Static value used for concatenation only. * @param v0 Value checked for change. * @param i0 Static value used for concatenation only. * @param v1 Value checked for change. * @param i1 Static value used for concatenation only. * @param v2 Value checked for change. * @param i2 Static value used for concatenation only. * @param v3 Value checked for change. * @param i3 Static value used for concatenation only. * @param v4 Value checked for change. * @param i4 Static value used for concatenation only. * @param v5 Value checked for change. * @param i5 Static value used for concatenation only. * @param v6 Value checked for change. * @param i6 Static value used for concatenation only. * @param v7 Value checked for change. * @param suffix Static value used for concatenation only. * @codeGenApi */ export function ɵɵclassMapInterpolate8( prefix: string, v0: any, i0: string, v1: any, i1: string, v2: any, i2: string, v3: any, i3: string, v4: any, i4: string, v5: any, i5: string, v6: any, i6: string, v7: any, suffix: string): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolation8( getLView(), prefix, v0, i0, v1, i1, v2, i2, v3, i3, v4, i4, v5, i5, v6, i6, v7, suffix); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } } /** * Update an interpolated class on an element with 8 or more bound values surrounded by text. * * Used when the number of interpolated values exceeds 7. * * ```html * <div * class="prefix{{v0}}-{{v1}}-{{v2}}-{{v3}}-{{v4}}-{{v5}}-{{v6}}-{{v7}}-{{v8}}-{{v9}}suffix"></div> * ``` * * Its compiled representation is: * * ```ts * ɵɵclassMapInterpolateV( * ['prefix', v0, '-', v1, '-', v2, '-', v3, '-', v4, '-', v5, '-', v6, '-', v7, '-', v9, * 'suffix']); * ``` *. * @param values The a collection of values and the strings in-between those values, beginning with * a string prefix and ending with a string suffix. * (e.g. `['prefix', value0, '-', value1, '-', value2, ..., value99, 'suffix']`) * @codeGenApi */ export function ɵɵclassMapInterpolateV(values: any[]): void { // TODO(FW-1340): Refactor to remove the use of other instructions here. const interpolatedValue = interpolationV(getLView(), values); if (interpolatedValue !== NO_CHANGE) { ɵɵclassMap(interpolatedValue); } }
#! /bin/sh -e # tup - A file-based build system # # Copyright (C) 2010-2018 Mike Shal <marfey@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Make sure moving a directory out of tup will successfully remove watches on # all the subdirectories. Use the USR1 signal to have the monitor quit if it # has a watch on any invalid tupid. . ./tup.sh check_monitor_supported mkdir tuptest cd tuptest re_init monitor mkdir -p foo/bar cd foo/bar echo 'int main(void) {return 0;}' > foo.c echo ': foreach *.c |> gcc %f -o %o |> %B' > Tupfile cd ../.. update signal_monitor mv foo .. tup flush signal_monitor stop_monitor update tup_object_no_exist . foo eotup
def swap(arr, a, b): temp = arr[a] arr[a] = arr[b] arr[b] = temp
#!/bin/sh cd $(dirname $0) pandoc --from markdown --to latex --output user_guide.tex ../../README.md # Removes "doc/report" path from figures sed -i s/doc\\/report\\///g user_guide.tex # Convert links to footnotes sed -i "s/\\\\href{\\([^}]*\\)}{\\([^}]*\\)}/\2\\\\footnote{\\\\url{\1}}/" user_guide.tex pandoc --from markdown --to latex --output code_source_org.tex code_source_org.md # Use non numbered section for code source organization sed -i "s/section/section*/" code_source_org.tex
<gh_stars>0 # Require gems require 'rubygems' require 'bundler/setup' Bundler.require(:default) # Require libraries require 'set' require 'benchmark' # Require all ruby files Dir["#{File.dirname(__FILE__)}/app/*.rb"].each { |f| require f } include SuperMedian include QuickSort puts '=== SuperMedian Algorithm Analysis Script ===' puts '' array_sizes = [100, 1000, 10000, 100000, 1000000] max_array_value = 10000 puts 'Ruby (C) Quick Sort:' array_sizes.each do |size| unsorted_list = Array.new(size) { rand(1..max_array_value) } total_time = Benchmark.realtime { unsorted_list.sort } puts total_time end puts "\nRuby Quick Sort:" array_sizes.each do |size| unsorted_list = Array.new(size) { rand(1..max_array_value) } total_time = Benchmark.realtime do sorted = QuickSort.quick_sort(unsorted_list, 0, size - 1) sorted[size / 2] end puts total_time end puts "\nSuper Median:" array_sizes.each do |size| unsorted_list = Array.new(size) { rand(1..max_array_value) } total_time = Benchmark.realtime { SuperMedian.super_median(unsorted_list) } puts total_time end
<gh_stars>100-1000 // Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package omaha import ( "runtime" ) // Translate GOARCH to Omaha's choice of names, because no two independent // software projects *ever* use the same set of architecture names. ;-) func LocalArch() string { switch runtime.GOARCH { case "386": return "x86" case "amd64": return "x64" case "amd64p32": // Not actually specified by Omaha but it follows the above. return "x32" case "arm": fallthrough default: // Nothing else is defined by Omaha so anything goes. return runtime.GOARCH } } // Translate GOOS to Omaha's platform names as best as we can. func LocalPlatform() string { switch runtime.GOOS { case "darwin": return "mac" // or "ios" case "linux": return "linux" // or "android" case "windows": return "win" default: // Nothing else is defined by Omaha so anything goes. return runtime.GOOS } }
#!/bin/bash # # patent_lim: Linguistically informed masking for representation learning in the patent domain # # Copyright (c) Siemens AG, 2020 # # SPDX-License-Identifier: Apache-2.0 # source ./bert_env8/bin/activate export BERT_BASE_DIR=/home/ubuntu/PycharmProjects/patent/bert runList='667 668 669 670 672 673 674' foo () { local run=$1 python create_pretraining_data.py \ --input_file=./data/part-000000000$run.txt \ --output_file=./data/tf_examples$run.tfrecord \ --vocab_file=$BERT_BASE_DIR/vocab.txt \ --do_lower_case=False \ --max_seq_length=128 \ --max_predictions_per_seq=20 \ --masked_lm_prob=0.15 \ --random_seed=12345 \ --dupe_factor=5 echo "Wrote tf records of sample $i" } for run in $runList; do foo "$run" & done
#!/bin/bash #Require sudo if [ $EUID != 0 ]; then sudo "$0" "$@" exit $? fi echo "removing service..." systemctl stop x11vnc.service systemctl disable x11vnc.service echo "done" echo "removing x11vnc password file /etc/x11vnc.passwd" rm /etc/x11vnc.passwd echo "done" echo "removing service from /lib/systemd/system/..." rm /lib/systemd/system/x11vnc.service echo "done" echo "reloading services" systemctl daemon-reload echo "done" echo "x11vnc service uninstalled sucessfully!"
#!/usr/bin/bash python Setup.py build_ext --inplace
ssh ubuntu@192.168.23.17 cd MSc_Research_Providers git pull origin master exit ssh ubuntu@192.168.23.11 cd MSc_Research_Providers git pull origin master exit ssh ubuntu@192.168.23.21 cd MSc_Research_Providers git pull origin master exit ssh ubuntu@192.168.23.13 cd MSc_Research_Providers git pull origin master exit
const Person = require("../artifact/Person.js"); const Comparator = {}; const personLabel = (person) => { let answer; if (person.middle) { answer = `${person.last}, ${person.first} ${person.middle}`; } else { answer = `${person.last}, ${person.first}`; } return answer; }; const trimTitle = (item) => { let answer = null; if (item) { answer = item.title.trim(); if (answer.startsWith("A ")) { answer = answer.substring("A ".length); } else if (answer.startsWith("The ")) { answer = answer.substring("The ".length); } } return answer; }; // ///////////////////////////////////////////////////////////////////////////// Comparator.compareByCount = (keyToCount) => (keyA, keyB) => { const countA = keyToCount[keyA]; const countB = keyToCount[keyB]; let answer = Comparator.compareString(false)(countA, countB); if (answer === 0) { if (Person.properties[keyA]) { answer = Comparator.comparePersonKey(keyA, keyB); } else { answer = Comparator.compareString(true)(keyA, keyB); } } return answer; }; Comparator.compareByMeeting = (ascending) => (bookA, bookB) => { const meetingA = bookA.meeting || ""; const meetingB = bookB.meeting || ""; let answer = Comparator.compareString(ascending)(meetingA, meetingB); if (answer === 0) { answer = Comparator.compareByTitle(bookA, bookB); } return answer; }; Comparator.comparePersonKey = (keyA, keyB) => { const labelA = personLabel(Person.properties[keyA]); const labelB = personLabel(Person.properties[keyB]); return Comparator.compareString(true)(labelA, labelB); }; Comparator.compareString = (ascending) => (stringA, stringB) => { const factor = ascending ? 1 : -1; let answer = -factor; if (stringA === stringB) { answer = 0; } else if (stringA > stringB) { answer = factor; } return answer; }; Comparator.compareByTitle = (itemA, itemB) => { const titleA = trimTitle(itemA); const titleB = trimTitle(itemB); return Comparator.compareString(true)(titleA, titleB); }; Object.freeze(Comparator); module.exports = Comparator;
#! /bin/bash -x #$ -cwd #$ -V ## $1 is file containing patterns ## $2 is the file to search for i in `cat $1`; do command="grep -w -c '$i' $2" out=$(eval $command) echo $i"\t"$out done;
package db import ( "testing" "github.com/go-pg/pg/v10" "github.com/speedandfunction-russ/dev-toolkit/pkg/repository" "github.com/stretchr/testify/assert" ) func testRepository(repo repository.Repository) error { return nil } func TestRepository(t *testing.T) { assert := assert.New(t) t.Run("repository interface match", func(t *testing.T) { assert.Nil(testRepository(NewRepository(new(pg.DB)))) }) }
package edu.jluzh.test_layuimini.mapper; import edu.jluzh.test_layuimini.bean.Car; import edu.jluzh.test_layuimini.bean.CarImg; import org.apache.ibatis.annotations.Mapper; import java.util.List; /** * @description: * @author: icecool * @date: Created in 2021/5/26 19:23 * @version: * @modified By: */ @Mapper public interface ICarImgMapper { //添加图片方法 void addCarImg(CarImg img); //先执行图片delete方法 再执行汽车delete方法 void deleteImg(int id); //更新汽车图片方法 void updateImg(CarImg img); //通过车牌号查找图片信息 String findCarImgById(int carId); }
function std_dev = standard_deviation (values) mu = mean(values); dev_squared = (values - mu) .^2; std_dev = sqrt(mean(dev_squared)); end
<reponame>wpisen/trace<filename>trace-service/trace-service-start/src/main/java/com/wpisen/trace/server/service/ProjectSystemManage.java package com.wpisen.trace.server.service; import java.util.List; import com.wpisen.trace.server.service.entity.ClientSessionVo; /** * * 项目系统管理 * Created by wpisen on 17/6/26. */ public interface ProjectSystemManage { /** * 获取当前在线 * @param proId * @return */ public List<ClientSessionVo> getActiveSessions(Integer proId); }
<reponame>chird/meteoJS /** * @module meteoJS/events */ /** * Listen for a certain type of event * * @abstract * @param {string} listener - Event type. * @param {callback} callback - Listener function. * @param {mixed} [thisArg] - Objekt für this beim Ausführen von callback. * @returns {number} Listener function key. */ function on(listener, callback, thisArg) { if (!('listeners' in this) || this.listeners === undefined) this.listeners = {}; if (!(listener in this.listeners)) this.listeners[listener] = {}; // Adapted from https://gist.github.com/gordonbrander/2230317 var result_key = Math.random().toString(36).substr(2, 9); this.listeners[listener][result_key] = { callback: callback, thisArg: thisArg }; return result_key; } /** * Unlisten for a certain type of event * * @abstract * @param {string} listener - Event type. * @param {number} key - Listener function key. */ function un(listener, key) { if ('listeners' in this && this.listeners !== undefined && listener in this.listeners && key in this.listeners[listener]) delete this.listeners[listener][key]; } /** * Listen once for a certain type of event * * @abstract * @param {string} listener - Event type. * @param {callback} callback - Listener function. * @param {mixed} [thisArg] - Objekt für this beim Ausführen von callback. */ function once(listener, callback, thisArg) { if (!('once_listeners' in this) || this.once_listeners === undefined) this.once_listeners = {}; if (!(listener in this.once_listeners) || !('push' in this.once_listeners[listener])) this.once_listeners[listener] = []; this.once_listeners[listener].push({ callback: callback, thisArg: thisArg }); } /** * Gibt es Listener Funktionen für einen Event Type * * @abstract * @param {string} listener - Event type. * @returns {boolean} */ function hasListener(listener) { return ('listeners' in this && this.listeners !== undefined && listener in this.listeners && Object.keys(this.listeners[listener]).length) || ('once_listeners' in this && listener in this.once_listeners && Object.keys(this.once_listeners[listener]).length); } /** * Execute all listener functions für einen Event Type * * @abstract * @param {string} listener - Event type. */ function trigger(listener) { let args = Array.prototype.slice.call(arguments); args.shift(); if ('listeners' in this && this.listeners !== undefined && listener in this.listeners && typeof this.listeners[listener] == 'object') { Object.keys(this.listeners[listener]).forEach(key => { this.listeners[listener][key].callback.apply( this.listeners[listener][key].thisArg === undefined ? this : this.listeners[listener][key].thisArg, args); }); } if ('once_listeners' in this && this.once_listeners !== undefined && listener in this.once_listeners && 'forEach' in this.once_listeners[listener]) { let once_listeners = this.once_listeners[listener]; this.once_listeners[listener] = []; once_listeners.forEach(obj => { obj.callback.apply(obj.thisArg === undefined ? this : obj.thisArg, args); }); } } /** * Fügt einem Objekt alle Event-Funktionen hinzu. * * @param {object} obj */ export function addEventFunctions(obj) { obj.on = on; obj.un = un; obj.once = once; obj.hasListener = hasListener; obj.trigger = trigger; } export default addEventFunctions;
#!/bin/bash ## Copyright (c) 2021 Oracle and/or its affiliates. ## Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ SCRIPT_DIR=$(dirname $0) IMAGE_NAME=db-log-exporter IMAGE_VERSION=0.1 echo DOCKER_REGISTRY is $DOCKER_REGISTRY if [ -z "$DOCKER_REGISTRY" ]; then echo "DOCKER_REGISTRY not set. Will get it with state_get" export DOCKER_REGISTRY=$(state_get DOCKER_REGISTRY) fi if [ -z "$DOCKER_REGISTRY" ]; then echo "Error: DOCKER_REGISTRY env variable needs to be set!" exit 1 fi export IMAGE=${DOCKER_REGISTRY}/${IMAGE_NAME}:${IMAGE_VERSION} mvn package docker push $IMAGE if [ $? -eq 0 ]; then docker rmi ${IMAGE} fi
def fibonacci(n): a = 0 b = 1 if n < 0: print("Incorrect input") elif n == 0: return a elif n == 1: return b else: for i in range(2,n+1): c = a + b a = b b = c return b # Driver Program print(fibonacci(n))
#!/usr/bin/env bash set -ex echo Installing driver dependencies curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql.list sudo apt-get update ACCEPT_EULA=Y sudo apt-get install -qy msodbcsql17 unixodbc unixodbc-dev libssl1.0.0
#!/bin/bash # # Jobscript for launching dcmip2012 test2-0 on a mac running Darwin # # usage: ./jobscript-... EXEC=../../../test_execs/preqx-nlev30-interp/preqx-nlev30-interp # set name of executable openmpiexec -n 6 $EXEC < ./namelist-lowres.nl # launch simulation
#!/bin/sh cd "$(dirname "$0")" groovy logs.groovy plain
#!/bin/bash medusa-dev --set-path-to-repo . cd integration-tests/api medusa-dev --force-install --scan-once yarn test
<gh_stars>1-10 ------------------------------------------------------------------------------- -- dict type ------------------------------------------------------------------------------- CREATE TABLE DICT_TYPE( ID BIGINT NOT NULL, NAME VARCHAR(200), TYPE VARCHAR(200), DESCN VARCHAR(200), CONSTRAINT PK_DICT_TYPE PRIMARY KEY(ID) ); COMMENT ON TABLE DICT_TYPE IS '数据字典类型'; COMMENT ON COLUMN DICT_TYPE.ID IS '唯一主键'; COMMENT ON COLUMN DICT_TYPE.TYPE IS '类型'; COMMENT ON COLUMN DICT_TYPE.NAME IS '名称'; COMMENT ON COLUMN DICT_TYPE.DESCN IS '描述';
import icon from './src/Icon' export const Icon = icon export default { install(vue) { vue.component(icon.name, icon) } }
#!/usr/bin/env bash kubectl delete job --all flekszible generate --print -t namefilter:include=test-runner -t run:args="bin/spark-shell --jars /opt/ozonefs/hadoop-ozone-filesystem-hadoop3.jar --packages io.delta:delta-core_2.12:0.7.0 --conf spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog -i /opt/testscripts/deltagenerate.scala" | kubectl apply -f - kubectl wait --timeout=300s -l job-name=test-runner --for=condition=complete job kubectl logs --tail=30 -l job-name=test-runner | tee -a results/wordcount.txt
#!/bin/sh set -e set -u export KUBE_NAMESPACE=prometheus export KUBE_CLUSTER=k8s-cluster export GCP_REGION=australia-southeast1-a export GCP_PROJECT=servicemeshlab export DATA_DIR=/prometheus/ export DATA_VOLUME=prometheus-storage-volume export SIDECAR_IMAGE_TAG=0.5.2 usage() { echo -e "Usage: $0 <deployment|statefulset> <name>\n" } if [ $# -le 1 ]; then usage exit 1 fi # kubectl create -f clusterRole.yml || true # kubectl create -f configMap.yml -n prometheus || true # kubectl create -f prometheus-deployment.yml -n prometheus || true # Override to use a different Docker image name for the sidecar. export SIDECAR_IMAGE_NAME=${SIDECAR_IMAGE_NAME:-'gcr.io/stackdriver-prometheus/stackdriver-prometheus-sidecar'} kubectl -n "${KUBE_NAMESPACE}" patch "$1" "$2" --type strategic --patch " spec: template: spec: containers: - name: sidecar image: ${SIDECAR_IMAGE_NAME}:${SIDECAR_IMAGE_TAG} imagePullPolicy: Always args: - \"--stackdriver.project-id=${GCP_PROJECT}\" - \"--stackdriver.kubernetes.location=${GCP_REGION}\" - \"--stackdriver.kubernetes.cluster-name=${KUBE_CLUSTER}\" - \"--prometheus.wal-directory=${DATA_DIR}/wal\" ports: - name: sidecar containerPort: 9091 volumeMounts: - name: ${DATA_VOLUME} mountPath: ${DATA_DIR} "
# on Fedora28 as root if [ "$#" -ne 1 ]; then echo "Usage: $0 <master IP>" exit 1 fi set -ex MASTER_IP=$1 # install and enable docker dnf update -y dnf install -y docker systemctl enable docker && systemctl start docker # permanently disable selinux setenforce 0 sed -i 's/^SELINUX=.*/SELINUX=permissive/' /etc/selinux/config # permanently disable swap file swapoff -a sed -i.bak '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab # disable firewalld systemctl stop firewalld && systemctl disable firewalld # set k8s repo if [ ! -f /etc/yum.repos.d/kubernetes.repo ]; then cat <<EOF > /etc/yum.repos.d/kubernetes.repo [kubernetes] name=Kubernetes baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 enabled=1 gpgcheck=1 repo_gpgcheck=1 gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg EOF fi # install k8s dnf install -y kubelet kubeadm kubectl kubernetes-cni systemctl enable kubelet && systemctl start kubelet # set variables export TOKEN=abcdef.1234567890123456 export NODE_NAME=`ip -o addr show|grep -v docker |awk '{ print $4 }'|grep -v '^127' |grep -v '^fe80' |grep -v '^::' |head -n 1 |cut -f1 -d/` # join the k8s cluster # the actual values should be taken from the output of "kubeadm init" command on the master kubeadm join ${MASTER_IP}:6443 --token $TOKEN --node-name $NODE_NAME --discovery-token-unsafe-skip-ca-verification # install and run ovs/ovn dnf install -y openvswitch openvswitch-ovn-* systemctl start openvswitch && systemctl enable openvswitch # start the ovn controler on a k8s minion node /usr/share/openvswitch/scripts/ovn-ctl start_controller # build and install ovn-kubernetes from source dnf install -y git go make git clone https://github.com/AlonaKaplan/ovn-kubernetes cd ovn-kubernetes/go-controller make && make install # seems like a bug in parsing the default file, just truncate it cp /etc/openvswitch/ovn_k8s.conf /etc/openvswitch/ovn_k8s.conf.bak echo "" > /etc/openvswitch/ovn_k8s.conf # to run kubectl from inside the node copy config mkdir -p /$USER/.kube && scp root@${MASTER_IP}:/etc/kubernetes/admin.conf /$USER/.kube/config
#!/bin/bash -e ################################################################################ ## File: aws.sh ## Desc: Installs the AWS CLI, Session Manager plugin for the AWS CLI, and AWS SAM CLI ################################################################################ # Source the helpers for use with the script source $HELPER_SCRIPTS/os.sh source $HELPER_SCRIPTS/install.sh # Install the AWS CLI v1 on Ubuntu16 and Ubuntu18, and AWS CLI v2 on Ubuntu20 if isUbuntu20 ; then download_with_retries "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" "/tmp" "awscliv2.zip" unzip -qq /tmp/awscliv2.zip -d /tmp /tmp/aws/install -i /usr/local/aws-cli -b /usr/local/bin fi # The installation should be run after python3 is installed as aws-cli V1 dropped python2 support if isUbuntu16 || isUbuntu18 ; then download_with_retries "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" "/tmp" "awscli-bundle.zip" unzip -qq /tmp/awscli-bundle.zip -d /tmp python3 /tmp/awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws fi download_with_retries "https://s3.amazonaws.com/session-manager-downloads/plugin/latest/ubuntu_64bit/session-manager-plugin.deb" "/tmp" "session-manager-plugin.deb" apt install /tmp/session-manager-plugin.deb # Download & install the latest aws sam cli release zipName="aws-sam-cli-linux-x86_64.zip" zipUrl="https://github.com/aws/aws-sam-cli/releases/latest/download/${zipName}" download_with_retries $zipUrl "/tmp" $zipName unzip /tmp/${zipName} -d /tmp /tmp/install invoke_tests "CLI.Tools" "AWS"
module load conda2/4.2.13 source activate /n/groups/lsp/cycif/cycif_pipeline/ python /n/groups/lsp/cycif/CyCif_Manager/O2/CyCif_Pipeline_O2_v1.py $1 conda deactivate
/**************************************************************************** ** ** Copyright (C) 2016 The Qt Company Ltd. ** Contact: https://www.qt.io/licensing/ ** ****************************************************************************/ // Copyright (c) 2014-2019, The Monero Project // // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are // permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be // used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #ifndef MONEROSETTINGS_H #define MONEROSETTINGS_H #include <memory> #include <QtQml/qqmlparserstatus.h> #include <QGuiApplication> #include <QClipboard> #include <QObject> #include <QDebug> #include <qsettings.h> static const int settingsWriteDelay = 500; // ms class MoneroSettings : public QObject, public QQmlParserStatus { Q_OBJECT Q_INTERFACES(QQmlParserStatus) Q_PROPERTY(QString fileName READ fileName WRITE setFileName FINAL) Q_PROPERTY(bool portable READ portable NOTIFY portableChanged) Q_PROPERTY(QString portableFolderName READ portableFolderName CONSTANT) public: explicit MoneroSettings(QObject *parent = nullptr); QString fileName() const; void setFileName(const QString &fileName); Q_INVOKABLE bool setPortable(bool enabled); Q_INVOKABLE void setWritable(bool enabled); static QString portableFolderName(); public slots: void _q_propertyChanged(); signals: void portableChanged() const; protected: void timerEvent(QTimerEvent *event) override; void classBegin() override; void componentComplete() override; private: QVariant readProperty(const QMetaProperty &property) const; void init(); void reset(); void load(); void store(); bool portable() const; bool portableConfigExists() const; QString portableFilePath() const; std::unique_ptr<QSettings> portableSettings() const; std::unique_ptr<QSettings> unportableSettings() const; void swap(std::unique_ptr<QSettings> newSettings); QHash<const char *, QVariant> m_changedProperties; std::unique_ptr<QSettings> m_settings; QString m_fileName = QString(""); bool m_initialized = false; bool m_writable = true; int m_timerId = 0; }; #endif // MONEROSETTINGS_H
const express = require('express'); const router = express.Router(); // create the contacts array let contacts = [ {id: 1, name: 'John Doe', phone: '123-456-7890'}, {id: 2, name: 'Jane Doe', phone: '098-765-4321'} ]; let contactId = 3; // get all contacts router.get('/', (req, res) => { res.json(contacts); }); // get a single contact router.get('/:id', (req, res) => { const contact = contacts.find(c => c.id === parseInt(req.params.id)); if (!contact) { res.status(404).json({ message: 'Contact not found.' }); return; } res.json(contact); }); // create a contact router.post('/', (req, res) => { const contact = { id: contactId++, name: req.body.name, phone: req.body.phone }; contacts.push(contact); res.json(contact); }); // delete a contact router.delete('/:id', (req, res) => { const contact = contacts.find(c => c.id === parseInt(req.params.id)); if (!contact) { res.status(404).json({ message: 'Contact not found.' }); return; } const index = contacts.indexOf(contact); contacts.splice(index, 1); res.json(contact); }); module.exports = router;
#include <stdio.h> #include <iostream> using namespace std; //{P == n >= 100000 } void casoDePrueba() { //Aqui has de escribir tu codigo int n; cin >> n; if(n <= 100000 && n >=0 ){ int v[100000]; for(int i = 0; i < n; i++) cin >> v[i]; int iz = 0; //indice izq int dc = 0; //indice dcha int maxiz = 0; int maxdc = 0; bool cadena = true; for(int i = 0; i < n; i++){ if(v[i]%2 == 0){ if(cadena == true){ dc++; if((dc - iz) >= (maxdc - maxiz)){ maxdc = dc; maxiz = iz; } } else{ iz = i; dc = i+1; cadena = true; } } else{ if(cadena == true) cadena = false; } } //QUE HACEMOS CUANDO N = 0? if(maxdc - maxiz == 0) cout <<"Vacio" << endl; else cout << maxdc - maxiz << " -> [" << maxiz <<"," << maxdc<< ")" << endl; } } // resuelve // Q == {} int main() { unsigned int numCasos, i; cin>> numCasos; for(i = 0; i < numCasos; ++i) { casoDePrueba(); } return 0; } // main
#!/bin/bash mkdir -p build cd build if [ -f /bin/cmake3 ]; then cmake3 .. else cmake .. fi make echo "" echo "Binary is written in ./build" echo ""
#!/usr/bin/env sh # SPDX-License-Identifier: MIT confidence="" case "$BANDIT_CONFIDENCE" in "HIGH") confidence="-iii" ;; "MEDIUM") confidence="-ii" ;; "LOW") confidence="-i" esac severity="" case "$BANDIT_SEVERITY" in "HIGH") severity="-lll" ;; "MEDIUM") severity="-ll" ;; "LOW") severity="-l" esac bandit --format sarif --ignore-nosec $severity $confidence --output "$PDS_JOB_RESULT_FILE" --recursive "$PDS_JOB_SOURCECODE_UNZIPPED_FOLDER/" exit 0
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/Presentables/Presentables.framework" install_framework "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/Presentables/Presentables.framework" install_framework "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
<gh_stars>0 package com.packtpub.springrest.booking; import com.packtpub.springrest.DateRange; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import javax.validation.Valid; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; /** * A booking request to be processed by the {@link BookingService}. * * @author <NAME> */ public class BookingRequest { @Min(1) private long roomId; @Valid @NotNull private DateRange dateRange; @Size(min = 1, max = 128) private String customerName; @Valid @NotNull private CreditCardDetails creditCardDetails; public BookingRequest() {} public BookingRequest(long roomId, DateRange dateRange, String customerName, CreditCardDetails creditCardDetails) { this.roomId = roomId; this.dateRange = dateRange; this.customerName = customerName; this.creditCardDetails = creditCardDetails; } public long getRoomId() { return roomId; } public DateRange getDateRange() { return dateRange; } public String getCustomerName() { return customerName; } public CreditCardDetails getCreditCardDetails() { return creditCardDetails; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } }
cd src python main.py ctdet --exp_id bdd_resdcn18_224 --arch resdcn_18 --dataset bdd --batch_size 18 --lr 1e-4 --lr_step 90 --gpus 0 --num_workers 8 --input_res 224 --num_epochs 140 --save_all --resume cd ..
<gh_stars>1000+ import { createOvermind } from "overmind"; import { createHook } from "overmind-react"; export const useApp = createHook(); export const overmind = createOvermind( { state: { newItemName: "", items: ["nacho", "burrito", "hotdog"] }, actions: { setNewItemName({ state }, itemName) { state.newItemName = itemName; }, addItem({ state }) { state.items = [...state.items, state.newItemName]; state.newItemName = ""; }, clear({ state }) { state.items = []; } } }, { devtools: false } );
public class BinaryTree { Node root; public int getMaxDFS(Node node) { if (node == null) return 0; int maxLeft = getMaxDFS(node.left); int maxRight = getMaxDFS(node.right); return Math.max(maxLeft, maxRight); } class Node { Node left; Node right; int val; public Node(int val) { this.val = val; } } }
from typing import Any class SerializationConfig: def __init__(self): self.format = "json" # Default serialization format def set_format(self, format: str): self.format = format def get_format(self) -> str: return self.format def serialize(self, data: Any) -> bytes: if self.format == "json": # Implement JSON serialization logic serialized_data = json.dumps(data).encode('utf-8') elif self.format == "xml": # Implement XML serialization logic serialized_data = xml_serialize(data).encode('utf-8') elif self.format == "binary": # Implement binary serialization logic serialized_data = binary_serialize(data) else: raise ValueError("Unsupported serialization format") return serialized_data def deserialize(self, data: bytes) -> Any: if self.format == "json": # Implement JSON deserialization logic deserialized_data = json.loads(data.decode('utf-8')) elif self.format == "xml": # Implement XML deserialization logic deserialized_data = xml_deserialize(data.decode('utf-8')) elif self.format == "binary": # Implement binary deserialization logic deserialized_data = binary_deserialize(data) else: raise ValueError("Unsupported serialization format") return deserialized_data # Sample usage config = SerializationConfig() config.set_format("json") print(config.get_format()) # Output: json data = {"key": "value"} serialized_data = config.serialize(data) print(serialized_data) # Output: b'{"key": "value"}' deserialized_data = config.deserialize(serialized_data) print(deserialized_data) # Output: {'key': 'value'}
#!/usr/bin/env bash # Test block processing by hooking up indexer to preconfigured block datasets. set -e # This script only works when CWD is 'test' rootdir=`dirname $0` pushd $rootdir > /dev/null pwd source common.sh trap cleanup EXIT start_postgres ############### ## RUN TESTS ## ############### # Test 1 print_alert "Integration Test 1" kill_indexer start_indexer_with_blocks createdestroy blockdata/create_destroy.tar.bz2 wait_for_migrated create_delete_tests createdestroy
<reponame>adarshjv20/Mutation-Testing package core.shape_interface; import core.Model; import java.awt.event.MouseEvent; /** * * @author <NAME> */ public class FillInterface extends ActionInterface { public FillInterface(Model model) { this.model = model; } @Override protected void mouseDown(MouseEvent e) { model.performFill(e.getPoint()); } }
#!/bin/bash # Library of file-indexing functions # see: shiftup # Returns the index number string in a string. # Indexes can be prefixed with '0'. # param1: string with a index. function get_index { local path=$1 index=$(echo $path | tr -d '[:alpha:][:punct:]') echo "$index" } # param: path - filename function up_index { local path=$1 # Get index # increase # rename file #echo "up_index($path)" echo "todo: implement" } # Returns the last index (largest) in the directory # param: index - start index function get_last_index { local index_start=$1 local index_max=$index_start # Go through each file in the directory # Not sure what order they come in, as the index may be on the right. for path in $dir/*; do if [[ -d $path ]]; then continue fi i=(get_index $path) if (( i < index )); then continue # out of range fi index=get_index $path if (( index < index_start )); then continue fi if (( index > index_max )); then max_index=indexes fi done echo $ return 0 }
#!/bin/sh set -e -x PYTHON=${PYTHON:=python} $PYTHON -mperf timeit -s'from gevent import spawn; from gevent.hub import xrange; g = spawn(lambda: 5); l = lambda: 5' 'for _ in xrange(1000): g.link(l)' $PYTHON -mperf timeit -s'from gevent import spawn; from gevent.hub import xrange; g = spawn(lambda: 5); l = lambda *args: 5' 'for _ in xrange(10): g.link(l);' 'g.join()' $PYTHON -mperf timeit -s'from gevent import spawn; from gevent.hub import xrange; g = spawn(lambda: 5); l = lambda *args: 5' 'for _ in xrange(100): g.link(l);' 'g.join()' $PYTHON -mperf timeit -s'from gevent import spawn; from gevent.hub import xrange; g = spawn(lambda: 5); l = lambda *args: 5' 'for _ in xrange(1000): g.link(l);' 'g.join()' $PYTHON -mperf timeit -s'from gevent import spawn; from gevent.hub import xrange; g = spawn(lambda: 5); l = lambda *args: 5' 'for _ in xrange(10000): g.link(l);' 'g.join()' $PYTHON -mperf timeit -s'from gevent import spawn; from gevent.hub import xrange; g = spawn(lambda: 5); l = lambda *args: 5' 'for _ in xrange(100000): g.link(l);' 'g.join()'
if [ -z "$ROOT" ]; then echo "ROOT must be set to the root of the end-to-end tests" >&2 exit 1 fi if [ -n "$MACHINE_READABLE" ]; then LINE_END="\n" else LINE_END="\r" fi step () { echo "==== $@" } initialize_cluster () { for namespace in $(kubectl get namespaces | egrep -v '^(NAME|kube-)' | awk ' { print $1 }'); do echo "Deleting everything in $namespace..." if [ "$namespace" = "default" ]; then kubectl delete pods,secrets,services,deployments,configmaps --all else kubectl delete namespace "$namespace" fi done } cluster_ip () { IP=$(kubectl get nodes -ojsonpath="{.items[0].status.addresses[?(@.type==\"ExternalIP\")].address}") if [ -z "$IP" ]; then IP=$(kubectl cluster-info | fgrep master | python -c 'import sys; print(sys.stdin.readlines()[0].split()[5].split(":")[1].lstrip("/"))') fi echo "$IP" } service_port() { instance=${2:-0} kubectl get services "$1" -ojsonpath="{.spec.ports[$instance].nodePort}" } demotest_pod() { kubectl get pods -l run=demotest -o 'jsonpath={.items[0].metadata.name}' } wait_for_pods () { namespace=${1:-default} attempts=60 running= while [ $attempts -gt 0 ]; do # pending=$(kubectl --namespace $namespace get pod -o json | grep phase | grep -c -v Running) pending=$(kubectl --namespace $namespace describe pods | grep '^Status:' | grep -c -v Running) if [ $pending -eq 0 ]; then printf "Pods running. \n" running=YES break fi printf "try %02d: %d not running${LINE_END}" $attempts $pending attempts=$(( $attempts - 1 )) sleep 2 done if [ -z "$running" ]; then echo 'Some pods have yet to start?' >&2 exit 1 fi } wait_for_ready () { baseurl=${1} attempts=60 ready= while [ $attempts -gt 0 ]; do OK=$(curl -k $baseurl/ambassador/v0/check_ready 2>&1 | grep -c 'readiness check OK') if [ $OK -gt 0 ]; then printf "ambassador ready \n" ready=YES break fi printf "try %02d: not ready${LINE_END}" $attempts attempts=$(( $attempts - 1 )) sleep 2 done if [ -z "$ready" ]; then echo 'Ambassador not yet ready?' >&2 kubectl get pods >&2 exit 1 fi } wait_for_extauth_running () { baseurl=${1} attempts=60 ready= while [ $attempts -gt 0 ]; do OK=$(curl -k -s $baseurl/example-auth/ready | egrep -c '^OK ') if [ $OK -gt 0 ]; then printf "extauth ready \n" ready=YES break fi printf "try %02d: not ready${LINE_END}" $attempts attempts=$(( $attempts - 1 )) sleep 5 done if [ -z "$ready" ]; then echo 'extauth not yet ready?' >&2 exit 1 fi } wait_for_extauth_enabled () { baseurl=${1} attempts=60 enabled= while [ $attempts -gt 0 ]; do OK=$(curl -k -s $baseurl/ambassador/v0/diag/?json=true | jget.py /filters/0/name 2>&1 | egrep -c 'extauth') if [ $OK -gt 0 ]; then printf "extauth enabled \n" enabled=YES break fi printf "try %02d: not enabled${LINE_END}" $attempts attempts=$(( $attempts - 1 )) sleep 5 done if [ -z "$enabled" ]; then echo 'extauth not yet enabled?' >&2 exit 1 fi } wait_for_demo_weights () { attempts=60 routed= while [ $attempts -gt 0 ]; do if checkweights.py "$@"; then routed=YES break fi printf "try %02d: misweighted${LINE_END}" $attempts attempts=$(( $attempts - 1 )) sleep 5 done if [ -z "$routed" ]; then echo 'weights still not correct?' >&2 exit 1 fi } check_diag () { baseurl=$1 index=$2 desc=$3 sleep 20 rc=1 curl -k -s ${baseurl}/ambassador/v0/diag/?json=true | jget.py /routes > check-$index.json if ! cmp -s check-$index.json diag-$index.json; then echo "check_diag $index: mismatch for $desc" if diag-diff.sh $index; then diag-fix.sh $index rc=0 fi else echo "check_diag $index: OK" rc=0 fi return $rc } istio_running () { kubectl get service istio-mixer >/dev/null 2>&1 } ambassador_pod () { kubectl get pod -l app=ambassador -o jsonpath='{.items[0].metadata.name}' } # ISTIOHOME=${ISTIOHOME:-${HERE}/istio-0.1.6} # source ${ISTIOHOME}/istio.VERSION # if [ \( "$1" = "--delete" \) -o \( "$1" = "-d" \) ]; then # ACTION="delete" # HRACTION="Tearing down" # shift # else # ACTION="apply" # HRACTION="Setting up" # fi # KUBEDIR=${HERE}/kube
<filename>app/src/main/java/com/flea/android/fleaandroid/activities/EventListActivity.java package com.flea.android.fleaandroid.activities; import android.os.Bundle; import com.flea.android.fleaandroid.R; import com.flea.android.fleaandroid.utils.BaseActivity; public class EventListActivity extends BaseActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_event_list); } }
package org.hisp.dhis.dataadmin.action.statistics; /* * Copyright (c) 2004-2012, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.HashMap; import java.util.Map; import org.hisp.dhis.chart.ChartService; import org.hisp.dhis.common.Objects; import org.hisp.dhis.i18n.I18n; import org.hisp.dhis.statistics.StatisticsProvider; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.CategoryLabelPositions; import org.jfree.chart.plot.PlotOrientation; import com.opensymphony.xwork2.Action; /** * @author <NAME> * @version $Id$ */ public class GetStatisticsChartAction implements Action { // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private StatisticsProvider statisticsProvider; public void setStatisticsProvider( StatisticsProvider statisticsProvider ) { this.statisticsProvider = statisticsProvider; } private ChartService chartService; public void setChartService( ChartService chartService ) { this.chartService = chartService; } private I18n i18n; public void setI18n( I18n i18n ) { this.i18n = i18n; } // ------------------------------------------------------------------------- // Output // ------------------------------------------------------------------------- private JFreeChart chart; public JFreeChart getChart() { return chart; } // ------------------------------------------------------------------------- // Action implemenation // ------------------------------------------------------------------------- public String execute() { Map<Objects, Integer> counts = statisticsProvider.getObjectCounts(); Map<String, Double> categoryValues = new HashMap<String, Double>(); categoryValues.put( i18n.getString( "data_elements" ), Double.valueOf( counts.get( Objects.DATAELEMENT ) ) ); categoryValues.put( i18n.getString( "indicators" ), Double.valueOf( counts.get( Objects.INDICATOR ) ) ); categoryValues.put( i18n.getString( "data_sets" ), Double.valueOf( counts.get( Objects.DATASET ) ) ); categoryValues.put( i18n.getString( "organisation_units" ), Double.valueOf( counts.get( Objects.SOURCE ) ) ); categoryValues.put( i18n.getString( "periods" ), Double.valueOf( counts.get( Objects.PERIOD ) ) ); chart = chartService.getJFreeChart( i18n.getString( "number_of_objects" ), PlotOrientation.HORIZONTAL, CategoryLabelPositions.STANDARD, categoryValues ); return SUCCESS; } }
The maximum contiguous sub-array problem is a task of finding the sub-array with the largest sum, in an array of integers. The basic idea is to divide the array into two sub-arrays. One sub-array contains the maximum contiguous sub-array ending at the current element, while the other sub-array contains the array from the current element to the end. We then compare the sum of the two sub-arrays and keep track of the maximum sum of the sub-array ending at the current element. Finally, we return the maximum sum at the end. Time complexity is O(n).
#!/bin/bash # This has to be a separate file from scripts/make.sh so it can be called # before menuconfig. (It's called again from scripts/make.sh just to be sure.) mkdir -p generated source configure probecc() { ${CROSS_COMPILE}${CC} $CFLAGS -xc -o /dev/null $1 - } # Probe for a single config symbol with a "compiles or not" test. # Symbol name is first argument, flags second, feed C file to stdin probesymbol() { probecc $2 2>/dev/null && DEFAULT=y || DEFAULT=n rm a.out 2>/dev/null echo -e "config $1\n\tbool" || exit 1 echo -e "\tdefault $DEFAULT\n" || exit 1 } probeconfig() { > generated/cflags # llvm produces its own really stupid warnings about things that aren't wrong, # and although you can turn the warning off, gcc reacts badly to command line # arguments it doesn't understand. So probe. [ -z "$(probecc -Wno-string-plus-int <<< \#warn warn 2>&1 | grep string-plus-int)" ] && echo -Wno-string-plus-int >> generated/cflags # Probe for container support on target probesymbol TOYBOX_CONTAINER << EOF #include <linux/sched.h> int x=CLONE_NEWNS|CLONE_NEWUTS|CLONE_NEWIPC|CLONE_NEWNET; int main(int argc, char *argv[]) { return unshare(x); } EOF probesymbol TOYBOX_FIFREEZE -c << EOF #include <linux/fs.h> #ifndef FIFREEZE #error nope #endif EOF # Work around some uClibc limitations probesymbol TOYBOX_ICONV -c << EOF #include "iconv.h" EOF probesymbol TOYBOX_FALLOCATE << EOF #include <fcntl.h> int main(int argc, char *argv[]) { return posix_fallocate(0,0,0); } EOF # Android and some other platforms miss utmpx probesymbol TOYBOX_UTMPX -c << EOF #include <utmpx.h> #ifndef BOOT_TIME #error nope #endif int main(int argc, char *argv[]) { struct utmpx *a; if (0 != (a = getutxent())) return 0; return 1; } EOF # Android is missing shadow.h probesymbol TOYBOX_SHADOW -c << EOF #include <shadow.h> int main(int argc, char *argv[]) { struct spwd *a = getspnam("root"); return 0; } EOF # Some commands are android-specific probesymbol TOYBOX_ON_ANDROID -c << EOF #ifndef __ANDROID__ #error nope #endif EOF # nommu support probesymbol TOYBOX_FORK << EOF #include <unistd.h> int main(int argc, char *argv[]) { return fork(); } EOF } genconfig() { # Reverse sort puts posix first, examples last. for j in $(ls toys/*/README | sort -r) do DIR="$(dirname "$j")" [ $(ls "$DIR" | wc -l) -lt 2 ] && continue echo "menu \"$(head -n 1 $j)\"" echo # extract config stanzas from each source file, in alphabetical order for i in $(ls -1 $DIR/*.c) do # Grab the config block for Config.in echo "# $i" sed -n '/^\*\//q;/^config [A-Z]/,$p' $i || return 1 echo done echo endmenu done } probeconfig > generated/Config.probed || rm generated/Config.probed genconfig > generated/Config.in || rm generated/Config.in
<filename>src/wizards/bay.ts<gh_stars>0 import { html, TemplateResult } from 'lit-html'; import { get, translate } from 'lit-translate'; import { updateNamingAction } from '../editors/substation/foundation.js'; import { createElement, EditorAction, getReference, getValue, Wizard, WizardActor, WizardInput, } from '../foundation.js'; function render(name: string | null, desc: string | null): TemplateResult[] { return [ html`<wizard-textfield label="name" .maybeValue=${name} helper="${translate('bay.wizard.nameHelper')}" required validationMessage="${translate('textfield.required')}" dialogInitialFocus ></wizard-textfield>`, html`<wizard-textfield label="desc" .maybeValue=${desc} nullable helper="${translate('bay.wizard.descHelper')}" ></wizard-textfield>`, ]; } export function createAction(parent: Element): WizardActor { return (inputs: WizardInput[]): EditorAction[] => { const name = getValue(inputs.find(i => i.label === 'name')!); const desc = getValue(inputs.find(i => i.label === 'desc')!); const element = createElement(parent.ownerDocument, 'Bay', { name, desc, }); const action = { new: { parent, element, reference: getReference(parent, 'Bay'), }, }; return [action]; }; } export function createBayWizard(parent: Element): Wizard { return [ { title: get('bay.wizard.title.add'), element: undefined, primary: { icon: '', label: get('add'), action: createAction(parent), }, content: render('', ''), }, ]; } export function editBayWizard(element: Element): Wizard { return [ { title: get('bay.wizard.title.edit'), element, primary: { icon: 'edit', label: get('save'), action: updateNamingAction(element), }, content: render( element.getAttribute('name'), element.getAttribute('desc') ), }, ]; }
package com.yingnuo.web.servlet.admin.handle; import com.google.gson.Gson; import com.yingnuo.domain.User; import com.yingnuo.domain.VipRule; import com.yingnuo.service.UserService; import com.yingnuo.service.VipRuleService; import javax.security.auth.login.LoginException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; /** * 根据手机号和订单实际金额获取折扣和优惠后金额 */ @WebServlet("/admin/getRebate") public class GetRebateServlet extends HttpServlet { @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String phone = req.getParameter("phone"); String actual_amount = req.getParameter("actual_amount"); System.out.println("电话为:"+phone+"-订单金额为:"+actual_amount); UserService userService = new UserService(); VipRuleService vipRuleService = new VipRuleService(); int is_rank = 0; Double rebate = 0.0; int point = 0; double pay_amount = 0.0; // 根据手机号查找用户,并返回折扣 try { User user = userService.findUserByPhone(phone); if (user!= null){ point = Integer.parseInt(user.getPoint()); List<VipRule> vipRuleList = vipRuleService.findAllVipRule(); for (VipRule vipRule : vipRuleList){ if (point <= vipRule.getMax_point()){ is_rank = vipRule.getIs_rank(); rebate = vipRule.getRebate(); // 计算实付金额并返回 pay_amount = rebate * Integer.parseInt(actual_amount)/10; break; }else { is_rank = 6; rebate = vipRuleService.findRebateByRank(6); pay_amount = rebate * Integer.parseInt(actual_amount)/10; } } }else { System.out.println("获取User或折扣失败"); } } catch (LoginException e) { e.printStackTrace(); } System.out.println("电话为:"+phone+"-订单金额为:"+actual_amount+"该用户积分为:"+point+"该用户等级为:"+is_rank+"所享受折扣:"+rebate+"实付金额为:"+ pay_amount); // 利用 GSON 返回json对象 PrintWriter out = resp.getWriter(); Map<String,String> map = new HashMap<String,String>(); map.put("msg","true"); map.put("point",String.valueOf(point)); map.put("is_rank",String.valueOf(is_rank)); map.put("rebate",String.valueOf(rebate)); map.put("pay_amount",String.valueOf(pay_amount)); Gson gson = new Gson(); String json = gson.toJson(map); out.println(json); out.flush(); out.close(); } }
var itemNo = 0; var key; // var recentKey; var uniqueIdentifier; var jsonData; var availableProducts = []; var colors = []; var monoStyles = []; var symbols = []; // Initialize Firebase var config = { apiKey: "<KEY>", authDomain: "jhd-quick-order-form.firebaseapp.com", databaseURL: "https://jhd-quick-order-form.firebaseio.com", projectId: "jhd-quick-order-form", storageBucket: "jhd-quick-order-form.appspot.com", messagingSenderId: "417642183917" }; firebase.initializeApp(config); var orderData = firebase.database(); var traditionalSelected = false; $(document).ready(function () { // Color Dropdown $("#input-color").on("change", function () { var itemCodeColor = $("#input-color").val().split(","); const currentItemCode = $("#input-code").val(); $("#input-code").empty(); $("#input-code").val(currentItemCode + itemCodeColor[1]); console.log(availableProducts); }); // Monogram Style Dropdown =-=-=-=-=-=-=-=-=- $("#input-monoStyle").on("change select", function () { var optionValue = $(this).val(); if (optionValue == 304 || optionValue == 305 || optionValue == 306) { traditionalSelected = true; $("#input-personalization").attr("maxlength", 3).prop("disabled", false); $("#input-personalization").val(""); $("#input-symbol").prop("disabled", false); } else if (optionValue == 394) { console.log("EVENT - 'NONE' Mono Style Selected"); traditionalSelected = false; $("#input-personalization").val(""); $("#input-personalization").prop("disabled", true); $("#input-symbol").prop("disabled", true); } else { traditionalSelected = false; $("#input-personalization").attr("maxlength", 3).prop("disabled", false); $("#input-personalization").val(""); $("#input-symbol").prop("disabled", false); } }); // Symbol Input =-=-=-=-=-=-=-=--=-=-=-=-==-=- $("#input-symbol").on("change", function () { var optionValue = $(this).val(); var monoStyleOption = $("#input-monoStyle").val(); var personalizationText = $("#input-personalization").val(); if (optionValue == 415 && monoStyleOption != 394) { $("#input-personalization").prop("disabled", false); } else { $("#input-personalization").val(""); $("#input-personalization").prop("disabled", true); } }); // Personalization Text Input =-=-=-=-=-=-=-=- $("#input-personalization").on("change keyup", function () { var optionValue = $(this).val(); var lettersUsed = parseInt(optionValue.length); if (lettersUsed > 0) { console.log(lettersUsed); $("#input-symbol").prop("disabled", true); if (traditionalSelected === true) { $("#input-personalization").val(optionValue.toUpperCase()); } } else { $("#input-symbol").prop("disabled", false); } // console.log(lettersUsed + " letters used"); // var UPs = optionValue.replace(/[^A-Z]/g, "").length; // var size = (UPs > 2 ? sizes[1] : sizes[0]); }); $("#input-item").on("change", function () { var itemName = $(this).val(); lookupItemCode(itemName); }); $("#quick-order-reset").on("click", function () { loadDefaultOptions(); }); $("#step2").hide(); $("#step1-submit").on("click", function () { if ($("#input-companyName").val() && $("#input-PO").val()) { const randomNumber = Math.floor(Math.random() * 1000000); var companyName = $("#input-companyName").val(); var poNumber = $("#input-PO").val(); uniqueIdentifier = companyName + "-" + poNumber + "-" + randomNumber; var newPO = { uniqueIdentifier: uniqueIdentifier, companyName: companyName, poNumber: poNumber }; var newDB = orderData.ref().child("orders").push(newPO); key = newDB.key; console.log("KEY: " + newDB.key); $("#step1").hide(); $("#step2").show(); loadDefaultOptions(); getFirstJSON(); } else { alert("Please input Company Name and PO Number to continue."); } }); $("#quick-order-add").on("click", function () { itemNo++; var item = $("#input-item").val(); var itemCode = $("#input-code").val(); var itemPrice = $("#input-price").val(); var itemColor = $("#input-color option:selected").text(); var itemMonoStyle = $("#input-monoStyle option:selected").text(); var itemSymbol = $("#input-symbol option:selected").text(); var itemText = $("#input-personalization").val(); var itemQuantity = $("#input-quantity").val(); var calcPrice = parseInt(itemPrice.split("$")[1]) * parseInt(itemQuantity); console.log("itemColor: " + itemColor); console.log("itemMonoStyle: " + itemMonoStyle); console.log("itemSymbol: " + itemSymbol); var newOrder = { itemNo: itemNo, itemName: item, itemCode: itemCode, itemPrice: itemPrice, itemColor: itemColor, monoStyle: itemMonoStyle, itemSymbol: itemSymbol, itemText: itemText, quantity: itemQuantity, totalLinePrice: calcPrice }; var newOrderObject = orderData.ref("orders/" + key).push(newOrder); var recentKey = newOrderObject.key; getFirebaseData(recentKey); // console.log(newOrder.itemName); // console.log(newOrder.itemCode); // console.log(newOrder.itemPrice); // console.log(newOrder.itemColor); // console.log(newOrder.monoStyle); // console.log(newOrder.itemSymbol); // console.log(newOrder.itemText); // console.log(newOrder.quantity); alert("Item Successfully Added"); loadDefaultOptions(); return false; }); function loadDefaultOptions() { $("#input-item").val(""); $("#input-code").val(""); $("#input-code").prop("disabled", true); $("#input-price").val(""); $("#input-price").prop("disabled", true); $("#input-color").val(""); $("#input-monoStyle").val(""); $("#input-symbol").val(""); $("#input-symbol").prop("disabled", true); $("#input-personalization").val(""); $("#input-personalization").prop("disabled", true); $("#input-quantity").val(""); } function lookupItemCode(itemName) { var PCode; var PCodeLastLetters; var PColorCalculated; var PColors; var PMonoStyles; var PSymbols; for (let i = 0; i < jsonData.Products.length; i++) { if (jsonData.Products[i].productname === itemName) { // console.log("Match!: " + jsonData.Products[i].productcode); PCode = jsonData.Products[i].productcode; PPrice = jsonData.Products[i].discountedprice_level1; $("#input-code").val(PCode); $("#input-price").val("$ " + PPrice + ".00"); PCodeLastLetters = PCode[PCode.length - 2] + PCode[PCode.length - 1]; PColorCalculated = jsonData.Products[i].productname.split(" - "); console.log(PColorCalculated.length); if (PColorCalculated.length === 2) { $("#input-color").empty(); // $("#input-color").append($('<option>', { // value: PColorCalculated[1] // })); $('#input-color').append('<option>' + PColorCalculated[1] + '</option>'); $("#input-color").prop("disabled", true); } else { $("#input-color").val("").prop("disabled", false); lookupColors(itemName); } lookupMonoStyles(itemName); lookupSymbols(itemName); } } } function lookupColors(itemName) { $.getJSON("./assets/json/Colors.json", function (data) { var womensLeatherColorsArray = ["Lola", "King's Pad", "Everyday Tote", "Britton Backpack"]; var allLeatherColorsArray = ["Passport Cover", "Kenedy", "Clear Mini Makeup Case", "Clear Becky", "Airport Chico", "Luggage Tag", "Game Day Tote", "Clear Grande", "Large Laundry Bag"]; var bocaChicaColorsArray = ["Boca Chica"]; var huntingItemColorsArray = ["Bird Bag", "Sidekick", "Shotgun Cover", "Rifle Cover", "Small Revolver Case", "Large Revolver Case"]; var clearBackpackColorsArray = ["Clear Backpack"]; var JHDoppKitColorsArray = ["JH Dopp Kit"]; var bridleMahoganyColorsArray = ["McClip"]; var highlandParkToteColorsArray = ["Highland Park Tote"]; var sandPrintedColorsArray = ["Diaper Pad"]; var blackPrintedColorsArray = ["Makeup Case Organizer"]; var splitItemName = itemName.split(" - ")[0]; if (_.contains(womensLeatherColorsArray, splitItemName)) { console.log("Women's Leather Colors"); colors = data.Colors[0].womensLeatherColors; console.log(colors); $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(allLeatherColorsArray, splitItemName)) { console.log("All Leather Colors"); colors = data.Colors[0].leatherColors; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(bocaChicaColorsArray, splitItemName)) { console.log("Boca Chica Colors"); colors = data.Colors[0].bocaChicaColors; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(huntingItemColorsArray, splitItemName)) { console.log("Hunting Colors"); colors = data.Colors[0].huntingItemColors; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(clearBackpackColorsArray, splitItemName)) { console.log("Clear Backpack Colors"); colors = data.Colors[0].clearBackpackColors; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(JHDoppKitColorsArray, splitItemName)) { console.log("JH Dopp Kit Colors"); colors = data.Colors[0].JHDoppKitColorsLeather; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(bridleMahoganyColorsArray, splitItemName)) { console.log("Bridle or Mahogany"); colors = data.Colors[0].bridleMahogany; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(highlandParkToteColorsArray, splitItemName)) { console.log("Highland Park Tote Colors"); colors = data.Colors[0].highlandParkToteColors; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(sandPrintedColorsArray, splitItemName)) { console.log("Sand Printed Lining"); colors = data.Colors[0].sandPrintedLiningColor; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else if (_.contains(blackPrintedColorsArray, splitItemName)) { console.log("Black Printed Lining"); colors = data.Colors[0].blackLiningColor; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } else { console.log("All Vinyl Colors"); colors = data.Colors[0].vinylColors; $("#input-color").empty(); $.each(colors, function (index, value) { $("#input-color").append($('<option>', { value: value, text: index })); }); } }); } function lookupMonoStyles(itemName) { $.getJSON("./assets/json/MonoStyles.json", function (data) { var onlyHotStampArray = ["JH Card Case", "JH Key Strap", "JH Duffel", "JH Dopp Kit", "Joe Duffel", "Mercado", "Bird Bag", "Sidekick", "Shotgun Cover", "Revolver Case"]; var noHotStampArray = ["Lola", "King's Pad", "Everyday Tote", "Britton Backpack", "Clear Mini Makeup Case", "Luggage Tag", "Easyview Organizer"]; var bocaChicaArray = ["Boca Chica"]; var splitItemName = itemName.split(" - ")[0]; // console.log(splitItemName); if (_.contains(onlyHotStampArray, splitItemName)) { console.log("Only Hot Stamps"); monoStyles = data.MonoStyles[0].onlyHotStamps; $("#input-monoStyle").empty(); $.each(monoStyles, function (index, value) { $("#input-monoStyle").append($('<option>', { value: value, text: index })); }); } else if (_.contains(noHotStampArray, splitItemName)) { console.log("NO Hot Stamps"); monoStyles = data.MonoStyles[0].noHotStamps; $("#input-monoStyle").empty(); $.each(monoStyles, function (index, value) { $("#input-monoStyle").append($('<option>', { value: value, text: index })); }); } else if (_.contains(bocaChicaArray, splitItemName)) { console.log("Boca Chica!"); monoStyles = data.MonoStyles[0].bocaChica; $("#input-monoStyle").empty(); $.each(monoStyles, function (index, value) { $("#input-monoStyle").append($('<option>', { value: value, text: index })); }); } else { console.log("All Mono Styles"); monoStyles = data.MonoStyles[0].allMonoStyles; $("#input-monoStyle").empty(); $.each(monoStyles, function (index, value) { $("#input-monoStyle").append($('<option>', { value: value, text: index })); }); } }); } function lookupSymbols(itemName) { $.getJSON("./assets/json/Symbols.json", function (symbolsData) { console.log(symbolsData.Symbols[0].noSymbols); var noSymbolsArray = ["Wristlet", "Passport Cover", "McClip", "Detachable Shoulder Strap", "Lola", "King's Pad", "Everyday Tote", "Britton Backpack", "Boca Chica", "Kenedy", "JH Card Case", "JH Key Strap", "JH Duffel", "JH Dopp Kit", "Joe Duffel", "Daisy", "Zippered Agenda Refill", "King's Pad Refill", "Diaper Pad", "Cool It Insert", "Makeup Case Organizer", "Cover for 360 Carry On Wheels", "Cover for 360 Large Wheels", "Cover for 360 Super Wheels", "Catalog"]; var logoOnlyArray = ["Clear Mini Makeup Case", "Becky", "Clear Becky", "ID Wallet", "Airport Chico", "Junior Shave Kit", "Mini Makeup Case", "El Mercado", "Bird Bag", "Sidekick", "Shotgun Cover", "Rifle Cover", "Small Revolver Case", "Large Revolver Case"]; var splitItemName = itemName.split(" - ")[0]; // console.log(splitItemName); if (_.contains(noSymbolsArray, splitItemName)) { console.log("No Symbols Available for this product"); symbols = symbolsData.Symbols[0].noSymbols; $("#input-symbol").empty(); $.each(symbols, function (index, value) { $("#input-symbol").append($('<option>', { value: value, text: index })); }); } else if (_.contains(logoOnlyArray, splitItemName)) { console.log("JH Jon Hart Logo - Only Stamp Available"); symbols = symbolsData.Symbols[0].logoOnly; $("#input-symbol").empty(); $.each(symbols, function (index, value) { $("#input-symbol").append($('<option>', { value: value, text: index })); }); } else { console.log("All Symbols"); symbols = symbolsData.Symbols[0].allSymbols; $("#input-symbol").empty(); $.each(symbols, function (index, value) { $("#input-symbol").append($('<option>', { value: value, text: index })); }); } }); } function getFirstJSON() { // retrieve JSon from external url and load the data inside an array : $.getJSON("./assets/json/ProductsJSON.json", function (data) { // console.log(data.Products); jsonData = data; for (let i = 0; i < data.Products.length; i++) { availableProducts.push({ "productName": data.Products[i].productname, "productCode": data.Products[i].productcode }); } // $('#autocomplete').autocomplete({ // source: valuesArray, // focus: function( event, ui ) { // $('#div-id').val( ui.item.label ); // return false; // }, // select: function( event, ui ) { // //add your own action on item select! // $('#add-friend').val(''); // return false; // } // }) // .data( "autocomplete" )._renderItem = function( ul, item ) { // //define renderer for list // return $( "<li></li>" ) // .data( "item.autocomplete", item ) // .append( "<a>" + item.label + "</a>" ) // .appendTo( ul ); // }; console.log(availableProducts); $("#input-item").autocomplete({ // source: availableProducts.productName source: function (request, response) { response($.map(availableProducts, function (value, key) { // console.log("response: " + value.productName); return { label: value.productName }; })); }, focus: function(event, ui) { $('#input-item').val(ui.value.label); return false; }, // Once a value in the drop down list is selected, do the following: select: function (event, ui) { console.log("ui: " + ui.item.label); $("#input-item").val(ui.item.label); $("#input-item").trigger("change"); // place the person.given_name value into the textfield called 'select_origin'... // $('#search').val(ui.item.first_name); // and place the person.id into the hidden textfield called 'link_origin_id'. // $('#link_origin_id').val(ui.item.id); return false; } }); }); } function getFirebaseData(recentKey) { orderData.ref("orders/" + key + "/" + recentKey).once("value", function (childSnapshot) { console.log(recentKey); console.log(childSnapshot.val()); // Store everything into a variable var iNo = childSnapshot.val().itemNo; var oName = childSnapshot.val().itemName; var oCode = childSnapshot.val().itemCode; var oPrice = childSnapshot.val().totalLinePrice; var oColor = childSnapshot.val().itemColor; var oMonoStyle = childSnapshot.val().monoStyle; var oSymbol = childSnapshot.val().itemSymbol; var oText = childSnapshot.val().itemText; var oQuantity = childSnapshot.val().quantity; $("#order-table > tbody").append("<tr><td>" + iNo + "</td><td>" + oName + "</td><td>" + oCode + "</td><td>" + oColor + "</td><td>" + oMonoStyle + "</td><td>" + oSymbol + "</td><td>" + oText + "</td><td>" + oQuantity + "</td><td>$ " + oPrice + ".00</td></tr>"); }); } });
import json import websockets class WSServer: def __init__(self, logger): self.logger = logger async def send_message(self, websocket, message_dict): self.logger.debug(f"WSServer: Send to : {websocket} " + json.dumps(message_dict)) await websocket.send(json.dumps(message_dict))
<!DOCTYPE html> <html> <head> <title>Digital Clock</title> <style> #clock { font-size: 50px; font-weight: bold; color: #0066ff; } </style> </head> <body> <div id="clock"></div> <script> function showTime(){ var date = new Date(); var h = date.getHours(); var m = date.getMinutes(); var s = date.getSeconds(); var session = "AM"; if(h == 0){ h = 12; } if(h > 12){ h = h - 12; session = "PM"; } h = (h < 10) ? "0" + h : h; m = (m < 10) ? "0" + m : m; s = (s < 10) ? "0" + s : s; var time = h + ":" + m + ":" + s + " " + session; document.getElementById("clock").innerText = time; document.getElementById("clock").textContent = time; setTimeout(showTime, 1000); } showTime(); </script> </body> </html>
def find_combinations(a, b, c): result = [] for i in range(len(a)): for j in range(len(b)): for k in range(len(c)): result.append([a[i], b[j], c[k]]) return result
<reponame>jamesmart77/csv-to-google-calendar<gh_stars>0 const csv = require('csvtojson'); const data = require("./volunteers"); async function parseCsv() { let events = []; const csvFilePath = './data.csv' await csv() .fromFile(csvFilePath) .on('data',(data)=>{ //data is a buffer object const jsonStr= JSON.parse(data.toString('utf8')); events.push(buildEvent(jsonStr)); }) .on('done',(error)=>{ if (error) { console.error("ERROR! Message: ", error); } else { console.log("Finished parsing csv!"); } }) return events; } buildEvent = (data) => { const { date, bibleText, musicLead, guitar1, guitar2, piano, drums, vocalist, computer, preschool1, preschool2, nursery1, nursery2, } = data; const attendees = buildAttendees(data); let description = `Thank you so much for serving! Please see below for who is serving where: <b>Bible Text</b>: ${bibleText} <b>Preschool</b>: ${preschool1} & ${preschool2} <b>Nursery</b>: ${nursery1} & ${nursery2} <b>Computer</b>: ${computer} <b>Music Lead</b>: ${musicLead}\n`; if (guitar1) description += ` <b>Guitar</b>: ${guitar1}\n`; if (guitar2) description += ` <b>Additional Guitar</b>: ${guitar2}\n`; if (piano) description += ` <b>Piano</b>: ${piano}\n`; if (drums) description += ` <b>Drums</b>: ${drums}\n`; if (vocalist) description += ` <b>Vocalist</b>: ${vocalist}\n`; return { 'summary': 'Sunday Serving', 'description': description, 'start': { 'dateTime': `${date}T08:30:00-04:00`, }, 'end': { 'dateTime': `${date}T12:00:00-04:00`, }, 'attendees': attendees, 'reminders': { 'useDefault': false, 'overrides': [ {'method': 'email', 'minutes': 24 * 60 * 6}, // 6 days {'method': 'popup', 'minutes': 24 * 60}, // 1 day ], } }; } buildAttendees = (data) => { const { guitar1, guitar2, piano, drums, vocalist, computer, preschool1, preschool2, nursery1, nursery2, } = data; const attendees = []; if (guitar1) attendees.push(findVolunteerEmail(guitar1)); if (guitar2) attendees.push(findVolunteerEmail(guitar2)); if (piano) attendees.push(findVolunteerEmail(piano)); if (drums) attendees.push(findVolunteerEmail(drums)); if (vocalist) attendees.push(findVolunteerEmail(vocalist)); if (computer) attendees.push(findVolunteerEmail(computer)); if (preschool1) attendees.push(findVolunteerEmail(preschool1)); if (preschool2) attendees.push(findVolunteerEmail(preschool2)); if (nursery1) attendees.push(findVolunteerEmail(nursery1)); if (nursery2) attendees.push(findVolunteerEmail(nursery2)); return attendees; } findVolunteerEmail = (name) => { const volunteer = data.volunteers.find(volunteer => volunteer.name === name); return { displayName: name, email: volunteer.email } } module.exports = parseCsv;
#!/bin/bash set -euxo pipefail DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" VERSION=1.18.4 mkdir -p $DIR/../deploy/linux cp $DIR/../integrations.json $DIR/../src/Datadog.Trace.ClrProfiler.Native/bin/Debug/x64/ cp $DIR/../createLogPath.sh $DIR/../src/Datadog.Trace.ClrProfiler.Native/bin/Debug/x64/ cd $DIR/../deploy/linux for pkgtype in $PKGTYPES ; do fpm \ -f \ -s dir \ -t $pkgtype \ -n datadog-dotnet-apm \ -v $VERSION \ $(if [ $pkgtype != 'tar' ] ; then echo --prefix /opt/datadog ; fi) \ --chdir $DIR/../src/Datadog.Trace.ClrProfiler.Native/bin/Debug/x64 \ netstandard2.0/ \ Datadog.Trace.ClrProfiler.Native.so \ integrations.json \ createLogPath.sh done gzip -f datadog-dotnet-apm.tar if [ -z "${MUSL-}" ]; then mv datadog-dotnet-apm.tar.gz datadog-dotnet-apm-$VERSION.tar.gz else mv datadog-dotnet-apm.tar.gz datadog-dotnet-apm-$VERSION-musl.tar.gz fi
<reponame>KotlinSpringBoot/demo_springboot_with_mybatis package com.easy.springboot.demo_springboot_with_mybatis.model; import java.util.Date; public class Article { private Long id; private Date gmtCreate; private Date gmtModify; private Integer isDeleted; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Date getGmtCreate() { return gmtCreate; } public void setGmtCreate(Date gmtCreate) { this.gmtCreate = gmtCreate; } public Date getGmtModify() { return gmtModify; } public void setGmtModify(Date gmtModify) { this.gmtModify = gmtModify; } public Integer getIsDeleted() { return isDeleted; } public void setIsDeleted(Integer isDeleted) { this.isDeleted = isDeleted; } }