code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
package com.planet_ink.coffee_mud.core.intermud.packets; import com.planet_ink.coffee_mud.core.intermud.server.*; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.Vector; /** * Copyright (c) 2008-2010 Bo Zimmerman * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ @SuppressWarnings("unchecked") public class ChannelUserReply extends Packet { public String userRequested=null; public String userVisibleName=null; public char gender = 'N'; public ChannelUserReply() { super(); type = Packet.CHAN_USER_REP; } public ChannelUserReply(Vector v) throws InvalidPacketException { super(v); try { type = Packet.CHAN_USER_REP; try{ userRequested = (String)v.elementAt(6); userVisibleName = (String)v.elementAt(7); int gend = CMath.s_int(v.elementAt(8).toString()); switch(gend) { case 0: gender='M'; break; case 1: gender='F'; break; case 2: gender='N'; break; } }catch(Exception e){} } catch( ClassCastException e ) { throw new InvalidPacketException(); } } public void send() throws InvalidPacketException { if( userRequested == null || userVisibleName == null ) { throw new InvalidPacketException(); } super.send(); } public String toString() { int genderCode = 0; switch(gender) { case 'M': genderCode=0; break; case 'F': genderCode=1; break; case 'N': genderCode=2; break; } String str="({\"chan-user-req\",5,\"" + Server.getMudName() + "\",0,\"" + target_mud + "\",0,\"" + userRequested + "\",\"" + userVisibleName + "\"," + genderCode + ",})"; return str; } }
welterde/ewok
com/planet_ink/coffee_mud/core/intermud/packets/ChannelUserReply.java
Java
apache-2.0
2,969
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ @Deprecated package demo; /** * Package docs. */
basepom/duplicate-finder-maven-plugin
src/it/setup-it/class-jars/second-class-jar/src/main/java/demo/package-info.java
Java
apache-2.0
610
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.concurrent.LazyInit; import com.google.j2objc.annotations.RetainedWith; import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.Collection; import java.util.Comparator; import java.util.Map; import java.util.Map.Entry; import org.checkerframework.checker.nullness.compatqual.NullableDecl; /** * A {@link ListMultimap} whose contents will never change, with many other important properties * detailed at {@link ImmutableCollection}. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/ImmutableCollectionsExplained"> immutable collections</a>. * * @author Jared Levy * @since 2.0 */ @GwtCompatible(serializable = true, emulated = true) public class ImmutableListMultimap<K, V> extends ImmutableMultimap<K, V> implements ListMultimap<K, V> { /** Returns the empty multimap. */ // Casting is safe because the multimap will never hold any elements. @SuppressWarnings("unchecked") public static <K, V> ImmutableListMultimap<K, V> of() { return (ImmutableListMultimap<K, V>) EmptyImmutableListMultimap.INSTANCE; } /** Returns an immutable multimap containing a single entry. */ public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); return builder.build(); } /** Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); return builder.build(); } /** Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); builder.put(k3, v3); return builder.build(); } /** Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); builder.put(k3, v3); builder.put(k4, v4); return builder.build(); } /** Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); builder.put(k3, v3); builder.put(k4, v4); builder.put(k5, v5); return builder.build(); } // looking for of() with > 5 entries? Use the builder instead. /** * Returns a new builder. The generated builder is equivalent to the builder created by the {@link * Builder} constructor. */ public static <K, V> Builder<K, V> builder() { return new Builder<>(); } /** * A builder for creating immutable {@code ListMultimap} instances, especially {@code public * static final} multimaps ("constant multimaps"). Example: * * <pre>{@code * static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP = * new ImmutableListMultimap.Builder<String, Integer>() * .put("one", 1) * .putAll("several", 1, 2, 3) * .putAll("many", 1, 2, 3, 4, 5) * .build(); * }</pre> * * <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build * multiple multimaps in series. Each multimap contains the key-value mappings in the previously * created multimaps. * * @since 2.0 */ public static final class Builder<K, V> extends ImmutableMultimap.Builder<K, V> { /** * Creates a new builder. The returned builder is equivalent to the builder generated by {@link * ImmutableListMultimap#builder}. */ public Builder() {} @CanIgnoreReturnValue @Override public Builder<K, V> put(K key, V value) { super.put(key, value); return this; } /** * {@inheritDoc} * * @since 11.0 */ @CanIgnoreReturnValue @Override public Builder<K, V> put(Entry<? extends K, ? extends V> entry) { super.put(entry); return this; } /** * {@inheritDoc} * * @since 19.0 */ @CanIgnoreReturnValue @Beta @Override public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) { super.putAll(entries); return this; } @CanIgnoreReturnValue @Override public Builder<K, V> putAll(K key, Iterable<? extends V> values) { super.putAll(key, values); return this; } @CanIgnoreReturnValue @Override public Builder<K, V> putAll(K key, V... values) { super.putAll(key, values); return this; } @CanIgnoreReturnValue @Override public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) { super.putAll(multimap); return this; } /** * {@inheritDoc} * * @since 8.0 */ @CanIgnoreReturnValue @Override public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) { super.orderKeysBy(keyComparator); return this; } /** * {@inheritDoc} * * @since 8.0 */ @CanIgnoreReturnValue @Override public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) { super.orderValuesBy(valueComparator); return this; } /** Returns a newly-created immutable list multimap. */ @Override public ImmutableListMultimap<K, V> build() { return (ImmutableListMultimap<K, V>) super.build(); } } /** * Returns an immutable multimap containing the same mappings as {@code multimap}. The generated * multimap's key and value orderings correspond to the iteration ordering of the {@code * multimap.asMap()} view. * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * @throws NullPointerException if any key or value in {@code multimap} is null */ public static <K, V> ImmutableListMultimap<K, V> copyOf( Multimap<? extends K, ? extends V> multimap) { if (multimap.isEmpty()) { return of(); } // TODO(lowasser): copy ImmutableSetMultimap by using asList() on the sets if (multimap instanceof ImmutableListMultimap) { @SuppressWarnings("unchecked") // safe since multimap is not writable ImmutableListMultimap<K, V> kvMultimap = (ImmutableListMultimap<K, V>) multimap; if (!kvMultimap.isPartialView()) { return kvMultimap; } } return fromMapEntries(multimap.asMap().entrySet(), null); } /** Creates an ImmutableListMultimap from an asMap.entrySet. */ static <K, V> ImmutableListMultimap<K, V> fromMapEntries( Collection<? extends Map.Entry<? extends K, ? extends Collection<? extends V>>> mapEntries, @NullableDecl Comparator<? super V> valueComparator) { if (mapEntries.isEmpty()) { return of(); } ImmutableMap.Builder<K, ImmutableList<V>> builder = new ImmutableMap.Builder<>(mapEntries.size()); int size = 0; for (Entry<? extends K, ? extends Collection<? extends V>> entry : mapEntries) { K key = entry.getKey(); Collection<? extends V> values = entry.getValue(); ImmutableList<V> list = (valueComparator == null) ? ImmutableList.copyOf(values) : ImmutableList.sortedCopyOf(valueComparator, values); if (!list.isEmpty()) { builder.put(key, list); size += list.size(); } } return new ImmutableListMultimap<>(builder.build(), size); } /** * Returns an immutable multimap containing the specified entries. The returned multimap iterates * over keys in the order they were first encountered in the input, and the values for each key * are iterated in the order they were encountered. * * @throws NullPointerException if any key, value, or entry is null * @since 19.0 */ @Beta public static <K, V> ImmutableListMultimap<K, V> copyOf( Iterable<? extends Entry<? extends K, ? extends V>> entries) { return new Builder<K, V>().putAll(entries).build(); } ImmutableListMultimap(ImmutableMap<K, ImmutableList<V>> map, int size) { super(map, size); } // views /** * Returns an immutable list of the values for the given key. If no mappings in the multimap have * the provided key, an empty immutable list is returned. The values are in the same order as the * parameters used to build this multimap. */ @Override public ImmutableList<V> get(@NullableDecl K key) { // This cast is safe as its type is known in constructor. ImmutableList<V> list = (ImmutableList<V>) map.get(key); return (list == null) ? ImmutableList.<V>of() : list; } @LazyInit @RetainedWith private transient ImmutableListMultimap<V, K> inverse; /** * {@inheritDoc} * * <p>Because an inverse of a list multimap can contain multiple pairs with the same key and * value, this method returns an {@code ImmutableListMultimap} rather than the {@code * ImmutableMultimap} specified in the {@code ImmutableMultimap} class. * * @since 11.0 */ @Override public ImmutableListMultimap<V, K> inverse() { ImmutableListMultimap<V, K> result = inverse; return (result == null) ? (inverse = invert()) : result; } private ImmutableListMultimap<V, K> invert() { Builder<V, K> builder = builder(); for (Entry<K, V> entry : entries()) { builder.put(entry.getValue(), entry.getKey()); } ImmutableListMultimap<V, K> invertedMultimap = builder.build(); invertedMultimap.inverse = this; return invertedMultimap; } /** * Guaranteed to throw an exception and leave the multimap unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @CanIgnoreReturnValue @Deprecated @Override public ImmutableList<V> removeAll(Object key) { throw new UnsupportedOperationException(); } /** * Guaranteed to throw an exception and leave the multimap unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @CanIgnoreReturnValue @Deprecated @Override public ImmutableList<V> replaceValues(K key, Iterable<? extends V> values) { throw new UnsupportedOperationException(); } /** * @serialData number of distinct keys, and then for each distinct key: the key, the number of * values for that key, and the key's values */ @GwtIncompatible // java.io.ObjectOutputStream private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); Serialization.writeMultimap(this, stream); } @GwtIncompatible // java.io.ObjectInputStream private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); int keyCount = stream.readInt(); if (keyCount < 0) { throw new InvalidObjectException("Invalid key count " + keyCount); } ImmutableMap.Builder<Object, ImmutableList<Object>> builder = ImmutableMap.builder(); int tmpSize = 0; for (int i = 0; i < keyCount; i++) { Object key = stream.readObject(); int valueCount = stream.readInt(); if (valueCount <= 0) { throw new InvalidObjectException("Invalid value count " + valueCount); } ImmutableList.Builder<Object> valuesBuilder = ImmutableList.builder(); for (int j = 0; j < valueCount; j++) { valuesBuilder.add(stream.readObject()); } builder.put(key, valuesBuilder.build()); tmpSize += valueCount; } ImmutableMap<Object, ImmutableList<Object>> tmpMap; try { tmpMap = builder.build(); } catch (IllegalArgumentException e) { throw (InvalidObjectException) new InvalidObjectException(e.getMessage()).initCause(e); } FieldSettersHolder.MAP_FIELD_SETTER.set(this, tmpMap); FieldSettersHolder.SIZE_FIELD_SETTER.set(this, tmpSize); } @GwtIncompatible // Not needed in emulated source private static final long serialVersionUID = 0; }
berndhopp/guava
android/guava/src/com/google/common/collect/ImmutableListMultimap.java
Java
apache-2.0
13,676
package edu.cmu.cs.cloudlet.android.application.graphics; import java.io.DataInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import org.apache.http.util.ByteArrayBuffer; import org.json.JSONException; import org.teleal.common.util.ByteArray; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; public class GNetworkClientReceiver extends Thread { private Handler mHandler; private DataInputStream networkReader; private boolean isThreadRun = true; private int messageCounter = 0; protected byte[] recvByte = null; ArrayList<Particle> particleList = new ArrayList<Particle>(); private int startFrameID = 0; private int currentFrameID = 0; private int clientID = 0; private TreeMap<Integer, Long> receiver_stamps = new TreeMap<Integer, Long>(); private ArrayList<Long> reciver_time_list = new ArrayList<Long>(); private int duplicated_client_id; private HashMap<Integer, Long> latencyRecords = new HashMap<Integer, Long>(); private long totalLatency = 0; public GNetworkClientReceiver(DataInputStream dataInputStream, Handler mHandler) { this.networkReader = dataInputStream; this.mHandler = mHandler; } public TreeMap<Integer, Long> getReceiverStamps(){ return this.receiver_stamps; } public ArrayList<Long> getReceivedTimeList(){ return this.reciver_time_list; } public int getDuplicatedAcc(){ return this.duplicated_client_id; } @Override public void run() { while(isThreadRun == false){ try { Thread.sleep(1); } catch (InterruptedException e) { } } // Recv initial simulation information try { int containerWidth = networkReader.readInt(); int containerHeight = networkReader.readInt(); Log.d("krha", "container size : " + containerWidth + ", " + containerHeight); VisualizationStaticInfo.containerWidth = containerWidth; VisualizationStaticInfo.containerHeight = containerHeight; } catch (IOException e1) { e1.printStackTrace(); } long startTime = System.currentTimeMillis(); while(isThreadRun == true){ int recvSize = 0; try { recvSize = this.receiveMsg(networkReader); long currentTime = System.currentTimeMillis(); long duration = currentTime - startTime; long latency = currentTime - this.getSentTime(this.clientID); if(latency > 0) totalLatency += latency; int totalFrameNumber = this.getLastFrameID()-this.startFrameID; if(totalFrameNumber > 0 && latency > 0){ String message = "FPS: " + this.roundDigit(1000.0*totalFrameNumber/duration) + ", ACC: " + this.roundDigit(1000.0*this.clientID/duration) + ", Latency: " + this.roundDigit(1.0*totalLatency/totalFrameNumber) + " / " + latency; this.notifyStatus(GNetworkClient.PROGRESS_MESSAGE, message, recvByte); } } catch (IOException e) { Log.e("krha", e.toString()); // this.notifyStatus(GNetworkClient.NETWORK_ERROR, e.toString(), null); break; } } } private int receiveMsg(DataInputStream reader) throws IOException { this.clientID = reader.readInt(); this.currentFrameID = reader.readInt(); int retLength = reader.readInt(); if(this.startFrameID == 0) this.startFrameID = this.currentFrameID; if(recvByte == null || recvByte.length < retLength){ recvByte = new byte[retLength]; } int readSize = 0; while(readSize < retLength){ int ret = reader.read(this.recvByte, readSize, retLength-readSize); if(ret <= 0){ break; } readSize += ret; } long currentTime = System.currentTimeMillis(); if(this.receiver_stamps.get(this.clientID) == null){ this.receiver_stamps.put(this.clientID, currentTime); // Log.d("krha", "Save Client ID : " + this.clientID); }else{ duplicated_client_id++; } this.reciver_time_list.add(currentTime); return readSize; } private void notifyStatus(int command, String string, byte[] recvData) { // Copy data with endian switching ByteBuffer buf = ByteBuffer.allocate(recvData.length); buf.order(ByteOrder.LITTLE_ENDIAN); buf.put(recvData); buf.flip(); buf.compact(); Message msg = Message.obtain(); msg.what = command; msg.obj = buf; Bundle data = new Bundle(); data.putString("message", string); msg.setData(data); this.mHandler.sendMessage(msg); } public void close() { this.isThreadRun = false; try { if(this.networkReader != null) this.networkReader.close(); } catch (IOException e) { Log.e("krha", e.toString()); } } public int getLastFrameID() { return this.currentFrameID; } public void recordSentTime(int accIndex, long currentTimeMillis) { this.latencyRecords.put(accIndex, System.currentTimeMillis()); } public long getSentTime(int accID){ if(this.latencyRecords.containsKey(accID) == false){ return Long.MAX_VALUE; }else{ long sentTime = this.latencyRecords.remove(accID); return sentTime; } } public static String roundDigit(double paramFloat) { return String.format("%.2f", paramFloat); } }
cmusatyalab/elijah-provisioning
android/android_fluid/src/edu/cmu/cs/cloudlet/android/application/graphics/GNetworkClientReceiver.java
Java
apache-2.0
5,437
/* ** Copyright [2013-2015] [Megam Systems] ** ** Licensed under the Apache License, Version 2.0 (the "License"); ** you may not use this file except in compliance with the License. ** You may obtain a copy of the License at ** ** http://www.apache.org/licenses/LICENSE-2.0 ** ** Unless required by applicable law or agreed to in writing, software ** distributed under the License is distributed on an "AS IS" BASIS, ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ** See the License for the specific language governing permissions and ** limitations under the License. */ package ubuntu import ( "fmt" "os" "strings" "github.com/megamsys/megdc/templates" "github.com/megamsys/urknall" //"github.com/megamsys/libgo/cmd" ) const ( CephUser = "CephUser" Osd = "Osd" Phydev = "PhyDev" UserHomePrefix = "/home/" StrictHostKey = ` ConnectTimeout 5 Host * StrictHostKeyChecking no ` SSHHostConfig = ` Host %s Hostname %s User %s ` CephConf = `osd crush chooseleaf type = 0 osd_pool_default_size = %d public network = %s cluster network = %s mon_pg_warn_max_per_osd = 0 ` ) var ubuntucephinstall *UbuntuCephInstall func init() { ubuntucephinstall = &UbuntuCephInstall{} templates.Register("UbuntuCephInstall", ubuntucephinstall) } type UbuntuCephInstall struct { osds []string cephuser string phydev string } func (tpl *UbuntuCephInstall) Options(t *templates.Template) { if osds, ok := t.Maps[Osd]; ok { tpl.osds = osds } if cephuser, ok := t.Options[CephUser]; ok { tpl.cephuser = cephuser } if phydev, ok := t.Options[Phydev]; ok { tpl.phydev = phydev } } func (tpl *UbuntuCephInstall) Render(p urknall.Package) { p.AddTemplate("ceph", &UbuntuCephInstallTemplate{ osds: tpl.osds, cephuser: tpl.cephuser, cephhome: UserHomePrefix + tpl.cephuser, phydev: tpl.phydev, }) } func (tpl *UbuntuCephInstall) Run(target urknall.Target) error { return urknall.Run(target, &UbuntuCephInstall{ osds: tpl.osds, cephuser: tpl.cephuser, phydev: tpl.phydev, }) } type UbuntuCephInstallTemplate struct { osds []string cephuser string cephhome string phydev string } func (m *UbuntuCephInstallTemplate) Render(pkg urknall.Package) { host, _ := os.Hostname() ip := IP(m.phydev) osddir := ArraytoString("/","/osd",m.osds) hostosd := ArraytoString(host+":/","/osd",m.osds) CephUser := m.cephuser CephHome := m.cephhome pkg.AddCommands("cephinstall", Shell("echo deb https://download.ceph.com/debian-infernalis/ $(lsb_release -sc) main | tee /etc/apt/sources.list.d/ceph.list"), Shell("wget -q -O- 'https://download.ceph.com/keys/release.asc' | apt-key add -"), InstallPackages("apt-transport-https sudo"), UpdatePackagesOmitError(), InstallPackages("ceph-deploy ceph-common ceph-mds dnsmasq openssh-server ntp sshpass ceph ceph-mds ceph-deploy radosgw"), ) pkg.AddCommands("cephuser_add", AddUser(CephUser,false), ) pkg.AddCommands("cephuser_sudoer", Shell("echo '"+CephUser+" ALL = (root) NOPASSWD:ALL' | sudo tee /etc/sudoers.d/"+CephUser+""), ) pkg.AddCommands("chmod_sudoer", Shell("sudo chmod 0440 /etc/sudoers.d/"+CephUser+""), ) pkg.AddCommands("etchost", Shell("echo '"+ip+" "+host+"' >> /etc/hosts"), ) pkg.AddCommands("ssh-keygen", Mkdir(CephHome+"/.ssh", CephUser, 0700), AsUser(CephUser, Shell("ssh-keygen -N '' -t rsa -f "+CephHome+"/.ssh/id_rsa")), AsUser(CephUser, Shell("cp "+CephHome+"/.ssh/id_rsa.pub "+CephHome+"/.ssh/authorized_keys")), ) pkg.AddCommands("ssh_known_hosts", WriteFile(CephHome+"/.ssh/ssh_config", StrictHostKey, CephUser, 0755), WriteFile(CephHome+"/.ssh/config", fmt.Sprintf(SSHHostConfig, host, host, CephUser), CephUser, 0755), ) pkg.AddCommands("mkdir_osd", Mkdir(osddir,"", 0755), Shell("sudo chown -R "+CephUser+":"+CephUser+" "+osddir ), ) pkg.AddCommands("write_cephconf", AsUser(CephUser, Shell("mkdir "+CephHome+"/ceph-cluster")), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster")), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy new "+host+" ")), AsUser(CephUser, Shell("echo 'osd crush chooseleaf type = 0' >> "+CephHome+"/ceph-cluster/ceph.conf")), AsUser(CephUser,Shell("echo 'osd_pool_default_size = 2' >> "+CephHome+"/ceph-cluster/ceph.conf")), AsUser(CephUser,Shell("echo 'mon_pg_warn_max_per_osd = 0' >> "+CephHome+"/ceph-cluster/ceph.conf")), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy install "+host+"")), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy mon create-initial")), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy osd prepare "+ hostosd )), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy osd activate "+ hostosd )), AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy admin "+host+"")), AsUser(CephUser, Shell("sudo chmod +r /etc/ceph/ceph.client.admin.keyring")), AsUser(CephUser, Shell("sleep 180")), AsUser(CephUser, Shell("ceph osd pool set rbd pg_num 100")), AsUser(CephUser, Shell("sleep 180")), AsUser(CephUser, Shell("ceph osd pool set rbd pgp_num 100")), ) pkg.AddCommands("copy_keyring", Shell("cp "+CephHome+"/ceph-cluster/*.keyring /etc/ceph/"), ) } func (m *UbuntuCephInstallTemplate) noOfIpsFromMask() int { si, _ := IPNet(m.phydev).Mask.Size() //from your netwwork return si } func (m *UbuntuCephInstallTemplate) slashIp() string { s := strings.Split(IP(m.phydev), ".") p := s[0 : len(s)-1] p = append(p, "0") return fmt.Sprintf("%s/%d", strings.Join(p, "."), m.noOfIpsFromMask()) } func (m *UbuntuCephInstallTemplate) osdPoolSize(osds ...string) int { return len(osds) }
rajthilakmca/megdc
templates/ubuntu/tpl_ceph_install.go
GO
apache-2.0
5,706
/** * @brief Tests a slow version of MCMC for Occupancy grids * @author Brian Peasley * @date ? */ #include "OccupancyGrid/MCMC.h" #include "OccupancyGrid/cvmat_serialization.h" #include "OccupancyGrid/visualiser.h" #include "OccupancyGrid/loadOccupancyGrid.h" #include <opencv2/opencv.hpp> #include <boost/program_options.hpp> namespace po = boost::program_options; using namespace std; using namespace gtsam; Visualiser global_vis_; /// Main int main(int argc, char *argv[]) { cv::namedWindow("c", cv::WINDOW_NORMAL); // parse arguments /////////////////////////////////////////// // Declare the supported options. po::options_description desc("Run dual decomposition"); desc.add_options() ("help", "produce help message") ("width", po::value<double>(), "Width") ("height", po::value<double>(), "Height") ("resolution", po::value<double>()->required(), "Size of square cell in the map") ("dir", po::value<std::string>()->default_value("Data/player_sim_with_reflectance"), "Data directory") ("clock", po::value<double>()->default_value(400), "Max clock") ; po::positional_options_description pos; pos.add("resolution", 1); po::variables_map vm; po::store(po::command_line_parser(argc, argv).options(desc).positional(pos).run(), vm); po::notify(vm); std::string directory = vm["dir"].as<std::string>(); double max_clock = CLOCKS_PER_SEC * vm["clock"].as<double>(); // end of parse arguments //////////////////////////////////// OccupancyGrid occupancyGrid = loadOccupancyGrid(vm); global_vis_.init(occupancyGrid.height(), occupancyGrid.width()); //run metropolis OccupancyGrid::Marginals occupancyMarginals = runSlowMetropolis(occupancyGrid, 300000, max_clock); // write the result char marginalsOutput[1000]; sprintf(marginalsOutput, "Data/Metropolis_Marginals.txt"); FILE* fptr = fopen(marginalsOutput, "w"); fprintf(fptr, "%lu %lu\n", occupancyGrid.width(), occupancyGrid.height()); for (size_t i = 0; i < occupancyMarginals.size(); i++) { fprintf(fptr, "%lf ", occupancyMarginals[i]); } fclose(fptr); std::stringstream ss; ss << directory << "/SICKSlowMetropolis.png"; global_vis_.save(ss.str()); }
wecacuee/modern-occupancy-grid
tests/testSICKSlowMetropolis.cpp
C++
apache-2.0
2,216
<?php namespace Scalr\Service\Aws\Ec2\Handler; use Scalr\Service\Aws\Ec2\DataType\InternetGatewayFilterList; use Scalr\Service\Aws\Ec2\DataType\InternetGatewayList; use Scalr\Service\Aws\Ec2\DataType\InternetGatewayData; use Scalr\Service\Aws\DataType\ListDataType; use Scalr\Service\Aws\Client\ClientException; use Scalr\Service\Aws\Ec2Exception; use Scalr\Service\Aws\Ec2\AbstractEc2Handler; /** * InternetGatewayHandler * * @author Vitaliy Demidov <vitaliy@scalr.com> * @since 03.04.2013 */ class InternetGatewayHandler extends AbstractEc2Handler { /** * Gets InternetGatewayData object from the EntityManager. * You should be aware of the fact that the entity manager is turned off by default. * * @param string $internetGatewayId Unique Identifier. * @return InternetGatewayData|null Returns InternetGatewayData if it does exist in the cache or NULL otherwise. */ public function get($internetGatewayId) { return $this->getEc2()->getEntityManager()->getRepository('Ec2:InternetGateway')->find($internetGatewayId); } /** * DescribeInternetGateways * * Describes one or more of your Internet gateways. * * @param ListDataType|array|string $internetGatewayIdList optional * The list of Internet gateway identifiers. * * @param InternetGatewayFilterList|InternetGatewayFilterData|array $filter optional * The filter list. * * @return InternetGatewayList Returns InternetGatewayList on success * @throws ClientException * @throws Ec2Exception */ public function describe($internetGatewayIdList = null, $filter = null) { if ($internetGatewayIdList !== null && !($internetGatewayIdList instanceof ListDataType)) { $internetGatewayIdList = new ListDataType($internetGatewayIdList); } if ($filter !== null && !($filter instanceof InternetGatewayFilterList)) { $filter = new InternetGatewayFilterList($filter); } return $this->getEc2()->getApiHandler()->describeInternetGateways($internetGatewayIdList, $filter); } /** * CreateInternetGateway action * * Creates a new Internet gateway for use with a VPC * * @return InternetGatewayData Returns InternetGatewayData on success * @throws ClientException * @throws Ec2Exception */ public function create() { return $this->getEc2()->getApiHandler()->createInternetGateway(); } /** * DeleteInternetGateway action * * Deletes an Internet gateway from your AWS account. * The gateway must not be attached to a VPC * * @param string $internetGatewayId The ID of the Internet Gateway * @return bool Returns TRUE on success * @throws ClientException * @throws Ec2Exception */ public function delete($internetGatewayId) { return $this->getEc2()->getApiHandler()->deleteInternetGateway($internetGatewayId); } /** * AttachInternetGateway action * * Attaches an Internet gateway to a VPC, enabling connectivity between the Internet and the VPC * * @param string $internetGatewayId The ID of the internet gateway * @param string $vpcId The ID of the VPC * @return bool Returns TRUE on success * @throws ClientException * @throws Ec2Exception */ public function attach($internetGatewayId, $vpcId) { return $this->getEc2()->getApiHandler()->attachInternetGateway($internetGatewayId, $vpcId); } /** * DetachInternetGateway action * * Detaches an Internet gateway from a VPC, disabling connectivity between the Internet and the VPC * * @param string $internetGatewayId The ID of the internet gateway * @param string $vpcId The ID of the VPC * @return bool Returns TRUE on success * @throws ClientException * @throws Ec2Exception */ public function detach($internetGatewayId, $vpcId) { return $this->getEc2()->getApiHandler()->detachInternetGateway($internetGatewayId, $vpcId); } }
AlphaStaxLLC/scalr
app/src/Scalr/Service/Aws/Ec2/Handler/InternetGatewayHandler.php
PHP
apache-2.0
4,237
/* eslint-env jest */ import { actionReducer } from './utils' describe('actionReducer', () => { const counter = actionReducer(0, { INCREMENT (state, action) { return state + 1 }, DECREMENT (state, action) { return state - 1 }, SET_VALUE (state, action) { return action.value }, }) it('returns initial state on no action', () => { expect(counter(undefined, {})).toEqual(0) }) it('utilizes initial state for known action', () => { expect(counter(undefined, { type: 'INCREMENT' })).toEqual(1) }) it('utilizes current state for known action', () => { expect(counter(1, { type: 'INCREMENT' })).toEqual(2) expect(counter(4, { type: 'DECREMENT' })).toEqual(3) }) it('preserves state on unknown actions', () => { expect(counter(42, { type: 'UNKNOWN' })).toEqual(42) }) it('enables action handlers to consume action parameters', () => { expect(counter(5, { type: 'SET_VALUE', value: 7 })).toEqual(7) }) })
mareklibra/userportal
src/reducers/utils.test.js
JavaScript
apache-2.0
994
// <copyright file="Quorum.cs" company="Basho Technologies, Inc."> // Copyright 2011 - OJ Reeves & Jeremiah Peschka // Copyright 2014 - Basho Technologies, Inc. // // This file is provided to you under the Apache License, // Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain // a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // </copyright> namespace RiakClient { using System; using System.Collections.Generic; /// <summary> /// Represents the possible values for Riak operation parameters such as R, W, PR, PW, DW, and RW. /// </summary> public class Quorum : IEquatable<Quorum> { private const int OneAsInt = -2; private const int QuorumAsInt = -3; private const int AllAsInt = -4; private const int DefaultAsInt = -5; private static readonly IDictionary<string, int> QuorumStrMap = new Dictionary<string, int> { { "one", OneAsInt }, { "quorum", QuorumAsInt }, { "all", AllAsInt }, { "default", DefaultAsInt } }; private static readonly IDictionary<int, string> QuorumIntMap = new Dictionary<int, string> { { OneAsInt, "one" }, { QuorumAsInt, "quorum" }, { AllAsInt, "all" }, { DefaultAsInt, "default" } }; private readonly int quorumValue = 0; /// <summary> /// Initializes a new instance of the <see cref="Quorum"/> class. /// </summary> /// <param name="quorum">A well known quorum value string, such as "one", "quorum", "all", or "default".</param> /// <exception cref="ArgumentNullException"> /// The value of <paramref name="quorum"/> cannot be null, empty, or whitespace. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// The value of <paramref name="quorum"/> must be well known quorum value. /// Valid values are "one", "quorum", "all", and "default". /// </exception> public Quorum(string quorum) { if (string.IsNullOrWhiteSpace(quorum)) { throw new ArgumentNullException("quorum"); } int tmp; if (QuorumStrMap.TryGetValue(quorum.ToLowerInvariant(), out tmp)) { quorumValue = tmp; } else { throw new ArgumentOutOfRangeException("quorum"); } } /// <summary> /// Initializes a new instance of the <see cref="Quorum"/> class. /// </summary> /// <param name="quorum">An integer, representing the number of nodes to use for the quorum.</param> /// <exception cref="ArgumentOutOfRangeException"> /// The quorum value must be either a positive integer, 0, or between [-5,-2] for special cases. /// </exception> public Quorum(int quorum) { if (quorum >= 0) { quorumValue = quorum; } else { if (quorum >= -5 && quorum <= -2) { quorumValue = quorum; } else { throw new ArgumentOutOfRangeException("quorum"); } } } #pragma warning disable 3019 [CLSCompliant(false)] internal Quorum(uint quorum) : this((int)quorum) { } #pragma warning restore 3019 /// <summary> /// Cast the value of this <see cref="Quorum"/> to an <see cref="Int32"/>. /// </summary> /// <param name="quorum">The <see cref="Quorum"/> value to cast to an <see cref="Int32"/>.</param> /// <returns>An <see cref="Int32"/> based on the value of the this <see cref="Quorum"/>.</returns> public static implicit operator int(Quorum quorum) { return (int)quorum.quorumValue; } /// <summary> /// Cast the value of this <see cref="Int32"/> to a <see cref="Quorum"/>. /// </summary> /// <param name="quorum">The <see cref="Int32"/> value to cast to a <see cref="Quorum"/>.</param> /// <returns>A <see cref="Quorum"/> based on the value of the this <see cref="Int32"/>.</returns> public static explicit operator Quorum(int quorum) { return new Quorum(quorum); } /// <summary> /// Cast the value of this <see cref="Quorum"/> to a <see cref="String"/>. /// </summary> /// <param name="quorum">The <see cref="Quorum"/> value to cast to a <see cref="String"/>.</param> /// <returns>A <see cref="String"/> based on the value of the this <see cref="Quorum"/>.</returns> public static implicit operator string(Quorum quorum) { return quorum.ToString(); } /// <summary> /// Cast the value of this <see cref="String"/> to a <see cref="Quorum"/>. /// </summary> /// <param name="quorum">The <see cref="String"/> value to cast to a <see cref="Quorum"/>.</param> /// <returns>A <see cref="Quorum"/> based on the value of the this <see cref="String"/>.</returns> public static explicit operator Quorum(string quorum) { return new Quorum(quorum); } /// <summary> /// Cast the value of this <see cref="Quorum"/> to a <see cref="UInt32"/>. /// </summary> /// <param name="quorum">The <see cref="Quorum"/> value to cast to a <see cref="UInt32"/>.</param> /// <returns>A <see cref="UInt32"/> based on the value of the this <see cref="Quorum"/>.</returns> [CLSCompliant(false)] public static implicit operator uint(Quorum quorum) { /* * NB: this is the default since the defaultValue attribute for quorum values * is default(uint) as well. * See DtUpdateReq, for instance. */ uint rv = default(uint); if (quorum != null) { rv = (uint)quorum.quorumValue; } return rv; } /// <summary> /// Cast the value of this <see cref="UInt32"/> to a <see cref="Quorum"/>. /// </summary> /// <param name="quorum">The <see cref="UInt32"/> value to cast to a <see cref="Quorum"/>.</param> /// <returns>A <see cref="Quorum"/> based on the value of the this <see cref="UInt32"/>.</returns> [CLSCompliant(false)] public static explicit operator Quorum(uint quorum) { return new Quorum(quorum); } /// <summary> /// Returns a string that represents the Quorum value. /// </summary> /// <returns> /// A string that represents the Quorum value. /// Well known strings such as "one", "quorum", "all", and "default" are returned if possible. /// If value is not a well known string, it's <see cref="Int32.ToString()"/> value will be used. /// </returns> public override string ToString() { string tmp; if (QuorumIntMap.TryGetValue(quorumValue, out tmp)) { return tmp; } else { return quorumValue.ToString(); } } /// <summary> /// Determines whether the specified object is equal to the current object. /// </summary> /// <param name="obj">The object to compare with the current object.</param> /// <returns><b>true</b> if the specified object is equal to the current object, otherwise, <b>false</b>.</returns> public override bool Equals(object obj) { return Equals(obj as Quorum); } /// <summary> /// Determines whether the specified object is equal to the current object. /// </summary> /// <param name="other">The object to compare with the current object.</param> /// <returns><b>true</b> if the specified object is equal to the current object, otherwise, <b>false</b>.</returns> public bool Equals(Quorum other) { if (object.ReferenceEquals(other, null)) { return false; } if (object.ReferenceEquals(other, this)) { return true; } return this.GetHashCode() == other.GetHashCode(); } /// <summary> /// Returns a hash code for the current object. /// </summary> /// <returns>A hash code for the current object.</returns> public override int GetHashCode() { return quorumValue.GetHashCode(); } /// <summary> /// A collection of well known static quorum values, pre-initialized for use. /// </summary> public static class WellKnown { private static readonly Quorum OneStatic = new Quorum(Quorum.OneAsInt); private static readonly Quorum QuorumStatic = new Quorum(Quorum.QuorumAsInt); private static readonly Quorum AllStatic = new Quorum(Quorum.AllAsInt); private static readonly Quorum DefaultStatic = new Quorum(Quorum.DefaultAsInt); /// <summary> /// The "one" Quorum instance. /// Only one replica must respond to a read or write request before it is considered successful. /// </summary> public static Quorum One { get { return OneStatic; } } /// <summary> /// The "quorum" Quorum instance. /// A majority of replicas must respond to a read or write request before it is considered successful. /// </summary> public static Quorum Quorum { get { return QuorumStatic; } } /// <summary> /// The "all" Quorum instance. /// All replicas that must respond to a read or write request before it is considered successful. /// </summary> public static Quorum All { get { return AllStatic; } } /// <summary> /// The "default" Quorum instance. /// The default number of replicas must respond to a read or write request before it is considered successful. /// Riak will use the bucket (or global) default value if this <see cref="Quorum" /> is used. /// The true default value can be found in a bucket's properties, and varies for different parameters. /// </summary> public static Quorum Default { get { return DefaultStatic; } } } } }
rob-somerville/riak-dotnet-client
src/RiakClient/Quorum.cs
C#
apache-2.0
11,310
package com.dankideacentral.dic.activities; import android.content.res.Configuration; import android.os.Bundle; import android.preference.PreferenceActivity; import android.support.annotation.LayoutRes; import android.support.annotation.Nullable; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatDelegate; import android.support.v7.widget.Toolbar; import android.view.MenuInflater; import android.view.View; import android.view.ViewGroup; /** * A {@link android.preference.PreferenceActivity} which implements and proxies the necessary calls * to be used with AppCompat. */ public abstract class AppCompatPreferenceActivity extends PreferenceActivity { private AppCompatDelegate mDelegate; @Override protected void onCreate(Bundle savedInstanceState) { getDelegate().installViewFactory(); getDelegate().onCreate(savedInstanceState); super.onCreate(savedInstanceState); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); getDelegate().onPostCreate(savedInstanceState); } public ActionBar getSupportActionBar() { return getDelegate().getSupportActionBar(); } public void setSupportActionBar(@Nullable Toolbar toolbar) { getDelegate().setSupportActionBar(toolbar); } @Override public MenuInflater getMenuInflater() { return getDelegate().getMenuInflater(); } @Override public void setContentView(@LayoutRes int layoutResID) { getDelegate().setContentView(layoutResID); } @Override public void setContentView(View view) { getDelegate().setContentView(view); } @Override public void setContentView(View view, ViewGroup.LayoutParams params) { getDelegate().setContentView(view, params); } @Override public void addContentView(View view, ViewGroup.LayoutParams params) { getDelegate().addContentView(view, params); } @Override protected void onPostResume() { super.onPostResume(); getDelegate().onPostResume(); } @Override protected void onTitleChanged(CharSequence title, int color) { super.onTitleChanged(title, color); getDelegate().setTitle(title); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); getDelegate().onConfigurationChanged(newConfig); } @Override protected void onStop() { super.onStop(); getDelegate().onStop(); } @Override protected void onDestroy() { super.onDestroy(); getDelegate().onDestroy(); } public void invalidateOptionsMenu() { getDelegate().invalidateOptionsMenu(); } private AppCompatDelegate getDelegate() { if (mDelegate == null) { mDelegate = AppCompatDelegate.create(this, null); } return mDelegate; } }
srowhani/migrate-app
app/src/main/java/com/dankideacentral/dic/activities/AppCompatPreferenceActivity.java
Java
apache-2.0
3,008
/* * Copyright (c) 2012 Binson Zhang. All rights reserved. * * @author Binson Zhang <bin183cs@gmail.com> * @date 2012-12-11 */ #include <iostream> #include <vector> #include <algorithm> using namespace std; class Solution { public: struct TwoSum { int sum; size_t a; size_t b; TwoSum(int sum = 0, size_t a = 0, size_t b = 0): sum(sum), a(a), b(b) {} bool operator<(const TwoSum& other) const { if (sum == other.sum) { if (a == other.a) { return b < other.b; } return a < other.a; } return sum < other.sum; } }; struct SumABCmp { bool operator()(const TwoSum& lhs, const TwoSum& rhs) const { if (lhs.sum == rhs.sum) { if (lhs.a == rhs.a) { return lhs.b < rhs.b; } return lhs.a < rhs.a; } return lhs.sum < rhs.sum; } } sumabcmp; struct SumCmp { bool operator()(const TwoSum& lhs, const TwoSum& rhs) const { return lhs.sum < rhs.sum; } } sumcmp; vector<vector<int> > fourSum(vector<int> &num, int target) { sort(num.begin(), num.end()); size_t n = num.size(); vector<TwoSum> sums; for (size_t i = 0; i < n; ++i) { for (size_t j = i + 1; j < n; ++j) { sums.push_back(TwoSum(num[i] + num[j], i, j)); } } sort(sums.begin(), sums.end(), sumabcmp); vector<vector<int> > vecs; TwoSum val; for (size_t i = 0; i < n; ++i) { if (i > 0 && num[i - 1] == num[i]) continue; for (size_t j = i + 1; j + 2 < n; ++j) { if (j > i + 1 && num[j - 1] == num[j]) continue; val.sum = target - (num[i] + num[j]); pair<vector<TwoSum>::iterator,vector<TwoSum>::iterator> bounds = equal_range(sums.begin(), sums.end(), val, sumcmp); bool prv = false; int prv_third = 0; for (vector<TwoSum>::iterator it = bounds.first; it != bounds.second; ++it) { if (it->a <= j) continue; if (!prv) { prv = true; } else if (prv_third == num[it->a]) { continue; } prv_third = num[it->a]; vecs.push_back(vector<int>()); vector<int>& v = vecs.back(); v.push_back(num[i]); v.push_back(num[j]); v.push_back(num[it->a]); v.push_back(num[it->b]); } } } return vecs; } }; #include "util.h" int main(int argc, char **argv) { std::cout << "------" << argv[0] << "------" << std::endl; // int arr[] = {1, 0, -1, 0, -2, 2}; // int arr[] = {-2, -1, 0, 0, 1, 2}; // int arr[] = {0}; int arr[] = {0, 0, 0, 0}; int sz = sizeof(arr) / sizeof(arr[0]); vector<int> num(arr, arr + sz); int target = 0; std::cout << "Input:\n"; Output(num); Solution s; vector<vector<int> > vecs = s.fourSum(num, target); std::cout << "Output:\n"; Output(vecs); return 0; }
bin3/challenge
src/leetcode/4Sum.cpp
C++
apache-2.0
2,884
package ${topLevelDomain}.${companyName}.${productName}.model.repository.user; import ${topLevelDomain}.${companyName}.${productName}.model.data.UserToken; import ${topLevelDomain}.${companyName}.${productName}.model.repository.ApplicationTestCase; import org.junit.Assert; import org.junit.Test; import javax.annotation.Resource; import java.sql.Timestamp; import java.util.UUID; public class TestUserToken extends ApplicationTestCase { @Resource(name = "UserTokenRepository") private UserTokenRepository userTokenRepository; @Test public void testUpdateByEmail() { UserToken data = new UserToken(); data.setCreateTs(new Timestamp(System.currentTimeMillis())); data.setEmailAddress("from@gmail.com"); data.setTokenUuid(UUID.randomUUID().toString()); data.setUserUuid("user1"); userTokenRepository.add(data); UserToken retr = userTokenRepository.retrieve(UserToken.class, data.getTokenUuid()); Assert.assertNotNull(retr); int rows = userTokenRepository.updateByEmail("from@gmail.com", "to@gmail.com"); Assert.assertEquals(1, rows); retr = userTokenRepository.retrieve(UserToken.class, data.getTokenUuid()); Assert.assertNotNull(retr); Assert.assertEquals("to@gmail.com", retr.getEmailAddress()); } }
dgestep/big-code-bang
src/main/resources/data-repository-test-user-templates/TestUserToken.java
Java
apache-2.0
1,331
""" Provides functionality to emulate keyboard presses on host machine. For more details about this component, please refer to the documentation at https://home-assistant.io/components/keyboard/ """ import voluptuous as vol from homeassistant.const import ( SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_UP) REQUIREMENTS = ['pyuserinput==0.1.11'] DOMAIN = 'keyboard' TAP_KEY_SCHEMA = vol.Schema({}) def setup(hass, config): """Listen for keyboard events.""" import pykeyboard # pylint: disable=import-error keyboard = pykeyboard.PyKeyboard() keyboard.special_key_assignment() hass.services.register(DOMAIN, SERVICE_VOLUME_UP, lambda service: keyboard.tap_key(keyboard.volume_up_key), schema=TAP_KEY_SCHEMA) hass.services.register(DOMAIN, SERVICE_VOLUME_DOWN, lambda service: keyboard.tap_key(keyboard.volume_down_key), schema=TAP_KEY_SCHEMA) hass.services.register(DOMAIN, SERVICE_VOLUME_MUTE, lambda service: keyboard.tap_key(keyboard.volume_mute_key), schema=TAP_KEY_SCHEMA) hass.services.register(DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, lambda service: keyboard.tap_key(keyboard.media_play_pause_key), schema=TAP_KEY_SCHEMA) hass.services.register(DOMAIN, SERVICE_MEDIA_NEXT_TRACK, lambda service: keyboard.tap_key(keyboard.media_next_track_key), schema=TAP_KEY_SCHEMA) hass.services.register(DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, lambda service: keyboard.tap_key(keyboard.media_prev_track_key), schema=TAP_KEY_SCHEMA) return True
PetePriority/home-assistant
homeassistant/components/keyboard/__init__.py
Python
apache-2.0
2,078
/* * Spatial Computation - Manifestation of Spatial computing. * Copyright © 2014 Pranav Kant * * This code is available to you under Apache License Version 2.0, January * 2014. You can grab a copy of license from the same repository from where you * fetched this code. */ #include "llvm/ADT/DenseMap.h" #include "llvm/Analysis/CFGPrinter.h" #include "llvm/ADT/PostOrderIterator.h" #include "llvm/ADT/SCCIterator.h" #include "llvm/ADT/SmallVector.h" #include "llvm/Analysis/CFG.h" #include "llvm/IR/DataLayout.h" #include "llvm/IR/Instruction.h" #include "llvm/IR/Instructions.h" #include "llvm/IR/LLVMContext.h" #include "llvm/IR/Module.h" #include "llvm/IRReader/IRReader.h" #include "llvm/Pass.h" #include "llvm/PassManager.h" #include "llvm/Support/raw_ostream.h" #include "llvm/Support/SourceMgr.h" #include "llvm/Support/InstIterator.h" #include "llvm/Support/GraphWriter.h" #include "waves.hpp" using namespace llvm; /* Wave class for waves pass. The main task of this pass is to annotate waves in a control flow graph. */ struct Waves : public FunctionPass, public SmallVectorImpl <std::pair<const BasicBlock*, const BasicBlock*> > { static char ID; Waves() : FunctionPass(ID), SmallVectorImpl(10){} bool runOnFunction(Function &F) { if (F.getName() == "main") return false; SmallVectorImpl<std::pair<const BasicBlock*, const BasicBlock*> > *res = this; WaveScalar obj; obj.annotateWaves(F, res); return false; } }; char Waves::ID = 0; static RegisterPass<Waves> X("annotateWaves", "Annotate Waves in Control Flow Graph", false, false);
pranavk/spatial-computing
wavesPass.cpp
C++
apache-2.0
1,676
// ---------------------------------------------------------------------------- // Copyright 2006-2010, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2010/04/25 Martin D. Flynn // -Initial release // ---------------------------------------------------------------------------- /** *** Contains the ICare device communication server support. **/ package org.opengts.servers.icare;
aldridged/gtg-gts-bhp
src/org/opengts/servers/icare/package-info.java
Java
apache-2.0
1,133
package tech.gusavila92.websocketclient.exceptions; /** * Exception which indicates that the received schema is invalid * * @author Gustavo Avila * */ public class IllegalSchemeException extends IllegalArgumentException { public IllegalSchemeException(String message) { super(message); } }
gusavila92/java-android-websocket-client
src/main/java/tech/gusavila92/websocketclient/exceptions/IllegalSchemeException.java
Java
apache-2.0
301
// Backbone.Syphon.KeySplitter // --------------------------- // This function is used to split DOM element keys in to an array // of parts, which are then used to create a nested result structure. // returning `["foo", "bar"]` results in `{foo: { bar: "value" }}`. // // Override this method to use a custom key splitter, such as: // `<input name="foo.bar.baz">`, `return key.split(".")` Backbone.Syphon.KeySplitter = function(key){ var matches = key.match(/[^\[\]]+/g); if (key.indexOf("[]") === key.length - 2){ lastKey = matches.pop(); matches.push([lastKey]); } return matches; }
CenturyLinkCloud/EstimatorTCO
source/js/libs/backbone.syphon/src/backbone.syphon.keysplitter.js
JavaScript
apache-2.0
604
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Lucene.Net.Index; namespace Lucene.Net.Analysis.Payloads { /// <summary> /// Mainly for use with the DelimitedPayloadTokenFilter, converts char buffers to Payload /// <p/> /// NOTE: this interface is subject to change /// </summary> public interface PayloadEncoder { Payload Encode(char[] buffer); /// <summary> /// Convert a char array to a <see cref="Payload"/> /// </summary> /// <returns>An encoded <see cref="Payload"/></returns> Payload Encode(char[] buffer, int offset, int length); } }
Anomalous-Software/Lucene.NET
src/contrib/Analyzers/Payloads/PayloadEncoder.cs
C#
apache-2.0
1,425
import * as THREE from 'three'; import STORE from 'store'; import { MAP_WS } from 'store/websocket'; import _ from 'lodash'; import { drawSegmentsFromPoints, drawDashedLineFromPoints, drawShapeFromPoints, } from 'utils/draw'; import Text3D, { TEXT_ALIGN } from 'renderer/text3d'; import TrafficSigns from 'renderer/traffic_controls/traffic_signs'; import TrafficSignals from 'renderer/traffic_controls/traffic_signals'; import stopSignMaterial from 'assets/models/stop_sign.mtl'; import stopSignObject from 'assets/models/stop_sign.obj'; import yieldSignMaterial from 'assets/models/yield_sign.mtl'; import yieldSignObject from 'assets/models/yield_sign.obj'; const STOP_SIGN_SCALE = 0.01; const YIELD_SIGN_SCALE = 1.5; const colorMapping = { YELLOW: 0XDAA520, WHITE: 0xCCCCCC, CORAL: 0xFF7F50, RED: 0xFF6666, GREEN: 0x006400, BLUE: 0x30A5FF, PURE_WHITE: 0xFFFFFF, DEFAULT: 0xC0C0C0, }; export default class Map { constructor() { this.textRender = new Text3D(); this.hash = -1; this.data = {}; this.initialized = false; this.elementKindsDrawn = ''; this.trafficSignals = new TrafficSignals(); this.stopSigns = new TrafficSigns( stopSignMaterial, stopSignObject, STOP_SIGN_SCALE, ); this.yieldSigns = new TrafficSigns( yieldSignMaterial, yieldSignObject, YIELD_SIGN_SCALE, ); this.zOffsetFactor = 1; } // The result will be the all the elements in current but not in data. diffMapElements(elementIds, data) { const result = {}; let empty = true; for (const kind in elementIds) { if (!this.shouldDrawObjectOfThisElementKind(kind)) { continue; } result[kind] = []; const newIds = elementIds[kind]; const oldData = data[kind]; for (let i = 0; i < newIds.length; ++i) { const found = oldData ? oldData.find((old) => old.id.id === newIds[i]) : false; if (!found) { empty = false; result[kind].push(newIds[i]); } } } return empty ? {} : result; } addLaneMesh(laneType, points) { switch (laneType) { case 'DOTTED_YELLOW': return drawDashedLineFromPoints( points, colorMapping.YELLOW, 4, 3, 3, this.zOffsetFactor, 1, false, ); case 'DOTTED_WHITE': return drawDashedLineFromPoints( points, colorMapping.WHITE, 2, 0.5, 0.25, this.zOffsetFactor, 0.4, false, ); case 'SOLID_YELLOW': return drawSegmentsFromPoints( points, colorMapping.YELLOW, 3, this.zOffsetFactor, false, ); case 'SOLID_WHITE': return drawSegmentsFromPoints( points, colorMapping.WHITE, 3, this.zOffsetFactor, false, ); case 'DOUBLE_YELLOW': const left = drawSegmentsFromPoints( points, colorMapping.YELLOW, 2, this.zOffsetFactor, false, ); const right = drawSegmentsFromPoints( points.map((point) => new THREE.Vector3(point.x + 0.3, point.y + 0.3, point.z)), colorMapping.YELLOW, 3, this.zOffsetFactor, false, ); left.add(right); return left; case 'CURB': return drawSegmentsFromPoints( points, colorMapping.CORAL, 3, this.zOffsetFactor, false, ); default: return drawSegmentsFromPoints( points, colorMapping.DEFAULT, 3, this.zOffsetFactor, false, ); } } addLane(lane, coordinates, scene) { const drewObjects = []; const centralLine = lane.centralCurve.segment; centralLine.forEach((segment) => { const points = coordinates.applyOffsetToArray(segment.lineSegment.point); const centerLine = drawSegmentsFromPoints( points, colorMapping.GREEN, 1, this.zOffsetFactor, false); centerLine.name = `CentralLine-${lane.id.id}`; scene.add(centerLine); drewObjects.push(centerLine); }); const rightLaneType = lane.rightBoundary.boundaryType[0].types[0]; // TODO: this is a temp. fix for repeated boundary types. lane.rightBoundary.curve.segment.forEach((segment, index) => { const points = coordinates.applyOffsetToArray(segment.lineSegment.point); const boundary = this.addLaneMesh(rightLaneType, points); boundary.name = `RightBoundary-${lane.id.id}`; scene.add(boundary); drewObjects.push(boundary); }); const leftLaneType = lane.leftBoundary.boundaryType[0].types[0]; lane.leftBoundary.curve.segment.forEach((segment, index) => { const points = coordinates.applyOffsetToArray(segment.lineSegment.point); const boundary = this.addLaneMesh(leftLaneType, points); boundary.name = `LeftBoundary-${lane.id.id}`; scene.add(boundary); drewObjects.push(boundary); }); return drewObjects; } addLaneId(lane, coordinates, scene) { const centralLine = lane.centralCurve.segment; let position = _.get(centralLine, '[0].startPosition'); if (position) { position.z = 0.04; position = coordinates.applyOffset(position); } const rotation = { x: 0.0, y: 0.0, z: 0.0 }; const points = _.get(centralLine, '[0].lineSegment.point', []); if (points.length >= 2) { const p1 = points[0]; const p2 = points[1]; rotation.z = Math.atan2(p2.y - p1.y, p2.x - p1.x); } const text = this.textRender.drawText( lane.id.id, scene, colorMapping.WHITE, TEXT_ALIGN.LEFT, ); if (text) { const textPosition = position || _.get(points, '[0]'); if (textPosition) { text.position.set(textPosition.x, textPosition.y, textPosition.z); text.rotation.set(rotation.x, rotation.y, rotation.z); } text.visible = false; scene.add(text); } return text; } addRoad(road, coordinates, scene) { const drewObjects = []; road.section.forEach((section) => { section.boundary.outerPolygon.edge.forEach((edge) => { edge.curve.segment.forEach((segment, index) => { const points = coordinates.applyOffsetToArray(segment.lineSegment.point); const boundary = this.addLaneMesh('CURB', points); boundary.name = `Road-${road.id.id}`; scene.add(boundary); drewObjects.push(boundary); }); }); }); return drewObjects; } addBorder(borderPolygon, color, coordinates, scene) { const drewObjects = []; const border = coordinates.applyOffsetToArray(borderPolygon.polygon.point); border.push(border[0]); const mesh = drawSegmentsFromPoints( border, color, 2, this.zOffsetFactor, true, false, 1.0, ); scene.add(mesh); drewObjects.push(mesh); return drewObjects; } addParkingSpaceId(parkingSpace, coordinates, scene) { const text = this.textRender.drawText(parkingSpace.id.id, scene, colorMapping.WHITE); const points = _.get(parkingSpace, 'polygon.point'); if (points && points.length >= 3 && text) { const point1 = points[0]; const point2 = points[1]; const point3 = points[2]; let textPosition = { x: (point1.x + point3.x) / 2, y: (point1.y + point3.y) / 2, z: 0.04, }; textPosition = coordinates.applyOffset(textPosition); const textRotationZ = Math.atan2(point2.y - point1.y, point2.x - point1.x); text.position.set(textPosition.x, textPosition.y, textPosition.z); text.rotation.set(0, 0, textRotationZ); text.visible = false; scene.add(text); } return text; } addZone(zone, color, coordinates, scene) { const drewObjects = []; const border = coordinates.applyOffsetToArray(zone.polygon.point); border.push(border[0]); const zoneMaterial = new THREE.MeshBasicMaterial({ color, transparent: true, opacity: 0.15, }); const zoneShape = drawShapeFromPoints( border, zoneMaterial, false, this.zOffsetFactor * 3, false, ); scene.add(zoneShape); drewObjects.push(zoneShape); const mesh = drawSegmentsFromPoints( border, color, 2, this.zOffsetFactor, true, false, 1.0, ); scene.add(mesh); drewObjects.push(mesh); return drewObjects; } addCurve(lines, color, coordinates, scene) { const drewObjects = []; lines.forEach((line) => { line.segment.forEach((segment) => { const points = coordinates.applyOffsetToArray(segment.lineSegment.point); const mesh = drawSegmentsFromPoints( points, color, 5, this.zOffsetFactor * 2, false, ); scene.add(mesh); drewObjects.push(mesh); }); }); return drewObjects; } addStopLine(stopLine, coordinates, scene) { const drewObjects = this.addCurve( stopLine, colorMapping.PURE_WHITE, coordinates, scene, ); return drewObjects; } removeDrewText(textMesh, scene) { if (textMesh) { textMesh.children.forEach((c) => c.visible = false); scene.remove(textMesh); } } removeDrewObjects(drewObjects, scene) { if (drewObjects) { drewObjects.forEach((object) => { scene.remove(object); if (object.geometry) { object.geometry.dispose(); } if (object.material) { object.material.dispose(); } }); } } removeAllElements(scene) { this.removeExpiredElements([], scene); this.trafficSignals.removeAll(scene); this.stopSigns.removeAll(scene); this.yieldSigns.removeAll(scene); } removeExpiredElements(elementIds, scene) { const newData = {}; for (const kind in this.data) { const drawThisKind = this.shouldDrawObjectOfThisElementKind(kind); newData[kind] = []; const oldDataOfThisKind = this.data[kind]; const currentIds = elementIds[kind]; oldDataOfThisKind.forEach((oldData) => { if (drawThisKind && currentIds && currentIds.includes(oldData.id.id)) { newData[kind].push(oldData); } else { this.removeDrewObjects(oldData.drewObjects, scene); this.removeDrewText(oldData.text, scene); } }); } this.data = newData; } // I do not want to do premature optimization either. Should the // performance become an issue, all the diff should be done at the server // side. This also means that the server should maintain a state of // (possibly) visible elements, presummably in the global store. appendMapData(newData, coordinates, scene) { for (const kind in newData) { if (!newData[kind]) { continue; } if (!this.data[kind]) { this.data[kind] = []; } for (let i = 0; i < newData[kind].length; ++i) { switch (kind) { case 'lane': const lane = newData[kind][i]; this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addLane(lane, coordinates, scene), text: this.addLaneId(lane, coordinates, scene), })); break; case 'clearArea': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addZone( newData[kind][i], colorMapping.YELLOW, coordinates, scene, ), })); break; case 'crosswalk': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addZone( newData[kind][i], colorMapping.PURE_WHITE, coordinates, scene, ), })); break; case 'junction': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addBorder( newData[kind][i], colorMapping.BLUE, coordinates, scene, ), })); break; case 'pncJunction': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addZone( newData[kind][i], colorMapping.BLUE, coordinates, scene, ), })); break; case 'signal': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addStopLine( newData[kind][i].stopLine, coordinates, scene, ), })); this.trafficSignals.add([newData[kind][i]], coordinates, scene); break; case 'stopSign': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addStopLine( newData[kind][i].stopLine, coordinates, scene, ), })); this.stopSigns.add([newData[kind][i]], coordinates, scene); break; case 'yield': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addStopLine( newData[kind][i].stopLine, coordinates, scene, ), })); this.yieldSigns.add([newData[kind][i]], coordinates, scene); break; case 'road': const road = newData[kind][i]; this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addRoad(road, coordinates, scene), })); break; case 'parkingSpace': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addBorder( newData[kind][i], colorMapping.YELLOW, coordinates, scene, ), text: this.addParkingSpaceId(newData[kind][i], coordinates, scene), })); break; case 'speedBump': this.data[kind].push(Object.assign(newData[kind][i], { drewObjects: this.addCurve( newData[kind][i].position, colorMapping.RED, coordinates, scene, ), })); break; default: this.data[kind].push(newData[kind][i]); break; } } } } shouldDrawObjectOfThisElementKind(kind) { // Ex: mapping 'lane' to 'showMapLane' option const optionName = `showMap${kind[0].toUpperCase()}${kind.slice(1)}`; // NOTE: return true if the option is not found return STORE.options[optionName] !== false; } shouldDrawTextOfThisElementKind(kind) { // showMapLaneId option controls both laneId and parkingSpaceId return STORE.options.showMapLaneId && ['parkingSpace', 'lane'].includes(kind); } updateText() { for (const kind in this.data) { const isVisible = this.shouldDrawTextOfThisElementKind(kind); this.data[kind].forEach((element) => { if (element.text) { element.text.visible = isVisible; } }); } } updateIndex(hash, elementIds, scene) { if (STORE.hmi.inNavigationMode) { MAP_WS.requestRelativeMapData(); } else { this.updateText(); let newElementKindsDrawn = ''; for (const kind of Object.keys(elementIds).sort()) { if (this.shouldDrawObjectOfThisElementKind(kind)) { newElementKindsDrawn += kind; } } if (hash !== this.hash || this.elementKindsDrawn !== newElementKindsDrawn) { this.hash = hash; this.elementKindsDrawn = newElementKindsDrawn; const diff = this.diffMapElements(elementIds, this.data); if (!_.isEmpty(diff) || !this.initialized) { MAP_WS.requestMapData(diff); this.initialized = true; } this.removeExpiredElements(elementIds, scene); if (!this.shouldDrawObjectOfThisElementKind('signal')) { this.trafficSignals.removeAll(scene); } else { this.trafficSignals.removeExpired(elementIds.signal, scene); } if (!this.shouldDrawObjectOfThisElementKind('stopSign')) { this.stopSigns.removeAll(scene); } else { this.stopSigns.removeExpired(elementIds.stopSign, scene); } if (!this.shouldDrawObjectOfThisElementKind('yield')) { this.yieldSigns.removeAll(scene); } else { this.yieldSigns.removeExpired(elementIds.yield, scene); } } } // Do not set zOffset in camera view, since zOffset will affect the accuracy of matching // between hdmap and camera image this.zOffsetFactor = STORE.options.showCameraView ? 0 : 1; } update(world) { this.trafficSignals.updateTrafficLightStatus(world.perceivedSignal); } }
xiaoxq/apollo
modules/dreamview/frontend/src/renderer/map.js
JavaScript
apache-2.0
16,450
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #nullable disable using System; using System.Collections.Immutable; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CodeGeneration; using Microsoft.CodeAnalysis.FindSymbols; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.PooledObjects; using Microsoft.CodeAnalysis.Shared.Extensions; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.GenerateMember.GenerateVariable { internal abstract partial class AbstractGenerateVariableService<TService, TSimpleNameSyntax, TExpressionSyntax> { private partial class State { public INamedTypeSymbol ContainingType { get; private set; } public INamedTypeSymbol TypeToGenerateIn { get; private set; } public IMethodSymbol ContainingMethod { get; private set; } public bool IsStatic { get; private set; } public bool IsConstant { get; private set; } public bool IsIndexer { get; private set; } public bool IsContainedInUnsafeType { get; private set; } public ImmutableArray<IParameterSymbol> Parameters { get; private set; } // Just the name of the method. i.e. "Goo" in "Goo" or "X.Goo" public SyntaxToken IdentifierToken { get; private set; } // The entire expression containing the name. i.e. "X.Goo" public TExpressionSyntax SimpleNameOrMemberAccessExpressionOpt { get; private set; } public ITypeSymbol TypeMemberType { get; private set; } public ITypeSymbol LocalType { get; private set; } public bool OfferReadOnlyFieldFirst { get; private set; } public bool IsWrittenTo { get; private set; } public bool IsOnlyWrittenTo { get; private set; } public bool IsInConstructor { get; private set; } public bool IsInRefContext { get; private set; } public bool IsInInContext { get; private set; } public bool IsInOutContext { get; private set; } public bool IsInMemberContext { get; private set; } public bool IsInExecutableBlock { get; private set; } public bool IsInConditionalAccessExpression { get; private set; } public Location AfterThisLocation { get; private set; } public Location BeforeThisLocation { get; private set; } public static async Task<State> GenerateAsync( TService service, SemanticDocument document, SyntaxNode interfaceNode, CancellationToken cancellationToken) { var state = new State(); if (!await state.TryInitializeAsync(service, document, interfaceNode, cancellationToken).ConfigureAwait(false)) { return null; } return state; } private async Task<bool> TryInitializeAsync( TService service, SemanticDocument document, SyntaxNode node, CancellationToken cancellationToken) { if (service.IsIdentifierNameGeneration(node)) { // Cases that we deal with currently: // // 1) expr.Goo // 2) expr->Goo // 3) Goo if (!TryInitializeSimpleName(service, document, (TSimpleNameSyntax)node, cancellationToken)) { return false; } } else if (service.IsExplicitInterfaceGeneration(node)) { // 4) bool IGoo.NewProp if (!TryInitializeExplicitInterface(service, document, node, cancellationToken)) { return false; } } else { return false; } // Ok. It either didn't bind to any symbols, or it bound to a symbol but with // errors. In the former case we definitely want to offer to generate a field. In // the latter case, we want to generate a field *unless* there's an existing member // with the same name. Note: it's ok if there's a method with the same name. var existingMembers = TypeToGenerateIn.GetMembers(IdentifierToken.ValueText) .Where(m => m.Kind != SymbolKind.Method); if (existingMembers.Any()) { // TODO: Code coverage // There was an existing method that the new method would clash with. return false; } if (cancellationToken.IsCancellationRequested) { return false; } TypeToGenerateIn = await SymbolFinder.FindSourceDefinitionAsync(TypeToGenerateIn, document.Project.Solution, cancellationToken).ConfigureAwait(false) as INamedTypeSymbol; if (!ValidateTypeToGenerateIn(TypeToGenerateIn, IsStatic, ClassInterfaceModuleStructTypes)) { return false; } IsContainedInUnsafeType = service.ContainingTypesOrSelfHasUnsafeKeyword(TypeToGenerateIn); return CanGenerateLocal() || CodeGenerator.CanAdd(document.Project.Solution, TypeToGenerateIn, cancellationToken); } internal bool CanGeneratePropertyOrField() { return ContainingType is { IsImplicitClass: false, Name: not WellKnownMemberNames.TopLevelStatementsEntryPointTypeName }; } internal bool CanGenerateLocal() { // !this.IsInMemberContext prevents us offering this fix for `x.goo` where `goo` does not exist return !IsInMemberContext && IsInExecutableBlock; } internal bool CanGenerateParameter() { // !this.IsInMemberContext prevents us offering this fix for `x.goo` where `goo` does not exist // Workaround: The compiler returns IsImplicitlyDeclared = false for <Main>$. return ContainingMethod is { IsImplicitlyDeclared: false, Name: not WellKnownMemberNames.TopLevelStatementsEntryPointMethodName } && !IsInMemberContext && !IsConstant; } private bool TryInitializeExplicitInterface( TService service, SemanticDocument document, SyntaxNode propertyDeclaration, CancellationToken cancellationToken) { if (!service.TryInitializeExplicitInterfaceState( document, propertyDeclaration, cancellationToken, out var identifierToken, out var propertySymbol, out var typeToGenerateIn)) { return false; } IdentifierToken = identifierToken; TypeToGenerateIn = typeToGenerateIn; if (propertySymbol.ExplicitInterfaceImplementations.Any()) { return false; } cancellationToken.ThrowIfCancellationRequested(); var semanticModel = document.SemanticModel; ContainingType = semanticModel.GetEnclosingNamedType(IdentifierToken.SpanStart, cancellationToken); if (ContainingType == null) { return false; } if (!ContainingType.Interfaces.OfType<INamedTypeSymbol>().Contains(TypeToGenerateIn)) { return false; } IsIndexer = propertySymbol.IsIndexer; Parameters = propertySymbol.Parameters; TypeMemberType = propertySymbol.Type; // By default, make it readonly, unless there's already an setter defined. IsWrittenTo = propertySymbol.SetMethod != null; return true; } private bool TryInitializeSimpleName( TService service, SemanticDocument semanticDocument, TSimpleNameSyntax simpleName, CancellationToken cancellationToken) { if (!service.TryInitializeIdentifierNameState( semanticDocument, simpleName, cancellationToken, out var identifierToken, out var simpleNameOrMemberAccessExpression, out var isInExecutableBlock, out var isInConditionalAccessExpression)) { return false; } if (string.IsNullOrWhiteSpace(identifierToken.ValueText)) { return false; } IdentifierToken = identifierToken; SimpleNameOrMemberAccessExpressionOpt = simpleNameOrMemberAccessExpression; IsInExecutableBlock = isInExecutableBlock; IsInConditionalAccessExpression = isInConditionalAccessExpression; // If we're in a type context then we shouldn't offer to generate a field or // property. var syntaxFacts = semanticDocument.Document.GetLanguageService<ISyntaxFactsService>(); if (syntaxFacts.IsInNamespaceOrTypeContext(SimpleNameOrMemberAccessExpressionOpt)) { return false; } IsConstant = syntaxFacts.IsInConstantContext(SimpleNameOrMemberAccessExpressionOpt); // If we're not in a type, don't even bother. NOTE(cyrusn): We'll have to rethink this // for C# Script. cancellationToken.ThrowIfCancellationRequested(); var semanticModel = semanticDocument.SemanticModel; ContainingType = semanticModel.GetEnclosingNamedType(IdentifierToken.SpanStart, cancellationToken); if (ContainingType == null) { return false; } // Now, try to bind the invocation and see if it succeeds or not. if it succeeds and // binds uniquely, then we don't need to offer this quick fix. cancellationToken.ThrowIfCancellationRequested(); var semanticInfo = semanticModel.GetSymbolInfo(SimpleNameOrMemberAccessExpressionOpt, cancellationToken); cancellationToken.ThrowIfCancellationRequested(); if (semanticInfo.Symbol != null) { return false; } // Either we found no matches, or this was ambiguous. Either way, we might be able // to generate a method here. Determine where the user wants to generate the method // into, and if it's valid then proceed. cancellationToken.ThrowIfCancellationRequested(); if (!TryDetermineTypeToGenerateIn(semanticDocument, ContainingType, SimpleNameOrMemberAccessExpressionOpt, cancellationToken, out var typeToGenerateIn, out var isStatic)) { return false; } TypeToGenerateIn = typeToGenerateIn; IsStatic = isStatic; DetermineFieldType(semanticDocument, cancellationToken); var semanticFacts = semanticDocument.Document.GetLanguageService<ISemanticFactsService>(); IsInRefContext = semanticFacts.IsInRefContext(semanticModel, SimpleNameOrMemberAccessExpressionOpt, cancellationToken); IsInInContext = semanticFacts.IsInInContext(semanticModel, SimpleNameOrMemberAccessExpressionOpt, cancellationToken); IsInOutContext = semanticFacts.IsInOutContext(semanticModel, SimpleNameOrMemberAccessExpressionOpt, cancellationToken); IsWrittenTo = semanticFacts.IsWrittenTo(semanticModel, SimpleNameOrMemberAccessExpressionOpt, cancellationToken); IsOnlyWrittenTo = semanticFacts.IsOnlyWrittenTo(semanticModel, SimpleNameOrMemberAccessExpressionOpt, cancellationToken); IsInConstructor = DetermineIsInConstructor(semanticDocument, simpleName); IsInMemberContext = simpleName != SimpleNameOrMemberAccessExpressionOpt || syntaxFacts.IsObjectInitializerNamedAssignmentIdentifier(SimpleNameOrMemberAccessExpressionOpt); ContainingMethod = semanticModel.GetEnclosingSymbol<IMethodSymbol>(IdentifierToken.SpanStart, cancellationToken); CheckSurroundingContext(semanticDocument, SymbolKind.Field, cancellationToken); CheckSurroundingContext(semanticDocument, SymbolKind.Property, cancellationToken); return true; } private void CheckSurroundingContext( SemanticDocument semanticDocument, SymbolKind symbolKind, CancellationToken cancellationToken) { // See if we're being assigned to. If so, look at the before/after statements // to see if either is an assignment. If so, we can use that to try to determine // user patterns that can be used when generating the member. For example, // if the sibling assignment is to a readonly field, then we want to offer to // generate a readonly field vs a writable field. // // Also, because users often like to keep members/assignments in the same order // we can pick a good place for the new member based on the surrounding assignments. var syntaxFacts = semanticDocument.Document.GetLanguageService<ISyntaxFactsService>(); var simpleName = SimpleNameOrMemberAccessExpressionOpt; if (syntaxFacts.IsLeftSideOfAssignment(simpleName)) { var assignmentStatement = simpleName.Ancestors().FirstOrDefault(syntaxFacts.IsSimpleAssignmentStatement); if (assignmentStatement != null) { syntaxFacts.GetPartsOfAssignmentStatement( assignmentStatement, out var left, out var right); if (left == simpleName) { var block = assignmentStatement.Parent; var children = block.ChildNodesAndTokens(); var statementindex = GetStatementIndex(children, assignmentStatement); var previousAssignedSymbol = TryGetAssignedSymbol(semanticDocument, symbolKind, children, statementindex - 1, cancellationToken); var nextAssignedSymbol = TryGetAssignedSymbol(semanticDocument, symbolKind, children, statementindex + 1, cancellationToken); if (symbolKind == SymbolKind.Field) { OfferReadOnlyFieldFirst = FieldIsReadOnly(previousAssignedSymbol) || FieldIsReadOnly(nextAssignedSymbol); } AfterThisLocation ??= previousAssignedSymbol?.Locations.FirstOrDefault(); BeforeThisLocation ??= nextAssignedSymbol?.Locations.FirstOrDefault(); } } } } private ISymbol TryGetAssignedSymbol( SemanticDocument semanticDocument, SymbolKind symbolKind, ChildSyntaxList children, int index, CancellationToken cancellationToken) { var syntaxFacts = semanticDocument.Document.GetLanguageService<ISyntaxFactsService>(); if (index >= 0 && index < children.Count) { var sibling = children[index]; if (sibling.IsNode) { var siblingNode = sibling.AsNode(); if (syntaxFacts.IsSimpleAssignmentStatement(siblingNode)) { syntaxFacts.GetPartsOfAssignmentStatement( siblingNode, out var left, out _); var symbol = semanticDocument.SemanticModel.GetSymbolInfo(left, cancellationToken).Symbol; if (symbol?.Kind == symbolKind && symbol.ContainingType.Equals(ContainingType)) { return symbol; } } } } return null; } private static bool FieldIsReadOnly(ISymbol symbol) => symbol is IFieldSymbol field && field.IsReadOnly; private static int GetStatementIndex(ChildSyntaxList children, SyntaxNode statement) { var index = 0; foreach (var child in children) { if (child == statement) { return index; } index++; } throw ExceptionUtilities.Unreachable; } private void DetermineFieldType( SemanticDocument semanticDocument, CancellationToken cancellationToken) { var typeInference = semanticDocument.Document.GetLanguageService<ITypeInferenceService>(); var inferredType = typeInference.InferType( semanticDocument.SemanticModel, SimpleNameOrMemberAccessExpressionOpt, objectAsDefault: true, name: IdentifierToken.ValueText, cancellationToken: cancellationToken); var compilation = semanticDocument.SemanticModel.Compilation; inferredType = inferredType.SpecialType == SpecialType.System_Void ? compilation.ObjectType : inferredType; if (IsInConditionalAccessExpression) { inferredType = inferredType.RemoveNullableIfPresent(); } if (inferredType.IsDelegateType() && !inferredType.CanBeReferencedByName) { var namedDelegateType = inferredType.GetDelegateType(compilation)?.DelegateInvokeMethod?.ConvertToType(compilation); if (namedDelegateType != null) { inferredType = namedDelegateType; } } // Substitute 'object' for all captured method type parameters. Note: we may need to // do this for things like anonymous types, as well as captured type parameters that // aren't in scope in the destination type. var capturedMethodTypeParameters = inferredType.GetReferencedMethodTypeParameters(); var mapping = capturedMethodTypeParameters.ToDictionary(tp => tp, tp => compilation.ObjectType); TypeMemberType = inferredType.SubstituteTypes(mapping, compilation); var availableTypeParameters = TypeToGenerateIn.GetAllTypeParameters(); TypeMemberType = TypeMemberType.RemoveUnavailableTypeParameters( compilation, availableTypeParameters); var enclosingMethodSymbol = semanticDocument.SemanticModel.GetEnclosingSymbol<IMethodSymbol>(SimpleNameOrMemberAccessExpressionOpt.SpanStart, cancellationToken); if (enclosingMethodSymbol != null && enclosingMethodSymbol.TypeParameters != null && enclosingMethodSymbol.TypeParameters.Length != 0) { using var _ = ArrayBuilder<ITypeParameterSymbol>.GetInstance(out var combinedTypeParameters); combinedTypeParameters.AddRange(availableTypeParameters); combinedTypeParameters.AddRange(enclosingMethodSymbol.TypeParameters); LocalType = inferredType.RemoveUnavailableTypeParameters(compilation, combinedTypeParameters); } else { LocalType = TypeMemberType; } } private bool DetermineIsInConstructor(SemanticDocument semanticDocument, SyntaxNode simpleName) { if (!ContainingType.OriginalDefinition.Equals(TypeToGenerateIn.OriginalDefinition)) return false; // If we're in an lambda/local function we're not actually 'in' the constructor. // i.e. we can't actually write to read-only fields here. var syntaxFacts = semanticDocument.Document.GetRequiredLanguageService<ISyntaxFactsService>(); if (simpleName.AncestorsAndSelf().Any(n => syntaxFacts.IsAnonymousOrLocalFunction(n))) return false; return syntaxFacts.IsInConstructor(simpleName); } } } }
brettfo/roslyn
src/Features/Core/Portable/GenerateMember/GenerateVariable/AbstractGenerateVariableService.State.cs
C#
apache-2.0
21,815
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xml.security.stax.impl.processor.input; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.spec.MGF1ParameterSpec; import java.util.Base64; import java.util.Deque; import javax.crypto.Cipher; import javax.crypto.NoSuchPaddingException; import javax.crypto.spec.OAEPParameterSpec; import javax.crypto.spec.PSource; import javax.crypto.spec.SecretKeySpec; import javax.xml.bind.JAXBElement; import org.apache.xml.security.binding.xmldsig.DigestMethodType; import org.apache.xml.security.binding.xmldsig.KeyInfoType; import org.apache.xml.security.binding.xmlenc.CipherValueType; import org.apache.xml.security.binding.xmlenc.EncryptedKeyType; import org.apache.xml.security.binding.xmlenc11.MGFType; import org.apache.xml.security.binding.xop.Include; import org.apache.xml.security.exceptions.XMLSecurityException; import org.apache.xml.security.stax.config.JCEAlgorithmMapper; import org.apache.xml.security.stax.ext.AbstractInputSecurityHeaderHandler; import org.apache.xml.security.stax.ext.InboundSecurityContext; import org.apache.xml.security.stax.ext.InputProcessorChain; import org.apache.xml.security.stax.ext.XMLSecurityConstants; import org.apache.xml.security.stax.ext.XMLSecurityProperties; import org.apache.xml.security.stax.ext.XMLSecurityUtils; import org.apache.xml.security.stax.ext.stax.XMLSecEvent; import org.apache.xml.security.stax.impl.securityToken.AbstractInboundSecurityToken; import org.apache.xml.security.stax.impl.util.IDGenerator; import org.apache.xml.security.stax.securityEvent.AlgorithmSuiteSecurityEvent; import org.apache.xml.security.stax.securityEvent.EncryptedKeyTokenSecurityEvent; import org.apache.xml.security.stax.securityToken.InboundSecurityToken; import org.apache.xml.security.stax.securityToken.SecurityTokenConstants; import org.apache.xml.security.stax.securityToken.SecurityTokenFactory; import org.apache.xml.security.stax.securityToken.SecurityTokenProvider; import org.apache.xml.security.utils.XMLUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An input handler for the EncryptedKey XML Structure * */ public class XMLEncryptedKeyInputHandler extends AbstractInputSecurityHeaderHandler { private static final transient Logger LOG = LoggerFactory.getLogger(XMLEncryptedKeyInputHandler.class); @Override public void handle(final InputProcessorChain inputProcessorChain, final XMLSecurityProperties securityProperties, final Deque<XMLSecEvent> eventQueue, final Integer index) throws XMLSecurityException { @SuppressWarnings("unchecked") final EncryptedKeyType encryptedKeyType = ((JAXBElement<EncryptedKeyType>) parseStructure(eventQueue, index, securityProperties)).getValue(); final XMLSecEvent responsibleXMLSecStartXMLEvent = getResponsibleStartXMLEvent(eventQueue, index); handle(inputProcessorChain, encryptedKeyType, responsibleXMLSecStartXMLEvent, securityProperties); } public void handle(final InputProcessorChain inputProcessorChain, final EncryptedKeyType encryptedKeyType, final XMLSecEvent responsibleXMLSecStartXMLEvent, final XMLSecurityProperties securityProperties) throws XMLSecurityException { if (encryptedKeyType.getEncryptionMethod() == null) { throw new XMLSecurityException("stax.encryption.noEncAlgo"); } if (encryptedKeyType.getId() == null) { encryptedKeyType.setId(IDGenerator.generateID(null)); } final InboundSecurityContext inboundSecurityContext = inputProcessorChain.getSecurityContext(); final SecurityTokenProvider<InboundSecurityToken> securityTokenProvider = new SecurityTokenProvider<InboundSecurityToken>() { private AbstractInboundSecurityToken securityToken; @Override public InboundSecurityToken getSecurityToken() throws XMLSecurityException { if (this.securityToken != null) { return this.securityToken; } this.securityToken = new AbstractInboundSecurityToken( inboundSecurityContext, encryptedKeyType.getId(), SecurityTokenConstants.KeyIdentifier_EncryptedKey, true) { private byte[] decryptedKey; @Override public Key getKey(String algorithmURI, XMLSecurityConstants.AlgorithmUsage algorithmUsage, String correlationID) throws XMLSecurityException { Key key = getSecretKey().get(algorithmURI); if (key != null) { return key; } String algoFamily = JCEAlgorithmMapper.getJCEKeyAlgorithmFromURI(algorithmURI); key = new SecretKeySpec(getSecret(this, correlationID, algorithmURI), algoFamily); setSecretKey(algorithmURI, key); return key; } @Override public InboundSecurityToken getKeyWrappingToken() throws XMLSecurityException { return getWrappingSecurityToken(this); } @Override public SecurityTokenConstants.TokenType getTokenType() { return SecurityTokenConstants.EncryptedKeyToken; } private InboundSecurityToken wrappingSecurityToken; private InboundSecurityToken getWrappingSecurityToken(InboundSecurityToken wrappedSecurityToken) throws XMLSecurityException { if (wrappingSecurityToken != null) { return this.wrappingSecurityToken; } KeyInfoType keyInfoType = encryptedKeyType.getKeyInfo(); this.wrappingSecurityToken = SecurityTokenFactory.getInstance().getSecurityToken( keyInfoType, SecurityTokenConstants.KeyUsage_Decryption, securityProperties, inboundSecurityContext ); this.wrappingSecurityToken.addWrappedToken(wrappedSecurityToken); return this.wrappingSecurityToken; } private byte[] getSecret(InboundSecurityToken wrappedSecurityToken, String correlationID, String symmetricAlgorithmURI) throws XMLSecurityException { if (this.decryptedKey != null) { return this.decryptedKey; } String algorithmURI = encryptedKeyType.getEncryptionMethod().getAlgorithm(); if (algorithmURI == null) { throw new XMLSecurityException("stax.encryption.noEncAlgo"); } String jceName = JCEAlgorithmMapper.translateURItoJCEID(algorithmURI); String jceProvider = JCEAlgorithmMapper.getJCEProviderFromURI(algorithmURI); if (jceName == null) { throw new XMLSecurityException("algorithms.NoSuchMap", new Object[] {algorithmURI}); } final InboundSecurityToken wrappingSecurityToken = getWrappingSecurityToken(wrappedSecurityToken); Cipher cipher; try { XMLSecurityConstants.AlgorithmUsage algorithmUsage; if (wrappingSecurityToken.isAsymmetric()) { algorithmUsage = XMLSecurityConstants.Asym_Key_Wrap; } else { algorithmUsage = XMLSecurityConstants.Sym_Key_Wrap; } if (jceProvider == null) { cipher = Cipher.getInstance(jceName); } else { cipher = Cipher.getInstance(jceName, jceProvider); } if (XMLSecurityConstants.NS_XENC11_RSAOAEP.equals(algorithmURI) || XMLSecurityConstants.NS_XENC_RSAOAEPMGF1P.equals(algorithmURI)) { final DigestMethodType digestMethodType = XMLSecurityUtils.getQNameType(encryptedKeyType.getEncryptionMethod().getContent(), XMLSecurityConstants.TAG_dsig_DigestMethod); String jceDigestAlgorithm = "SHA-1"; if (digestMethodType != null) { AlgorithmSuiteSecurityEvent algorithmSuiteSecurityEvent = new AlgorithmSuiteSecurityEvent(); algorithmSuiteSecurityEvent.setAlgorithmURI(digestMethodType.getAlgorithm()); algorithmSuiteSecurityEvent.setAlgorithmUsage(XMLSecurityConstants.EncDig); algorithmSuiteSecurityEvent.setCorrelationID(correlationID); inboundSecurityContext.registerSecurityEvent(algorithmSuiteSecurityEvent); jceDigestAlgorithm = JCEAlgorithmMapper.translateURItoJCEID(digestMethodType.getAlgorithm()); } PSource.PSpecified pSource = PSource.PSpecified.DEFAULT; final byte[] oaepParams = XMLSecurityUtils.getQNameType(encryptedKeyType.getEncryptionMethod().getContent(), XMLSecurityConstants.TAG_xenc_OAEPparams); if (oaepParams != null) { pSource = new PSource.PSpecified(oaepParams); } MGF1ParameterSpec mgfParameterSpec = new MGF1ParameterSpec("SHA-1"); final MGFType mgfType = XMLSecurityUtils.getQNameType(encryptedKeyType.getEncryptionMethod().getContent(), XMLSecurityConstants.TAG_xenc11_MGF); if (mgfType != null) { String jceMGFAlgorithm = JCEAlgorithmMapper.translateURItoJCEID(mgfType.getAlgorithm()); mgfParameterSpec = new MGF1ParameterSpec(jceMGFAlgorithm); } OAEPParameterSpec oaepParameterSpec = new OAEPParameterSpec(jceDigestAlgorithm, "MGF1", mgfParameterSpec, pSource); cipher.init(Cipher.UNWRAP_MODE, wrappingSecurityToken.getSecretKey(algorithmURI, algorithmUsage, correlationID), oaepParameterSpec); } else { cipher.init(Cipher.UNWRAP_MODE, wrappingSecurityToken.getSecretKey(algorithmURI, algorithmUsage, correlationID)); } if (encryptedKeyType.getCipherData() == null || encryptedKeyType.getCipherData().getCipherValue() == null || encryptedKeyType.getCipherData().getCipherValue().getContent() == null || encryptedKeyType.getCipherData().getCipherValue().getContent().isEmpty()) { throw new XMLSecurityException("stax.encryption.noCipherValue"); } } catch (NoSuchPaddingException | NoSuchAlgorithmException | InvalidAlgorithmParameterException | InvalidKeyException | NoSuchProviderException e) { throw new XMLSecurityException(e); } byte[] encryptedBytes = getEncryptedBytes(encryptedKeyType.getCipherData().getCipherValue()); byte[] sha1Bytes = generateDigest(encryptedBytes); String sha1Identifier = XMLUtils.encodeToString(sha1Bytes); super.setSha1Identifier(sha1Identifier); try { Key key = cipher.unwrap(encryptedBytes, jceName, Cipher.SECRET_KEY); return this.decryptedKey = key.getEncoded(); } catch (IllegalStateException e) { throw new XMLSecurityException(e); } catch (Exception e) { LOG.warn("Unwrapping of the encrypted key failed with error: " + e.getMessage() + ". " + "Generating a faked one to mitigate timing attacks."); int keyLength = JCEAlgorithmMapper.getKeyLengthFromURI(symmetricAlgorithmURI); this.decryptedKey = XMLSecurityConstants.generateBytes(keyLength / 8); return this.decryptedKey; } } }; this.securityToken.setElementPath(responsibleXMLSecStartXMLEvent.getElementPath()); this.securityToken.setXMLSecEvent(responsibleXMLSecStartXMLEvent); return this.securityToken; } private byte[] getEncryptedBytes(CipherValueType cipherValue) throws XMLSecurityException { StringBuilder sb = new StringBuilder(); for (Object obj : cipherValue.getContent()) { if (obj instanceof String) { sb.append((String)obj); } else if (obj instanceof JAXBElement<?>) { JAXBElement<?> element = (JAXBElement<?>)obj; if (XMLSecurityConstants.TAG_XOP_INCLUDE.equals(element.getName())) { Include include = (Include)element.getValue(); if (include != null && include.getHref() != null && include.getHref().startsWith("cid:")) { return getBytesFromAttachment(include.getHref(), securityProperties); } } } } return Base64.getMimeDecoder().decode(sb.toString()); } @Override public String getId() { return encryptedKeyType.getId(); } }; //register the key token for decryption: inboundSecurityContext.registerSecurityTokenProvider(encryptedKeyType.getId(), securityTokenProvider); //fire a tokenSecurityEvent EncryptedKeyTokenSecurityEvent tokenSecurityEvent = new EncryptedKeyTokenSecurityEvent(); tokenSecurityEvent.setSecurityToken(securityTokenProvider.getSecurityToken()); tokenSecurityEvent.setCorrelationID(encryptedKeyType.getId()); inboundSecurityContext.registerSecurityEvent(tokenSecurityEvent); //if this EncryptedKey structure contains a reference list, delegate it to a subclass if (encryptedKeyType.getReferenceList() != null) { handleReferenceList(inputProcessorChain, encryptedKeyType, securityProperties); } } private byte[] generateDigest(byte[] inputBytes) throws XMLSecurityException { try { return MessageDigest.getInstance("SHA-1").digest(inputBytes); } catch (NoSuchAlgorithmException e) { throw new XMLSecurityException(e); } } protected void handleReferenceList(final InputProcessorChain inputProcessorChain, final EncryptedKeyType encryptedKeyType, final XMLSecurityProperties securityProperties) throws XMLSecurityException { // do nothing } protected byte[] getBytesFromAttachment(String xopUri, final XMLSecurityProperties securityProperties) throws XMLSecurityException { throw new XMLSecurityException("errorMessages.NotYetImplementedException"); } /* <xenc:EncryptedKey xmlns:xenc="http://www.w3.org/2001/04/xmlenc#" Id="EncKeyId-1483925398"> <xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5" /> <ds:KeyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#"> <wsse:SecurityTokenReference xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd"> <wsse:KeyIdentifier EncodingType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary" ValueType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509SubjectKeyIdentifier">pHoiKNGY2YsLBKxwIV+jURt858M=</wsse:KeyIdentifier> </wsse:SecurityTokenReference> </ds:KeyInfo> <xenc:CipherData> <xenc:CipherValue>Khsa9SN3ALNXOgGDKOqihvfwGsXb9QN/q4Fpi9uuThgz+3D4oRSMkrGSPCqwG13vddvHywGAA/XNbWNT+5Xivz3lURCDCc2H/92YlXXo/crQNJnPlLrLZ81bGOzbNo7lnYQBLp/77K7b1bhldZAeV9ZfEW7DjbOMZ+k1dnDCu3A=</xenc:CipherValue> </xenc:CipherData> <xenc:ReferenceList> <xenc:DataReference URI="#EncDataId-1612925417" /> </xenc:ReferenceList> </xenc:EncryptedKey> */ }
apache/santuario-java
src/main/java/org/apache/xml/security/stax/impl/processor/input/XMLEncryptedKeyInputHandler.java
Java
apache-2.0
18,812
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.sparktc.sparkbench.datageneration.mlgenerator; /** * This file is copied from Hadoop package org.apache.hadoop.examples.terasort. */ /** * This class implements a 128-bit linear congruential generator. * Specifically, if X0 is the most recently issued 128-bit random * number (or a seed of 0 if no random number has already been generated, * the next number to be generated, X1, is equal to: * X1 = (a * X0 + c) mod 2**128 * where a is 47026247687942121848144207491837523525 * or 0x2360ed051fc65da44385df649fccf645 * and c is 98910279301475397889117759788405497857 * or 0x4a696d47726179524950202020202001 * The coefficient "a" is suggested by: * Pierre L'Ecuyer, "Tables of linear congruential generators of different * sizes and good lattice structure", Mathematics of Computation, 68 * pp. 249 - 260 (1999) * http://www.ams.org/mcom/1999-68-225/S0025-5718-99-00996-5/S0025-5718-99-00996-5.pdf * The constant "c" meets the simple suggestion by the same reference that * it be odd. * * There is also a facility for quickly advancing the state of the * generator by a fixed number of steps - this facilitates parallel * generation. * * This is based on 1.0 of rand16.c from Chris Nyberg * <chris.nyberg@ordinal.com>. */ public class Random16 { /** * The "Gen" array contain powers of 2 of the linear congruential generator. * The index 0 struct contain the "a" coefficient and "c" constant for the * generator. That is, the generator is: * f(x) = (Gen[0].a * x + Gen[0].c) mod 2**128 * * All structs after the first contain an "a" and "c" that * comprise the square of the previous function. * * f**2(x) = (Gen[1].a * x + Gen[1].c) mod 2**128 * f**4(x) = (Gen[2].a * x + Gen[2].c) mod 2**128 * f**8(x) = (Gen[3].a * x + Gen[3].c) mod 2**128 * ... */ private static class RandomConstant { final Unsigned16 a; final Unsigned16 c; public RandomConstant(String left, String right) { a = new Unsigned16(left); c = new Unsigned16(right); } } private static final RandomConstant[] genArray = new RandomConstant[]{ /* [ 0] */ new RandomConstant("2360ed051fc65da44385df649fccf645", "4a696d47726179524950202020202001"), /* [ 1] */ new RandomConstant("17bce35bdf69743c529ed9eb20e0ae99", "95e0e48262b3edfe04479485c755b646"), /* [ 2] */ new RandomConstant("f4dd417327db7a9bd194dfbe42d45771", "882a02c315362b60765f100068b33a1c"), /* [ 3] */ new RandomConstant("6347af777a7898f6d1a2d6f33505ffe1", "5efc4abfaca23e8ca8edb1f2dfbf6478"), /* [ 4] */ new RandomConstant("b6a4239f3b315f84f6ef6d3d288c03c1", "f25bd15439d16af594c1b1bafa6239f0"), /* [ 5] */ new RandomConstant("2c82901ad1cb0cd182b631ba6b261781", "89ca67c29c9397d59c612596145db7e0"), /* [ 6] */ new RandomConstant("dab03f988288676ee49e66c4d2746f01", "8b6ae036713bd578a8093c8eae5c7fc0"), /* [ 7] */ new RandomConstant("602167331d86cf5684fe009a6d09de01", "98a2542fd23d0dbdff3b886cdb1d3f80"), /* [ 8] */ new RandomConstant("61ecb5c24d95b058f04c80a23697bc01", "954db923fdb7933e947cd1edcecb7f00"), /* [ 9] */ new RandomConstant("4a5c31e0654c28aa60474e83bf3f7801", "00be4a36657c98cd204e8c8af7dafe00"), /* [ 10] */ new RandomConstant("ae4f079d54fbece1478331d3c6bef001", "991965329dccb28d581199ab18c5fc00"), /* [ 11] */ new RandomConstant("101b8cb830c7cb927ff1ed50ae7de001", "e1a8705b63ad5b8cd6c3d268d5cbf800"), /* [ 12] */ new RandomConstant("f54a27fc056b00e7563f3505e0fbc001", "2b657bbfd6ed9d632079e70c3c97f000"), /* [ 13] */ new RandomConstant("df8a6fc1a833d201f98d719dd1f78001", "59b60ee4c52fa49e9fe90682bd2fe000"), /* [ 14] */ new RandomConstant("5480a5015f101a4ea7e3f183e3ef0001", "cc099c88030679464fe86aae8a5fc000"), /* [ 15] */ new RandomConstant("a498509e76e5d7925f539c28c7de0001", "06b9abff9f9f33dd30362c0154bf8000"), /* [ 16] */ new RandomConstant("0798a3d8b10dc72e60121cd58fbc0001", "e296707121688d5a0260b293a97f0000"), /* [ 17] */ new RandomConstant("1647d1e78ec02e665fafcbbb1f780001", "189ffc4701ff23cb8f8acf6b52fe0000"), /* [ 18] */ new RandomConstant("a7c982285e72bf8c0c8ddfb63ef00001", "5141110ab208fb9d61fb47e6a5fc0000"), /* [ 19] */ new RandomConstant("3eb78ee8fb8c56dbc5d4e06c7de00001", "3c97caa62540f2948d8d340d4bf80000"), /* [ 20] */ new RandomConstant("72d03b6f4681f2f9fe8e44d8fbc00001", "1b25cb9cfe5a0c963174f91a97f00000"), /* [ 21] */ new RandomConstant("ea85f81e4f502c9bc8ae99b1f7800001", "0c644570b4a487103c5436352fe00000"), /* [ 22] */ new RandomConstant("629c320db08b00c6bfa57363ef000001", "3d0589c28869472bde517c6a5fc00000"), /* [ 23] */ new RandomConstant("c5c4b9ce268d074a386be6c7de000001", "bc95e5ab36477e65534738d4bf800000"), /* [ 24] */ new RandomConstant("f30bbbbed1596187555bcd8fbc000001", "ddb02ff72a031c01011f71a97f000000"), /* [ 25] */ new RandomConstant("4a1000fb26c9eeda3cc79b1f78000001", "2561426086d9acdb6c82e352fe000000"), /* [ 26] */ new RandomConstant("89fb5307f6bf8ce2c1cf363ef0000001", "64a788e3c118ed1c8215c6a5fc000000"), /* [ 27] */ new RandomConstant("830b7b3358a5d67ea49e6c7de0000001", "e65ea321908627cfa86b8d4bf8000000"), /* [ 28] */ new RandomConstant("fd8a51da91a69fe1cd3cd8fbc0000001", "53d27225604d85f9e1d71a97f0000000"), /* [ 29] */ new RandomConstant("901a48b642b90b55aa79b1f780000001", "ca5ec7a3ed1fe55e07ae352fe0000000"), /* [ 30] */ new RandomConstant("118cdefdf32144f394f363ef00000001", "4daebb2e085330651f5c6a5fc0000000"), /* [ 31] */ new RandomConstant("0a88c0a91cff430829e6c7de00000001", "9d6f1a00a8f3f76e7eb8d4bf80000000"), /* [ 32] */ new RandomConstant("433bef4314f16a9453cd8fbc00000001", "158c62f2b31e496dfd71a97f00000000"), /* [ 33] */ new RandomConstant("c294b02995ae6738a79b1f7800000001", "290e84a2eb15fd1ffae352fe00000000"), /* [ 34] */ new RandomConstant("913575e0da8b16b14f363ef000000001", "e3dc1bfbe991a34ff5c6a5fc00000000"), /* [ 35] */ new RandomConstant("2f61b9f871cf4e629e6c7de000000001", "ddf540d020b9eadfeb8d4bf800000000"), /* [ 36] */ new RandomConstant("78d26ccbd68320c53cd8fbc000000001", "8ee4950177ce66bfd71a97f000000000"), /* [ 37] */ new RandomConstant("8b7ebd037898518a79b1f78000000001", "39e0f787c907117fae352fe000000000"), /* [ 38] */ new RandomConstant("0b5507b61f78e314f363ef0000000001", "659d2522f7b732ff5c6a5fc000000000"), /* [ 39] */ new RandomConstant("4f884628f812c629e6c7de0000000001", "9e8722938612a5feb8d4bf8000000000"), /* [ 40] */ new RandomConstant("be896744d4a98c53cd8fbc0000000001", "e941a65d66b64bfd71a97f0000000000"), /* [ 41] */ new RandomConstant("daf63a553b6318a79b1f780000000001", "7b50d19437b097fae352fe0000000000"), /* [ 42] */ new RandomConstant("2d7a23d8bf06314f363ef00000000001", "59d7b68e18712ff5c6a5fc0000000000"), /* [ 43] */ new RandomConstant("392b046a9f0c629e6c7de00000000001", "4087bab2d5225feb8d4bf80000000000"), /* [ 44] */ new RandomConstant("eb30fbb9c218c53cd8fbc00000000001", "b470abc03b44bfd71a97f00000000000"), /* [ 45] */ new RandomConstant("b9cdc30594318a79b1f7800000000001", "366630eaba897fae352fe00000000000"), /* [ 46] */ new RandomConstant("014ab453686314f363ef000000000001", "a2dfc77e8512ff5c6a5fc00000000000"), /* [ 47] */ new RandomConstant("395221c7d0c629e6c7de000000000001", "1e0d25a14a25feb8d4bf800000000000"), /* [ 48] */ new RandomConstant("4d972813a18c53cd8fbc000000000001", "9d50a5d3944bfd71a97f000000000000"), /* [ 49] */ new RandomConstant("06f9e2374318a79b1f78000000000001", "bf7ab5eb2897fae352fe000000000000"), /* [ 50] */ new RandomConstant("bd220cae86314f363ef0000000000001", "925b14e6512ff5c6a5fc000000000000"), /* [ 51] */ new RandomConstant("36fd3a5d0c629e6c7de0000000000001", "724cce0ca25feb8d4bf8000000000000"), /* [ 52] */ new RandomConstant("60def8ba18c53cd8fbc0000000000001", "1af42d1944bfd71a97f0000000000000"), /* [ 53] */ new RandomConstant("8d500174318a79b1f780000000000001", "0f529e32897fae352fe0000000000000"), /* [ 54] */ new RandomConstant("48e842e86314f363ef00000000000001", "844e4c6512ff5c6a5fc0000000000000"), /* [ 55] */ new RandomConstant("4af185d0c629e6c7de00000000000001", "9f40d8ca25feb8d4bf80000000000000"), /* [ 56] */ new RandomConstant("7a670ba18c53cd8fbc00000000000001", "9912b1944bfd71a97f00000000000000"), /* [ 57] */ new RandomConstant("86de174318a79b1f7800000000000001", "9c69632897fae352fe00000000000000"), /* [ 58] */ new RandomConstant("55fc2e86314f363ef000000000000001", "e1e2c6512ff5c6a5fc00000000000000"), /* [ 59] */ new RandomConstant("ccf85d0c629e6c7de000000000000001", "68058ca25feb8d4bf800000000000000"), /* [ 60] */ new RandomConstant("1df0ba18c53cd8fbc000000000000001", "610b1944bfd71a97f000000000000000"), /* [ 61] */ new RandomConstant("4be174318a79b1f78000000000000001", "061632897fae352fe000000000000000"), /* [ 62] */ new RandomConstant("d7c2e86314f363ef0000000000000001", "1c2c6512ff5c6a5fc000000000000000"), /* [ 63] */ new RandomConstant("af85d0c629e6c7de0000000000000001", "7858ca25feb8d4bf8000000000000000"), /* [ 64] */ new RandomConstant("5f0ba18c53cd8fbc0000000000000001", "f0b1944bfd71a97f0000000000000000"), /* [ 65] */ new RandomConstant("be174318a79b1f780000000000000001", "e1632897fae352fe0000000000000000"), /* [ 66] */ new RandomConstant("7c2e86314f363ef00000000000000001", "c2c6512ff5c6a5fc0000000000000000"), /* [ 67] */ new RandomConstant("f85d0c629e6c7de00000000000000001", "858ca25feb8d4bf80000000000000000"), /* [ 68] */ new RandomConstant("f0ba18c53cd8fbc00000000000000001", "0b1944bfd71a97f00000000000000000"), /* [ 69] */ new RandomConstant("e174318a79b1f7800000000000000001", "1632897fae352fe00000000000000000"), /* [ 70] */ new RandomConstant("c2e86314f363ef000000000000000001", "2c6512ff5c6a5fc00000000000000000"), /* [ 71] */ new RandomConstant("85d0c629e6c7de000000000000000001", "58ca25feb8d4bf800000000000000000"), /* [ 72] */ new RandomConstant("0ba18c53cd8fbc000000000000000001", "b1944bfd71a97f000000000000000000"), /* [ 73] */ new RandomConstant("174318a79b1f78000000000000000001", "632897fae352fe000000000000000000"), /* [ 74] */ new RandomConstant("2e86314f363ef0000000000000000001", "c6512ff5c6a5fc000000000000000000"), /* [ 75] */ new RandomConstant("5d0c629e6c7de0000000000000000001", "8ca25feb8d4bf8000000000000000000"), /* [ 76] */ new RandomConstant("ba18c53cd8fbc0000000000000000001", "1944bfd71a97f0000000000000000000"), /* [ 77] */ new RandomConstant("74318a79b1f780000000000000000001", "32897fae352fe0000000000000000000"), /* [ 78] */ new RandomConstant("e86314f363ef00000000000000000001", "6512ff5c6a5fc0000000000000000000"), /* [ 79] */ new RandomConstant("d0c629e6c7de00000000000000000001", "ca25feb8d4bf80000000000000000000"), /* [ 80] */ new RandomConstant("a18c53cd8fbc00000000000000000001", "944bfd71a97f00000000000000000000"), /* [ 81] */ new RandomConstant("4318a79b1f7800000000000000000001", "2897fae352fe00000000000000000000"), /* [ 82] */ new RandomConstant("86314f363ef000000000000000000001", "512ff5c6a5fc00000000000000000000"), /* [ 83] */ new RandomConstant("0c629e6c7de000000000000000000001", "a25feb8d4bf800000000000000000000"), /* [ 84] */ new RandomConstant("18c53cd8fbc000000000000000000001", "44bfd71a97f000000000000000000000"), /* [ 85] */ new RandomConstant("318a79b1f78000000000000000000001", "897fae352fe000000000000000000000"), /* [ 86] */ new RandomConstant("6314f363ef0000000000000000000001", "12ff5c6a5fc000000000000000000000"), /* [ 87] */ new RandomConstant("c629e6c7de0000000000000000000001", "25feb8d4bf8000000000000000000000"), /* [ 88] */ new RandomConstant("8c53cd8fbc0000000000000000000001", "4bfd71a97f0000000000000000000000"), /* [ 89] */ new RandomConstant("18a79b1f780000000000000000000001", "97fae352fe0000000000000000000000"), /* [ 90] */ new RandomConstant("314f363ef00000000000000000000001", "2ff5c6a5fc0000000000000000000000"), /* [ 91] */ new RandomConstant("629e6c7de00000000000000000000001", "5feb8d4bf80000000000000000000000"), /* [ 92] */ new RandomConstant("c53cd8fbc00000000000000000000001", "bfd71a97f00000000000000000000000"), /* [ 93] */ new RandomConstant("8a79b1f7800000000000000000000001", "7fae352fe00000000000000000000000"), /* [ 94] */ new RandomConstant("14f363ef000000000000000000000001", "ff5c6a5fc00000000000000000000000"), /* [ 95] */ new RandomConstant("29e6c7de000000000000000000000001", "feb8d4bf800000000000000000000000"), /* [ 96] */ new RandomConstant("53cd8fbc000000000000000000000001", "fd71a97f000000000000000000000000"), /* [ 97] */ new RandomConstant("a79b1f78000000000000000000000001", "fae352fe000000000000000000000000"), /* [ 98] */ new RandomConstant("4f363ef0000000000000000000000001", "f5c6a5fc000000000000000000000000"), /* [ 99] */ new RandomConstant("9e6c7de0000000000000000000000001", "eb8d4bf8000000000000000000000000"), /* [100] */ new RandomConstant("3cd8fbc0000000000000000000000001", "d71a97f0000000000000000000000000"), /* [101] */ new RandomConstant("79b1f780000000000000000000000001", "ae352fe0000000000000000000000000"), /* [102] */ new RandomConstant("f363ef00000000000000000000000001", "5c6a5fc0000000000000000000000000"), /* [103] */ new RandomConstant("e6c7de00000000000000000000000001", "b8d4bf80000000000000000000000000"), /* [104] */ new RandomConstant("cd8fbc00000000000000000000000001", "71a97f00000000000000000000000000"), /* [105] */ new RandomConstant("9b1f7800000000000000000000000001", "e352fe00000000000000000000000000"), /* [106] */ new RandomConstant("363ef000000000000000000000000001", "c6a5fc00000000000000000000000000"), /* [107] */ new RandomConstant("6c7de000000000000000000000000001", "8d4bf800000000000000000000000000"), /* [108] */ new RandomConstant("d8fbc000000000000000000000000001", "1a97f000000000000000000000000000"), /* [109] */ new RandomConstant("b1f78000000000000000000000000001", "352fe000000000000000000000000000"), /* [110] */ new RandomConstant("63ef0000000000000000000000000001", "6a5fc000000000000000000000000000"), /* [111] */ new RandomConstant("c7de0000000000000000000000000001", "d4bf8000000000000000000000000000"), /* [112] */ new RandomConstant("8fbc0000000000000000000000000001", "a97f0000000000000000000000000000"), /* [113] */ new RandomConstant("1f780000000000000000000000000001", "52fe0000000000000000000000000000"), /* [114] */ new RandomConstant("3ef00000000000000000000000000001", "a5fc0000000000000000000000000000"), /* [115] */ new RandomConstant("7de00000000000000000000000000001", "4bf80000000000000000000000000000"), /* [116] */ new RandomConstant("fbc00000000000000000000000000001", "97f00000000000000000000000000000"), /* [117] */ new RandomConstant("f7800000000000000000000000000001", "2fe00000000000000000000000000000"), /* [118] */ new RandomConstant("ef000000000000000000000000000001", "5fc00000000000000000000000000000"), /* [119] */ new RandomConstant("de000000000000000000000000000001", "bf800000000000000000000000000000"), /* [120] */ new RandomConstant("bc000000000000000000000000000001", "7f000000000000000000000000000000"), /* [121] */ new RandomConstant("78000000000000000000000000000001", "fe000000000000000000000000000000"), /* [122] */ new RandomConstant("f0000000000000000000000000000001", "fc000000000000000000000000000000"), /* [123] */ new RandomConstant("e0000000000000000000000000000001", "f8000000000000000000000000000000"), /* [124] */ new RandomConstant("c0000000000000000000000000000001", "f0000000000000000000000000000000"), /* [125] */ new RandomConstant("80000000000000000000000000000001", "e0000000000000000000000000000000"), /* [126] */ new RandomConstant("00000000000000000000000000000001", "c0000000000000000000000000000000"), /* [127] */ new RandomConstant("00000000000000000000000000000001", "80000000000000000000000000000000")}; /** * generate the random number that is "advance" steps * from an initial random number of 0. This is done by * starting with 0, and then advancing the by the * appropriate powers of 2 of the linear congruential * generator. */ public static Unsigned16 skipAhead(Unsigned16 advance) { Unsigned16 result = new Unsigned16(); long bit_map; bit_map = advance.getLow8(); for (int i = 0; bit_map != 0 && i < 64; i++) { if ((bit_map & (1L << i)) != 0) { /* advance random number by f**(2**i) (x) */ result.multiply(genArray[i].a); result.add(genArray[i].c); bit_map &= ~(1L << i); } } bit_map = advance.getHigh8(); for (int i = 0; bit_map != 0 && i < 64; i++) { if ((bit_map & (1L << i)) != 0) { /* advance random number by f**(2**(i + 64)) (x) */ result.multiply(genArray[i+64].a); result.add(genArray[i+64].c); bit_map &= ~(1L << i); } } return result; } /** * Generate the next 16 byte random number. */ public static void nextRand(Unsigned16 rand) { /* advance the random number forward once using the linear congruential * generator, and then return the new random number */ rand.multiply(genArray[0].a); rand.add(genArray[0].c); } }
ecurtin/spark-bench
cli/src/main/java/com/ibm/sparktc/sparkbench/datageneration/mlgenerator/Random16.java
Java
apache-2.0
18,912
using System.Windows.Controls; namespace Meridian.View.Settings { /// <summary> /// Interaction logic for SettingsUpdatesView.xaml /// </summary> public partial class SettingsRemotePlayView : Page { public SettingsRemotePlayView() { InitializeComponent(); } } }
Mavamaarten/meridian-with-downloading
Meridian/View/Settings/SettingsRemotePlayView.xaml.cs
C#
apache-2.0
326
package org.openkilda.atdd.floodlight; public class KafkaBreakException extends Exception { public KafkaBreakException(String s) { super(s); } public KafkaBreakException(String s, Throwable throwable) { super(s, throwable); } }
nikitamarchenko/open-kilda
services/src/atdd/src/test/java/org/openkilda/atdd/floodlight/KafkaBreakException.java
Java
apache-2.0
262
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) frentix GmbH<br> * http://www.frentix.com<br> * <p> */ package org.olat.presentation.framework.core.components.form.flexible; /** * <h3>Description:</h3> * <p> * This is a marker interface to tell the flexi form that this item needs a multipart request * <p> * Initial Date: 08.12.2008 <br> * * @author Florian Gnaegi, frentix GmbH, http://www.frentix.com */ public interface FormMultipartItem extends FormItem { public int getMaxUploadSizeKB(); }
huihoo/olat
olat7.8/src/main/java/org/olat/presentation/framework/core/components/form/flexible/FormMultipartItem.java
Java
apache-2.0
1,157
package javacommon.base; import java.math.BigDecimal; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.Date; import javacommon.util.ConvertRegisterHelper; import javacommon.util.PageRequestFactory; import javax.servlet.http.HttpServletRequest; import org.springframework.beans.propertyeditors.CustomDateEditor; import org.springframework.beans.propertyeditors.CustomNumberEditor; import org.springframework.ui.ModelMap; import org.springframework.util.Assert; import org.springframework.util.ReflectionUtils; import org.springframework.web.bind.ServletRequestDataBinder; import org.springframework.web.servlet.mvc.multiaction.MultiActionController; import cn.org.rapid_framework.beanutils.BeanUtils; import cn.org.rapid_framework.page.Page; import cn.org.rapid_framework.page.PageRequest; import com.sun.jmx.snmp.Timestamp; public class BaseSpringController extends MultiActionController{ protected final static String CREATED_SUCCESS = "创建成功"; protected final static String UPDATE_SUCCESS = "更新成功"; protected final static String DELETE_SUCCESS = "删除成功"; static { //注册converters ConvertRegisterHelper.registerConverters(); } public static void copyProperties(Object target,Object source) { BeanUtils.copyProperties(target, source); } public static <T> T copyProperties(Class<T> destClass,Object orig) { return BeanUtils.copyProperties(destClass, orig); } /** * 初始化binder的回调函数. * * @see MultiActionController#createBinder(HttpServletRequest,Object) */ protected void initBinder(HttpServletRequest request, ServletRequestDataBinder binder) { binder.registerCustomEditor(Short.class, new CustomNumberEditor(Short.class, true)); binder.registerCustomEditor(Integer.class, new CustomNumberEditor(Integer.class, true)); binder.registerCustomEditor(Long.class, new CustomNumberEditor(Long.class, true)); binder.registerCustomEditor(Float.class, new CustomNumberEditor(Float.class, true)); binder.registerCustomEditor(Double.class, new CustomNumberEditor(Double.class, true)); binder.registerCustomEditor(BigDecimal.class, new CustomNumberEditor(BigDecimal.class, true)); binder.registerCustomEditor(BigInteger.class, new CustomNumberEditor(BigInteger.class, true)); binder.registerCustomEditor(java.util.Date.class, new CustomDateEditor(new SimpleDateFormat("yyyy-MM-dd"), true)); } public static ModelMap toModelMap(Page page,PageRequest pageRequest) { return toModelMap("",page, pageRequest); } public static ModelMap toModelMap(String tableId,Page page,PageRequest pageRequest) { ModelMap model = new ModelMap(); saveIntoModelMap(tableId,page,pageRequest,model); return model; } /** * 用于一个页面有多个extremeTable是使用 * @param tableId 等于extremeTable的tableId属性 */ public static void saveIntoModelMap(String tableId,Page page,PageRequest pageRequest,ModelMap model){ Assert.notNull(tableId,"tableId must be not null"); Assert.notNull(page,"page must be not null"); model.addAttribute(tableId+"page", page); model.addAttribute(tableId+"totalRows", new Integer(page.getTotalCount())); model.addAttribute(tableId+"pageRequest", pageRequest); model.addAttribute(tableId+"query", pageRequest); } public static PageRequest bindPageRequest(HttpServletRequest request,PageRequest pageRequest,String defaultSortColumns){ return PageRequestFactory.bindPageRequest(pageRequest,request, defaultSortColumns); } public static <T> T getOrCreateRequestAttribute(HttpServletRequest request, String key,Class<T> clazz) { Object value = request.getAttribute(key); if(value == null) { try { value = clazz.newInstance(); } catch (Exception e) { ReflectionUtils.handleReflectionException(e); } request.setAttribute(key, value); } return (T)value; } }
zhangwenzhuo/Rapid
plugins/springmvc/java_src/javacommon/base/BaseSpringController.java
Java
apache-2.0
4,053
/* // Licensed to DynamoBI Corporation (DynamoBI) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. DynamoBI licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. */ package net.sf.farrago.runtime; import java.sql.*; import net.sf.farrago.session.*; import org.eigenbase.enki.mdr.*; /** * FarragoUdrInvocationFrame represents one entry on the routine invocation * stack for a given thread. * * @author John V. Sichi * @version $Id$ */ class FarragoUdrInvocationFrame { //~ Instance fields -------------------------------------------------------- FarragoRuntimeContext context; EnkiMDSession reposSession; FarragoSessionUdrContext udrContext; boolean allowSql; Connection connection; String invokingUser; String invokingRole; } // End FarragoUdrInvocationFrame.java
LucidDB/luciddb
farrago/src/net/sf/farrago/runtime/FarragoUdrInvocationFrame.java
Java
apache-2.0
1,449
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apereo.inspektr.audit.spi.support; import org.apereo.inspektr.audit.AuditTrailManager; import java.util.ArrayList; import java.util.List; /** * Returns the parameters as an array of strings. * * @author Scott Battaglia * @since 1.0.0 */ public class ParametersAsStringResourceResolver extends AbstractAuditResourceResolver { @Override protected String[] createResource(final Object[] args) { final List<String> stringArgs = new ArrayList<String>(); for (final Object arg : args) { stringArgs.add(toResourceString(arg)); } return stringArgs.toArray(new String[stringArgs.size()]); } public String toResourceString(final Object arg) { if (auditFormat == AuditTrailManager.AuditFormats.JSON) { return AuditTrailManager.toJson(arg); } return arg.toString(); } }
Jasig/inspektr
inspektr-audit/src/main/java/org/apereo/inspektr/audit/spi/support/ParametersAsStringResourceResolver.java
Java
apache-2.0
1,683
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package sidecar import ( "archive/tar" "compress/gzip" "context" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "os" "path/filepath" "strings" "sync" "github.com/mattn/go-zglob" "github.com/sirupsen/logrus" "golang.org/x/sync/semaphore" "gopkg.in/ini.v1" kerrors "k8s.io/apimachinery/pkg/util/errors" "k8s.io/test-infra/prow/secretutil" ) // defaultBufferSize is the default buffer size, 10MiB. const defaultBufferSize = 10 * 1024 * 1024 func (o Options) censor() error { var concurrency int64 if o.CensoringOptions.CensoringConcurrency == nil { concurrency = int64(10) } else { concurrency = *o.CensoringOptions.CensoringConcurrency } logrus.WithField("concurrency", concurrency).Debug("Censoring artifacts.") sem := semaphore.NewWeighted(concurrency) wg := &sync.WaitGroup{} errors := make(chan error) var errs []error errLock := &sync.Mutex{} go func() { errLock.Lock() for err := range errors { errs = append(errs, err) } errLock.Unlock() }() secrets, err := loadSecrets(o.CensoringOptions.SecretDirectories, o.CensoringOptions.IniFilenames) if err != nil { return fmt.Errorf("could not load secrets: %w", err) } logrus.WithField("secrets", len(secrets)).Debug("Loaded secrets to censor.") censorer := secretutil.NewCensorer() censorer.RefreshBytes(secrets...) bufferSize := defaultBufferSize if o.CensoringOptions.CensoringBufferSize != nil { bufferSize = *o.CensoringOptions.CensoringBufferSize } if largest := censorer.LargestSecret(); 2*largest > bufferSize { bufferSize = 2 * largest } logrus.WithField("buffer_size", bufferSize).Debug("Determined censoring buffer size.") censorFile := fileCensorer(sem, errors, censorer, bufferSize) censor := func(file string) { censorFile(wg, file) } for _, entry := range o.Entries { logPath := entry.ProcessLog censor(logPath) } for _, item := range o.GcsOptions.Items { if err := filepath.Walk(item, func(absPath string, info os.FileInfo, err error) error { if err != nil { return err } if info.IsDir() || info.Mode()&os.ModeSymlink == os.ModeSymlink { return nil } logger := logrus.WithField("path", absPath) relpath, shouldNotErr := filepath.Rel(item, absPath) if shouldNotErr != nil { logrus.WithError(shouldNotErr).Warnf("filepath.Rel returned an error, but we assumed there must be a relative path between %s and %s", item, absPath) } should, err := shouldCensor(*o.CensoringOptions, relpath) if err != nil { return fmt.Errorf("could not determine if we should censor path: %w", err) } if !should { return nil } contentType, err := determineContentType(absPath) if err != nil { return fmt.Errorf("could not determine content type of %s: %w", absPath, err) } switch contentType { case "application/x-gzip", "application/zip": logger.Debug("Censoring archive.") if err := handleArchive(absPath, censorFile); err != nil { return fmt.Errorf("could not censor archive %s: %w", absPath, err) } default: logger.Debug("Censoring file.") censor(absPath) } return nil }); err != nil { return fmt.Errorf("could not walk items to censor them: %w", err) } } wg.Wait() close(errors) errLock.Lock() return kerrors.NewAggregate(errs) } func shouldCensor(options CensoringOptions, path string) (bool, error) { for _, glob := range options.ExcludeDirectories { found, err := zglob.Match(glob, path) if err != nil { return false, err } if found { return false, nil // when explicitly excluded, do not censor } } for _, glob := range options.IncludeDirectories { found, err := zglob.Match(glob, path) if err != nil { return false, err } if found { return true, nil // when explicitly included, censor } } return len(options.IncludeDirectories) == 0, nil // censor if no explicit includes exist } // fileCensorer returns a closure over all of our synchronization for a clean handler signature func fileCensorer(sem *semaphore.Weighted, errors chan<- error, censorer secretutil.Censorer, bufferSize int) func(wg *sync.WaitGroup, file string) { return func(wg *sync.WaitGroup, file string) { wg.Add(1) go func() { if err := sem.Acquire(context.Background(), 1); err != nil { errors <- err return } defer sem.Release(1) defer wg.Done() errors <- handleFile(file, censorer, bufferSize) }() } } // determineContentType determines the content type of the file func determineContentType(path string) (string, error) { file, err := os.Open(path) if err != nil { return "", fmt.Errorf("could not open file to check content type: %w", err) } defer func() { if err := file.Close(); err != nil { logrus.WithError(err).Warn("Could not close input file while detecting content type.") } }() header := make([]byte, 512) if _, err := file.Read(header); err != nil && err != io.EOF { return "", fmt.Errorf("could not read file to check content type: %w", err) } return http.DetectContentType(header), nil } // handleArchive unravels the archive in order to censor data in the files that were added to it. // This is mostly stolen from build/internal/untar/untar.go func handleArchive(archivePath string, censor func(wg *sync.WaitGroup, file string)) error { outputDir, err := ioutil.TempDir("", "tmp-unpack") if err != nil { return fmt.Errorf("could not create temporary dir for unpacking: %w", err) } defer func() { if err := os.RemoveAll(outputDir); err != nil { logrus.WithError(err).Warn("Failed to clean up temporary directory for archive") } }() if err := unarchive(archivePath, outputDir); err != nil { return fmt.Errorf("could not unpack archive: %w", err) } children := &sync.WaitGroup{} if err := filepath.Walk(outputDir, func(absPath string, info os.FileInfo, err error) error { if info.IsDir() { return nil } censor(children, absPath) return nil }); err != nil { return fmt.Errorf("could not walk unpacked archive to censor them: %w", err) } children.Wait() if err := archive(outputDir, archivePath); err != nil { return fmt.Errorf("could not re-pack archive: %w", err) } return nil } // unarchive unpacks the archive into the destination func unarchive(archivePath, destPath string) error { input, err := os.Open(archivePath) if err != nil { return fmt.Errorf("could not open archive for unpacking: %w", err) } zipReader, err := gzip.NewReader(input) if err != nil { return fmt.Errorf("could not read archive: %w", err) } tarReader := tar.NewReader(zipReader) defer func() { if err := zipReader.Close(); err != nil { logrus.WithError(err).Warn("Could not close zip reader after unarchiving.") } if err := input.Close(); err != nil { logrus.WithError(err).Warn("Could not close input file after unarchiving.") } }() for { entry, err := tarReader.Next() if err == io.EOF { break } if err != nil { return fmt.Errorf("could not read archive: %w", err) } if !validRelPath(entry.Name) { return fmt.Errorf("tar contained invalid name error %q", entry.Name) } rel := filepath.FromSlash(entry.Name) abs := filepath.Join(destPath, rel) mode := entry.FileInfo().Mode() switch { case mode.IsDir(): if err := os.MkdirAll(abs, 0755); err != nil { return fmt.Errorf("could not create directory while unpacking archive: %w", err) } case mode.IsRegular(): file, err := os.OpenFile(abs, os.O_RDWR|os.O_CREATE|os.O_TRUNC, mode.Perm()) if err != nil { return err } n, err := io.Copy(file, tarReader) if closeErr := file.Close(); closeErr != nil && err == nil { return fmt.Errorf("error closing %s: %w", abs, closeErr) } if err != nil { return fmt.Errorf("error writing to %s: %w", abs, err) } if n != entry.Size { return fmt.Errorf("only wrote %d bytes to %s; expected %d", n, abs, entry.Size) } } } return nil } func validRelPath(p string) bool { if p == "" || strings.Contains(p, `\`) || strings.HasPrefix(p, "/") || strings.Contains(p, "../") { return false } return true } // archive re-packs the dir into the destination func archive(srcDir, destArchive string) error { // we want the temporary file we use for output to be in the same directory as the real destination, so // we can be certain that our final os.Rename() call will not have to operate across a device boundary output, err := ioutil.TempFile(filepath.Dir(destArchive), "tmp-archive") if err != nil { return fmt.Errorf("failed to create temporary file for archive: %w", err) } zipWriter := gzip.NewWriter(output) tarWriter := tar.NewWriter(zipWriter) defer func() { if err := tarWriter.Close(); err != nil { logrus.WithError(err).Warn("Could not close tar writer after archiving.") } if err := zipWriter.Close(); err != nil { logrus.WithError(err).Warn("Could not close zip writer after archiving.") } if err := output.Close(); err != nil { logrus.WithError(err).Warn("Could not close output file after archiving.") } }() if err := filepath.Walk(srcDir, func(absPath string, info os.FileInfo, err error) error { if err != nil { return err } // Handle symlinks. See https://stackoverflow.com/a/40003617. var link string if info.Mode()&os.ModeSymlink == os.ModeSymlink { if link, err = os.Readlink(absPath); err != nil { return err } } // "link" is only used by FileInfoHeader if "info" here is a symlink. // See https://pkg.go.dev/archive/tar#FileInfoHeader. header, err := tar.FileInfoHeader(info, link) if err != nil { return fmt.Errorf("could not create tar header: %w", err) } // the header won't get nested paths right relpath, shouldNotErr := filepath.Rel(srcDir, absPath) if shouldNotErr != nil { logrus.WithError(shouldNotErr).Warnf("filepath.Rel returned an error, but we assumed there must be a relative path between %s and %s", srcDir, absPath) } header.Name = relpath if err := tarWriter.WriteHeader(header); err != nil { return fmt.Errorf("could not write tar header: %w", err) } if info.IsDir() { return nil } // Nothing more to do for non-regular files (symlinks). if !info.Mode().IsRegular() { return nil } file, err := os.Open(absPath) if err != nil { return fmt.Errorf("could not open source file: %w", err) } n, err := io.Copy(tarWriter, file) if err != nil { return fmt.Errorf("could not tar file: %w", err) } if n != info.Size() { return fmt.Errorf("only wrote %d bytes from %s; expected %d", n, absPath, info.Size()) } if err := file.Close(); err != nil { return fmt.Errorf("could not close source file: %w", err) } return nil }); err != nil { return fmt.Errorf("could not walk source files to archive them: %w", err) } if err := os.Rename(output.Name(), destArchive); err != nil { return fmt.Errorf("could not overwrite archive: %w", err) } return nil } // handleFile censors the content of a file by streaming it to a new location, then overwriting the previous // location, to make it seem like this happened in place on the filesystem func handleFile(path string, censorer secretutil.Censorer, bufferSize int) error { input, err := os.Open(path) if err != nil { return fmt.Errorf("could not open file for censoring: %w", err) } // we want the temporary file we use for output to be in the same directory as the real destination, so // we can be certain that our final os.Rename() call will not have to operate across a device boundary output, err := ioutil.TempFile(filepath.Dir(path), "tmp-censor") if err != nil { return fmt.Errorf("could not create temporary file for censoring: %w", err) } if err := censor(input, output, censorer, bufferSize); err != nil { return fmt.Errorf("could not censor file: %w", err) } if err := os.Rename(output.Name(), path); err != nil { return fmt.Errorf("could not overwrite file after censoring: %w", err) } return nil } // censor censors input data and streams it to the output. We have a memory footprint of bufferSize bytes. func censor(input io.ReadCloser, output io.WriteCloser, censorer secretutil.Censorer, bufferSize int) error { if bufferSize%2 != 0 { return fmt.Errorf("frame size must be even, not %d", bufferSize) } defer func() { if err := input.Close(); err != nil { logrus.WithError(err).Warn("Could not close input file after censoring.") } if err := output.Close(); err != nil { logrus.WithError(err).Warn("Could not close output file after censoring.") } }() buffer := make([]byte, bufferSize) frameSize := bufferSize / 2 // bootstrap the algorithm by reading in the first half-frame numInitialized, initializeErr := input.Read(buffer[:frameSize]) // handle read errors - if we read everything in this init step, the next read will return 0, EOF and // we can flush appropriately as part of the process loop if initializeErr != nil && initializeErr != io.EOF { return fmt.Errorf("could not read data from input file before censoring: %w", initializeErr) } frameSize = numInitialized // this will normally be bufferSize/2 but will be smaller at the end of the file for { // populate the second half of the buffer with new data numRead, readErr := input.Read(buffer[frameSize:]) if readErr != nil && readErr != io.EOF { return fmt.Errorf("could not read data from input file before censoring: %w", readErr) } // censor the full buffer and flush the first half to the output censorer.Censor(&buffer) numWritten, writeErr := output.Write(buffer[:frameSize]) if writeErr != nil { return fmt.Errorf("could not write data to output file after censoring: %w", writeErr) } if numWritten != frameSize { // TODO: we could retry here I guess? When would a filesystem write less than expected and not error? return fmt.Errorf("only wrote %d out of %d bytes after censoring", numWritten, frameSize) } // shift the buffer over and get ready to repopulate the rest with new data copy(buffer[:numRead], buffer[frameSize:frameSize+numRead]) frameSize = numRead if readErr == io.EOF { break } } return nil } // loadSecrets loads all files under the paths into memory func loadSecrets(paths, iniFilenames []string) ([][]byte, error) { var secrets [][]byte for _, path := range paths { if err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error { if err != nil { return err } if strings.HasPrefix(info.Name(), "..") { // kubernetes volumes also include files we // should not look be looking into for keys if info.IsDir() { return filepath.SkipDir } return nil } if info.IsDir() { return nil } raw, err := ioutil.ReadFile(path) if err != nil { return err } secrets = append(secrets, raw) // In many cases, a secret file contains much more than just the sensitive data. For instance, // container registry credentials files are JSON formatted, so there are only a couple of fields // that are truly secret, the rest is formatting and whitespace. The implication here is that // a censoring approach that only looks at the full, uninterrupted secret value will not be able // to censor anything if that value is reformatted, truncated, etc. When the secrets we are asked // to censor are container registry credentials, we can know the format of these files and extract // the subsets of data that are sensitive, allowing us not only to censor the full file's contents // but also any individual fields that exist in the output, whether they're there due to a user // extracting the fields or output being truncated, etc. var parser = func(bytes []byte) ([]string, error) { return nil, nil } if info.Name() == ".dockercfg" { parser = loadDockercfgAuths } if info.Name() == ".dockerconfigjson" { parser = loadDockerconfigJsonAuths } for _, filename := range iniFilenames { if info.Name() == filename { parser = loadIniData break } } extra, parseErr := parser(raw) if parseErr != nil { return fmt.Errorf("could not read %s as a docker secret: %w", path, parseErr) } // It is important that these are added to the list of secrets *after* their parent data // as we will censor in order and this will give a reasonable guarantee that the parent // data (a superset of any of these fields) will be censored in its entirety, first. It // remains possible that the sliding window used to censor pulls in only part of the // superset and some small part of it is censored first, making the larger superset no // longer match the file being censored. for _, item := range extra { secrets = append(secrets, []byte(item)) } return nil }); err != nil { return nil, err } } return secrets, nil } // loadDockercfgAuths parses auth values from a kubernetes.io/dockercfg secret func loadDockercfgAuths(content []byte) ([]string, error) { var data map[string]authEntry if err := json.Unmarshal(content, &data); err != nil { return nil, err } var entries []authEntry for _, entry := range data { entries = append(entries, entry) } return collectSecretsFrom(entries), nil } // loadDockerconfigJsonAuths parses auth values from a kubernetes.io/dockercfgjson secret func loadDockerconfigJsonAuths(content []byte) ([]string, error) { var data = struct { Auths map[string]authEntry `json:"auths"` }{} if err := json.Unmarshal(content, &data); err != nil { return nil, err } var entries []authEntry for _, entry := range data.Auths { entries = append(entries, entry) } return collectSecretsFrom(entries), nil } // authEntry holds credentials for authentication to registries type authEntry struct { Password string `json:"password"` Auth string `json:"auth"` } func collectSecretsFrom(entries []authEntry) []string { var auths []string for _, entry := range entries { if entry.Auth != "" { auths = append(auths, entry.Auth) } if entry.Password != "" { auths = append(auths, entry.Password) } } return auths } func handleSection(section *ini.Section, extra []string) []string { for _, subsection := range section.ChildSections() { extra = handleSection(subsection, extra) } for _, key := range section.Keys() { extra = append(extra, key.Value()) } return extra } // loadIniData parses key-value data from an INI file func loadIniData(content []byte) ([]string, error) { cfg, err := ini.Load(content) if err != nil { return nil, err } var extra []string for _, section := range cfg.Sections() { extra = handleSection(section, extra) } return extra, nil }
cblecker/test-infra
prow/sidecar/censor.go
GO
apache-2.0
19,195
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Metadata request handler.""" import hashlib import hmac import os from oslo_config import cfg from oslo_log import log as logging import six import webob.dec import webob.exc from nova.api.metadata import base from nova import conductor from nova import exception from nova.i18n import _ from nova.i18n import _LE from nova.i18n import _LW from nova.openstack.common import memorycache from nova import utils from nova import wsgi CACHE_EXPIRATION = 15 # in seconds CONF = cfg.CONF CONF.import_opt('use_forwarded_for', 'nova.api.auth') metadata_proxy_opts = [ cfg.BoolOpt( 'service_metadata_proxy', default=False, help='Set flag to indicate Neutron will proxy metadata requests and ' 'resolve instance ids.'), cfg.StrOpt( 'metadata_proxy_shared_secret', default='', secret=True, help='Shared secret to validate proxies Neutron metadata requests'), ] CONF.register_opts(metadata_proxy_opts, 'neutron') LOG = logging.getLogger(__name__) class MetadataRequestHandler(wsgi.Application): """Serve metadata.""" def __init__(self): self._cache = memorycache.get_client() self.conductor_api = conductor.API() def get_metadata_by_remote_address(self, address): if not address: raise exception.FixedIpNotFoundForAddress(address=address) cache_key = 'metadata-%s' % address data = self._cache.get(cache_key) if data: return data try: data = base.get_metadata_by_address(self.conductor_api, address) except exception.NotFound: return None self._cache.set(cache_key, data, CACHE_EXPIRATION) return data def get_metadata_by_instance_id(self, instance_id, address): cache_key = 'metadata-%s' % instance_id data = self._cache.get(cache_key) if data: return data try: data = base.get_metadata_by_instance_id(self.conductor_api, instance_id, address) except exception.NotFound: return None self._cache.set(cache_key, data, CACHE_EXPIRATION) return data @webob.dec.wsgify(RequestClass=wsgi.Request) def __call__(self, req): if os.path.normpath(req.path_info) == "/": resp = base.ec2_md_print(base.VERSIONS + ["latest"]) req.response.body = resp req.response.content_type = base.MIME_TYPE_TEXT_PLAIN return req.response if CONF.neutron.service_metadata_proxy: meta_data = self._handle_instance_id_request(req) else: if req.headers.get('X-Instance-ID'): LOG.warning( _LW("X-Instance-ID present in request headers. The " "'service_metadata_proxy' option must be " "enabled to process this header.")) meta_data = self._handle_remote_ip_request(req) if meta_data is None: raise webob.exc.HTTPNotFound() try: data = meta_data.lookup(req.path_info) except base.InvalidMetadataPath: raise webob.exc.HTTPNotFound() if callable(data): return data(req, meta_data) resp = base.ec2_md_print(data) if isinstance(resp, six.text_type): req.response.text = resp else: req.response.body = resp req.response.content_type = meta_data.get_mimetype() return req.response def _handle_remote_ip_request(self, req): remote_address = req.remote_addr if CONF.use_forwarded_for: remote_address = req.headers.get('X-Forwarded-For', remote_address) try: meta_data = self.get_metadata_by_remote_address(remote_address) except Exception: LOG.exception(_LE('Failed to get metadata for ip: %s'), remote_address) msg = _('An unknown error has occurred. ' 'Please try your request again.') raise webob.exc.HTTPInternalServerError( explanation=six.text_type(msg)) if meta_data is None: LOG.error(_LE('Failed to get metadata for ip: %s'), remote_address) return meta_data def _handle_instance_id_request(self, req): instance_id = req.headers.get('X-Instance-ID') tenant_id = req.headers.get('X-Tenant-ID') signature = req.headers.get('X-Instance-ID-Signature') remote_address = req.headers.get('X-Forwarded-For') # Ensure that only one header was passed if instance_id is None: msg = _('X-Instance-ID header is missing from request.') elif signature is None: msg = _('X-Instance-ID-Signature header is missing from request.') elif tenant_id is None: msg = _('X-Tenant-ID header is missing from request.') elif not isinstance(instance_id, six.string_types): msg = _('Multiple X-Instance-ID headers found within request.') elif not isinstance(tenant_id, six.string_types): msg = _('Multiple X-Tenant-ID headers found within request.') else: msg = None if msg: raise webob.exc.HTTPBadRequest(explanation=msg) expected_signature = hmac.new( CONF.neutron.metadata_proxy_shared_secret, instance_id, hashlib.sha256).hexdigest() if not utils.constant_time_compare(expected_signature, signature): if instance_id: LOG.warning(_LW('X-Instance-ID-Signature: %(signature)s does ' 'not match the expected value: ' '%(expected_signature)s for id: ' '%(instance_id)s. Request From: ' '%(remote_address)s'), {'signature': signature, 'expected_signature': expected_signature, 'instance_id': instance_id, 'remote_address': remote_address}) msg = _('Invalid proxy request signature.') raise webob.exc.HTTPForbidden(explanation=msg) try: meta_data = self.get_metadata_by_instance_id(instance_id, remote_address) except Exception: LOG.exception(_LE('Failed to get metadata for instance id: %s'), instance_id) msg = _('An unknown error has occurred. ' 'Please try your request again.') raise webob.exc.HTTPInternalServerError( explanation=six.text_type(msg)) if meta_data is None: LOG.error(_LE('Failed to get metadata for instance id: %s'), instance_id) elif meta_data.instance.project_id != tenant_id: LOG.warning(_LW("Tenant_id %(tenant_id)s does not match tenant_id " "of instance %(instance_id)s."), {'tenant_id': tenant_id, 'instance_id': instance_id}) # causes a 404 to be raised meta_data = None return meta_data
cloudbase/nova-virtualbox
nova/api/metadata/handler.py
Python
apache-2.0
8,152
/*** Copyright (c) 2012 CommonsWare, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Covered in detail in the book _The Busy Coder's Guide to Android Development_ https://commonsware.com/Android */ package com.commonsware.android.webbeam; import android.annotation.SuppressLint; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.webkit.WebView; import android.webkit.WebViewClient; public class BeamFragment extends WebViewFragment { @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } @SuppressLint("SetJavaScriptEnabled") @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); getWebView().setWebViewClient(new BeamClient()); getWebView().getSettings().setJavaScriptEnabled(true); loadUrl("https://google.com"); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { if (getContract().hasNFC()) { inflater.inflate(R.menu.actions, menu); } super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.beam) { getContract().enablePush(); return(true); } return(super.onOptionsItemSelected(item)); } WebBeamActivity getContract() { return((WebBeamActivity)getActivity()); } String getUrl() { return(getWebView().getUrl()); } void loadUrl(String url) { android.util.Log.d(getClass().getSimpleName(), url); getWebView().stopLoading(); getWebView().loadUrl(url); } class BeamClient extends WebViewClient { @Override public boolean shouldOverrideUrlLoading(WebView wv, String url) { wv.loadUrl(url); return(true); } } }
commonsguy/cw-omnibus
NFC/WebBeam/app/src/main/java/com/commonsware/android/webbeam/BeamFragment.java
Java
apache-2.0
2,587
package com.qiaoba.qbcp.db; import com.lidroid.xutils.db.sqlite.WhereBuilder; import com.lidroid.xutils.exception.DbException; import com.qiaoba.qbcp.QbcpApplication; public abstract class AbsDbOperation implements IDbOperation{ DbManager mDbManager = QbcpApplication.mApp.mQbDbManager; @Override public boolean saveData(EntityBase mEntity) { try { mDbManager.getContentDb().save(mEntity); return true; } catch (DbException e) { return false; } } public abstract String getDbName (); @Override public boolean deleteDataFromDb(String sql) { try { mDbManager.getContentDb().execNonQuery(sql); return true; } catch (DbException e) { return false; } } @Override public boolean updateDataFromDb(String sql) { try { mDbManager.getContentDb().execNonQuery(sql); return true; } catch (DbException e) { return false; } } public DbManager getDBDbManager(){ return mDbManager; } public void clearDbData() { try { mDbManager.getContentDb().execNonQuery("delete from "+ getDbName()); } catch (DbException e) { e.printStackTrace(); } } public void insertOrUpdate(EntityBase mUser,WhereBuilder mWhereBuilder){ boolean isInsertSuccess = saveData(mUser); if (!isInsertSuccess) { try { getDBDbManager().getContentDb() .update(mUser, mWhereBuilder); } catch (DbException e) { } } } }
zhujohnle/qbcp
QBcp/src/com/qiaoba/qbcp/db/AbsDbOperation.java
Java
apache-2.0
1,523
package eu.humanbrainproject.mip.algorithms.serializers.pfa; import eu.humanbrainproject.mip.algorithms.SimpleAlgorithm; import java.util.Arrays; import java.util.List; public class NumericalInputDescription extends InputDescription<SimpleAlgorithm> { public NumericalInputDescription(SimpleAlgorithm algorithm) { super(algorithm); } @Override protected VariableType getType(String variable) throws Exception { return VariableType.REAL; } @Override protected String getQuery() { return "SELECT input data"; } @Override protected int getDataSize() throws Exception { return 10; } @Override protected String[] getVariables() { return new String[] {"var1"}; } @Override protected String[] getCovariables() { return new String[] {"num1", "num2", "num3", "num4"}; } }
woken-ml/java-base-docker-images
java-mip/src/test/java/eu/humanbrainproject/mip/algorithms/serializers/pfa/NumericalInputDescription.java
Java
apache-2.0
889
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.util; import static java.util.stream.Collectors.toList; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.nio.file.Path; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.Logger; import org.apache.geode.internal.logging.MergeLogFiles; import org.apache.geode.logging.internal.log4j.api.LogService; import org.apache.geode.management.internal.cli.GfshParser; import org.apache.geode.management.internal.i18n.CliStrings; /** * @since GemFire 7.0 */ public class MergeLogs { private static final Logger logger = LogService.getLogger(); public static void main(String[] args) { if (args.length < 1 || args.length > 1) { throw new IllegalArgumentException("Requires only 1 arguments : <targetDirName>"); } try { String result = mergeLogFile(args[0]).getCanonicalPath(); System.out.println("Merged logs to: " + result); } catch (Exception e) { System.out.println(e.getMessage()); } } public static void mergeLogsInNewProcess(Path logDirectory) { // create a new process for merging logger.info("Exporting logs merging logs" + logDirectory); List<String> commandList = new ArrayList<String>(); commandList.add( System.getProperty("java.home") + File.separatorChar + "bin" + File.separatorChar + "java"); commandList.add("-classpath"); commandList.add(System.getProperty("java.class.path", ".")); commandList.add(MergeLogs.class.getName()); commandList.add(logDirectory.toAbsolutePath().toString()); ProcessBuilder procBuilder = new ProcessBuilder(commandList); StringBuilder output = new StringBuilder(); try { logger.info("Exporting logs now merging logs"); Process mergeProcess = procBuilder.redirectErrorStream(true).start(); mergeProcess.waitFor(); InputStream inputStream = mergeProcess.getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(inputStream)); String line = null; while ((line = br.readLine()) != null) { output.append(line).append(GfshParser.LINE_SEPARATOR); } mergeProcess.destroy(); } catch (Exception e) { logger.error(e.getMessage()); } if (output.toString().contains("Merged logs to: ")) { logger.info("Exporting logs successfully merged logs"); } else { logger.error("Could not merge"); } } protected static List<File> findLogFilesToMerge(File dir) { return FileUtils.listFiles(dir, new String[] {"log"}, true).stream().collect(toList()); } static File mergeLogFile(String dirName) throws Exception { Path dir = Paths.get(dirName); List<File> logsToMerge = findLogFilesToMerge(dir.toFile()); Map<String, InputStream> logFiles = new HashMap<>(); for (int i = 0; i < logsToMerge.size(); i++) { try { logFiles.put(dir.relativize(logsToMerge.get(i).toPath()).toString(), new FileInputStream(logsToMerge.get(i))); } catch (FileNotFoundException e) { throw new Exception( logsToMerge.get(i) + " " + CliStrings.EXPORT_LOGS__MSG__FILE_DOES_NOT_EXIST); } } PrintWriter mergedLog = null; File mergedLogFile = null; try { String mergeLog = dirName + File.separator + "merge_" + new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new java.util.Date()) + ".log"; mergedLogFile = new File(mergeLog); mergedLog = new PrintWriter(mergedLogFile); MergeLogFiles.mergeLogFiles(logFiles, mergedLog); } catch (FileNotFoundException e) { throw new Exception( "FileNotFoundException in creating PrintWriter in MergeLogFiles" + e.getMessage()); } catch (Exception e) { throw new Exception("Exception in creating PrintWriter in MergeLogFiles" + e.getMessage()); } return mergedLogFile; } }
davinash/geode
geode-gfsh/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
Java
apache-2.0
5,006
// Copyright 2015 The Project Buendia Authors // // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distrib- // uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES // OR CONDITIONS OF ANY KIND, either express or implied. See the License for // specific language governing permissions and limitations under the License. package org.projectbuendia.client.user; import android.content.OperationApplicationException; import android.os.AsyncTask; import android.os.RemoteException; import com.android.volley.VolleyError; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import org.projectbuendia.client.events.user.ActiveUserSetEvent; import org.projectbuendia.client.events.user.ActiveUserUnsetEvent; import org.projectbuendia.client.events.user.KnownUsersLoadFailedEvent; import org.projectbuendia.client.events.user.KnownUsersLoadedEvent; import org.projectbuendia.client.events.user.KnownUsersSyncFailedEvent; import org.projectbuendia.client.events.user.KnownUsersSyncedEvent; import org.projectbuendia.client.events.user.UserAddFailedEvent; import org.projectbuendia.client.events.user.UserAddedEvent; import org.projectbuendia.client.json.JsonNewUser; import org.projectbuendia.client.json.JsonUser; import org.projectbuendia.client.utils.AsyncTaskRunner; import org.projectbuendia.client.utils.EventBusInterface; import org.projectbuendia.client.utils.Logger; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutionException; import javax.annotation.Nullable; import static com.google.common.base.Preconditions.checkNotNull; /** * Manages the available logins and the currently logged-in user. * <p/> * <p>All classes that care about the current active user should be able to gracefully handle the * following event bus events: * <ul> * <li>{@link ActiveUserSetEvent} * <li>{@link ActiveUserUnsetEvent} * </ul> * <p/> * <p>All classes that care about all known users should additionally be able to gracefully handle * the following event bus events: * <ul> * <li>{@link KnownUsersLoadedEvent} * <li>{@link KnownUsersLoadFailedEvent} * <li>{@link KnownUsersSyncedEvent} * <li>{@link KnownUsersSyncFailedEvent} * </ul> * <p/> * <p>All classes that care about being able to add and delete users should additionally be able * gracefully handle the following event bus events: * <ul> * <li>{@link UserAddedEvent} * <li>{@link UserAddFailedEvent} * </ul> * <p/> * <p>All methods should be called on the main thread. */ public class UserManager { private static final Logger LOG = Logger.create(); private final UserStore mUserStore; private final EventBusInterface mEventBus; private final AsyncTaskRunner mAsyncTaskRunner; private final Set<JsonUser> mKnownUsers = new HashSet<>(); private boolean mSynced = false; private boolean mAutoCancelEnabled = false; private boolean mIsDirty = false; @Nullable private AsyncTask mLastTask; @Nullable private JsonUser mActiveUser; /** * Utility function for automatically canceling user load tasks to simulate network connectivity * issues. * TODO: Move to a fake or mock out when daggered. */ public void setAutoCancelEnabled(boolean autoCancelEnabled) { mAutoCancelEnabled = autoCancelEnabled; } /** Resets the UserManager to its initial empty state. */ public void reset() { mKnownUsers.clear(); mSynced = false; } /** * If true, users have been recently updated and any data relying on a specific view of users * may be out of sync. */ public boolean isDirty() { return mIsDirty; } /** Sets whether or not users have been recently updated. */ public void setDirty(boolean shouldInvalidateFormCache) { mIsDirty = shouldInvalidateFormCache; } /** * Loads the set of all users known to the application from local cache. * <p/> * <p>This method will post a {@link KnownUsersLoadedEvent} if the known users were * successfully loaded and a {@link KnownUsersLoadFailedEvent} otherwise. * <p/> * <p>This method will only perform a local cache lookup once per application lifetime. */ public void loadKnownUsers() { if (!mSynced) { mLastTask = new LoadKnownUsersTask(); mAsyncTaskRunner.runTask(mLastTask); } else { mEventBus.post(new KnownUsersLoadedEvent(ImmutableSet.copyOf(mKnownUsers))); } } /** Sync users synchronously. Blocks until the list of users is synced, or interrupted. */ public void syncKnownUsersSynchronously() throws InterruptedException, ExecutionException, RemoteException, OperationApplicationException, UserSyncException { onUsersSynced(mUserStore.syncKnownUsers()); } /** * Called when users are retrieved from the server, in order to send events and update user * state as necessary. */ private void onUsersSynced(Set<JsonUser> syncedUsers) throws UserSyncException { if (syncedUsers == null || syncedUsers.isEmpty()) { throw new UserSyncException("Set of users retrieved from server is null or empty."); } ImmutableSet<JsonUser> addedUsers = ImmutableSet.copyOf(Sets.difference(syncedUsers, mKnownUsers)); ImmutableSet<JsonUser> deletedUsers = ImmutableSet.copyOf(Sets.difference(mKnownUsers, syncedUsers)); mKnownUsers.clear(); mKnownUsers.addAll(syncedUsers); mEventBus.post(new KnownUsersSyncedEvent(addedUsers, deletedUsers)); if (mActiveUser != null && deletedUsers.contains(mActiveUser)) { // TODO: Potentially clear mActiveUser here. mEventBus.post(new ActiveUserUnsetEvent( mActiveUser, ActiveUserUnsetEvent.REASON_USER_DELETED)); } // If at least one user was added or deleted, the set of known users has changed. if (!addedUsers.isEmpty() || !deletedUsers.isEmpty()) { setDirty(true); } } /** Returns the current active user or {@code null} if no user is active. */ @Nullable public JsonUser getActiveUser() { return mActiveUser; } /** * Sets the current active user or unsets it if {@code activeUser} is {@code null}, returning * whether the operation succeeded. * <p/> * <p>This method will fail if the specified user is not known to the application. * <p/> * <p>This method will post an {@link ActiveUserSetEvent} if the active user was successfully * set and an {@link ActiveUserUnsetEvent} if the active user was unset successfully; these * events will be posted even if the active user did not change. */ public boolean setActiveUser(@Nullable JsonUser activeUser) { @Nullable JsonUser previousActiveUser = mActiveUser; if (activeUser == null) { mActiveUser = null; mEventBus.post(new ActiveUserUnsetEvent( previousActiveUser, ActiveUserUnsetEvent.REASON_UNSET_INVOKED)); return true; } if (!mKnownUsers.contains(activeUser)) { LOG.e("Couldn't switch user -- new user is not known"); return false; } mActiveUser = activeUser; mEventBus.post(new ActiveUserSetEvent(previousActiveUser, activeUser)); return true; } /** * Adds a user to the set of known users, both locally and on the server. * <p/> * <p>This method will post a {@link UserAddedEvent} if the user was added successfully and a * {@link UserAddFailedEvent} otherwise. */ public void addUser(JsonNewUser user) { checkNotNull(user); // TODO: Validate user. mAsyncTaskRunner.runTask(new AddUserTask(user)); } /** Thrown when an error occurs syncing users from server. */ public static class UserSyncException extends Throwable { public UserSyncException(String s) { super(s); } } UserManager( UserStore userStore, EventBusInterface eventBus, AsyncTaskRunner asyncTaskRunner) { mAsyncTaskRunner = checkNotNull(asyncTaskRunner); mEventBus = checkNotNull(eventBus); mUserStore = checkNotNull(userStore); } /** * Loads known users from the database into memory. * <p/> * <p>Forces a network sync if the database has not been downloaded yet. */ private class LoadKnownUsersTask extends AsyncTask<Object, Void, Set<JsonUser>> { @Override protected Set<JsonUser> doInBackground(Object... unusedObjects) { if (mAutoCancelEnabled) { cancel(true); return null; } try { return mUserStore.loadKnownUsers(); } catch (Exception e) { // TODO: Figure out type of exception to throw. LOG.e(e, "Load users task failed"); mEventBus.post( new KnownUsersLoadFailedEvent(KnownUsersLoadFailedEvent.REASON_UNKNOWN)); return null; } } @Override protected void onCancelled() { LOG.w("Load users task cancelled"); mEventBus.post( new KnownUsersLoadFailedEvent(KnownUsersLoadFailedEvent.REASON_CANCELLED)); } @Override protected void onPostExecute(Set<JsonUser> knownUsers) { mKnownUsers.clear(); if (knownUsers != null) { mKnownUsers.addAll(knownUsers); } mSynced = true; mEventBus.post(new KnownUsersLoadedEvent(ImmutableSet.copyOf(mKnownUsers))); } } /** Adds a user to the database asynchronously. */ private final class AddUserTask extends AsyncTask<Void, Void, JsonUser> { private final JsonNewUser mUser; private boolean mAlreadyExists; private boolean mFailedToConnect; public AddUserTask(JsonNewUser user) { mUser = checkNotNull(user); } @Override protected JsonUser doInBackground(Void... voids) { try { return mUserStore.addUser(mUser); } catch (VolleyError e) { if (e.getMessage() != null) { if (e.getMessage().contains("already in use")) { mAlreadyExists = true; } else if (e.getMessage().contains("failed to connect")) { mFailedToConnect = true; } } return null; } } @Override protected void onPostExecute(JsonUser addedUser) { if (addedUser != null) { mKnownUsers.add(addedUser); mEventBus.post(new UserAddedEvent(addedUser)); // Set of known users has changed. setDirty(true); } else if (mAlreadyExists) { mEventBus.post(new UserAddFailedEvent( mUser, UserAddFailedEvent.REASON_USER_EXISTS_ON_SERVER)); } else if (mFailedToConnect) { mEventBus.post(new UserAddFailedEvent( mUser, UserAddFailedEvent.REASON_CONNECTION_ERROR)); } else { mEventBus.post(new UserAddFailedEvent(mUser, UserAddFailedEvent.REASON_UNKNOWN)); } } } }
llvasconcellos/client
app/src/main/java/org/projectbuendia/client/user/UserManager.java
Java
apache-2.0
11,751
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/iam/model/ListServerCertificatesResult.h> #include <aws/core/utils/xml/XmlSerializer.h> #include <aws/core/AmazonWebServiceResult.h> #include <aws/core/utils/StringUtils.h> #include <aws/core/utils/logging/LogMacros.h> #include <utility> using namespace Aws::IAM::Model; using namespace Aws::Utils::Xml; using namespace Aws::Utils::Logging; using namespace Aws::Utils; using namespace Aws; ListServerCertificatesResult::ListServerCertificatesResult() : m_isTruncated(false) { } ListServerCertificatesResult::ListServerCertificatesResult(const AmazonWebServiceResult<XmlDocument>& result) : m_isTruncated(false) { *this = result; } ListServerCertificatesResult& ListServerCertificatesResult::operator =(const AmazonWebServiceResult<XmlDocument>& result) { const XmlDocument& xmlDocument = result.GetPayload(); XmlNode rootNode = xmlDocument.GetRootElement(); XmlNode resultNode = rootNode; if (rootNode.GetName() != "ListServerCertificatesResult") { resultNode = rootNode.FirstChild("ListServerCertificatesResult"); } if(!resultNode.IsNull()) { XmlNode serverCertificateMetadataListNode = resultNode.FirstChild("ServerCertificateMetadataList"); if(!serverCertificateMetadataListNode.IsNull()) { XmlNode serverCertificateMetadataListMember = serverCertificateMetadataListNode.FirstChild("member"); while(!serverCertificateMetadataListMember.IsNull()) { m_serverCertificateMetadataList.push_back(serverCertificateMetadataListMember); serverCertificateMetadataListMember = serverCertificateMetadataListMember.NextNode("member"); } } XmlNode isTruncatedNode = resultNode.FirstChild("IsTruncated"); if(!isTruncatedNode.IsNull()) { m_isTruncated = StringUtils::ConvertToBool(StringUtils::Trim(isTruncatedNode.GetText().c_str()).c_str()); } XmlNode markerNode = resultNode.FirstChild("Marker"); if(!markerNode.IsNull()) { m_marker = StringUtils::Trim(markerNode.GetText().c_str()); } } XmlNode responseMetadataNode = rootNode.FirstChild("ResponseMetadata"); m_responseMetadata = responseMetadataNode; AWS_LOGSTREAM_DEBUG("Aws::IAM::Model::ListServerCertificatesResult", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() ); return *this; }
kahkeng/aws-sdk-cpp
aws-cpp-sdk-iam/source/model/ListServerCertificatesResult.cpp
C++
apache-2.0
2,877
package org.toobs.framework.pres.util; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.jxpath.JXPathContext; import org.apache.commons.jxpath.JXPathException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.web.util.WebUtils; import org.toobs.framework.exception.ParameterException; import org.toobs.framework.pres.component.config.Parameter; import org.toobs.framework.pres.doit.config.Forward; import org.toobs.framework.util.Configuration; @SuppressWarnings("unchecked") public class ParameterUtil { private static Log log = LogFactory.getLog(ParameterUtil.class); private static List excludedParameters; private static Map envParameters; static { excludedParameters = new ArrayList(); excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.THEME"); excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.THEME_RESOLVER"); excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.THEME_SOURCE"); excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.CONTEXT"); excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.LOCALE"); excludedParameters.add("org.springframework.web.servlet.DispatcherServlet.LOCALE_RESOLVER"); excludedParameters.add("org.springframework.web.servlet.HandlerMapping.pathWithinHandlerMapping"); excludedParameters.add("hibernateFilter.FILTERED"); envParameters = new HashMap(); envParameters.put("host", Configuration.getInstance().getMainHost() ); envParameters.put("toobs.debug", Configuration.getInstance().getProperty("toobs.debug", "false") ); } /** * Extract the URL filename from the given request URI. * Delegates to <code>WebUtils.extractViewNameFromUrlPath(String)</code>. * @param uri the request URI (e.g. "/index.html") * @return the extracted URI filename (e.g. "index") * @see org.springframework.web.util.WebUtils#extractFilenameFromUrlPath */ public static String extractViewNameFromUrlPath(String uri) { return WebUtils.extractFilenameFromUrlPath(uri); } public static String extractExtensionFromUrlPath(String uri) { int lastDot = uri.indexOf("."); if (lastDot != -1) { return uri.substring(lastDot+1); } return ""; } public static String extractContextPathFromUrlPath(String uri) { int midSlash = uri.indexOf("/",1); if (midSlash != -1) { return uri.substring(1,midSlash); } return ""; } public static String resoveForwardPath(Forward forwardDef, Map parameters, String urlPath) { String forwardPath = null; forwardPath = ((String[])ParameterUtil.resolveParam(forwardDef.getUri(), parameters))[0]; if (forwardPath != null && forwardDef.getUseContext()) { String contextPath = ParameterUtil.extractContextPathFromUrlPath(urlPath); forwardPath = (contextPath.length()>0 ? "/" + contextPath + "/" : "") + forwardPath; } return forwardPath; } public static Map buildParameterMap(HttpServletRequest request) { return buildParameterMap(request, false); } public static Map buildParameterMap(HttpServletRequest request, boolean compCall) { Map params = new HashMap(); HttpSession session = request.getSession(); Enumeration attributes = session.getAttributeNames(); // Session has lowest priority while(attributes.hasMoreElements()){ String thisAttribute = (String) attributes.nextElement(); //if (session.getAttribute(thisAttribute) instanceof String) { params.put(thisAttribute, session.getAttribute(thisAttribute)); //} } // Parameters next highest params.putAll(request.getParameterMap()); // Attributes rule all attributes = request.getAttributeNames(); while(attributes.hasMoreElements()){ String thisAttribute = (String) attributes.nextElement(); if (!excludedParameters.contains(thisAttribute)) { if (log.isDebugEnabled()) { log.debug("Putting " + thisAttribute + " As " + request.getAttribute(thisAttribute)); } params.put(thisAttribute, request.getAttribute(thisAttribute)); } } params.put("httpQueryString", request.getQueryString()); if (compCall && request.getMethod().equals("POST")) { StringBuffer qs = new StringBuffer(); Iterator iter = request.getParameterMap().entrySet().iterator(); int i = 0; while (iter.hasNext()) { Map.Entry entry = (Map.Entry)iter.next(); String key = (String)entry.getKey(); String[] value = (String[])entry.getValue(); for (int j = 0; j < value.length; j++) { if (i > 0) qs.append("&"); qs.append(key).append("=").append(value[j]); i++; } } params.put("httpQueryString", qs.toString()); } return params; } public static void mapParameters(String callingContext, Parameter[] paramMap, Map inParams, Map outParams, String scopeId) throws ParameterException { mapParameters(callingContext, paramMap, inParams, outParams, scopeId, null); } public static void mapParameters(String callingContext, Parameter[] paramMap, Map inParams, Map outParams, String scopeId, ArrayList objectList) throws ParameterException { JXPathContext context = JXPathContext.newContext(inParams); for(int j = 0; j < paramMap.length; j++){ Parameter thisParam = paramMap[j]; Object value = null; String thisPath = null; String thisName = null; try { if (thisParam.getScope() != null && !thisParam.getScope().equalsIgnoreCase("all") && !thisParam.getScope().equalsIgnoreCase(scopeId) ) { continue; } if(!thisParam.getOverwriteExisting() && inParams.get(thisParam.getName()) != null) { continue; } thisName = resolveParam(thisParam.getName(), inParams)[0]; thisPath = resolveParam(thisParam.getPath(), inParams)[0]; boolean condition = true; if (thisParam.getCondition() != null) { Object condObj = context.getValue(thisParam.getCondition()); if (log.isDebugEnabled()) { log.debug("Condition Object: " + condObj); } if (condObj != null && condObj instanceof Boolean) { condition = (Boolean)condObj; } } if (condition) { if (thisParam.getIsStatic()) { value = thisPath; } else if (thisParam.getIsObject()) { if((objectList == null) || (objectList != null && thisParam.getObjectIndex() >= objectList.size())){ continue; } JXPathContext objContext = JXPathContext.newContext(objectList.get(thisParam.getObjectIndex())); if (thisParam.getIsList()) { Iterator iter = objContext.iterate(thisPath); value = new ArrayList(); while (iter.hasNext()) { ((ArrayList)value).add(iter.next()); } if (((ArrayList)value).size() == 0 && thisParam.getDefault() != null) { ((ArrayList)value).add(thisParam.getDefault()); } } else { value = objContext.getValue(thisPath); } } else if (thisParam.getIsList()) { Object newList = inParams.get(thisName); if (newList == null) newList = outParams.get(thisName); if (newList != null && !(newList instanceof ArrayList)) { newList = new ArrayList(); ((ArrayList)newList).add(value); } if (newList == null) newList = new ArrayList(); value = context.getValue(thisPath); if(value != null && value.getClass().isArray()){ Object[] valueArray = (Object[])value; if (valueArray.length > 1) { for (int i = 0; i < valueArray.length; i++) { if (valueArray[i] != null && ((String)valueArray[i]).length() > 0) ((ArrayList)newList).add(valueArray[i]); } value = null; } else { value = valueArray[0]; } } if (value != null && !"".equals(value)) ((ArrayList)newList).add(value); value = newList; } else { value = context.getValue(thisPath); if(value != null && value.getClass().isArray()){ Object[] valueArray = (Object[])value; if (valueArray.length > 1) { value = valueArray; } else { value = valueArray[0]; } } else if (value == null && thisParam.getSessionPath() != null) { value = context.getValue(thisParam.getSessionPath()); } } if (value != null && value.getClass().isArray() && thisParam.getIsList()) { outParams.put(thisName, value); } else if (value != null && value.getClass().isArray()) { outParams.put(thisName, ((String[])value)[0]); } else if (value != null && value instanceof ArrayList && ((ArrayList)value).size()>0) { outParams.put(thisName, value); } else if (value != null && !(value instanceof ArrayList) && String.valueOf(value).length() > 0) { outParams.put(thisName, String.valueOf(value)); } else if (thisParam.getDefault() != null) { String [] defVal = resolveParam(thisParam.getDefault(), inParams); if (defVal != null) { outParams.put(thisName, defVal[0]); } } else if (!thisParam.getIgnoreNull()) { throw new ParameterException(callingContext, thisName, thisPath); } else if (log.isDebugEnabled()){ log.debug("Param " + thisName + " evaluated to null"); } } } catch (Exception e) { log.error("mapParameters - exception [name:" + thisName + " path:" + thisPath + " value:" + value + "]"); throw new ParameterException(callingContext, thisName, thisPath); } } } public static void mapOutputParameters(Parameter[] paramMap, Map paramsIn, String scopeId, ArrayList objects) { for(int j = 0; j < paramMap.length; j++){ Parameter thisParam = paramMap[j]; if (thisParam.getScope() != null && !thisParam.getScope().equalsIgnoreCase("all") && !thisParam.getScope().equalsIgnoreCase(scopeId) ) { continue; } if(!thisParam.getOverwriteExisting() && paramsIn.get(thisParam.getName()) != null) { continue; } if(thisParam.getObjectIndex() >= objects.size()){ continue; } JXPathContext context = null; Object value = null; String paramName = resolveParam(thisParam.getName(), paramsIn)[0]; try { String thisPath = resolveParam(thisParam.getPath(), paramsIn)[0]; if(thisParam.getIsStatic()){ value = thisPath; } else { if (thisParam.getIsList()) { value = new ArrayList(); if (thisParam.getObjectIndex() == -1) { for (int i = 0; i < objects.size(); i++) { context = JXPathContext.newContext(objects.get(i)); ((ArrayList)value).add(context.getValue(thisPath)); } } else { context = JXPathContext.newContext(objects.get(thisParam.getObjectIndex())); Iterator iter = context.iterate(thisPath); while (iter.hasNext()) { ((ArrayList)value).add(iter.next()); } } if (((ArrayList)value).size() == 0) { if (thisParam.getDefault() != null) { try { ((ArrayList)value).add(Integer.parseInt(thisParam.getDefault())); } catch (NumberFormatException nfe) { ((ArrayList)value).add(thisParam.getDefault()); } } else { value = null; } } } else { context = JXPathContext.newContext(objects.get(thisParam.getObjectIndex())); value = context.getValue(thisPath); } } if(value != null && List.class.isAssignableFrom(value.getClass()) && ((List)value).size() == 0 && thisParam.getDefault() != null){ ((List)value).add(thisPath); } paramsIn.put(paramName, value); } catch (JXPathException e) { if (thisParam.getDefault() != null) { String[] def = resolveParam(thisParam.getDefault(), paramsIn); if (def != null && def.length > 0) { paramsIn.put(paramName, def[0]); } } else if (!thisParam.getIgnoreNull()) { log.error("JXPathException for parameter " + paramName + " in scope " + scopeId); // TODO This should be a BaseException throw e; } } } } public static void mapDoItInputParameters(Parameter[] paramMap, Map paramsIn, Map paramsOut, boolean useJXPathContext) { JXPathContext context = null; if(useJXPathContext) context = JXPathContext.newContext(paramsIn); for(int j = 0; j < paramMap.length; j++){ Parameter thisParam = paramMap[j]; Object value = null; if(thisParam.getIsStatic()) { String [] valueAry = new String[1]; valueAry[0] = resolveParam(thisParam.getPath(), paramsIn)[0]; value = valueAry; } else { value = context.getValue(resolveParam(thisParam.getPath(), paramsIn)[0]); if (value != null && value.getClass().isArray() && ((Object[])value).length == 1) { value = ((Object[])value)[0]; } else if (value == null && thisParam.getDefault() != null) { value = thisParam.getDefault(); } } paramsOut.put(resolveParam(thisParam.getName(), paramsIn)[0], value); } } public static String[] resolveParam(Object input, Map params) { return resolveParam(input, params, null); } public static String[] resolveParam(Object input, Map params, Object defaultValue) { String[] output; if (input != null && input.getClass().isArray()) { output = (String[])input; } else { output = new String[] {(String)input}; } if (input != null && input instanceof String && !"".equals(input)) { char ind = ((String)input).charAt(0); Object value; switch (ind) { case '$': value = params.get(((String)input).substring(1)); if (value == null) { if (defaultValue != null) { value = defaultValue; } else { log.warn("Input variable with name " + input + " resolved to null"); return null; } } if (value.getClass().isArray()) { output = ((String[])value); } else { output = new String[1]; output[0] = (String)value; } if (log.isDebugEnabled()) { log.debug("Input variable with name " + input + " resolved to " + output[0]); } break; case '#': value = envParameters.get(((String)input).substring(1)); if (value != null) { if (value.getClass().isArray()) { output = ((String[])value); } else { output = new String[1]; output[0] = (String)value; } } break; case '%': if (((String)input).equalsIgnoreCase("%now")) { output = new String[1]; output[0] = String.valueOf(new Date().getTime()); } } } else if (defaultValue != null) { output = new String[1]; output[0] = (String)defaultValue; } return output; } public static void mapScriptParams(Map params, Map paramsIn) { Iterator iter = params.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); paramsIn.put(key, params.get(key)); } } }
parabuzzle/toobs
legacy/toobs-0.1/Platform/PresFramework/src/org/toobs/framework/pres/util/ParameterUtil.java
Java
apache-2.0
16,514
/** * Copyright IBM Corp. 2016, 2018 * * This source code is licensed under the Apache-2.0 license found in the * LICENSE file in the root directory of this source tree. */ import React, { Component } from 'react'; import PropTypes from 'prop-types'; import { settings } from 'carbon-components'; import ListBulleted16 from '@carbon/icons-react/lib/list--bulleted/16'; import Grid16 from '@carbon/icons-react/lib/grid/16'; const { prefix } = settings; /** * The layout button for `<Search>`. */ class SearchLayoutButton extends Component { state = { format: 'list' }; static propTypes = { /** * The layout. */ format: PropTypes.oneOf(['list', 'grid']), /** * The a11y label text. */ labelText: PropTypes.string, /** * The description for the "list" icon. */ iconDescriptionList: PropTypes.string, /** * The description for the "grid" icon. */ iconDescriptionGrid: PropTypes.string, /** * The callback called when layout switches. */ onChangeFormat: PropTypes.func, }; static defaultProps = { labelText: 'Filter', iconDescriptionList: 'list', iconDescriptionGrid: 'grid', }; static getDerivedStateFromProps({ format }, state) { const { prevFormat } = state; return prevFormat === format ? null : { format: format || 'list', prevFormat: format, }; } /** * Toggles the button state upon user-initiated event. */ toggleLayout = () => { const format = this.state.format === 'list' ? 'grid' : 'list'; this.setState({ format }, () => { const { onChangeFormat } = this.props; if (typeof onChangeFormat === 'function') { onChangeFormat({ format }); } }); }; render() { const { labelText, iconDescriptionList, iconDescriptionGrid } = this.props; const SearchLayoutButtonIcon = () => { if (this.state.format === 'list') { return ( <ListBulleted16 className={`${prefix}--search-view`} aria-label={iconDescriptionList} /> ); } return ( <Grid16 className={`${prefix}--search-view`} aria-label={iconDescriptionGrid} /> ); }; return ( <button className={`${prefix}--search-button`} type="button" onClick={this.toggleLayout} aria-label={labelText} title={labelText}> <div className={`${prefix}--search__toggle-layout__container`}> <SearchLayoutButtonIcon /> </div> </button> ); } } export default SearchLayoutButton;
carbon-design-system/carbon-components-react
src/components/SearchLayoutButton/SearchLayoutButton.js
JavaScript
apache-2.0
2,657
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/events/model/Connection.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace CloudWatchEvents { namespace Model { Connection::Connection() : m_connectionArnHasBeenSet(false), m_nameHasBeenSet(false), m_connectionState(ConnectionState::NOT_SET), m_connectionStateHasBeenSet(false), m_stateReasonHasBeenSet(false), m_authorizationType(ConnectionAuthorizationType::NOT_SET), m_authorizationTypeHasBeenSet(false), m_creationTimeHasBeenSet(false), m_lastModifiedTimeHasBeenSet(false), m_lastAuthorizedTimeHasBeenSet(false) { } Connection::Connection(JsonView jsonValue) : m_connectionArnHasBeenSet(false), m_nameHasBeenSet(false), m_connectionState(ConnectionState::NOT_SET), m_connectionStateHasBeenSet(false), m_stateReasonHasBeenSet(false), m_authorizationType(ConnectionAuthorizationType::NOT_SET), m_authorizationTypeHasBeenSet(false), m_creationTimeHasBeenSet(false), m_lastModifiedTimeHasBeenSet(false), m_lastAuthorizedTimeHasBeenSet(false) { *this = jsonValue; } Connection& Connection::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("ConnectionArn")) { m_connectionArn = jsonValue.GetString("ConnectionArn"); m_connectionArnHasBeenSet = true; } if(jsonValue.ValueExists("Name")) { m_name = jsonValue.GetString("Name"); m_nameHasBeenSet = true; } if(jsonValue.ValueExists("ConnectionState")) { m_connectionState = ConnectionStateMapper::GetConnectionStateForName(jsonValue.GetString("ConnectionState")); m_connectionStateHasBeenSet = true; } if(jsonValue.ValueExists("StateReason")) { m_stateReason = jsonValue.GetString("StateReason"); m_stateReasonHasBeenSet = true; } if(jsonValue.ValueExists("AuthorizationType")) { m_authorizationType = ConnectionAuthorizationTypeMapper::GetConnectionAuthorizationTypeForName(jsonValue.GetString("AuthorizationType")); m_authorizationTypeHasBeenSet = true; } if(jsonValue.ValueExists("CreationTime")) { m_creationTime = jsonValue.GetDouble("CreationTime"); m_creationTimeHasBeenSet = true; } if(jsonValue.ValueExists("LastModifiedTime")) { m_lastModifiedTime = jsonValue.GetDouble("LastModifiedTime"); m_lastModifiedTimeHasBeenSet = true; } if(jsonValue.ValueExists("LastAuthorizedTime")) { m_lastAuthorizedTime = jsonValue.GetDouble("LastAuthorizedTime"); m_lastAuthorizedTimeHasBeenSet = true; } return *this; } JsonValue Connection::Jsonize() const { JsonValue payload; if(m_connectionArnHasBeenSet) { payload.WithString("ConnectionArn", m_connectionArn); } if(m_nameHasBeenSet) { payload.WithString("Name", m_name); } if(m_connectionStateHasBeenSet) { payload.WithString("ConnectionState", ConnectionStateMapper::GetNameForConnectionState(m_connectionState)); } if(m_stateReasonHasBeenSet) { payload.WithString("StateReason", m_stateReason); } if(m_authorizationTypeHasBeenSet) { payload.WithString("AuthorizationType", ConnectionAuthorizationTypeMapper::GetNameForConnectionAuthorizationType(m_authorizationType)); } if(m_creationTimeHasBeenSet) { payload.WithDouble("CreationTime", m_creationTime.SecondsWithMSPrecision()); } if(m_lastModifiedTimeHasBeenSet) { payload.WithDouble("LastModifiedTime", m_lastModifiedTime.SecondsWithMSPrecision()); } if(m_lastAuthorizedTimeHasBeenSet) { payload.WithDouble("LastAuthorizedTime", m_lastAuthorizedTime.SecondsWithMSPrecision()); } return payload; } } // namespace Model } // namespace CloudWatchEvents } // namespace Aws
aws/aws-sdk-cpp
aws-cpp-sdk-events/source/model/Connection.cpp
C++
apache-2.0
3,877
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.webbeans.test.injection.injectionpoint.beans; import javax.enterprise.inject.Instance; import javax.inject.Inject; public class ProducerInjectionPointInstanceOwner { @Inject private Instance<ProducerMethodInjectionPointOwner> ipOwnerInstance; public Instance<ProducerMethodInjectionPointOwner> getIpOwnerInstance() { return ipOwnerInstance; } }
apache/openwebbeans
webbeans-impl/src/test/java/org/apache/webbeans/test/injection/injectionpoint/beans/ProducerInjectionPointInstanceOwner.java
Java
apache-2.0
1,198
/* * ============================================================================= * * Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.standard.expression; import java.io.Serializable; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.thymeleaf.util.Validate; /** * * @author Daniel Fern&aacute;ndez * * @since 1.1 * */ public final class ExpressionSequence implements Iterable<IStandardExpression>, Serializable { private static final long serialVersionUID = -6069208208568731809L; private final List<IStandardExpression> expressions; public ExpressionSequence(final List<? extends IStandardExpression> expressions) { super(); Validate.notNull(expressions, "Expression list cannot be null"); Validate.containsNoNulls(expressions, "Expression list cannot contain any nulls"); this.expressions = Collections.unmodifiableList(expressions); } public List<IStandardExpression> getExpressions() { return this.expressions; } public int size() { return this.expressions.size(); } public Iterator<IStandardExpression> iterator() { return this.expressions.iterator(); } public String getStringRepresentation() { final StringBuilder sb = new StringBuilder(); if (this.expressions.size() > 0) { sb.append(this.expressions.get(0)); for (int i = 1; i < this.expressions.size(); i++) { sb.append(','); sb.append(this.expressions.get(i)); } } return sb.toString(); } @Override public String toString() { return getStringRepresentation(); } }
magat/thymeleaf
src/main/java/org/thymeleaf/standard/expression/ExpressionSequence.java
Java
apache-2.0
2,465
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # coding: utf-8 # pylint: disable=no-member, too-many-lines """Online evaluation metric module.""" from __future__ import absolute_import import math from collections import OrderedDict import numpy from .base import numeric_types, string_types from . import ndarray from . import registry def check_label_shapes(labels, preds, wrap=False, shape=False): """Helper function for checking shape of label and prediction Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. wrap : boolean If True, wrap labels/preds in a list if they are single NDArray shape : boolean If True, check the shape of labels and preds; Otherwise only check their length. """ if not shape: label_shape, pred_shape = len(labels), len(preds) else: label_shape, pred_shape = labels.shape, preds.shape if label_shape != pred_shape: raise ValueError("Shape of labels {} does not match shape of " "predictions {}".format(label_shape, pred_shape)) if wrap: if isinstance(labels, ndarray.ndarray.NDArray): labels = [labels] if isinstance(preds, ndarray.ndarray.NDArray): preds = [preds] return labels, preds class EvalMetric(object): """Base class for all evaluation metrics. .. note:: This is a base class that provides common metric interfaces. One should not use this class directly, but instead create new metric classes that extend it. Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. """ def __init__(self, name, output_names=None, label_names=None, **kwargs): self.name = str(name) self.output_names = output_names self.label_names = label_names self._kwargs = kwargs self.reset() def __str__(self): return "EvalMetric: {}".format(dict(self.get_name_value())) def get_config(self): """Save configurations of metric. Can be recreated from configs with metric.create(**config) """ config = self._kwargs.copy() config.update({ 'metric': self.__class__.__name__, 'name': self.name, 'output_names': self.output_names, 'label_names': self.label_names}) return config def update_dict(self, label, pred): """Update the internal evaluation with named label and pred Parameters ---------- labels : OrderedDict of str -> NDArray name to array mapping for labels. preds : OrderedDict of str -> NDArray name to array mapping of predicted outputs. """ if self.output_names is not None: pred = [pred[name] for name in self.output_names] else: pred = list(pred.values()) if self.label_names is not None: label = [label[name] for name in self.label_names] else: label = list(label.values()) self.update(label, pred) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ raise NotImplementedError() def reset(self): """Resets the internal evaluation result to initial state.""" self.num_inst = 0 self.sum_metric = 0.0 def get(self): """Gets the current evaluation result. Returns ------- names : list of str Name of the metrics. values : list of float Value of the evaluations. """ if self.num_inst == 0: return (self.name, float('nan')) else: return (self.name, self.sum_metric / self.num_inst) def get_name_value(self): """Returns zipped name and value pairs. Returns ------- list of tuples A (name, value) tuple list. """ name, value = self.get() if not isinstance(name, list): name = [name] if not isinstance(value, list): value = [value] return list(zip(name, value)) # pylint: disable=invalid-name register = registry.get_register_func(EvalMetric, 'metric') alias = registry.get_alias_func(EvalMetric, 'metric') _create = registry.get_create_func(EvalMetric, 'metric') # pylint: enable=invalid-name def create(metric, *args, **kwargs): """Creates evaluation metric from metric names or instances of EvalMetric or a custom metric function. Parameters ---------- metric : str or callable Specifies the metric to create. This argument must be one of the below: - Name of a metric. - An instance of `EvalMetric`. - A list, each element of which is a metric or a metric name. - An evaluation function that computes custom metric for a given batch of labels and predictions. *args : list Additional arguments to metric constructor. Only used when metric is str. **kwargs : dict Additional arguments to metric constructor. Only used when metric is str Examples -------- >>> def custom_metric(label, pred): ... return np.mean(np.abs(label - pred)) ... >>> metric1 = mx.metric.create('acc') >>> metric2 = mx.metric.create(custom_metric) >>> metric3 = mx.metric.create([metric1, metric2, 'rmse']) """ if callable(metric): return CustomMetric(metric, *args, **kwargs) elif isinstance(metric, list): composite_metric = CompositeEvalMetric() for child_metric in metric: composite_metric.add(create(child_metric, *args, **kwargs)) return composite_metric return _create(metric, *args, **kwargs) @register @alias('composite') class CompositeEvalMetric(EvalMetric): """Manages multiple evaluation metrics. Parameters ---------- metrics : list of EvalMetric List of child metrics. name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([0, 1, 1])] >>> eval_metrics_1 = mx.metric.Accuracy() >>> eval_metrics_2 = mx.metric.F1() >>> eval_metrics = mx.metric.CompositeEvalMetric() >>> for child_metric in [eval_metrics_1, eval_metrics_2]: >>> eval_metrics.add(child_metric) >>> eval_metrics.update(labels = labels, preds = predicts) >>> print eval_metrics.get() (['accuracy', 'f1'], [0.6666666666666666, 0.8]) """ def __init__(self, metrics=None, name='composite', output_names=None, label_names=None): super(CompositeEvalMetric, self).__init__( 'composite', output_names=output_names, label_names=label_names) if metrics is None: metrics = [] self.metrics = [create(i) for i in metrics] def add(self, metric): """Adds a child metric. Parameters ---------- metric A metric instance. """ self.metrics.append(create(metric)) def get_metric(self, index): """Returns a child metric. Parameters ---------- index : int Index of child metric in the list of metrics. """ try: return self.metrics[index] except IndexError: return ValueError("Metric index {} is out of range 0 and {}".format( index, len(self.metrics))) def update_dict(self, labels, preds): # pylint: disable=arguments-differ if self.label_names is not None: labels = OrderedDict([i for i in labels.items() if i[0] in self.label_names]) if self.output_names is not None: preds = OrderedDict([i for i in preds.items() if i[0] in self.output_names]) for metric in self.metrics: metric.update_dict(labels, preds) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ for metric in self.metrics: metric.update(labels, preds) def reset(self): """Resets the internal evaluation result to initial state.""" try: for metric in self.metrics: metric.reset() except AttributeError: pass def get(self): """Returns the current evaluation result. Returns ------- names : list of str Name of the metrics. values : list of float Value of the evaluations. """ names = [] values = [] for metric in self.metrics: name, value = metric.get() if isinstance(name, string_types): name = [name] if isinstance(value, numeric_types): value = [value] names.extend(name) values.extend(value) return (names, values) def get_config(self): config = super(CompositeEvalMetric, self).get_config() config.update({'metrics': [i.get_config() for i in self.metrics]}) return config ######################## # CLASSIFICATION METRICS ######################## @register @alias('acc') class Accuracy(EvalMetric): """Computes accuracy classification score. The accuracy score is defined as .. math:: \\text{accuracy}(y, \\hat{y}) = \\frac{1}{n} \\sum_{i=0}^{n-1} \\text{1}(\\hat{y_i} == y_i) Parameters ---------- axis : int, default=1 The axis that represents classes name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([0, 1, 1])] >>> acc = mx.metric.Accuracy() >>> acc.update(preds = predicts, labels = labels) >>> print acc.get() ('accuracy', 0.6666666666666666) """ def __init__(self, axis=1, name='accuracy', output_names=None, label_names=None): super(Accuracy, self).__init__( name, axis=axis, output_names=output_names, label_names=label_names) self.axis = axis def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data with class indices as values, one per sample. preds : list of `NDArray` Prediction values for samples. Each prediction value can either be the class index, or a vector of likelihoods for all classes. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred_label in zip(labels, preds): if pred_label.shape != label.shape: pred_label = ndarray.argmax(pred_label, axis=self.axis) pred_label = pred_label.asnumpy().astype('int32') label = label.asnumpy().astype('int32') # flatten before checking shapes to avoid shape miss match label = label.flat pred_label = pred_label.flat labels, preds = check_label_shapes(label, pred_label) self.sum_metric += (pred_label == label).sum() self.num_inst += len(pred_label) @register @alias('top_k_accuracy', 'top_k_acc') class TopKAccuracy(EvalMetric): """Computes top k predictions accuracy. `TopKAccuracy` differs from Accuracy in that it considers the prediction to be ``True`` as long as the ground truth label is in the top K predicated labels. If `top_k` = ``1``, then `TopKAccuracy` is identical to `Accuracy`. Parameters ---------- top_k : int Whether targets are in top k predictions. name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> np.random.seed(999) >>> top_k = 3 >>> labels = [mx.nd.array([2, 6, 9, 2, 3, 4, 7, 8, 9, 6])] >>> predicts = [mx.nd.array(np.random.rand(10, 10))] >>> acc = mx.metric.TopKAccuracy(top_k=top_k) >>> acc.update(labels, predicts) >>> print acc.get() ('top_k_accuracy', 0.3) """ def __init__(self, top_k=1, name='top_k_accuracy', output_names=None, label_names=None): super(TopKAccuracy, self).__init__( name, top_k=top_k, output_names=output_names, label_names=label_names) self.top_k = top_k assert(self.top_k > 1), 'Please use Accuracy if top_k is no more than 1' self.name += '_%d' % self.top_k def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred_label in zip(labels, preds): assert(len(pred_label.shape) <= 2), 'Predictions should be no more than 2 dims' pred_label = numpy.argsort(pred_label.asnumpy().astype('float32'), axis=1) label = label.asnumpy().astype('int32') check_label_shapes(label, pred_label) num_samples = pred_label.shape[0] num_dims = len(pred_label.shape) if num_dims == 1: self.sum_metric += (pred_label.flat == label.flat).sum() elif num_dims == 2: num_classes = pred_label.shape[1] top_k = min(num_classes, self.top_k) for j in range(top_k): self.sum_metric += (pred_label[:, num_classes - 1 - j].flat == label.flat).sum() self.num_inst += num_samples class _BinaryClassificationMetrics(object): """ Private container class for classification metric statistics. True/false positive and true/false negative counts are sufficient statistics for various classification metrics. This class provides the machinery to track those statistics across mini-batches of (label, prediction) pairs. """ def __init__(self): self.true_positives = 0 self.false_negatives = 0 self.false_positives = 0 self.true_negatives = 0 def update_binary_stats(self, label, pred): """ Update various binary classification counts for a single (label, pred) pair. Parameters ---------- label : `NDArray` The labels of the data. pred : `NDArray` Predicted values. """ pred = pred.asnumpy() label = label.asnumpy().astype('int32') pred_label = numpy.argmax(pred, axis=1) check_label_shapes(label, pred) if len(numpy.unique(label)) > 2: raise ValueError("%s currently only supports binary classification." % self.__class__.__name__) pred_true = (pred_label == 1) pred_false = 1 - pred_true label_true = (label == 1) label_false = 1 - label_true self.true_positives += (pred_true * label_true).sum() self.false_positives += (pred_true * label_false).sum() self.false_negatives += (pred_false * label_true).sum() self.true_negatives += (pred_false * label_false).sum() @property def precision(self): if self.true_positives + self.false_positives > 0: return float(self.true_positives) / (self.true_positives + self.false_positives) else: return 0. @property def recall(self): if self.true_positives + self.false_negatives > 0: return float(self.true_positives) / (self.true_positives + self.false_negatives) else: return 0. @property def fscore(self): if self.precision + self.recall > 0: return 2 * self.precision * self.recall / (self.precision + self.recall) else: return 0. @property def total_examples(self): return self.false_negatives + self.false_positives + \ self.true_negatives + self.true_positives def reset_stats(self): self.false_positives = 0 self.false_negatives = 0 self.true_positives = 0 self.true_negatives = 0 @register class F1(EvalMetric): """Computes the F1 score of a binary classification problem. The F1 score is equivalent to weighted average of the precision and recall, where the best value is 1.0 and the worst value is 0.0. The formula for F1 score is:: F1 = 2 * (precision * recall) / (precision + recall) The formula for precision and recall is:: precision = true_positives / (true_positives + false_positives) recall = true_positives / (true_positives + false_negatives) .. note:: This F1 score only supports binary classification. Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. average : str, default 'macro' Strategy to be used for aggregating across mini-batches. "macro": average the F1 scores for each batch. "micro": compute a single F1 score across all batches. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0., 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([0., 1., 1.])] >>> f1 = mx.metric.F1() >>> f1.update(preds = predicts, labels = labels) >>> print f1.get() ('f1', 0.8) """ def __init__(self, name='f1', output_names=None, label_names=None, average="macro"): self.average = average self.metrics = _BinaryClassificationMetrics() EvalMetric.__init__(self, name=name, output_names=output_names, label_names=label_names) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): self.metrics.update_binary_stats(label, pred) if self.average == "macro": self.sum_metric += self.metrics.fscore self.num_inst += 1 self.metrics.reset_stats() else: self.sum_metric = self.metrics.fscore * self.metrics.total_examples self.num_inst = self.metrics.total_examples def reset(self): """Resets the internal evaluation result to initial state.""" self.sum_metric = 0. self.num_inst = 0. self.metrics.reset_stats() @register class Perplexity(EvalMetric): """Computes perplexity. Perplexity is a measurement of how well a probability distribution or model predicts a sample. A low perplexity indicates the model is good at predicting the sample. The perplexity of a model q is defined as .. math:: b^{\\big(-\\frac{1}{N} \\sum_{i=1}^N \\log_b q(x_i) \\big)} = \\exp \\big(-\\frac{1}{N} \\sum_{i=1}^N \\log q(x_i)\\big) where we let `b = e`. :math:`q(x_i)` is the predicted value of its ground truth label on sample :math:`x_i`. For example, we have three samples :math:`x_1, x_2, x_3` and their labels are :math:`[0, 1, 1]`. Suppose our model predicts :math:`q(x_1) = p(y_1 = 0 | x_1) = 0.3` and :math:`q(x_2) = 1.0`, :math:`q(x_3) = 0.6`. The perplexity of model q is :math:`exp\\big(-(\\log 0.3 + \\log 1.0 + \\log 0.6) / 3\\big) = 1.77109762852`. Parameters ---------- ignore_label : int or None Index of invalid label to ignore when counting. By default, sets to -1. If set to `None`, it will include all entries. axis : int (default -1) The axis from prediction that was used to compute softmax. By default use the last axis. name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([0, 1, 1])] >>> perp = mx.metric.Perplexity(ignore_label=None) >>> perp.update(labels, predicts) >>> print perp.get() ('Perplexity', 1.7710976285155853) """ def __init__(self, ignore_label, axis=-1, name='perplexity', output_names=None, label_names=None): super(Perplexity, self).__init__( name, ignore_label=ignore_label, output_names=output_names, label_names=label_names) self.ignore_label = ignore_label self.axis = axis def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ assert len(labels) == len(preds) loss = 0. num = 0 for label, pred in zip(labels, preds): assert label.size == pred.size/pred.shape[-1], \ "shape mismatch: %s vs. %s"%(label.shape, pred.shape) label = label.as_in_context(pred.context).reshape((label.size,)) pred = ndarray.pick(pred, label.astype(dtype='int32'), axis=self.axis) if self.ignore_label is not None: ignore = (label == self.ignore_label).astype(pred.dtype) num -= ndarray.sum(ignore).asscalar() pred = pred*(1-ignore) + ignore loss -= ndarray.sum(ndarray.log(ndarray.maximum(1e-10, pred))).asscalar() num += pred.size self.sum_metric += loss self.num_inst += num def get(self): """Returns the current evaluation result. Returns ------- Tuple of (str, float) Representing name of the metric and evaluation result. """ return (self.name, math.exp(self.sum_metric/self.num_inst)) #################### # REGRESSION METRICS #################### @register class MAE(EvalMetric): """Computes Mean Absolute Error (MAE) loss. The mean absolute error is given by .. math:: \\frac{\\sum_i^n |y_i - \\hat{y}_i|}{n} Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))] >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))] >>> mean_absolute_error = mx.metric.MAE() >>> mean_absolute_error.update(labels = labels, preds = predicts) >>> print mean_absolute_error.get() ('mae', 0.5) """ def __init__(self, name='mae', output_names=None, label_names=None): super(MAE, self).__init__( name, output_names=output_names, label_names=label_names) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): label = label.asnumpy() pred = pred.asnumpy() if len(label.shape) == 1: label = label.reshape(label.shape[0], 1) if len(pred.shape) == 1: pred = pred.reshape(pred.shape[0], 1) self.sum_metric += numpy.abs(label - pred).mean() self.num_inst += 1 # numpy.prod(label.shape) @register class MSE(EvalMetric): """Computes Mean Squared Error (MSE) loss. The mean squared error is given by .. math:: \\frac{\\sum_i^n (y_i - \\hat{y}_i)^2}{n} Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))] >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))] >>> mean_squared_error = mx.metric.MSE() >>> mean_squared_error.update(labels = labels, preds = predicts) >>> print mean_squared_error.get() ('mse', 0.375) """ def __init__(self, name='mse', output_names=None, label_names=None): super(MSE, self).__init__( name, output_names=output_names, label_names=label_names) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): label = label.asnumpy() pred = pred.asnumpy() if len(label.shape) == 1: label = label.reshape(label.shape[0], 1) if len(pred.shape) == 1: pred = pred.reshape(pred.shape[0], 1) self.sum_metric += ((label - pred)**2.0).mean() self.num_inst += 1 # numpy.prod(label.shape) @register class RMSE(EvalMetric): """Computes Root Mean Squred Error (RMSE) loss. The root mean squared error is given by .. math:: \\sqrt{\\frac{\\sum_i^n (y_i - \\hat{y}_i)^2}{n}} Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))] >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))] >>> root_mean_squared_error = mx.metric.RMSE() >>> root_mean_squared_error.update(labels = labels, preds = predicts) >>> print root_mean_squared_error.get() ('rmse', 0.612372457981) """ def __init__(self, name='rmse', output_names=None, label_names=None): super(RMSE, self).__init__( name, output_names=output_names, label_names=label_names) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): label = label.asnumpy() pred = pred.asnumpy() if len(label.shape) == 1: label = label.reshape(label.shape[0], 1) if len(pred.shape) == 1: pred = pred.reshape(pred.shape[0], 1) self.sum_metric += numpy.sqrt(((label - pred)**2.0).mean()) self.num_inst += 1 @register @alias('ce') class CrossEntropy(EvalMetric): """Computes Cross Entropy loss. The cross entropy over a batch of sample size :math:`N` is given by .. math:: -\\sum_{n=1}^{N}\\sum_{k=1}^{K}t_{nk}\\log (y_{nk}), where :math:`t_{nk}=1` if and only if sample :math:`n` belongs to class :math:`k`. :math:`y_{nk}` denotes the probability of sample :math:`n` belonging to class :math:`k`. Parameters ---------- eps : float Cross Entropy loss is undefined for predicted value is 0 or 1, so predicted values are added with the small constant. name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([0, 1, 1])] >>> ce = mx.metric.CrossEntropy() >>> ce.update(labels, predicts) >>> print ce.get() ('cross-entropy', 0.57159948348999023) """ def __init__(self, eps=1e-12, name='cross-entropy', output_names=None, label_names=None): super(CrossEntropy, self).__init__( name, eps=eps, output_names=output_names, label_names=label_names) self.eps = eps def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): label = label.asnumpy() pred = pred.asnumpy() label = label.ravel() assert label.shape[0] == pred.shape[0] prob = pred[numpy.arange(label.shape[0]), numpy.int64(label)] self.sum_metric += (-numpy.log(prob + self.eps)).sum() self.num_inst += label.shape[0] @register @alias('nll_loss') class NegativeLogLikelihood(EvalMetric): """Computes the negative log-likelihood loss. The negative log-likelihoodd loss over a batch of sample size :math:`N` is given by .. math:: -\\sum_{n=1}^{N}\\sum_{k=1}^{K}t_{nk}\\log (y_{nk}), where :math:`K` is the number of classes, :math:`y_{nk}` is the prediceted probability for :math:`k`-th class for :math:`n`-th sample. :math:`t_{nk}=1` if and only if sample :math:`n` belongs to class :math:`k`. Parameters ---------- eps : float Negative log-likelihood loss is undefined for predicted value is 0, so predicted values are added with the small constant. name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([0, 1, 1])] >>> nll_loss = mx.metric.NegativeLogLikelihood() >>> nll_loss.update(labels, predicts) >>> print nll_loss.get() ('nll-loss', 0.57159948348999023) """ def __init__(self, eps=1e-12, name='nll-loss', output_names=None, label_names=None): super(NegativeLogLikelihood, self).__init__( name, eps=eps, output_names=output_names, label_names=label_names) self.eps = eps def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): label = label.asnumpy() pred = pred.asnumpy() label = label.ravel() num_examples = pred.shape[0] assert label.shape[0] == num_examples, (label.shape[0], num_examples) prob = pred[numpy.arange(num_examples, dtype=numpy.int64), numpy.int64(label)] self.sum_metric += (-numpy.log(prob + self.eps)).sum() self.num_inst += num_examples @register @alias('pearsonr') class PearsonCorrelation(EvalMetric): """Computes Pearson correlation. The pearson correlation is given by .. math:: \\frac{cov(y, \\hat{y})}{\\sigma{y}\\sigma{\\hat{y}}} Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])] >>> labels = [mx.nd.array([[1, 0], [0, 1], [0, 1]])] >>> pr = mx.metric.PearsonCorrelation() >>> pr.update(labels, predicts) >>> print pr.get() ('pearson-correlation', 0.42163704544016178) """ def __init__(self, name='pearsonr', output_names=None, label_names=None): super(PearsonCorrelation, self).__init__( name, output_names=output_names, label_names=label_names) def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ labels, preds = check_label_shapes(labels, preds, True) for label, pred in zip(labels, preds): check_label_shapes(label, pred, False, True) label = label.asnumpy() pred = pred.asnumpy() self.sum_metric += numpy.corrcoef(pred.ravel(), label.ravel())[0, 1] self.num_inst += 1 @register class Loss(EvalMetric): """Dummy metric for directly printing loss. Parameters ---------- name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. """ def __init__(self, name='loss', output_names=None, label_names=None): super(Loss, self).__init__( name, output_names=output_names, label_names=label_names) def update(self, _, preds): for pred in preds: self.sum_metric += ndarray.sum(pred).asscalar() self.num_inst += pred.size @register class Torch(Loss): """Dummy metric for torch criterions.""" def __init__(self, name='torch', output_names=None, label_names=None): super(Torch, self).__init__( name, output_names=output_names, label_names=label_names) @register class Caffe(Loss): """Dummy metric for caffe criterions.""" def __init__(self, name='caffe', output_names=None, label_names=None): super(Caffe, self).__init__( name, output_names=output_names, label_names=label_names) @register class CustomMetric(EvalMetric): """Computes a customized evaluation metric. The `feval` function can return a `tuple` of (sum_metric, num_inst) or return an `int` sum_metric. Parameters ---------- feval : callable(label, pred) Customized evaluation function. name : str, optional The name of the metric. (the default is None). allow_extra_outputs : bool, optional If true, the prediction outputs can have extra outputs. This is useful in RNN, where the states are also produced in outputs for forwarding. (the default is False). name : str Name of this metric instance for display. output_names : list of str, or None Name of predictions that should be used when updating with update_dict. By default include all predictions. label_names : list of str, or None Name of labels that should be used when updating with update_dict. By default include all labels. Examples -------- >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))] >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))] >>> feval = lambda x, y : (x + y).mean() >>> eval_metrics = mx.metric.CustomMetric(feval=feval) >>> eval_metrics.update(labels, predicts) >>> print eval_metrics.get() ('custom(<lambda>)', 6.0) """ def __init__(self, feval, name=None, allow_extra_outputs=False, output_names=None, label_names=None): if name is None: name = feval.__name__ if name.find('<') != -1: name = 'custom(%s)' % name super(CustomMetric, self).__init__( name, feval=feval, allow_extra_outputs=allow_extra_outputs, output_names=output_names, label_names=label_names) self._feval = feval self._allow_extra_outputs = allow_extra_outputs def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ if not self._allow_extra_outputs: labels, preds = check_label_shapes(labels, preds, True) for pred, label in zip(preds, labels): label = label.asnumpy() pred = pred.asnumpy() reval = self._feval(label, pred) if isinstance(reval, tuple): (sum_metric, num_inst) = reval self.sum_metric += sum_metric self.num_inst += num_inst else: self.sum_metric += reval self.num_inst += 1 def get_config(self): raise NotImplementedError("CustomMetric cannot be serialized") # pylint: disable=invalid-name def np(numpy_feval, name=None, allow_extra_outputs=False): """Creates a custom evaluation metric that receives its inputs as numpy arrays. Parameters ---------- numpy_feval : callable(label, pred) Custom evaluation function that receives labels and predictions for a minibatch as numpy arrays and returns the corresponding custom metric as a floating point number. name : str, optional Name of the custom metric. allow_extra_outputs : bool, optional Whether prediction output is allowed to have extra outputs. This is useful in cases like RNN where states are also part of output which can then be fed back to the RNN in the next step. By default, extra outputs are not allowed. Returns ------- float Custom metric corresponding to the provided labels and predictions. Example ------- >>> def custom_metric(label, pred): ... return np.mean(np.abs(label-pred)) ... >>> metric = mx.metric.np(custom_metric) """ def feval(label, pred): """Internal eval function.""" return numpy_feval(label, pred) feval.__name__ = numpy_feval.__name__ return CustomMetric(feval, name, allow_extra_outputs) # pylint: enable=invalid-name
TuSimple/mxnet
python/mxnet/metric.py
Python
apache-2.0
43,258
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.trace.http; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * In-memory implementation of {@link HttpTraceRepository}. * * @author Dave Syer * @author Olivier Bourgain * @since 2.0.0 */ public class InMemoryHttpTraceRepository implements HttpTraceRepository { private int capacity = 100; private boolean reverse = true; private final List<HttpTrace> traces = new LinkedList<>(); /** * Flag to say that the repository lists traces in reverse order. * @param reverse flag value (default true) */ public void setReverse(boolean reverse) { synchronized (this.traces) { this.reverse = reverse; } } /** * Set the capacity of the in-memory repository. * @param capacity the capacity */ public void setCapacity(int capacity) { synchronized (this.traces) { this.capacity = capacity; } } @Override public List<HttpTrace> findAll() { synchronized (this.traces) { return Collections.unmodifiableList(new ArrayList<>(this.traces)); } } @Override public void add(HttpTrace trace) { synchronized (this.traces) { while (this.traces.size() >= this.capacity) { this.traces.remove(this.reverse ? this.capacity - 1 : 0); } if (this.reverse) { this.traces.add(0, trace); } else { this.traces.add(trace); } } } }
lburgazzoli/spring-boot
spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/trace/http/InMemoryHttpTraceRepository.java
Java
apache-2.0
2,019
#include <stdio.h> #include "ali_api_core.h" #include "ali_string_utils.h" #include "ali_alert.h" #include "json/value.h" #include "json/reader.h" using namespace aliyun; namespace { void Json2Type(const Json::Value& value, std::string* item); void Json2Type(const Json::Value& value, AlertGetDBMetricResponseType* item); template<typename T> class Json2Array { public: Json2Array(const Json::Value& value, std::vector<T>* vec) { if(!value.isArray()) { return; } for(int i = 0; i < value.size(); i++) { T val; Json2Type(value[i], &val); vec->push_back(val); } } }; void Json2Type(const Json::Value& value, std::string* item) { *item = value.asString(); } void Json2Type(const Json::Value& value, AlertGetDBMetricResponseType* item) { if(value.isMember("code")) { item->code = value["code"].asString(); } if(value.isMember("message")) { item->message = value["message"].asString(); } if(value.isMember("success")) { item->success = value["success"].asString(); } if(value.isMember("traceId")) { item->trace_id = value["traceId"].asString(); } if(value.isMember("result")) { item->result = value["result"].asString(); } } } int Alert::GetDBMetric(const AlertGetDBMetricRequestType& req, AlertGetDBMetricResponseType* response, AlertErrorInfo* error_info) { std::string str_response; int status_code; int ret = 0; bool parse_success = false; Json::Value val; Json::Reader reader; std::string secheme = this->use_tls_ ? "https" : "http"; std::string url = secheme + "://" + host_ + get_format_string("/projects/%s/dbMetrics/%s", req.project_name.c_str(), req.metric_name.c_str()); AliRoaRequest* req_rpc = new AliRoaRequest(version_, appid_, secret_, url); if((!this->use_tls_) && this->proxy_host_ && this->proxy_host_[0]) { req_rpc->SetHttpProxy( this->proxy_host_); } req_rpc->setRequestMethod("GET"); if(req_rpc->CommitRequest() != 0) { if(error_info) { error_info->code = "connect to host failed"; } ret = -1; goto out; } status_code = req_rpc->WaitResponseHeaderComplete(); req_rpc->ReadResponseBody(str_response); if(status_code > 0 && !str_response.empty()){ parse_success = reader.parse(str_response, val); } if(!parse_success) { if(error_info) { error_info->code = "parse response failed"; } ret = -1; goto out; } if(status_code!= 200 && error_info) { error_info->request_id = val.isMember("RequestId") ? val["RequestId"].asString(): ""; error_info->code = val.isMember("Code") ? val["Code"].asString(): ""; error_info->host_id = val.isMember("HostId") ? val["HostId"].asString(): ""; error_info->message = val.isMember("Message") ? val["Message"].asString(): ""; } if(status_code== 200 && response) { Json2Type(val, response); } ret = status_code; out: delete req_rpc; return ret; }
aliyun-beta/aliyun-openapi-cpp-sdk
aliyun-api-alert/2015-08-15/src/ali_alert_get_db_metric.cc
C++
apache-2.0
3,130
package org.michenux.drodrolib.network.volley; import android.location.Location; import com.android.volley.AuthFailureError; import com.android.volley.NetworkResponse; import com.android.volley.ParseError; import com.android.volley.Request; import com.android.volley.Response; import com.android.volley.Response.ErrorListener; import com.android.volley.Response.Listener; import com.android.volley.toolbox.HttpHeaderParser; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import org.michenux.drodrolib.network.json.LocationDeserializer; import org.michenux.drodrolib.network.json.TimestampDeserializer; import java.io.UnsupportedEncodingException; import java.sql.Timestamp; import java.util.Map; public class GsonRequest<T> extends Request<T> { private final Gson gson ; private final Class<T> clazz; private final Map<String, String> headers; private final Listener<T> listener; public GsonRequest(int method, String url, Class<T> clazz, Map<String, String> headers, Listener<T> listener, ErrorListener errorListener) { super(method, url, errorListener); GsonBuilder gsonBuilder = new GsonBuilder(); gsonBuilder.registerTypeAdapter(Timestamp.class, new TimestampDeserializer()); gsonBuilder.registerTypeAdapter(Location.class, new LocationDeserializer()); this.gson = gsonBuilder.create(); this.clazz = clazz; this.headers = headers; this.listener = listener; } @Override public Map<String, String> getHeaders() throws AuthFailureError { return headers != null ? headers : super.getHeaders(); } @Override protected void deliverResponse(T response) { listener.onResponse(response); } @Override protected Response<T> parseNetworkResponse(NetworkResponse response) { try { String json = new String( response.data, HttpHeaderParser.parseCharset(response.headers)); return Response.success( gson.fromJson(json, clazz), HttpHeaderParser.parseCacheHeaders(response)); } catch (UnsupportedEncodingException | JsonSyntaxException e) { return Response.error(new ParseError(e)); } } }
alessandrogurgel/pedefacil
drodrolib/src/main/java/org/michenux/drodrolib/network/volley/GsonRequest.java
Java
apache-2.0
2,302
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.om.pointables.printer.json.clean; import java.io.IOException; import java.io.PrintStream; import java.util.List; import org.apache.asterix.common.exceptions.AsterixException; import org.apache.asterix.om.pointables.ARecordVisitablePointable; import org.apache.asterix.om.pointables.base.IVisitablePointable; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.EnumDeserializer; import org.apache.hyracks.algebricks.common.utils.Pair; /** * This class is to print the content of a record. It is ONLY visible to * APrintVisitor. */ class ARecordPrinter { private static String LEFT_PAREN = "{ "; private static String RIGHT_PAREN = " }"; private static String COMMA = ", "; private static String COLON = ": "; private final Pair<PrintStream, ATypeTag> nameVisitorArg = new Pair<PrintStream, ATypeTag>(null, ATypeTag.STRING); private final Pair<PrintStream, ATypeTag> itemVisitorArg = new Pair<PrintStream, ATypeTag>(null, null); public ARecordPrinter() { } public void printRecord(ARecordVisitablePointable recordAccessor, PrintStream ps, APrintVisitor visitor) throws IOException, AsterixException { List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames(); List<IVisitablePointable> fieldTags = recordAccessor.getFieldTypeTags(); List<IVisitablePointable> fieldValues = recordAccessor.getFieldValues(); nameVisitorArg.first = ps; itemVisitorArg.first = ps; // print the beginning part ps.print(LEFT_PAREN); // print field 0 to n-2 for (int i = 0; i < fieldNames.size() - 1; i++) { printField(ps, visitor, fieldNames, fieldTags, fieldValues, i); // print the comma ps.print(COMMA); } // print field n-1 if (fieldValues.size() > 0) { printField(ps, visitor, fieldNames, fieldTags, fieldValues, fieldValues.size() - 1); } // print the end part ps.print(RIGHT_PAREN); } private void printField(PrintStream ps, APrintVisitor visitor, List<IVisitablePointable> fieldNames, List<IVisitablePointable> fieldTags, List<IVisitablePointable> fieldValues, int i) throws AsterixException { IVisitablePointable itemTypeTag = fieldTags.get(i); IVisitablePointable item = fieldValues.get(i); ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(itemTypeTag.getByteArray()[itemTypeTag .getStartOffset()]); itemVisitorArg.second = item.getLength() <= 1 ? ATypeTag.NULL : typeTag; // print field name fieldNames.get(i).accept(visitor, nameVisitorArg); ps.print(COLON); // print field value item.accept(visitor, itemVisitorArg); } }
kisskys/incubator-asterixdb
asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/json/clean/ARecordPrinter.java
Java
apache-2.0
3,650
/* * Copyright 2015 Hippo Seven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hippo.widget; import android.os.Handler; import android.os.Looper; import android.os.Message; import java.lang.ref.WeakReference; public class SnackbarManager { private static final int MSG_TIMEOUT = 0; private static final int SHORT_DURATION_MS = 1500; private static final int LONG_DURATION_MS = 2750; private static SnackbarManager sSnackbarManager; private final Object mLock; private final Handler mHandler; private SnackbarRecord mCurrentSnackbar; private SnackbarRecord mNextSnackbar; static SnackbarManager getInstance() { if (sSnackbarManager == null) { sSnackbarManager = new SnackbarManager(); } return sSnackbarManager; } private SnackbarManager() { mLock = new Object(); mHandler = new Handler(Looper.getMainLooper(), new Handler.Callback() { @Override public boolean handleMessage(Message message) { switch (message.what) { case MSG_TIMEOUT: handleTimeout((SnackbarManager.SnackbarRecord)message.obj); return true; } return false; } }); } public void show(int duration, Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { mCurrentSnackbar.duration = duration; mHandler.removeCallbacksAndMessages(mCurrentSnackbar); scheduleTimeoutLocked(mCurrentSnackbar); return; } if (isNextSnackbar(callback)) { mNextSnackbar.duration = duration; } else { mNextSnackbar = new SnackbarRecord(duration, callback); } if ((mCurrentSnackbar != null) && (cancelSnackbarLocked(mCurrentSnackbar, Snackbar.Callback.DISMISS_EVENT_CONSECUTIVE))) { return; } mCurrentSnackbar = null; showNextSnackbarLocked(); } } public void dismiss(Callback callback, int event) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { cancelSnackbarLocked(mCurrentSnackbar, event); } else if (isNextSnackbar(callback)) { cancelSnackbarLocked(mNextSnackbar, event); } } } public void onDismissed(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { mCurrentSnackbar = null; if (mNextSnackbar != null) { showNextSnackbarLocked(); } } } } public void onShown(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { scheduleTimeoutLocked(mCurrentSnackbar); } } } public void cancelTimeout(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { mHandler.removeCallbacksAndMessages(mCurrentSnackbar); } } } public void restoreTimeout(Callback callback) { synchronized (mLock) { if (isCurrentSnackbar(callback)) { scheduleTimeoutLocked(mCurrentSnackbar); } } } private static class SnackbarRecord { private final WeakReference<Callback> callback; private int duration; SnackbarRecord(int duration, SnackbarManager.Callback callback) { this.callback = new WeakReference<>(callback); this.duration = duration; } boolean isSnackbar(SnackbarManager.Callback callback) { return (callback != null) && (this.callback.get() == callback); } } private void showNextSnackbarLocked() { if (mNextSnackbar != null) { mCurrentSnackbar = mNextSnackbar; mNextSnackbar = null; Callback callback = mCurrentSnackbar.callback.get(); if (callback != null) { callback.show(); } else { mCurrentSnackbar = null; } } } private boolean cancelSnackbarLocked(SnackbarRecord record, int event) { Callback callback = record.callback.get(); if (callback != null) { callback.dismiss(event); return true; } return false; } private boolean isCurrentSnackbar(Callback callback) { return (mCurrentSnackbar != null) && (mCurrentSnackbar.isSnackbar(callback)); } private boolean isNextSnackbar(Callback callback) { return (mNextSnackbar != null) && (mNextSnackbar.isSnackbar(callback)); } private void scheduleTimeoutLocked(SnackbarRecord r) { if (r.duration == Snackbar.LENGTH_INDEFINITE) { return; } int durationMs; if (r.duration > Snackbar.LENGTH_LONG) { durationMs = r.duration; } else if (r.duration == Snackbar.LENGTH_SHORT) { durationMs = SHORT_DURATION_MS; } else { durationMs = LONG_DURATION_MS; } mHandler.removeCallbacksAndMessages(r); mHandler.sendMessageDelayed(Message.obtain(mHandler, MSG_TIMEOUT, r), durationMs); } private void handleTimeout(SnackbarRecord record) { synchronized (mLock) { if ((mCurrentSnackbar == record) || (mNextSnackbar == record)) { cancelSnackbarLocked(record, Snackbar.Callback.DISMISS_EVENT_TIMEOUT); } } } interface Callback { void show(); void dismiss(int paramInt); } }
xdujiang/Nimingban
app/src/main/java/com/hippo/widget/SnackbarManager.java
Java
apache-2.0
6,317
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.struct.feature; /** * Description for normalized cross correlation (NCC). The descriptor's value * in a NCC feature is the pixel intensity value minus the mean pixel intensity value. * </p> * value[i] = I(x,y) - mean * * @author Peter Abeles */ public class NccFeature extends TupleDesc_F64 { /** Mean pixel intensity Can be used to reconstruct the original values of the template.*/ public double mean; /** standard deviation of pixel intensity*/ public double sigma; public NccFeature(int numFeatures) { super(numFeatures); } protected NccFeature() { } @Override public NccFeature copy() { NccFeature ret = new NccFeature( value.length ); ret.setTo(this); return ret; } @Override public void setTo(TupleDesc_F64 source) { super.setTo(source); NccFeature ncc = (NccFeature)source; this.mean = ncc.mean; this.sigma = ncc.sigma; } }
pacozaa/BoofCV
main/feature/src/boofcv/struct/feature/NccFeature.java
Java
apache-2.0
1,575
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package storage import ( "bytes" "errors" "fmt" "log" "github.com/coreos/etcd/Godeps/_workspace/src/github.com/google/btree" ) var ( ErrRevisionNotFound = errors.New("stroage: revision not found") ) // keyIndex stores the revision of an key in the backend. // Each keyIndex has at least one key generation. // Each generation might have several key versions. // Tombstone on a key appends an tombstone version at the end // of the current generation and creates a new empty generation. // Each version of a key has an index pointing to the backend. // // For example: put(1.0);put(2.0);tombstone(3.0);put(4.0);tombstone(5.0) on key "foo" // generate a keyIndex: // key: "foo" // rev: 5 // generations: // {empty} // {4.0, 5.0(t)} // {1.0, 2.0, 3.0(t)} // // Compact a keyIndex removes the versions with smaller or equal to // rev except the largest one. If the generation becomes empty // during compaction, it will be removed. if all the generations get // removed, the keyIndex should be removed. // For example: // compact(2) on the previous example // generations: // {empty} // {4.0, 5.0(t)} // {2.0, 3.0(t)} // // compact(4) // generations: // {empty} // {4.0, 5.0(t)} // // compact(5): // generations: // {empty} -> key SHOULD be removed. // // compact(6): // generations: // {empty} -> key SHOULD be removed. type keyIndex struct { key []byte modified revision // the main rev of the last modification generations []generation } // put puts a revision to the keyIndex. func (ki *keyIndex) put(main int64, sub int64) { rev := revision{main: main, sub: sub} if !rev.GreaterThan(ki.modified) { log.Panicf("store.keyindex: put with unexpected smaller revision [%v / %v]", rev, ki.modified) } if len(ki.generations) == 0 { ki.generations = append(ki.generations, generation{}) } g := &ki.generations[len(ki.generations)-1] if len(g.revs) == 0 { // create a new key keysGauge.Inc() g.created = rev } g.revs = append(g.revs, rev) g.ver++ ki.modified = rev } func (ki *keyIndex) restore(created, modified revision, ver int64) { if len(ki.generations) != 0 { log.Panicf("store.keyindex: cannot restore non-empty keyIndex") } ki.modified = modified g := generation{created: created, ver: ver, revs: []revision{modified}} ki.generations = append(ki.generations, g) keysGauge.Inc() } // tombstone puts a revision, pointing to a tombstone, to the keyIndex. // It also creates a new empty generation in the keyIndex. // It returns ErrRevisionNotFound when tombstone on an empty generation. func (ki *keyIndex) tombstone(main int64, sub int64) error { if ki.isEmpty() { log.Panicf("store.keyindex: unexpected tombstone on empty keyIndex %s", string(ki.key)) } if ki.generations[len(ki.generations)-1].isEmpty() { return ErrRevisionNotFound } ki.put(main, sub) ki.generations = append(ki.generations, generation{}) keysGauge.Dec() return nil } // get gets the modified, created revision and version of the key that satisfies the given atRev. // Rev must be higher than or equal to the given atRev. func (ki *keyIndex) get(atRev int64) (modified, created revision, ver int64, err error) { if ki.isEmpty() { log.Panicf("store.keyindex: unexpected get on empty keyIndex %s", string(ki.key)) } g := ki.findGeneration(atRev) if g.isEmpty() { return revision{}, revision{}, 0, ErrRevisionNotFound } f := func(rev revision) bool { if rev.main <= atRev { return false } return true } n := g.walk(f) if n != -1 { return g.revs[n], g.created, g.ver - int64(len(g.revs)-n-1), nil } return revision{}, revision{}, 0, ErrRevisionNotFound } // since returns revisions since the give rev. Only the revision with the // largest sub revision will be returned if multiple revisions have the same // main revision. func (ki *keyIndex) since(rev int64) []revision { if ki.isEmpty() { log.Panicf("store.keyindex: unexpected get on empty keyIndex %s", string(ki.key)) } since := revision{rev, 0} var gi int // find the generations to start checking for gi = len(ki.generations) - 1; gi > 0; gi-- { g := ki.generations[gi] if g.isEmpty() { continue } if since.GreaterThan(g.created) { break } } var revs []revision var last int64 for ; gi < len(ki.generations); gi++ { for _, r := range ki.generations[gi].revs { if since.GreaterThan(r) { continue } if r.main == last { // replace the revision with a new one that has higher sub value, // because the original one should not be seen by external revs[len(revs)-1] = r continue } revs = append(revs, r) last = r.main } } return revs } // compact compacts a keyIndex by removing the versions with smaller or equal // revision than the given atRev except the largest one (If the largest one is // a tombstone, it will not be kept). // If a generation becomes empty during compaction, it will be removed. func (ki *keyIndex) compact(atRev int64, available map[revision]struct{}) { if ki.isEmpty() { log.Panicf("store.keyindex: unexpected compact on empty keyIndex %s", string(ki.key)) } // walk until reaching the first revision that has an revision smaller or equal to // the atRevision. // add it to the available map f := func(rev revision) bool { if rev.main <= atRev { available[rev] = struct{}{} return false } return true } i, g := 0, &ki.generations[0] // find first generation includes atRev or created after atRev for i < len(ki.generations)-1 { if tomb := g.revs[len(g.revs)-1].main; tomb > atRev { break } i++ g = &ki.generations[i] } if !g.isEmpty() { n := g.walk(f) // remove the previous contents. if n != -1 { g.revs = g.revs[n:] } // remove any tombstone if len(g.revs) == 1 && i != len(ki.generations)-1 { delete(available, g.revs[0]) i++ } } // remove the previous generations. ki.generations = ki.generations[i:] return } func (ki *keyIndex) isEmpty() bool { return len(ki.generations) == 1 && ki.generations[0].isEmpty() } // findGeneartion finds out the generation of the keyIndex that the // given rev belongs to. If the given rev is at the gap of two generations, // which means that the key does not exist at the given rev, it returns nil. func (ki *keyIndex) findGeneration(rev int64) *generation { lastg := len(ki.generations) - 1 cg := lastg for cg >= 0 { if len(ki.generations[cg].revs) == 0 { cg-- continue } g := ki.generations[cg] if cg != lastg { if tomb := g.revs[len(g.revs)-1].main; tomb <= rev { return nil } } if g.revs[0].main <= rev { return &ki.generations[cg] } cg-- } return nil } func (a *keyIndex) Less(b btree.Item) bool { return bytes.Compare(a.key, b.(*keyIndex).key) == -1 } func (a *keyIndex) equal(b *keyIndex) bool { if !bytes.Equal(a.key, b.key) { return false } if a.modified != b.modified { return false } if len(a.generations) != len(b.generations) { return false } for i := range a.generations { ag, bg := a.generations[i], b.generations[i] if !ag.equal(bg) { return false } } return true } func (ki *keyIndex) String() string { var s string for _, g := range ki.generations { s += g.String() } return s } // generation contains multiple revisions of a key. type generation struct { ver int64 created revision // when the generation is created (put in first revision). revs []revision } func (g *generation) isEmpty() bool { return g == nil || len(g.revs) == 0 } // walk walks through the revisions in the generation in descending order. // It passes the revision to the given function. // walk returns until: 1. it finishes walking all pairs 2. the function returns false. // walk returns the position at where it stopped. If it stopped after // finishing walking, -1 will be returned. func (g *generation) walk(f func(rev revision) bool) int { l := len(g.revs) for i := range g.revs { ok := f(g.revs[l-i-1]) if !ok { return l - i - 1 } } return -1 } func (g *generation) String() string { return fmt.Sprintf("g: created[%d] ver[%d], revs %#v\n", g.created, g.ver, g.revs) } func (a generation) equal(b generation) bool { if a.ver != b.ver { return false } if len(a.revs) != len(b.revs) { return false } for i := range a.revs { ar, br := a.revs[i], b.revs[i] if ar != br { return false } } return true }
glerchundi/etcd2-bootstrapper
vendor/github.com/coreos/etcd/storage/key_index.go
GO
apache-2.0
8,969
// SPDX-License-Identifier: Apache-2.0 // Copyright 2019-2021 Authors of Cilium package nat import ( "fmt" "unsafe" "github.com/cilium/cilium/pkg/byteorder" "github.com/cilium/cilium/pkg/tuple" "github.com/cilium/cilium/pkg/types" ) // NatEntry4 represents an IPv4 entry in the NAT table. // +k8s:deepcopy-gen=true // +k8s:deepcopy-gen:interfaces=github.com/cilium/cilium/pkg/bpf.MapValue type NatEntry4 struct { Created uint64 `align:"created"` HostLocal uint64 `align:"host_local"` Pad1 uint64 `align:"pad1"` Pad2 uint64 `align:"pad2"` Addr types.IPv4 `align:"to_saddr"` Port uint16 `align:"to_sport"` } // SizeofNatEntry4 is the size of the NatEntry4 type in bytes. const SizeofNatEntry4 = int(unsafe.Sizeof(NatEntry4{})) // GetValuePtr returns the unsafe.Pointer for n. func (n *NatEntry4) GetValuePtr() unsafe.Pointer { return unsafe.Pointer(n) } // String returns the readable format. func (n *NatEntry4) String() string { return fmt.Sprintf("Addr=%s Port=%d Created=%d HostLocal=%d\n", n.Addr, n.Port, n.Created, n.HostLocal) } // Dump dumps NAT entry to string. func (n *NatEntry4) Dump(key NatKey, start uint64) string { var which string if key.GetFlags()&tuple.TUPLE_F_IN != 0 { which = "DST" } else { which = "SRC" } return fmt.Sprintf("XLATE_%s %s:%d Created=%s HostLocal=%d\n", which, n.Addr, n.Port, NatDumpCreated(start, n.Created), n.HostLocal) } // ToHost converts NatEntry4 ports to host byte order. func (n *NatEntry4) ToHost() NatEntry { x := *n x.Port = byteorder.NetworkToHost16(n.Port) return &x }
tklauser/cilium
pkg/maps/nat/ipv4.go
GO
apache-2.0
1,615
import re import sys class URI(): def __init__( self, root_path ): super().__init__() if root_path[-1] != '/' or root_path[0] != '/': raise ValueError( 'root_path must start and end with "/"' ) self.root_path = root_path self.uri_regex = re.compile( r'^({0}|/)(([a-zA-Z0-9\-_.!~*<>]+/)*)([a-zA-Z0-9\-_.!~*<>]+)?(:([a-zA-Z0-9\-_.!~*\'<>]*:)*)?(\([a-zA-Z0-9\-_.!~*<>]+\))?$'.format( self.root_path ) ) def split( self, uri, root_optional=False ): uri_match = self.uri_regex.match( uri ) if not uri_match: raise ValueError( 'Unable to parse URI "{0}"'.format( uri ) ) ( root, namespace, _, model, rec_id, _, action ) = uri_match.groups() if root != self.root_path and not root_optional: raise ValueError( 'URI does not start in the root_path' ) if namespace != '': namespace_list = namespace.rstrip( '/' ).split( '/' ) else: namespace_list = [] if rec_id is not None: id_list = rec_id.strip( ':' ).split( ':' ) multi = len( id_list ) > 1 else: id_list = None # id_list = [] is an empty list of ids, where None means the list is not even present multi = False if action is not None: action = action[ 1:-1 ] return ( namespace_list, model, action, id_list, multi ) def build( self, namespace=None, model=None, action=None, id_list=None, in_root=True ): """ build a uri, NOTE: if model is None, id_list and action are skiped """ if in_root: result = self.root_path else: result = '/' if namespace is not None: if not isinstance( namespace, list ): namespace = [ namespace ] if len( namespace ) > 0: result = '{0}{1}/'.format( result, '/'.join( namespace ) ) if model is None: return result result = '{0}{1}'.format( result, model ) if id_list is not None and id_list != []: if not isinstance( id_list, list ): id_list = [ id_list ] result = '{0}:{1}:'.format( result, ':'.join( id_list ) ) if action is not None: result = '{0}({1})'.format( result, action ) return result def extractIds( self, uri_list ): # TODO: should we make sure the namespace/model do not change in the list? """ extract the record IDs from the URI's in uri_list, can handle some/all/none of the URIs having multiple IDs in them allready, does not force uniqunes order should remain intact """ if isinstance( uri_list, str ): uri_list = [ uri_list ] if not isinstance( uri_list, list ): raise ValueError( 'uri_list must be string or list of strings' ) result = [] for uri in uri_list: uri_match = self.uri_regex.match( uri ) if not uri_match: raise ValueError( 'Unable to parse URI "{0}"'.format( uri ) ) ( _, _, _, _, rec_id, _, _ ) = uri_match.groups() if rec_id is None: continue result += rec_id.strip( ':' ).split( ':' ) return result def uriListToMultiURI( self, uri_list ): """ runs extract Ids on the list, then takes the first uri and applies all the ids to it """ if not uri_list: return [] id_list = self.extractIds( uri_list ) if not id_list: return [] ( namespace_list, model, action, _, _ ) = self.split( uri_list[0] ) return self.build( namespace_list, model, action, id_list, True ) # barrowed from https://www.python.org/dev/peps/pep-0257/ def doccstring_prep( docstring ): if not docstring: return '' # Convert tabs to spaces (following the normal Python rules) # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): indent = sys.maxsize for line in lines[ 1: ]: stripped = line.lstrip() if stripped: indent = min( indent, len( line ) - len( stripped ) ) # Remove indentation (first line is special): trimmed = [ lines[0].strip() ] if indent < sys.maxsize: for line in lines[1:]: trimmed.append( line[ indent: ].rstrip() ) # Strip off trailing and leading blank lines: while trimmed and not trimmed[-1]: trimmed.pop() while trimmed and not trimmed[0]: trimmed.pop( 0 ) # Return a single string: return '\n'.join( trimmed )
cinp/python
cinp/common.py
Python
apache-2.0
4,279
package alien4cloud.tosca.parser.impl.advanced; import java.util.Map; import javax.annotation.Resource; import org.alien4cloud.tosca.model.definitions.AbstractPropertyValue; import org.alien4cloud.tosca.model.definitions.Interface; import org.alien4cloud.tosca.model.templates.RelationshipTemplate; import org.springframework.stereotype.Component; import org.yaml.snakeyaml.nodes.MappingNode; import org.yaml.snakeyaml.nodes.Node; import org.yaml.snakeyaml.nodes.NodeTuple; import org.yaml.snakeyaml.nodes.ScalarNode; import alien4cloud.tosca.parser.INodeParser; import alien4cloud.tosca.parser.ParserUtils; import alien4cloud.tosca.parser.ParsingContextExecution; import alien4cloud.tosca.parser.ParsingError; import alien4cloud.tosca.parser.ParsingErrorLevel; import alien4cloud.tosca.parser.impl.ErrorCode; import alien4cloud.tosca.parser.impl.base.BaseParserFactory; import alien4cloud.tosca.parser.impl.base.MapParser; import alien4cloud.tosca.parser.impl.base.ScalarParser; /** * Parse a relationship */ @Deprecated @Component public class RelationshipTemplateParser implements INodeParser<RelationshipTemplate> { @Resource private ScalarParser scalarParser; @Resource private BaseParserFactory baseParserFactory; @Override public RelationshipTemplate parse(Node node, ParsingContextExecution context) { // To parse a relationship template we actually get the parent node to retrieve the requirement name; if (!(node instanceof MappingNode) || ((MappingNode) node).getValue().size() != 1) { ParserUtils.addTypeError(node, context.getParsingErrors(), "Requirement assignment"); } MappingNode assignmentNode = (MappingNode) node; RelationshipTemplate relationshipTemplate = new RelationshipTemplate(); relationshipTemplate.setRequirementName(scalarParser.parse(assignmentNode.getValue().get(0).getKeyNode(), context)); // Now parse the content of the relationship assignment. node = assignmentNode.getValue().get(0).getValueNode(); if (node instanceof ScalarNode) { // Short notation (host: compute) relationshipTemplate.setTarget(scalarParser.parse(node, context)); } else if (node instanceof MappingNode) { MappingNode mappingNode = (MappingNode) node; for (NodeTuple nodeTuple : mappingNode.getValue()) { String key = scalarParser.parse(nodeTuple.getKeyNode(), context); switch (key) { case "node": relationshipTemplate.setTarget(scalarParser.parse(nodeTuple.getValueNode(), context)); break; case "capability": relationshipTemplate.setTargetedCapabilityName(scalarParser.parse(nodeTuple.getValueNode(), context)); break; case "relationship": relationshipTemplate.setType(scalarParser.parse(nodeTuple.getValueNode(), context)); break; case "properties": INodeParser<AbstractPropertyValue> propertyValueParser = context.getRegistry().get("node_template_property"); MapParser<AbstractPropertyValue> mapParser = baseParserFactory.getMapParser(propertyValueParser, "node_template_property"); relationshipTemplate.setProperties(mapParser.parse(nodeTuple.getValueNode(), context)); break; case "interfaces": INodeParser<Map<String, Interface>> interfacesParser = context.getRegistry().get("interfaces"); relationshipTemplate.setInterfaces(interfacesParser.parse(nodeTuple.getValueNode(), context)); break; default: context.getParsingErrors().add(new ParsingError(ParsingErrorLevel.WARNING, ErrorCode.UNKNOWN_ARTIFACT_KEY, null, node.getStartMark(), "Unrecognized key while parsing implementation artifact", node.getEndMark(), key)); } } } else { ParserUtils.addTypeError(node, context.getParsingErrors(), "Requirement assignment"); } return relationshipTemplate; } }
alien4cloud/alien4cloud
alien4cloud-tosca/src/main/java/alien4cloud/tosca/parser/impl/advanced/RelationshipTemplateParser.java
Java
apache-2.0
4,234
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2014-2022 ArangoDB GmbH, Cologne, Germany /// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Andrey Abramov /// @author Vasiliy Nabatchikov //////////////////////////////////////////////////////////////////////////////// #include "SortRegister.h" #include "Aql/AqlValue.h" #include "Aql/ClusterNodes.h" #include "Aql/ExecutionPlan.h" #include "Aql/RegisterPlan.h" #include "Aql/SortNode.h" namespace arangodb { namespace aql { // ----------------------------------------------------------------------------- // -- SECTION -- SortRegister // ----------------------------------------------------------------------------- SortRegister::SortRegister(RegisterId reg, SortElement const& element) noexcept : attributePath(element.attributePath), reg(reg), asc(element.ascending) {} void SortRegister::fill(ExecutionPlan const& /*execPlan*/, RegisterPlan const& regPlan, std::vector<SortElement> const& elements, std::vector<SortRegister>& sortRegisters) { sortRegisters.reserve(elements.size()); auto const& vars = regPlan.varInfo; for (auto const& p : elements) { auto const varId = p.var->id; auto const it = vars.find(varId); TRI_ASSERT(it != vars.end()); TRI_ASSERT(it->second.registerId.isValid()); sortRegisters.emplace_back(it->second.registerId, p); } } } // namespace aql } // namespace arangodb
wiltonlazary/arangodb
arangod/Aql/SortRegister.cpp
C++
apache-2.0
2,228
require File.join(File.dirname(__FILE__), '..', '..', 'puppet_x/puppetlabs/splunk/type') Puppet::Type.newtype(:splunkforwarder_web) do @doc = 'Manage splunkforwarder web settings in web.conf' PuppetX::Puppetlabs::Splunk::Type.clone_type(self) end
ralfbosz/puppet-splunk
lib/puppet/type/splunkforwarder_web.rb
Ruby
apache-2.0
252
/* * MinIO Java SDK for Amazon S3 Compatible Cloud Storage, (C) 2020 MinIO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.minio; /** Argument class of {@link MinioClient#bucketExists}. */ public class BucketExistsArgs extends BucketArgs { public static Builder builder() { return new Builder(); } /** Argument builder of {@link BucketExistsArgs}. */ public static final class Builder extends BucketArgs.Builder<Builder, BucketExistsArgs> {} }
minio/minio-java
api/src/main/java/io/minio/BucketExistsArgs.java
Java
apache-2.0
991
// Code generated by protoc-gen-gogo. // source: cistern.proto // DO NOT EDIT! /* Package binlog is a generated protocol buffer package. It is generated from these files: cistern.proto It has these top-level messages: DumpBinlogReq DumpBinlogResp DumpDDLJobsReq DumpDDLJobsResp GetLatestCommitTSReq GetLatestCommitTSResp */ package binlog import ( "fmt" proto "github.com/golang/protobuf/proto" math "math" io "io" ) import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type DumpBinlogReq struct { // beginCommitTS speicifies the position from which begin to dump binlogs. // note that actually the result of dump starts from the one next to beginCommitTS // it should be zero in case of the first request. BeginCommitTS int64 `protobuf:"varint,1,opt,name=beginCommitTS,proto3" json:"beginCommitTS,omitempty"` } func (m *DumpBinlogReq) Reset() { *m = DumpBinlogReq{} } func (m *DumpBinlogReq) String() string { return proto.CompactTextString(m) } func (*DumpBinlogReq) ProtoMessage() {} func (*DumpBinlogReq) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{0} } type DumpBinlogResp struct { // CommitTS specifies the commitTS of binlog CommitTS int64 `protobuf:"varint,1,opt,name=commitTS,proto3" json:"commitTS,omitempty"` // payloads is bytecodes encoded from binlog item Payload []byte `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"` // ddljob is json bytes marshaled from corresponding ddljob struct if payload is a DDL type of binlog Ddljob []byte `protobuf:"bytes,3,opt,name=ddljob,proto3" json:"ddljob,omitempty"` } func (m *DumpBinlogResp) Reset() { *m = DumpBinlogResp{} } func (m *DumpBinlogResp) String() string { return proto.CompactTextString(m) } func (*DumpBinlogResp) ProtoMessage() {} func (*DumpBinlogResp) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{1} } type DumpDDLJobsReq struct { // beginCommitTS is the start point of drainer processing binlog, DumpDDLJobs() returns // all history DDL jobs before this position, then drainer will apply these DDL jobs // in order of job ID to restore the whole schema info at that moment. BeginCommitTS int64 `protobuf:"varint,1,opt,name=beginCommitTS,proto3" json:"beginCommitTS,omitempty"` } func (m *DumpDDLJobsReq) Reset() { *m = DumpDDLJobsReq{} } func (m *DumpDDLJobsReq) String() string { return proto.CompactTextString(m) } func (*DumpDDLJobsReq) ProtoMessage() {} func (*DumpDDLJobsReq) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{2} } type DumpDDLJobsResp struct { // ddljobs is an array of JSON encoded history DDL jobs Ddljobs [][]byte `protobuf:"bytes,1,rep,name=ddljobs" json:"ddljobs,omitempty"` } func (m *DumpDDLJobsResp) Reset() { *m = DumpDDLJobsResp{} } func (m *DumpDDLJobsResp) String() string { return proto.CompactTextString(m) } func (*DumpDDLJobsResp) ProtoMessage() {} func (*DumpDDLJobsResp) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{3} } type GetLatestCommitTSReq struct { } func (m *GetLatestCommitTSReq) Reset() { *m = GetLatestCommitTSReq{} } func (m *GetLatestCommitTSReq) String() string { return proto.CompactTextString(m) } func (*GetLatestCommitTSReq) ProtoMessage() {} func (*GetLatestCommitTSReq) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{4} } type GetLatestCommitTSResp struct { // commitTS specifies the Last binlog commitTS of the TiDB CommitTS int64 `protobuf:"varint,1,opt,name=commitTS,proto3" json:"commitTS,omitempty"` // isSynced specifies whether the all binlogs are consumed from pump IsSynced bool `protobuf:"varint,2,opt,name=isSynced,proto3" json:"isSynced,omitempty"` } func (m *GetLatestCommitTSResp) Reset() { *m = GetLatestCommitTSResp{} } func (m *GetLatestCommitTSResp) String() string { return proto.CompactTextString(m) } func (*GetLatestCommitTSResp) ProtoMessage() {} func (*GetLatestCommitTSResp) Descriptor() ([]byte, []int) { return fileDescriptorCistern, []int{5} } func init() { proto.RegisterType((*DumpBinlogReq)(nil), "binlog.DumpBinlogReq") proto.RegisterType((*DumpBinlogResp)(nil), "binlog.DumpBinlogResp") proto.RegisterType((*DumpDDLJobsReq)(nil), "binlog.DumpDDLJobsReq") proto.RegisterType((*DumpDDLJobsResp)(nil), "binlog.DumpDDLJobsResp") proto.RegisterType((*GetLatestCommitTSReq)(nil), "binlog.GetLatestCommitTSReq") proto.RegisterType((*GetLatestCommitTSResp)(nil), "binlog.GetLatestCommitTSResp") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion3 // Client API for Cistern service type CisternClient interface { // DumpBinlog dumps continuous binlog items in a stream from a given position DumpBinlog(ctx context.Context, in *DumpBinlogReq, opts ...grpc.CallOption) (Cistern_DumpBinlogClient, error) // DumpDDLJobs dumps all history DDL jobs before a specified commitTS DumpDDLJobs(ctx context.Context, in *DumpDDLJobsReq, opts ...grpc.CallOption) (*DumpDDLJobsResp, error) // GetLatestCommitTS returns the Last binlog commitTS of the TiDB GetLatestCommitTS(ctx context.Context, in *GetLatestCommitTSReq, opts ...grpc.CallOption) (*GetLatestCommitTSResp, error) } type cisternClient struct { cc *grpc.ClientConn } func NewCisternClient(cc *grpc.ClientConn) CisternClient { return &cisternClient{cc} } func (c *cisternClient) DumpBinlog(ctx context.Context, in *DumpBinlogReq, opts ...grpc.CallOption) (Cistern_DumpBinlogClient, error) { stream, err := grpc.NewClientStream(ctx, &_Cistern_serviceDesc.Streams[0], c.cc, "/binlog.Cistern/DumpBinlog", opts...) if err != nil { return nil, err } x := &cisternDumpBinlogClient{stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } type Cistern_DumpBinlogClient interface { Recv() (*DumpBinlogResp, error) grpc.ClientStream } type cisternDumpBinlogClient struct { grpc.ClientStream } func (x *cisternDumpBinlogClient) Recv() (*DumpBinlogResp, error) { m := new(DumpBinlogResp) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *cisternClient) DumpDDLJobs(ctx context.Context, in *DumpDDLJobsReq, opts ...grpc.CallOption) (*DumpDDLJobsResp, error) { out := new(DumpDDLJobsResp) err := grpc.Invoke(ctx, "/binlog.Cistern/DumpDDLJobs", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } func (c *cisternClient) GetLatestCommitTS(ctx context.Context, in *GetLatestCommitTSReq, opts ...grpc.CallOption) (*GetLatestCommitTSResp, error) { out := new(GetLatestCommitTSResp) err := grpc.Invoke(ctx, "/binlog.Cistern/GetLatestCommitTS", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for Cistern service type CisternServer interface { // DumpBinlog dumps continuous binlog items in a stream from a given position DumpBinlog(*DumpBinlogReq, Cistern_DumpBinlogServer) error // DumpDDLJobs dumps all history DDL jobs before a specified commitTS DumpDDLJobs(context.Context, *DumpDDLJobsReq) (*DumpDDLJobsResp, error) // GetLatestCommitTS returns the Last binlog commitTS of the TiDB GetLatestCommitTS(context.Context, *GetLatestCommitTSReq) (*GetLatestCommitTSResp, error) } func RegisterCisternServer(s *grpc.Server, srv CisternServer) { s.RegisterService(&_Cistern_serviceDesc, srv) } func _Cistern_DumpBinlog_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(DumpBinlogReq) if err := stream.RecvMsg(m); err != nil { return err } return srv.(CisternServer).DumpBinlog(m, &cisternDumpBinlogServer{stream}) } type Cistern_DumpBinlogServer interface { Send(*DumpBinlogResp) error grpc.ServerStream } type cisternDumpBinlogServer struct { grpc.ServerStream } func (x *cisternDumpBinlogServer) Send(m *DumpBinlogResp) error { return x.ServerStream.SendMsg(m) } func _Cistern_DumpDDLJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(DumpDDLJobsReq) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(CisternServer).DumpDDLJobs(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/binlog.Cistern/DumpDDLJobs", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(CisternServer).DumpDDLJobs(ctx, req.(*DumpDDLJobsReq)) } return interceptor(ctx, in, info, handler) } func _Cistern_GetLatestCommitTS_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetLatestCommitTSReq) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(CisternServer).GetLatestCommitTS(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/binlog.Cistern/GetLatestCommitTS", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(CisternServer).GetLatestCommitTS(ctx, req.(*GetLatestCommitTSReq)) } return interceptor(ctx, in, info, handler) } var _Cistern_serviceDesc = grpc.ServiceDesc{ ServiceName: "binlog.Cistern", HandlerType: (*CisternServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "DumpDDLJobs", Handler: _Cistern_DumpDDLJobs_Handler, }, { MethodName: "GetLatestCommitTS", Handler: _Cistern_GetLatestCommitTS_Handler, }, }, Streams: []grpc.StreamDesc{ { StreamName: "DumpBinlog", Handler: _Cistern_DumpBinlog_Handler, ServerStreams: true, }, }, Metadata: fileDescriptorCistern, } func (m *DumpBinlogReq) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *DumpBinlogReq) MarshalTo(data []byte) (int, error) { var i int _ = i var l int _ = l if m.BeginCommitTS != 0 { data[i] = 0x8 i++ i = encodeVarintCistern(data, i, uint64(m.BeginCommitTS)) } return i, nil } func (m *DumpBinlogResp) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *DumpBinlogResp) MarshalTo(data []byte) (int, error) { var i int _ = i var l int _ = l if m.CommitTS != 0 { data[i] = 0x8 i++ i = encodeVarintCistern(data, i, uint64(m.CommitTS)) } if len(m.Payload) > 0 { data[i] = 0x12 i++ i = encodeVarintCistern(data, i, uint64(len(m.Payload))) i += copy(data[i:], m.Payload) } if len(m.Ddljob) > 0 { data[i] = 0x1a i++ i = encodeVarintCistern(data, i, uint64(len(m.Ddljob))) i += copy(data[i:], m.Ddljob) } return i, nil } func (m *DumpDDLJobsReq) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *DumpDDLJobsReq) MarshalTo(data []byte) (int, error) { var i int _ = i var l int _ = l if m.BeginCommitTS != 0 { data[i] = 0x8 i++ i = encodeVarintCistern(data, i, uint64(m.BeginCommitTS)) } return i, nil } func (m *DumpDDLJobsResp) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *DumpDDLJobsResp) MarshalTo(data []byte) (int, error) { var i int _ = i var l int _ = l if len(m.Ddljobs) > 0 { for _, b := range m.Ddljobs { data[i] = 0xa i++ i = encodeVarintCistern(data, i, uint64(len(b))) i += copy(data[i:], b) } } return i, nil } func (m *GetLatestCommitTSReq) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *GetLatestCommitTSReq) MarshalTo(data []byte) (int, error) { var i int _ = i var l int _ = l return i, nil } func (m *GetLatestCommitTSResp) Marshal() (data []byte, err error) { size := m.Size() data = make([]byte, size) n, err := m.MarshalTo(data) if err != nil { return nil, err } return data[:n], nil } func (m *GetLatestCommitTSResp) MarshalTo(data []byte) (int, error) { var i int _ = i var l int _ = l if m.CommitTS != 0 { data[i] = 0x8 i++ i = encodeVarintCistern(data, i, uint64(m.CommitTS)) } if m.IsSynced { data[i] = 0x10 i++ if m.IsSynced { data[i] = 1 } else { data[i] = 0 } i++ } return i, nil } func encodeFixed64Cistern(data []byte, offset int, v uint64) int { data[offset] = uint8(v) data[offset+1] = uint8(v >> 8) data[offset+2] = uint8(v >> 16) data[offset+3] = uint8(v >> 24) data[offset+4] = uint8(v >> 32) data[offset+5] = uint8(v >> 40) data[offset+6] = uint8(v >> 48) data[offset+7] = uint8(v >> 56) return offset + 8 } func encodeFixed32Cistern(data []byte, offset int, v uint32) int { data[offset] = uint8(v) data[offset+1] = uint8(v >> 8) data[offset+2] = uint8(v >> 16) data[offset+3] = uint8(v >> 24) return offset + 4 } func encodeVarintCistern(data []byte, offset int, v uint64) int { for v >= 1<<7 { data[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } data[offset] = uint8(v) return offset + 1 } func (m *DumpBinlogReq) Size() (n int) { var l int _ = l if m.BeginCommitTS != 0 { n += 1 + sovCistern(uint64(m.BeginCommitTS)) } return n } func (m *DumpBinlogResp) Size() (n int) { var l int _ = l if m.CommitTS != 0 { n += 1 + sovCistern(uint64(m.CommitTS)) } l = len(m.Payload) if l > 0 { n += 1 + l + sovCistern(uint64(l)) } l = len(m.Ddljob) if l > 0 { n += 1 + l + sovCistern(uint64(l)) } return n } func (m *DumpDDLJobsReq) Size() (n int) { var l int _ = l if m.BeginCommitTS != 0 { n += 1 + sovCistern(uint64(m.BeginCommitTS)) } return n } func (m *DumpDDLJobsResp) Size() (n int) { var l int _ = l if len(m.Ddljobs) > 0 { for _, b := range m.Ddljobs { l = len(b) n += 1 + l + sovCistern(uint64(l)) } } return n } func (m *GetLatestCommitTSReq) Size() (n int) { var l int _ = l return n } func (m *GetLatestCommitTSResp) Size() (n int) { var l int _ = l if m.CommitTS != 0 { n += 1 + sovCistern(uint64(m.CommitTS)) } if m.IsSynced { n += 2 } return n } func sovCistern(x uint64) (n int) { for { n++ x >>= 7 if x == 0 { break } } return n } func sozCistern(x uint64) (n int) { return sovCistern(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (m *DumpBinlogReq) Unmarshal(data []byte) error { l := len(data) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: DumpBinlogReq: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: DumpBinlogReq: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field BeginCommitTS", wireType) } m.BeginCommitTS = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ m.BeginCommitTS |= (int64(b) & 0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipCistern(data[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCistern } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *DumpBinlogResp) Unmarshal(data []byte) error { l := len(data) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: DumpBinlogResp: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: DumpBinlogResp: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field CommitTS", wireType) } m.CommitTS = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ m.CommitTS |= (int64(b) & 0x7F) << shift if b < 0x80 { break } } case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Payload", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthCistern } postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } m.Payload = append(m.Payload[:0], data[iNdEx:postIndex]...) if m.Payload == nil { m.Payload = []byte{} } iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Ddljob", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthCistern } postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } m.Ddljob = append(m.Ddljob[:0], data[iNdEx:postIndex]...) if m.Ddljob == nil { m.Ddljob = []byte{} } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCistern(data[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCistern } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *DumpDDLJobsReq) Unmarshal(data []byte) error { l := len(data) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: DumpDDLJobsReq: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: DumpDDLJobsReq: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field BeginCommitTS", wireType) } m.BeginCommitTS = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ m.BeginCommitTS |= (int64(b) & 0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipCistern(data[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCistern } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *DumpDDLJobsResp) Unmarshal(data []byte) error { l := len(data) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: DumpDDLJobsResp: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: DumpDDLJobsResp: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Ddljobs", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthCistern } postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } m.Ddljobs = append(m.Ddljobs, make([]byte, postIndex-iNdEx)) copy(m.Ddljobs[len(m.Ddljobs)-1], data[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCistern(data[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCistern } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *GetLatestCommitTSReq) Unmarshal(data []byte) error { l := len(data) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: GetLatestCommitTSReq: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: GetLatestCommitTSReq: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex skippy, err := skipCistern(data[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCistern } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *GetLatestCommitTSResp) Unmarshal(data []byte) error { l := len(data) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: GetLatestCommitTSResp: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: GetLatestCommitTSResp: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field CommitTS", wireType) } m.CommitTS = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ m.CommitTS |= (int64(b) & 0x7F) << shift if b < 0x80 { break } } case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field IsSynced", wireType) } var v int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCistern } if iNdEx >= l { return io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ v |= (int(b) & 0x7F) << shift if b < 0x80 { break } } m.IsSynced = bool(v != 0) default: iNdEx = preIndex skippy, err := skipCistern(data[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCistern } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipCistern(data []byte) (n int, err error) { l := len(data) iNdEx := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCistern } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCistern } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if data[iNdEx-1] < 0x80 { break } } return iNdEx, nil case 1: iNdEx += 8 return iNdEx, nil case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCistern } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } iNdEx += length if length < 0 { return 0, ErrInvalidLengthCistern } return iNdEx, nil case 3: for { var innerWire uint64 var start int = iNdEx for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCistern } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := data[iNdEx] iNdEx++ innerWire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } innerWireType := int(innerWire & 0x7) if innerWireType == 4 { break } next, err := skipCistern(data[start:]) if err != nil { return 0, err } iNdEx = start + next } return iNdEx, nil case 4: return iNdEx, nil case 5: iNdEx += 4 return iNdEx, nil default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } } panic("unreachable") } var ( ErrInvalidLengthCistern = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowCistern = fmt.Errorf("proto: integer overflow") ) func init() { proto.RegisterFile("cistern.proto", fileDescriptorCistern) } var fileDescriptorCistern = []byte{ // 324 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xe2, 0x4d, 0xce, 0x2c, 0x2e, 0x49, 0x2d, 0xca, 0xd3, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x4b, 0xca, 0xcc, 0xcb, 0xc9, 0x4f, 0x97, 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x0b, 0xe9, 0x83, 0x58, 0x10, 0x59, 0x25, 0x53, 0x2e, 0x5e, 0x97, 0xd2, 0xdc, 0x02, 0x27, 0xb0, 0x9a, 0xa0, 0xd4, 0x42, 0x21, 0x15, 0x2e, 0xde, 0xa4, 0xd4, 0xf4, 0xcc, 0x3c, 0xe7, 0xfc, 0xdc, 0xdc, 0xcc, 0x92, 0x90, 0x60, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xe6, 0x20, 0x54, 0x41, 0xa5, 0x38, 0x2e, 0x3e, 0x64, 0x6d, 0xc5, 0x05, 0x42, 0x52, 0x5c, 0x1c, 0xc9, 0xa8, 0x5a, 0xe0, 0x7c, 0x21, 0x09, 0x2e, 0xf6, 0x82, 0xc4, 0xca, 0x9c, 0xfc, 0xc4, 0x14, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x9e, 0x20, 0x18, 0x57, 0x48, 0x8c, 0x8b, 0x2d, 0x25, 0x25, 0x27, 0x2b, 0x3f, 0x49, 0x82, 0x19, 0x2c, 0x01, 0xe5, 0x29, 0x99, 0x41, 0xcc, 0x77, 0x71, 0xf1, 0xf1, 0xca, 0x4f, 0x2a, 0x26, 0xde, 0x5d, 0xda, 0x5c, 0xfc, 0x28, 0xfa, 0x8a, 0x0b, 0x40, 0x96, 0x43, 0x0c, 0x2d, 0x96, 0x60, 0x54, 0x60, 0x06, 0x59, 0x0e, 0xe5, 0x2a, 0x89, 0x71, 0x89, 0xb8, 0xa7, 0x96, 0xf8, 0x24, 0x96, 0xa4, 0x16, 0x97, 0xc0, 0x4c, 0x08, 0x4a, 0x2d, 0x54, 0xf2, 0xe7, 0x12, 0xc5, 0x22, 0x4e, 0xc0, 0x8f, 0x52, 0x5c, 0x1c, 0x99, 0xc5, 0xc1, 0x95, 0x79, 0xc9, 0xa9, 0x10, 0x4f, 0x72, 0x04, 0xc1, 0xf9, 0x46, 0x0f, 0x18, 0xb9, 0xd8, 0x9d, 0x21, 0x91, 0x22, 0x64, 0xcf, 0xc5, 0x85, 0x08, 0x39, 0x21, 0x51, 0x3d, 0x48, 0xec, 0xe8, 0xa1, 0x44, 0x82, 0x94, 0x18, 0x36, 0xe1, 0xe2, 0x02, 0x25, 0x06, 0x03, 0x46, 0x21, 0x07, 0x2e, 0x6e, 0x24, 0x2f, 0x0a, 0xa1, 0x28, 0x45, 0x84, 0x97, 0x94, 0x38, 0x56, 0x71, 0x90, 0x19, 0x42, 0x41, 0x5c, 0x82, 0x18, 0xfe, 0x13, 0x92, 0x81, 0xa9, 0xc7, 0x16, 0x24, 0x52, 0xb2, 0x78, 0x64, 0x41, 0x66, 0x3a, 0x09, 0x9c, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x33, 0x1e, 0xcb, 0x31, 0x24, 0xb1, 0x81, 0x13, 0x98, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x28, 0xda, 0xbc, 0xd4, 0x8f, 0x02, 0x00, 0x00, }
cwen0/cdb-syncer
vendor/github.com/pingcap/tipb/go-binlog/cistern.pb.go
GO
apache-2.0
30,126
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.geronimo.echo; import java.awt.Image; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; @WebService(name = "Echo", targetNamespace = "http://geronimo.apache.org/echo") @XmlSeeAlso({ ObjectFactory.class }) public interface Echo { /** * * @param arg0 * @return * returns java.lang.String */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "hello", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.Hello") @ResponseWrapper(localName = "helloResponse", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.HelloResponse") public String hello( @WebParam(name = "arg0", targetNamespace = "") String arg0); /** * * @param bytes * @param useMTOM * @return * returns byte[] */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "echoBytes", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoBytes") @ResponseWrapper(localName = "echoBytesResponse", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoBytesResponse") public byte[] echoBytes( @WebParam(name = "useMTOM", targetNamespace = "") boolean useMTOM, @WebParam(name = "bytes", targetNamespace = "") byte[] bytes); /** * * @param imageBytes * @param useMTOM * @return * returns java.awt.Image */ @WebMethod @WebResult(targetNamespace = "") @RequestWrapper(localName = "echoImage", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoImage") @ResponseWrapper(localName = "echoImageResponse", targetNamespace = "http://geronimo.apache.org/echo", className = "org.apache.geronimo.echo.EchoImageResponse") public Image echoImage( @WebParam(name = "useMTOM", targetNamespace = "") boolean useMTOM, @WebParam(name = "imageBytes", targetNamespace = "") Image imageBytes); }
apache/geronimo
testsuite/webservices-testsuite/jaxws-mtom-tests/mtom-test-war/src/main/java/org/apache/geronimo/echo/Echo.java
Java
apache-2.0
3,138
/* Copyright 2006 - 2010 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.IO; using System.Net; using System.Text; using System.Collections; using System.Net.Sockets; using OpenSource.UPnP; namespace UPnPStackBuilder { /// <summary> /// Summary description for EmbeddedCGenerator. /// </summary> public class EmbeddedCGenerator : CodeGenerator { // private Hashtable SequenceTable = new Hashtable(); // private Hashtable ChoiceTable = new Hashtable(); // private int SequenceCounter = 0; // private int ChoiceCounter = 0; private Hashtable FriendlyNameTable = new Hashtable(); private Hashtable MasterFriendlyNameTable = new Hashtable(); public enum PLATFORMS { WINDOWS, POSIX } public enum SUBTARGETS { NONE, PPC2003, NUCLEUS, PSOS } public enum LANGUAGES { C, CPP } private string UseSystem = ""; private UPnPDevice RootDevice = null; public PLATFORMS Platform = PLATFORMS.POSIX; public SUBTARGETS SubTarget = SUBTARGETS.NONE; public LANGUAGES Language = LANGUAGES.C; private int WinSock = 0; public ArrayList AllServices = new ArrayList(); private static string cl = "\r\n"; public string CodeNewLine { get { return cl; } set { cl = value; } } private string pc_methodPrefix = "UPnP"; private string pc_methodLibPrefix = "ILib"; private string pc_methodPrefixDef = "UPnP"; // private string pc_inline = ""; // private string pc_inlineextern = ""; private string pc_classPrefix = ""; private static CodeProcessor PrivateClassDeclarations; private static CodeProcessor PublicClassDeclarations; public EmbeddedCGenerator(ServiceGenerator.StackConfiguration Config) : base(Config) { switch (Config.newline) { case ServiceGenerator.NEWLINETYPE.CRLF: cl = "\r\n"; break; case ServiceGenerator.NEWLINETYPE.LF: cl = "\n"; break; } switch (Config.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: this.Platform = PLATFORMS.POSIX; this.SubTarget = SUBTARGETS.NONE; break; case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: this.Platform = PLATFORMS.WINDOWS; this.SubTarget = SUBTARGETS.NONE; this.WinSock = 1; break; case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: this.Platform = PLATFORMS.WINDOWS; this.SubTarget = SUBTARGETS.NONE; this.WinSock = 2; break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: this.Platform = PLATFORMS.WINDOWS; this.SubTarget = SUBTARGETS.PPC2003; this.WinSock = 1; break; } } private void AddLicense(CodeProcessor cs, string filename) { string l = License; l = l.Replace("<FILE>", filename); cs.Append(l); } private void AddAllServices(UPnPDevice device) { foreach (UPnPService s in device.Services) AllServices.Add(s); foreach (UPnPDevice d in device.EmbeddedDevices) AddAllServices(d); } private string SettingsAsComments { get { StringBuilder cs = new StringBuilder(); cs.Append("/*" + cl); cs.Append(" *" + cl); cs.Append(" * Target Platform = " + this.Platform.ToString()); if (this.SubTarget != SUBTARGETS.NONE) { cs.Append(" / " + this.SubTarget.ToString()); } cs.Append(cl); if (this.Platform == PLATFORMS.WINDOWS) { cs.Append(" * WinSockVersion = " + this.WinSock.ToString() + cl); } cs.Append(" *" + cl); cs.Append(" * HTTP Mode = " + (Configuration.HTTP_1dot1 == false ? "1.0" : "1.1") + cl); cs.Append(" * IPAddressMonitoring = " + (Configuration.DefaultIPAddressMonitor == true ? "YES" : "NO") + cl); cs.Append(" *" + cl); cs.Append(" */" + cl); return (cs.ToString()); } } private void CreateMicroStackDef_Device(CodeProcessor cs, UPnPDevice d, ref int counter) { ++counter; cs.Append("const char *FriendlyName" + counter.ToString() + ", "); foreach (UPnPDevice dx in d.EmbeddedDevices) { CreateMicroStackDef_Device(cs, dx, ref counter); } } private void CreateMicroStack_Device_Values(CodeProcessor cs, UPnPDevice d) { cs.Append("\"" + (string)FriendlyNameTable[d] + "\", "); foreach (UPnPDevice dx in d.EmbeddedDevices) { CreateMicroStack_Device_Values(cs, dx); } } public static void BuildComplexTypeParser_Header(CodeProcessor cs, SortedList SortedServiceList, string pc_methodPrefix, string pc_methodLibPrefix) { cs.Append(cl); cs.Comment("Complex Type Parsers"); IDictionaryEnumerator en = SortedServiceList.GetEnumerator(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; foreach (UPnPComplexType CT in service.GetComplexTypeList()) { cs.Append("struct " + CT.Name_LOCAL + "* " + pc_methodPrefix + "Parse_" + CT.Name_LOCAL + "(struct " + pc_methodLibPrefix + "XMLNode *node);" + cl); } } cs.Append(cl); } public static void BuildComplexTypeParser_Collection(string cx, Hashtable SequenceTable, Hashtable ChoiceTable, ref int SeqX, ref int ChoX, CodeProcessor cs, UPnPComplexType.ItemCollection ic, string pc_methodPrefix) { int x = 0; string prefix = ""; int SeqX2 = 0; int ChoX2 = 0; if (ic.GetType() == typeof(UPnPComplexType.Sequence)) { ++SeqX; if (cx == "") { cx += "_sequence_" + SeqX.ToString(); } else { cx += "->_sequence_" + SeqX.ToString(); } prefix = cx + "->"; } else if (ic.GetType() == typeof(UPnPComplexType.Choice)) { ++ChoX; if (cx == "") { cx += "_choice_" + ChoX.ToString(); } else { cx += "->_choice_" + ChoX.ToString(); } prefix = cx + "->"; } foreach (UPnPComplexType.ContentData cd in ic.Items) { ++x; cs.Append(" if (node->NameLength==" + cd.Name.Length.ToString() + " && memcmp(node->Name,\"" + cd.Name + "\"," + cd.Name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); if (x == 1) { Stack st = new Stack(); UPnPComplexType.ItemCollection tc = ic; DText pp = new DText(); pp.ATTRMARK = "->"; pp[0] = cx; int ppx = pp.DCOUNT(); while (tc != null) { string ps; ps = "RetVal"; for (int i = 1; i <= ppx; ++i) { ps += ("->" + pp[i]); } st.Push(new object[2] { ps, tc }); --ppx; tc = tc.ParentCollection; } while (st.Count > 0) { object[] foo = (object[])st.Pop(); cs.Append(" if (" + (string)foo[0] + " == NULL)" + cl); cs.Append(" {" + cl); if (foo[1].GetType() == typeof(UPnPComplexType.Sequence)) { cs.Append(" if ((" + (string)foo[0] + " = (struct SEQUENCE_" + SequenceTable[foo[1]].ToString() + "*)malloc(sizeof(struct SEQUENCE_" + SequenceTable[foo[1]].ToString() + "))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" memset(" + (string)foo[0] + ",0,sizeof(struct SEQUENCE_" + SequenceTable[foo[1]].ToString() + "));" + cl); } else if (foo[1].GetType() == typeof(UPnPComplexType.Choice)) { cs.Append(" if ((" + (string)foo[0] + " = (struct CHOICE_" + ChoiceTable[foo[1]].ToString() + "*)malloc(sizeof(struct CHOICE_" + ChoiceTable[foo[1]].ToString() + "))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" memset(" + (string)foo[0] + ",0,sizeof(struct CHOICE_" + ChoiceTable[foo[1]].ToString() + "));" + cl); } cs.Append(" }" + cl); } // if (ic.GetType()==typeof(UPnPComplexType.Sequence)) // { // cs.Append(" RetVal->"+cx+" = (struct SEQUENCE_"+SequenceTable[ic].ToString()+"*)malloc(sizeof(struct SEQUENCE_"+SequenceTable[ic].ToString()+"));"+cl); // } // else if (ic.GetType()==typeof(UPnPComplexType.Choice)) // { // cs.Append(" RetVal->"+cx+" = (struct CHOICE_"+ChoiceTable[ic].ToString()+"*)malloc(sizeof(struct CHOICE_"+ChoiceTable[ic].ToString()+"));"+cl); // } } if (cd.TypeNS == "http://www.w3.org/2001/XMLSchema") { // XSD Simple Type switch (cd.Type) { case "boolean": case "int": case "integer": case "positiveInteger": case "negativeInteger": case "nonNegativeInteger": case "nonPositiveInteger": case "long": case "short": cs.Append(" RetVal->" + prefix + cd.Name + " = atoi(text);" + cl); break; } } else { // XSD User Defined Type cs.Append(" RetVal->" + prefix + cd.Name + " = " + pc_methodPrefix + "Parse_" + cd.Type + "(node->Next);" + cl); } cs.Append(" }" + cl); } foreach (UPnPComplexType.ItemCollection ec in ic.NestedCollections) { BuildComplexTypeParser_Collection(cx, SequenceTable, ChoiceTable, ref SeqX2, ref ChoX2, cs, ec, pc_methodPrefix); } } public static void BuildComplexTypeParser(Hashtable SequenceTable, Hashtable ChoiceTable, CodeProcessor cs, SortedList SortedServiceList, string pc_methodPrefix, string pc_methodLibPrefix) { IDictionaryEnumerator en = SortedServiceList.GetEnumerator(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; foreach (UPnPComplexType CT in service.GetComplexTypeList()) { int SeqX = 0; int ChoX = 0; cs.Append("struct " + CT.Name_LOCAL + "* " + pc_methodPrefix + "Parse_" + CT.Name_LOCAL + "(struct " + pc_methodLibPrefix + "XMLNode *node)" + cl); cs.Append("{" + cl); cs.Append(" struct " + pc_methodLibPrefix + "XMLNode *current = node;" + cl); cs.Append(" struct " + CT.Name_LOCAL + " *RetVal;" + cl); cs.Append(" " + cl); cs.Append(" int OK;" + cl); cs.Append(" char *text;" + cl); cs.Append(" int textLength;" + cl); cs.Append(" if ((RetVal = (struct " + CT.Name_LOCAL + "*)malloc(sizeof(struct " + CT.Name_LOCAL + "))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(cl); cs.Append(" memset(RetVal, 0, sizeof(struct " + CT.Name_LOCAL + "));" + cl); cs.Append(cl); cs.Append(" while(node != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" textLength = " + pc_methodLibPrefix + "ReadInnerXML(node, &text);" + cl); foreach (UPnPComplexType.GenericContainer gc in CT.Containers) { foreach (UPnPComplexType.ItemCollection ic in gc.Collections) { BuildComplexTypeParser_Collection("", SequenceTable, ChoiceTable, ref SeqX, ref ChoX, cs, ic, pc_methodPrefix); } } cs.Append(" node = node->Peer;" + cl); cs.Append(" }" + cl); cs.Append(" return(RetVal);" + cl); cs.Append("}" + cl); } } } private void BuildCreateMicroStackDefinition_sprintf(CodeProcessor cs, UPnPDevice d, int i) { cs.Append(", FriendlyName"); if (i != 1) { cs.Append(i.ToString()); } cs.Append(", RetVal->Serial, RetVal->UDN"); foreach (UPnPDevice ed in d.EmbeddedDevices) { BuildCreateMicroStackDefinition_sprintf(cs, ed, ++i); } } private void BuildCreateMicroStackDefinition(CodeProcessor cs, UPnPDevice d, int i) { UPnPDevice parent = d; while (parent.ParentDevice != null) { parent = parent.ParentDevice; } if (parent.User.Equals(d.User)) { cs.Append("const char* FriendlyName"); if (i != 1) cs.Append(i.ToString()); cs.Append(", "); foreach (UPnPDevice ed in d.EmbeddedDevices) BuildCreateMicroStackDefinition(cs, ed, ++i); } } private void BuildObjectMetaData(CodeProcessor cs, UPnPDevice d, int i) { string deviceIdent = DeviceObjectGenerator.GetDeviceIdentifier(d); if (d.ParentDevice == null) { deviceIdent += "."; } else { deviceIdent += "->"; } UPnPDevice parentDevice = d; while (parentDevice.ParentDevice != null) { parentDevice = parentDevice.ParentDevice; } if (parentDevice.User.Equals(d.User)) { cs.Append(" " + deviceIdent + "FriendlyName = FriendlyName"); if (i != 1) { cs.Append(i.ToString()); } cs.Append(";" + cl); } if (i == 1) { cs.Append(" " + deviceIdent + "UDN = UDN;" + cl); cs.Append(" " + deviceIdent + "Serial = SerialNumber;" + cl); } cs.Append(" if (" + deviceIdent + "Manufacturer == NULL) {" + deviceIdent + "Manufacturer = \"" + (string)((object[])d.User3)[2] + "\";}" + cs.NewLine); cs.Append(" if (" + deviceIdent + "ManufacturerURL == NULL) {" + deviceIdent + "ManufacturerURL = \"" + (string)((object[])d.User3)[3] + "\";}" + cs.NewLine); cs.Append(" if (" + deviceIdent + "ModelDescription == NULL) {" + deviceIdent + "ModelDescription = \"" + (string)((object[])d.User3)[4] + "\";}" + cs.NewLine); cs.Append(" if (" + deviceIdent + "ModelName == NULL) {" + deviceIdent + "ModelName = \"" + (string)((object[])d.User3)[5] + "\";}" + cs.NewLine); cs.Append(" if (" + deviceIdent + "ModelNumber == NULL) {" + deviceIdent + "ModelNumber = \"" + (string)((object[])d.User3)[6] + "\";}" + cs.NewLine); if (((object[])d.User3)[7] != null) { cs.Append(" if (" + deviceIdent + "ModelURL == NULL) {" + deviceIdent + "ModelURL = \"" + ((Uri)((object[])d.User3)[7]).AbsoluteUri + "\";}" + cs.NewLine); } else { cs.Append(" if (" + deviceIdent + "ModelURL == NULL) {" + deviceIdent + "ModelURL = \"" + (string)((object[])d.User3)[3] + "\";}" + cs.NewLine); } cs.Append(" if (" + deviceIdent + "ProductCode == NULL) {" + deviceIdent + "ProductCode = \"" + (string)((object[])d.User3)[8] + "\";}" + cs.NewLine); foreach (UPnPDevice ed in d.EmbeddedDevices) { BuildObjectMetaData(cs, ed, ++i); } } private int BuildCreateMicroStackDefinition_Malloc(CodeProcessor cs, UPnPDevice d, int i) { cs.Append("+ (int)strlen(FriendlyName"); if (i != 1) { cs.Append(i.ToString()); } cs.Append(") "); foreach (UPnPDevice ed in d.EmbeddedDevices) { BuildCreateMicroStackDefinition_Malloc(cs, ed, ++i); } return (i); } protected void FriendlyName(UPnPDevice d) { if (d.FriendlyName == "%s") { d.FriendlyName = (string)MasterFriendlyNameTable[d]; } else { MasterFriendlyNameTable[d] = d.FriendlyName; } foreach (UPnPDevice ed in d.EmbeddedDevices) { FriendlyName(ed); } } public override bool Generate(UPnPDevice[] devices, DirectoryInfo outputDirectory) { StreamWriter W; bool RetVal = false; string SampleApp = null; bool deviceOK = false; bool cpOK = false; string WS = null; string WS2 = null; if (!Configuration.SupressSampleProject) { switch (Configuration.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: SampleApp = SourceCodeRepository.GetMain_C_Template(); break; case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: SampleApp = SourceCodeRepository.Get_Generic("SAMPLE_CPP"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: SampleApp = SourceCodeRepository.Get_SampleProjectDlg_cpp(); break; } } MasterFriendlyNameTable.Clear(); foreach (UPnPDevice d in devices) { FriendlyName(d); } if (Configuration.CPlusPlusWrapper || Configuration.DynamicObjectModel) { DeviceObjectGenerator.PrepDevice(devices); } #region ILib File Generation SourceCodeRepository.Generate_Parsers(Configuration.prefixlib, outputDirectory); SourceCodeRepository.Generate_AsyncSocket(Configuration.prefixlib, outputDirectory); SourceCodeRepository.Generate_AsyncUDPSocket(Configuration.prefixlib, outputDirectory); SourceCodeRepository.Generate_AsyncServerSocket(Configuration.prefixlib, outputDirectory); SourceCodeRepository.Generate_WebClient(Configuration, outputDirectory); SourceCodeRepository.Generate_WebServer(Configuration, outputDirectory); if (Configuration.GenerateThreadPoolLibrary) { SourceCodeRepository.Generate_ThreadPool(Configuration.prefixlib, outputDirectory); } if (Configuration.TargetPlatform == ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1) { SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "ChainAdaptor.h", "CHAINADAPTOR_H"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "ChainAdaptor.cpp", "CHAINADAPTOR_CPP"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "ChainEngine.h", "CHAINENGINE_H"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "ChainEngine.cpp", "CHAINENGINE_CPP"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "SocketWrapper.h", "SOCKETWRAPPER_H"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "SocketWrapper.cpp", "SOCKETWRAPPER_CPP"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "SymbianSemaphore.h", "SYMBIANSEMAPHORE_H"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "SymbianSemaphore.cpp", "SYMBIANSEMAPHORE_CPP"); SourceCodeRepository.Generate_Generic(Configuration.prefixlib, outputDirectory, "ChainDefs.h", "CHAINDEFS_H"); } #endregion #region Generate MicroStack files for each device foreach (UPnPDevice device in devices) { if (((ServiceGenerator.Configuration)device.User).ConfigType == ServiceGenerator.ConfigurationType.DEVICE) { if (Configuration.UPNP_1dot1) { device.ArchitectureVersion = "1.1"; device.BootID = "%d"; } else { device.ArchitectureVersion = "1.0"; device.BootID = ""; } device.ClearCustomFieldsInDescription(); ((ServiceGenerator.Configuration)device.User).AddAllCustomFieldsToDevice(device); device.HasPresentation = ((ServiceGenerator.Configuration)device.User).AdvertisesPresentationPage; if (device.HasPresentation) device.PresentationURL = "/web"; RetVal = GenerateEx(device, outputDirectory, GetServiceNameTable(device), ref SampleApp); if (!RetVal) { break; } } } #endregion #region C++ Wrapper Generation if (Configuration.CPlusPlusWrapper) { string CPlusPlus_H = DeviceObjectGenerator.GetCPlusPlusAbstraction_H(devices); #region Prefixes CPlusPlus_H = CPlusPlus_H.Replace("ILib", Configuration.prefixlib); #endregion #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\UPnPAbstraction.h"); W.Write(CPlusPlus_H); W.Close(); #endregion foreach (UPnPDevice d in devices) { FriendlyName(d); } string CPlusPlus_CPP = DeviceObjectGenerator.GetCPlusPlusAbstraction_CPP(devices); #region Prefixes CPlusPlus_CPP = CPlusPlus_CPP.Replace("ILib", Configuration.prefixlib); #endregion #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\UPnPAbstraction.cpp"); W.Write(CPlusPlus_CPP); W.Close(); #endregion } #endregion CPEmbeddedCGenerator gen2 = new CPEmbeddedCGenerator(Configuration, SampleApp); gen2.Generate(devices, outputDirectory); SampleApp = gen2.SampleApplication; if (SampleApp != null) { #region Main.c / SampleProjectDlg.cpp #region Platform if (Configuration.TargetPlatform != ServiceGenerator.PLATFORMS.MICROSTACK_POSIX) { SampleApp = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_POSIX}}}", "//{{{END_POSIX}}}", SampleApp); } else { SampleApp = SourceCodeRepository.RemoveTag("//{{{BEGIN_POSIX}}}", "//{{{END_POSIX}}}", SampleApp); } if (Configuration.TargetPlatform == ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1 || Configuration.TargetPlatform == ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2 || Configuration.TargetPlatform == ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC) { SampleApp = SourceCodeRepository.RemoveTag("//{{{BEGIN_WIN32}}}", "//{{{END_WIN32}}}", SampleApp); } else { SampleApp = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_WIN32}}}", "//{{{END_WIN32}}}", SampleApp); } #endregion if (Configuration.CPlusPlusWrapper) { SampleApp = SampleApp.Replace("//{{{CLASS_DEFINITIONS_DEVICE}}}", DeviceObjectGenerator.GetCPlusPlus_DerivedSampleClasses(devices)); SampleApp = SampleApp.Replace("//{{{CLASS_IMPLEMENTATIONS_DEVICE}}}", DeviceObjectGenerator.GetCPlusPlus_DerivedSampleClasses_Implementation(devices)); SampleApp = SampleApp.Replace("//{{{DERIVED_CLASS_INSERTION}}}", DeviceObjectGenerator.GetCPlusPlus_DerivedSampleClasses_Insertion(devices)); } SampleApp = SampleApp.Replace("{{{INITSTRING}}}", ""); SampleApp = SampleApp.Replace("//{{{DEVICE_INVOCATION_DISPATCH}}}", ""); SampleApp = SampleApp.Replace("//{{{INVOCATION_FP}}}", ""); SampleApp = SampleApp.Replace("//{{{MICROSTACK_VARIABLE}}}", ""); SampleApp = SampleApp.Replace("//{{{MicroStack_Include}}}", ""); SampleApp = SampleApp.Replace("//{{{CREATE_MICROSTACK}}}", ""); SampleApp = SampleApp.Replace("//{{{STATEVARIABLES_INITIAL_STATE}}}", ""); SampleApp = SampleApp.Replace("//{{{IPAddress_Changed}}}", ""); SampleApp = SampleApp.Replace("//{{{PresentationRequest}}}", ""); if (this.Configuration.InitThreadPoolInSampleApp) { SampleApp = SourceCodeRepository.RemoveTag("//{{{BEGIN_THREADPOOL}}}", "//{{{END_THREADPOOL}}}", SampleApp); SampleApp = SampleApp.Replace("!NUMTHREADPOOLTHREADS!", Configuration.ThreadPoolThreads_InSampleApp.ToString()); } else { SampleApp = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_THREADPOOL}}}", "//{{{END_THREADPOOL}}}", SampleApp); } if (Configuration.BareBonesSample) { SampleApp = SourceCodeRepository.RemoveTag("//{{{BEGIN_BAREBONES}}}", "//{{{END_BAREBONES}}}", SampleApp); } else { SampleApp = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_BAREBONES}}}", "//{{{END_BAREBONES}}}", SampleApp); } #region C or C++ if (Configuration.CPlusPlusWrapper) { SampleApp = SourceCodeRepository.RemoveAndClearTag("//{{{STANDARD_C_APP_BEGIN}}}", "//{{{STANDARD_C_APP_END}}}", SampleApp); SampleApp = SourceCodeRepository.RemoveTag("//{{{STANDARD_C++_APP_BEGIN}}}", "//{{{STANDARD_C++_APP_END}}}", SampleApp); } else { SampleApp = SourceCodeRepository.RemoveTag("//{{{STANDARD_C_APP_BEGIN}}}", "//{{{STANDARD_C_APP_END}}}", SampleApp); SampleApp = SourceCodeRepository.RemoveAndClearTag("//{{{STANDARD_C++_APP_BEGIN}}}", "//{{{STANDARD_C++_APP_END}}}", SampleApp); } #endregion #region Write to disk switch (Configuration.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: if (Configuration.CPlusPlusWrapper) { W = File.CreateText(outputDirectory.FullName + "\\Main.cpp"); SampleApp = SampleApp.Replace("Main.c", "Main.cpp"); } else { W = File.CreateText(outputDirectory.FullName + "\\Main.c"); } break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: W = File.CreateText(outputDirectory.FullName + "\\SampleProjectDlg.cpp"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: W = File.CreateText(outputDirectory.FullName + "\\Sample.cpp"); break; default: W = null; break; } if (W != null) { W.Write(SampleApp); W.Close(); } #endregion #endregion #region Visual Studio Files and Posix Makefile #region Initialize Project and Makefile switch (Configuration.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: WS = SourceCodeRepository.Get_UPnPSample_vcproj().Replace("{{{WINSOCK}}}", "WINSOCK1"); WS = WS.Replace("{{{WINSOCK_LIB}}}", "WSock32.lib"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: WS = SourceCodeRepository.Get_UPnPSample_vcproj().Replace("{{{WINSOCK}}}", "WINSOCK2"); WS = WS.Replace("{{{WINSOCK_LIB}}}", "Psapi.lib ws2_32.lib Iphlpapi.lib"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: WS = SourceCodeRepository.Get_Makefile().Replace("{{{BUILD_NUMBER}}}", UseVersion); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: WS = SourceCodeRepository.Get_SampleProject_vcp(); break; case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: WS = SourceCodeRepository.Get_Generic("_MMP"); break; } #endregion if (WS != null) { #region Building Project and Makefile if (Configuration.GenerateThreadPoolLibrary) { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_THREADPOOL}}}", "//{{{END_THREADPOOL}}}", WS); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_THREADPOOL}}}", "//{{{END_THREADPOOL}}}", WS); } if (Configuration.CPlusPlusWrapper) { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_C++}}}", "//{{{END_C++}}}", WS); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_C++}}}", "//{{{END_C++}}}", WS); } WS = WS.Replace("ILib", Configuration.prefixlib); deviceOK = false; cpOK = false; foreach (UPnPDevice device in devices) { ServiceGenerator.Configuration DeviceConf = (ServiceGenerator.Configuration)device.User; switch (DeviceConf.ConfigType) { case ServiceGenerator.ConfigurationType.DEVICE: deviceOK = true; switch (Configuration.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: WS = WS.Replace("{{{INCLUDE_C}}}", "{{{INCLUDE_C}}}" + "\r\n<File RelativePath=\"" + DeviceConf.Prefix + "MicroStack.c\"/>"); WS = WS.Replace("{{{INCLUDE_H}}}", "{{{INCLUDE_H}}}" + "\r\n<File RelativePath=\"" + DeviceConf.Prefix + "MicroStack.h\"/>"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: WS = WS.Replace("{{{O_FILES}}}", DeviceConf.Prefix + "MicroStack.o\\" + "\n{{{O_FILES}}}"); WS = WS.Replace("{{{H_FILES}}}", DeviceConf.Prefix + "MicroStack.h\\" + "\n{{{H_FILES}}}"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "{{{BEGIN_MICROSTACK_DEFINTION}}}", "{{{END_MICROSTACK_DEFINTION}}}"); WS2 = WS2.Replace("{{CODEPREFIX}}", DeviceConf.Prefix); WS2 = WS2.Replace("{{LIBPREFIX}}", Configuration.prefixlib); WS2 = WS2.Replace("{{CODEPREFIX_CAPS}}", DeviceConf.Prefix.ToUpper()); WS2 = WS2.Replace("{{STACK}}", "MicroStack"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "{{{BEGIN_MICROSTACK_DEFINTION}}}", WS2); WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "{{{BEGIN_MICROSTACK_H}}}", "{{{END_MICROSTACK_H}}}"); WS2 = WS2.Replace("{{CODEPREFIX}}", DeviceConf.Prefix); WS2 = WS2.Replace("{{STACK}}", "MicroStack"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "{{{BEGIN_MICROSTACK_H}}}", WS2); WS = WS.Replace("{{{MICROSTACK_H}}}", "{{{MICROSTACK_H}}}\r\n\t\".\\" + DeviceConf.Prefix + "MicroStack.h\"\\"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "//{{{BeginSource}}}", "//{{{EndSource}}}"); WS2 = WS2.Replace("{{{SOURCE}}}", DeviceConf.Prefix + "MicroStack.c"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "//{{{BeginSource}}}", WS2); break; } break; case ServiceGenerator.ConfigurationType.CONTROLPOINT: cpOK = true; switch (Configuration.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: WS = WS.Replace("{{{INCLUDE_C}}}", "{{{INCLUDE_C}}}" + "\r\n<File RelativePath=\"" + DeviceConf.Prefix + "ControlPoint.c\"/>"); WS = WS.Replace("{{{INCLUDE_H}}}", "{{{INCLUDE_H}}}" + "\r\n<File RelativePath=\"" + DeviceConf.Prefix + "ControlPoint.h\"/>"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: WS = WS.Replace("{{{O_FILES}}}", DeviceConf.Prefix + "ControlPoint.o\\" + "\n{{{O_FILES}}}"); WS = WS.Replace("{{{H_FILES}}}", DeviceConf.Prefix + "ControlPoint.h\\" + "\n{{{H_FILES}}}"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "{{{BEGIN_MICROSTACK_DEFINTION}}}", "{{{END_MICROSTACK_DEFINTION}}}"); WS2 = WS2.Replace("{{CODEPREFIX}}", DeviceConf.Prefix); WS2 = WS2.Replace("{{LIBPREFIX}}", Configuration.prefixlib); WS2 = WS2.Replace("{{CODEPREFIX_CAPS}}", DeviceConf.Prefix.ToUpper()); WS2 = WS2.Replace("{{STACK}}", "ControlPoint"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "{{{BEGIN_MICROSTACK_DEFINTION}}}", WS2); WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "{{{BEGIN_MICROSTACK_H}}}", "{{{END_MICROSTACK_H}}}"); WS2 = WS2.Replace("{{CODEPREFIX}}", DeviceConf.Prefix); WS2 = WS2.Replace("{{STACK}}", "ControlPoint"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "{{{BEGIN_MICROSTACK_H}}}", WS2); WS = WS.Replace("{{{MICROSTACK_H}}}", "{{{MICROSTACK_H}}}\r\n\t\".\\" + DeviceConf.Prefix + "ControlPoint.h\"\\"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "//{{{BeginSource}}}", "//{{{EndSource}}}"); WS2 = WS2.Replace("{{{SOURCE}}}", DeviceConf.Prefix + "ControlPoint.c"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "//{{{BeginSource}}}", WS2); break; } break; } } if (cpOK) { // Insert CP related files into the project file switch (Configuration.TargetPlatform) { case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: WS = WS.Replace("{{{INCLUDE_C}}}", "{{{INCLUDE_C}}}" + "\r\n<File RelativePath=\"" + Configuration.prefixlib + "SSDPClient.c\"/>"); WS = WS.Replace("{{{INCLUDE_H}}}", "{{{INCLUDE_H}}}" + "\r\n<File RelativePath=\"" + Configuration.prefixlib + "SSDPClient.h\"/>"); WS = WS.Replace("{{{INCLUDE_H}}}", "{{{INCLUDE_H}}}" + "\r\n<File RelativePath=\"UPnPControlPointStructs.h\"/>"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: WS = WS.Replace("{{{O_FILES}}}", Configuration.prefixlib + "SSDPClient.o\\" + "\n{{{O_FILES}}}"); WS = WS.Replace("{{{H_FILES}}}", Configuration.prefixlib + "SSDPClient.h\\" + "\n{{{H_FILES}}}"); break; case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: WS = WS.Replace("{{{MICROSTACK_H}}}", "{{{MICROSTACK_H}}}\r\n\t\".\\" + Configuration.prefixlib + "SSDPClient.h\"\\"); WS = WS.Replace("{{{MICROSTACK_H}}}", "{{{MICROSTACK_H}}}\r\n\t\".\\UPnPControlPointStructs.h\"\\"); WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "{{{BEGIN_MICROSTACK_H}}}", "{{{END_MICROSTACK_H}}}"); WS2 = WS2.Replace("{{CODEPREFIX}}", ""); WS2 = WS2.Replace("{{STACK}}", "UPnPControlPointStructs"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "{{{BEGIN_MICROSTACK_H}}}", WS2); WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "{{{BEGIN_MICROSTACK_H}}}", "{{{END_MICROSTACK_H}}}"); WS2 = WS2.Replace("{{CODEPREFIX}}", this.pc_methodLibPrefix); WS2 = WS2.Replace("{{STACK}}", "SSDPClient"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "{{{BEGIN_MICROSTACK_H}}}", WS2); WS = WS.Replace("{{LIBPREFIX}}", Configuration.prefixlib); WS = WS.Replace("{{LIBPREFIX_CAPS}}", Configuration.prefixlib.ToUpper()); WS = SourceCodeRepository.RemoveTag("{{{BEGIN_SSDPCLIENT}}}", "{{{END_SSDPCLIENT}}}", WS); break; case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "//{{{BeginSource}}}", "//{{{EndSource}}}"); WS2 = WS2.Replace("{{{SOURCE}}}", Configuration.prefixlib + "SSDPClient.c"); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "//{{{BeginSource}}}", WS2); break; } } else { WS = SourceCodeRepository.RemoveAndClearTag("{{{BEGIN_SSDPCLIENT}}}", "{{{END_SSDPCLIENT}}}", WS); } WS = WS.Replace("{{{INCLUDE_C}}}", ""); WS = WS.Replace("{{{INCLUDE_H}}}", ""); WS = WS.Replace("{{{O_FILES}}}", ""); WS = WS.Replace("{{{H_FILES}}}", ""); WS = WS.Replace("{{{MICROSTACK_H}}}", ""); WS = SourceCodeRepository.RemoveAndClearTag("{{{BEGIN_MICROSTACK_DEFINTION}}}", "{{{END_MICROSTACK_DEFINTION}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("{{{BEGIN_MICROSTACK_H}}}", "{{{END_MICROSTACK_H}}}", WS); if (!Configuration.BareBonesSample) { WS = WS.Replace("<-- Additional Filters -->", ""); } #endregion #region Write various files to Disc switch (Configuration.TargetPlatform) { #region Visual Studio 2003 Solution/Project files case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK1: case ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2: #region UPnPSample.vcproj if (Configuration.CPlusPlusWrapper) { WS = WS.Replace("Main.c", "Main.cpp"); } W = File.CreateText(outputDirectory.FullName + "\\UPnPSample.vcproj"); W.Write(WS); W.Close(); #endregion #region UPnPSample.sln W = File.CreateText(outputDirectory.FullName + "\\UPnPSample.sln"); W.Write(SourceCodeRepository.Get_UPnPSample_sln()); W.Close(); #endregion #region stdafx.h W = File.CreateText(outputDirectory.FullName + "\\stdafx.h"); W.Write(SourceCodeRepository.Get_Win32_stdafx_h()); W.Close(); #endregion #region stdafx.cpp W = File.CreateText(outputDirectory.FullName + "\\stdafx.cpp"); W.Write(SourceCodeRepository.Get_Win32_stdafx_cpp()); W.Close(); #endregion break; #endregion #region PocketPC 2003 Specific case ServiceGenerator.PLATFORMS.MICROSTACK_POCKETPC: #region SampleProject.vcp WS = WS.Replace("{{CODEPREFIX}}", this.pc_methodPrefix); WS = WS.Replace("{{CODEPREFIX_CAPS}}", this.pc_methodPrefix.ToUpper()); WS = WS.Replace("{{LIBPREFIX}}", this.pc_methodLibPrefix); WS = WS.Replace("{{LIBPREFIX_CAPS}}", this.pc_methodLibPrefix.ToUpper()); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\SampleProject.vcp"); W.Write(WS); W.Close(); #endregion #endregion #region SampleProjectDlg.h WS = SourceCodeRepository.Get_SampleProjectDlg_h(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\SampleProjectDlg.h"); W.Write(WS); W.Close(); #endregion #endregion #region SampleProject.cpp WS = SourceCodeRepository.Get_SampleProject_cpp(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\SampleProject.cpp"); W.Write(WS); W.Close(); #endregion #endregion #region SampleProject.h WS = SourceCodeRepository.Get_SampleProject_h(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\SampleProject.h"); W.Write(WS); W.Close(); #endregion #endregion #region newres.h WS = SourceCodeRepository.Get_newres_h(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\newres.h"); W.Write(WS); W.Close(); #endregion #endregion #region resource.h WS = SourceCodeRepository.Get_resource_h(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\resource.h"); W.Write(WS); W.Close(); #endregion #endregion #region SampleProject.rc WS = SourceCodeRepository.Get_SampleProject_rc(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\SampleProject.rc"); W.Write(WS); W.Close(); #endregion #endregion #region SampleProject.vcw WS = SourceCodeRepository.Get_SampleProject_vcw(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\SampleProject.vcw"); W.Write(WS); W.Close(); #endregion #endregion #region StdAfx.h WS = SourceCodeRepository.Get_StdAfx_h(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\StdAfx.h"); W.Write(WS); W.Close(); #endregion #endregion #region StdAfx.cpp WS = SourceCodeRepository.Get_StdAfx_cpp(); #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\StdAfx.cpp"); W.Write(WS); W.Close(); #endregion #endregion #region SampleProject.ico #region Write to disk byte[] b = SourceCodeRepository.Get_SampleProject_ico(); FileStream F = File.Create(outputDirectory.FullName + "\\SampleProject.ico", b.Length); F.Write(b, 0, b.Length); F.Close(); #endregion #endregion break; #endregion #region Makefile case ServiceGenerator.PLATFORMS.MICROSTACK_POSIX: if (Configuration.CPlusPlusWrapper) { WS = WS.Replace("Main.c", "Main.cpp"); } W = File.CreateText(outputDirectory.FullName + "\\makefile"); W.Write(WS); W.Close(); break; #endregion #region Symbian MMP File case ServiceGenerator.PLATFORMS.MICROSTACK_SYMBIANv9_1: W = File.CreateText(outputDirectory.FullName + "\\Sample.mmp"); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BeginSource}}}", "//{{{EndSource}}}", WS); W.Write(WS); W.Close(); break; #endregion } #endregion } #endregion } return (RetVal); } protected bool DoesDeviceHaveAnyNonVersionOneComponents(UPnPDevice device) { if (device.Major > 1) { return (true); } else { foreach (UPnPDevice ed in device.EmbeddedDevices) { if (DoesDeviceHaveAnyNonVersionOneComponents(ed)) { return (true); } } foreach (UPnPService s in device.Services) { if (s.Major > 1) { return (true); } } } return (false); } protected bool DeviceHasEvents(OpenSource.UPnP.UPnPDevice device) { foreach (UPnPDevice ed in device.EmbeddedDevices) { if (DeviceHasEvents(ed)) { return (true); } } foreach (UPnPService s in device.Services) { foreach (UPnPStateVariable sv in s.GetStateVariables()) { if (sv.SendEvent) { return (true); } } } return (false); } protected bool GenerateEx(UPnPDevice device, DirectoryInfo outputDirectory, Hashtable serviceNames, ref string SampleApp) { bool BuildSampleApp = SampleApp == null ? false : true; ServiceGenerator.Configuration DeviceConf = (ServiceGenerator.Configuration)device.User; #region Initialize string WS; StreamWriter W; Hashtable ChoTable = new Hashtable(); Hashtable SeqTable = new Hashtable(); int SequenceCounter = 0; int ChoiceCounter = 0; string first = ""; RootDevice = device; SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); if (this.SubTarget == SUBTARGETS.NONE) { UseSystem = this.Platform.ToString(); } else { UseSystem = this.SubTarget.ToString(); } pc_methodPrefix = ((ServiceGenerator.Configuration)device.User).Prefix; pc_methodLibPrefix = Configuration.prefixlib; if (this.Language == LANGUAGES.C) { pc_methodPrefixDef = CallingConvention + pc_methodPrefix; pc_classPrefix = ""; } if (this.Language == LANGUAGES.CPP) { pc_methodPrefixDef = CallingConvention + ClassName + "::" + pc_methodPrefix; pc_classPrefix = ClassName + "::"; } AllServices.Clear(); AddAllServices(device); FriendlyNameTable.Clear(); Fix(device, 0, serviceNames); PrivateClassDeclarations = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); PublicClassDeclarations = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); CodeProcessor cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.NewLine = this.CodeNewLine; cs.ClassDefinitions = PrivateClassDeclarations; cs.PublicClassDefinitions = PublicClassDeclarations; PrivateClassDeclarations.CodeTab = Indent; PublicClassDeclarations.CodeTab = Indent; cs.CodeTab = Indent; #endregion #region New Style UPnPMicroStack.h WS = SourceCodeRepository.GetMicroStack_H_Template(pc_methodPrefix); #region UPnP/1.1 Complex Types cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildComplexTypeDefinitionsAndHeaders(SL, cs, SeqTable, ChoTable, ref SequenceCounter, ref ChoiceCounter, this.pc_methodPrefix, this.pc_methodLibPrefix); WS = WS.Replace("//{{{ComplexTypeCode}}}", cs.ToString()); #endregion #region Function Callbacks cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; foreach (UPnPAction action in service.Actions) { cs.Append(" typedef void(*UPnP_ActionHandler_" + serviceNames[service] + "_" + action.Name + ") (void* upnptoken"); foreach (UPnPArgument args in action.Arguments) { if (args.Direction == "in") { if (args.RelatedStateVar.ComplexType == null) { cs.Append("," + ToCType(args.RelatedStateVar.GetNetType().FullName) + " " + args.Name); if (args.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(",int _" + args.Name + "Length"); } } else { // Complex Type cs.Append(", struct " + args.RelatedStateVar.ComplexType.Name_LOCAL + " *" + args.Name); } } } cs.Append(");" + cl); } } en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; if (Configuration.EXTERN_Callbacks == true || serviceNames[service].ToString() == "DeviceSecurity") { foreach (UPnPAction action in service.Actions) { if (serviceNames[service].ToString() == "DeviceSecurity") { cs.Append("extern void " + pc_methodLibPrefix + serviceNames[service] + "_" + action.Name + "(void* upnptoken"); } else { cs.Append("extern void " + pc_methodPrefix + serviceNames[service] + "_" + action.Name + "(void* upnptoken"); } foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in") { cs.Append("," + ToCType(arg.RelatedStateVar.GetNetType().ToString()) + " " + arg.Name); if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(",int _" + arg.Name + "Length"); } } } cs.Append(");" + cl); } } } if (Configuration.EXTERN_Callbacks == false) { cs.Comment("UPnP Set Function Pointers Methods"); cs.Append("extern void (*" + pc_methodPrefixDef + "FP_PresentationPage) (void* upnptoken,struct packetheader *packet);" + cl); BuildFunctionPointerHeaders(cs, device, serviceNames); cs.Append(cl); } else { cs.Append("extern void " + pc_methodPrefix + "PresentationRequest(void* upnptoken, struct packetheader *packet);" + cl); } WS = WS.Replace("//{{{UPnP_Set_Function_Pointer_Methods}}}", cs.ToString()); #endregion #region Invocation Response Methods cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Comment("Invocation Response Methods"); cs.Append("void " + pc_methodPrefixDef + "Response_Error(const UPnPSessionToken UPnPToken, const int ErrorCode, const char* ErrorMsg);" + cl); cs.Append("void " + pc_methodPrefixDef + "ResponseGeneric(const UPnPSessionToken UPnPToken,const char* ServiceURI,const char* MethodName,const char* Params);" + cl); if (ServiceGenerator.ServiceConfiguration.HasFragmentedActions(device)) { cs.Append("int " + pc_methodPrefixDef + "AsyncResponse_START(const UPnPSessionToken UPnPToken, const char* actionName, const char* serviceUrnWithVersion);" + cl); cs.Append("int " + pc_methodPrefixDef + "AsyncResponse_DONE(const UPnPSessionToken UPnPToken, const char* actionName);" + cl); cs.Append("int " + pc_methodPrefixDef + "AsyncResponse_OUT(const UPnPSessionToken UPnPToken, const char* outArgName, const char* bytes, const int byteLength, enum ILibAsyncSocket_MemoryOwnership bytesMemoryOwnership,const int startArg, const int endArg);" + cl); } BuildUPnPResponseHeaders(cs, device, serviceNames); WS = WS.Replace("//{{{Invocation_Response_Methods}}}", cs.ToString()); #endregion #region Eventing Methods cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Comment("State Variable Eventing Methods"); BuildStateVariableHeaders(cs, device, serviceNames); WS = WS.Replace("//{{{Eventing_Methods}}}", cs.ToString()); #endregion #region Multicast Eventing Methods if (device.ArchitectureVersion != "1.0") { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Comment("State Variable Multicast-Eventing Methods"); BuildMulticastStateVariableHeaders(cs, device, serviceNames); WS = WS.Replace("//{{{MulticastEventing_Methods}}}", cs.ToString()); WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_MulticastEventing}}}", "//{{{END_MulticastEventing}}}", WS); WS = BuildMulticastStateVariableHeaders2(WS, device, serviceNames); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_MulticastEventing_Specific}}}", "//{{{END_MulticastEventing_Specific}}}", WS); } else { WS = WS.Replace("//{{{MulticastEventing_Methods}}}", ""); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_MulticastEventing}}}", "//{{{END_MulticastEventing}}}", WS); } #endregion #region CreateMicroStack Definition cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append("UPnPMicroStackToken UPnPCreateMicroStack(void *Chain, "); BuildCreateMicroStackDefinition(cs, device, 1); cs.Append("const char* UDN, const char* SerialNumber, const int NotifyCycleSeconds, const unsigned short PortNum);" + cl); WS = WS.Replace("//{{{CreateMicroStackHeader}}}", cs.ToString()); #endregion #region Device Object Model if (this.Configuration.DynamicObjectModel) { WS = WS.Replace("//{{{ObjectDefintions}}}", DeviceObjectGenerator.GetDeviceObjectsString(device)); WS = WS.Replace("//{{{GetConfiguration}}}", "struct UPnP_Device_" + device.User2.ToString() + "* UPnPGetConfiguration();" + cl); } else { WS = WS.Replace("//{{{ObjectDefintions}}}", ""); WS = WS.Replace("//{{{GetConfiguration}}}", ""); } #endregion #region Prefixes WS = WS.Replace("UPnP", this.pc_methodPrefix); WS = WS.Replace("ILib", this.pc_methodLibPrefix); #endregion #region Write to disk W = File.CreateText(outputDirectory.FullName + "\\" + pc_methodPrefix + "MicroStack.h"); W.Write(WS); W.Close(); #endregion #endregion #region New Style UPnPMicroStack.c WS = SourceCodeRepository.GetMicroStack_C_Template(pc_methodPrefix); #region Set Function Pointers if (Configuration.EXTERN_Callbacks == false) { cs.Comment("UPnP Set Function Pointers Methods"); string staticdef = ""; if (this.Language == LANGUAGES.CPP) staticdef = "static "; cs.Append("void (*" + pc_methodPrefixDef + "FP_PresentationPage) (void* upnptoken,struct packetheader *packet);" + cl); cs.PublicClassDefinitions.Append(staticdef + "void (*" + pc_methodPrefix + "FP_PresentationPage) (void* upnptoken,struct packetheader *packet);" + cl); BuildFunctionPointers(cs, device, serviceNames); cs.Append(cl); WS = WS.Replace("//{{{FunctionPointers}}}", cs.ToString()); } else { WS = WS.Replace("//{{{FunctionPointers}}}", ""); } #endregion #region Build and Compress Device Description //Compress Device Description cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildDeviceDescription(cs, device); BuildServiceDescriptions(cs, device, serviceNames); WS = WS.Replace("//{{{CompressedDescriptionDocs}}}", cs.ToString()); #endregion #region Object Model if (this.Configuration.DynamicObjectModel) { WS = WS.Replace("//{{{ObjectDefintions}}}", DeviceObjectGenerator.GetPopulatedDeviceObjectsString(device)); WS = DeviceObjectGenerator.BuildDeviceDescriptionStreamer(device, WS); WS = SourceCodeRepository.RemoveTag("//{{{Device_Object_Model_BEGIN}}}", "//{{{Device_Object_Model_END}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("//{{{Device_Default_Model_BEGIN}}}", "//{{{Device_Default_Model_END}}}", WS); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append("struct UPnP_Device_" + device.User2.ToString() + "* UPnPGetConfiguration()" + cl); cs.Append("{" + cl); cs.Append(" return(&(" + DeviceObjectGenerator.GetDeviceIdentifier(device) + "));" + cl); cs.Append("}" + cl); WS = WS.Replace("//{{{GetConfiguration}}}", cs.ToString()); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{Device_Object_Model_BEGIN}}}", "//{{{Device_Object_Model_END}}}", WS); WS = SourceCodeRepository.RemoveTag("//{{{Device_Default_Model_BEGIN}}}", "//{{{Device_Default_Model_END}}}", WS); WS = WS.Replace("//{{{GetConfiguration}}}", ""); } #endregion #region FragmentedResponseSystem if (ServiceGenerator.ServiceConfiguration.HasFragmentedActions(device)) { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_FragmentedResponseSystem}}}", "//{{{END_FragmentedResponseSystem}}}", WS); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_FragmentedResponseSystem}}}", "//{{{END_FragmentedResponseSystem}}}", WS); } #endregion #region CreateMicroStackDefinition cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append("UPnPMicroStackToken UPnPCreateMicroStack(void *Chain, "); BuildCreateMicroStackDefinition(cs, device, 1); cs.Append("const char* UDN, const char* SerialNumber, const int NotifyCycleSeconds, const unsigned short PortNum)" + cl); WS = WS.Replace("//{{{CreateMicroStackDefinition}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" RetVal->DeviceDescriptionLength = snprintf(RetVal->DeviceDescription, len, DDT"); if (device.ArchitectureVersion != "1.0") { cs.Append(", RetVal->ConfigID"); } BuildCreateMicroStackDefinition_sprintf(cs, device, 1); cs.Append(");" + cl); WS = WS.Replace("//{{{CreateMicroStack_sprintf}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" len = 10 + UPnPDeviceDescriptionTemplateLengthUX"); int nd = BuildCreateMicroStackDefinition_Malloc(cs, device, 1); cs.Append(" + (((int)strlen(RetVal->Serial) + (int)strlen(RetVal->UUID)) * " + nd.ToString() + ");" + cl); cs.Append(" if ((RetVal->DeviceDescription = (char*)malloc(len)) == NULL) ILIBCRITICALEXIT(254);" + cl); WS = WS.Replace("//{{{DeviceDescriptionMalloc}}}", cs.ToString()); if (nd == 1) { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_EmbeddedDevice>0}}}", "//{{{END_EmbeddedDevices>0}}}", WS); WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_EmbeddedDevices=0}}}", "//{{{END_EmbeddedDevices=0}}}", WS); } else { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_EmbeddedDevice>0}}}", "//{{{END_EmbeddedDevices>0}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_EmbeddedDevices=0}}}", "//{{{END_EmbeddedDevices=0}}}", WS); } #endregion #region CreateMicroStack --> Object Meta Data if (Configuration.DynamicObjectModel) { cs = new CodeProcessor(new StringBuilder(), false); BuildObjectMetaData(cs, device, 1); WS = WS.Replace("//{{{ObjectModel_MetaData}}}", cs.ToString()); } else { WS = WS.Replace("//{{{ObjectModel_MetaData}}}", ""); } #endregion #region Presentation Page Support cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); if (DeviceConf.AdvertisesPresentationPage) { cs.Append("/* Presentation Page Support */" + cl); cs.Append("if (header->DirectiveObjLength>=4 && memcmp(header->DirectiveObj,\"/web\",4)==0)" + cl); cs.Append("{" + cl); if (Configuration.EXTERN_Callbacks) { cs.Append(" UPnPPresentationRequest((void*)session,header);" + cl); } else { cs.Append(" UPnPFP_PresentationPage((void*)session,header);" + cl); } cs.Append("}" + cl); cs.Append("else "); } WS = WS.Replace("//{{{PRESENTATIONPAGE}}}", cs.ToString()); #endregion #region #define vs method definition, on SSDP related stuff (Embedded Devices) if (this.GetNumberOfTotalEmbeddedDevices(device) == 0) { // No Embedded Devices WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_EmbeddedDevices=0}}}", "//{{{END_EmbeddedDevices=0}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_EmbeddedDevices>0}}}", "//{{{END_EmbeddedDevices>0}}}", WS); } else { // There are Embedded Devices WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_EmbeddedDevices=0}}}", "//{{{END_EmbeddedDevices=0}}}", WS); WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_EmbeddedDevices>0}}}", "//{{{END_EmbeddedDevices>0}}}", WS); } #endregion #region ssdp:all cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildSSDPALL_Response(device, cs, 0); WS = WS.Replace("//{{{SSDP:ALL}}}", cs.ToString()); #endregion #region ssdp:other if (DoesDeviceHaveAnyNonVersionOneComponents(device)) { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_VERSION>1}}}", "//{{{END_VERSION>1}}}", WS); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_VERSION>1}}}", "//{{{END_VERSION>1}}}", WS); } cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildMSEARCHHandler_device(device, cs, 0); WS = WS.Replace("//{{{SSDP:OTHER}}}", cs.ToString()); #endregion #region FragmentedSendNotify Case statements cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" case 1:" + cl); cs.Append(" " + this.pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV4[i]), 0, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\");" + cl); cs.Append(" break;" + cl); BuildFragmentedNotify_CaseStatement(cs, device, 2, 0, false); WS = WS.Replace("//{{{FragmentedSendNotifyCaseStatements}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" case 1:" + cl); cs.Append(" " + this.pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks6[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV6[i]), 0, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\");" + cl); cs.Append(" break;" + cl); BuildFragmentedNotify_CaseStatement(cs, device, 2, 0, true); WS = WS.Replace("//{{{FragmentedSendNotifyV6CaseStatements}}}", cs.ToString()); #endregion #region SendNotify "For statement" cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->AddressListV4[i]), 0, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\");" + cl); BuildNotifyPackets_Device(cs, device, 0, false); WS = WS.Replace("//{{{SendNotifyForStatement}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks6[i], upnp, (struct sockaddr*)&(upnp->AddressListV6[i]), 0, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\");" + cl); BuildNotifyPackets_Device(cs, device, 0, true); WS = WS.Replace("//{{{SendNotifyV6ForStatement}}}", cs.ToString()); WS = WS.Replace("!NUMPACKETS!", this.CountPackets(device).ToString()); #endregion #region SendByeBye "For statement" cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->MulticastAddrV4), UPNP_MCASTv4_GROUP, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\", 0);" + cl); BuildByeByePackets_Device(cs, device, 0, false); WS = WS.Replace("//{{{SendByeByeForStatement}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks6[i], upnp, t1, t2, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\", 0);" + cl); BuildByeByePackets_Device(cs, device, 0, true); WS = WS.Replace("//{{{SendByeByeV6ForStatement}}}", cs.ToString()); #endregion #region Device Icon if (device.Icon != null) { WS = SourceCodeRepository.RemoveTag("//{{{DeviceIcon_Begin}}}", "//{{{DeviceIcon_End}}}", WS); string iconString; MemoryStream ms = new MemoryStream(); device.Icon.Save(ms, System.Drawing.Imaging.ImageFormat.Png); HTTPMessage r; if (Configuration.HTTP_1dot1) { r = new HTTPMessage("1.1"); } else { r = new HTTPMessage("1.0"); } r.StatusCode = 200; r.StatusData = "OK"; r.ContentType = "image/png"; r.BodyBuffer = ms.ToArray(); // Small PNG DeviceObjectGenerator.InjectBytes(out iconString, r.RawPacket, this.CodeNewLine, false); WS = WS.Replace("{{{IconLength_SMPNG}}}", r.RawPacket.Length.ToString()); WS = WS.Replace("{{{IconLength_HEAD_SMPNG}}}", (r.RawPacket.Length - r.BodyBuffer.Length).ToString()); WS = WS.Replace("{{{ICON_SMPNG}}}", iconString); // Small JPG ms = new MemoryStream(); device.Icon.Save(ms, System.Drawing.Imaging.ImageFormat.Jpeg); r.ContentType = "image/jpg"; r.BodyBuffer = ms.ToArray(); DeviceObjectGenerator.InjectBytes(out iconString, r.RawPacket, this.CodeNewLine, false); WS = WS.Replace("{{{IconLength_SMJPG}}}", r.RawPacket.Length.ToString()); WS = WS.Replace("{{{IconLength_HEAD_SMJPG}}}", (r.RawPacket.Length - r.BodyBuffer.Length).ToString()); WS = WS.Replace("{{{ICON_SMJPG}}}", iconString); if (device.Icon2 != null) { // Large PNG ms = new MemoryStream(); device.Icon2.Save(ms, System.Drawing.Imaging.ImageFormat.Png); r.ContentType = "image/png"; r.BodyBuffer = ms.ToArray(); DeviceObjectGenerator.InjectBytes(out iconString, r.RawPacket, this.CodeNewLine, false); WS = WS.Replace("{{{IconLength_LGPNG}}}", r.RawPacket.Length.ToString()); WS = WS.Replace("{{{IconLength_HEAD_LGPNG}}}", (r.RawPacket.Length - r.BodyBuffer.Length).ToString()); WS = WS.Replace("{{{ICON_LGPNG}}}", iconString); // Large JPG ms = new MemoryStream(); device.Icon2.Save(ms, System.Drawing.Imaging.ImageFormat.Jpeg); r.ContentType = "image/jpg"; r.BodyBuffer = ms.ToArray(); DeviceObjectGenerator.InjectBytes(out iconString, r.RawPacket, this.CodeNewLine, false); WS = WS.Replace("{{{IconLength_LGJPG}}}", r.RawPacket.Length.ToString()); WS = WS.Replace("{{{IconLength_HEAD_LGJPG}}}", (r.RawPacket.Length - r.BodyBuffer.Length).ToString()); WS = WS.Replace("{{{ICON_LGJPG}}}", iconString); } } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{DeviceIcon_Begin}}}", "//{{{DeviceIcon_End}}}", WS); } #endregion #region Dispatch Methods cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); Build_DispatchMethods(cs, serviceNames); WS = WS.Replace("//{{{DispatchMethods}}}", cs.ToString()); #endregion #region Dispatch Controller cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); if (BuildHTTPSink_CONTROL(cs, device, serviceNames, "") != "") { cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" RetVal=1;" + cl); cs.Append(" }" + cl); } else { cs.Append(" RetVal=1;" + cl); } WS = WS.Replace("//{{{DispatchControl}}}", cs.ToString()); #endregion #region Invocation Response Methods cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildUPnPResponse(cs, device, serviceNames); WS = WS.Replace("//{{{InvokeResponseMethods}}}", cs.ToString()); #endregion #region GetInitialEventBody cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildEventHelpers_InitialEvent(cs, serviceNames); WS = WS.Replace("//{{{InitialEventBody}}}", cs.ToString()); #endregion #region Multicast Events if (device.ArchitectureVersion != "1.0") { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildMulticastSoapEvents(cs, device, serviceNames); WS = WS.Replace("//{{{SetStateMethods}}}", "//{{{SetStateMethods}}}\r\n" + cs.ToString()); WS = BuildMulticastSoapEventsProcessor(WS, device, serviceNames); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_CHECK_MULTICASTVARIABLE}}}", "//{{{END_CHECK_MULTICASTVARIABLE}}}", WS); WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_MulticastEventing}}}", "//{{{END_MulticastEventing}}}", WS); WS = WS.Replace("{{{VARDEFS}}}", ""); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_MulticastEventing}}}", "//{{{END_MulticastEventing}}}", WS); } #endregion #region SetState Methods cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildSoapEvents(cs, device, serviceNames); WS = WS.Replace("//{{{SetStateMethods}}}", cs.ToString()); #endregion #region UnSubscribeDispatcher string packet = "HTTP/!HTTPVERSION! %d %s\\r\\nContent-Length: 0\\r\\n\\r\\n"; cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); first = ""; en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; UPnPDebugObject obj = new UPnPDebugObject(service); string name = (string)obj.GetField("__eventurl"); cs.Append(" " + first + "if (header->DirectiveObjLength==" + (name.Length + 1).ToString() + " && memcmp(header->DirectiveObj + 1,\"" + name + "\"," + name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" Info = " + pc_methodPrefix + "RemoveSubscriberInfo(&(((struct " + pc_methodPrefix + "DataObject*)session->User)->HeadSubscriberPtr_" + (string)en.Key + "),&(((struct " + pc_methodPrefix + "DataObject*)session->User)->NumberOfSubscribers_" + (string)en.Key + "),SID,SIDLength);" + cl); cs.Append(" if (Info != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" --Info->RefCount;" + cl); cs.Append(" if (Info->RefCount == 0)" + cl); cs.Append(" {" + cl); cs.Append(" " + pc_methodPrefix + "DestructSubscriberInfo(Info);" + cl); cs.Append(" }" + cl); cs.Append(" packetlength = snprintf(packet, 50, \"" + packet + "\", 200, \"OK\");" + cl); cs.Append(" " + this.pc_methodLibPrefix + "WebServer_Send_Raw(session, packet, packetlength, 0, 1);" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" packetlength = snprintf(packet, 50, \"" + packet + "\", 412, \"Invalid SID\");" + cl); cs.Append(" " + this.pc_methodLibPrefix + "WebServer_Send_Raw(session, packet, packetlength, 0, 1);" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); first = "else "; } WS = WS.Replace("//{{{UnSubscribeDispatcher}}}", cs.ToString()); #endregion #region SubscribeDispatcher cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); first = Build_SubscribeEvents_Device("", cs, device, serviceNames); if (first != "") { cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" " + this.pc_methodLibPrefix + "WebServer_Send_Raw(session,\"HTTP/1.1 412 Invalid Service Name\\r\\nContent-Length: 0\\r\\n\\r\\n\",56,1,1);" + cl); cs.Append(" }" + cl); } else { cs.Append(" " + this.pc_methodLibPrefix + "WebServer_Send_Raw(session,\"HTTP/1.1 412 Invalid Service Name\\r\\nContent-Length: 0\\r\\n\\r\\n\",56,1,1);" + cl); } WS = WS.Replace("//{{{SubscribeEventsDispatcher}}}", cs.ToString()); #endregion #region Maximum Subscription Timeout WS = WS.Replace("{{{UPnP_MAX_SUBSCRIPTION_TIMEOUT}}}", ((ServiceGenerator.Configuration)device.User).MaxSubscriptionTimeout.ToString()); #endregion #region State Variables cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); while (en.MoveNext()) { string name = (string)en.Key; UPnPService s = (UPnPService)en.Value; foreach (UPnPStateVariable v in s.GetStateVariables()) { if (v.SendEvent) { cs.Append(" char* " + name + "_" + v.Name + ";" + cl); } if (v.MulticastEvent) { cs.Append(" int " + name + "_" + v.Name + "_SEQ;" + cl); } } } WS = WS.Replace("//{{{StateVariables}}}", cs.ToString()); #endregion #region Subscriber Head Pointer cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); while (en.MoveNext()) { string name = (string)en.Key; cs.Append(" struct SubscriberInfo *HeadSubscriberPtr_" + name + ";" + cl); cs.Append(" int NumberOfSubscribers_" + name + ";" + cl); } WS = WS.Replace("//{{{HeadSubscriberPointers}}}", cs.ToString()); #endregion #region UPnPExpireSubscriberInfo cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); first = ""; while (en.MoveNext()) { string name = (string)en.Key; cs.Append(" " + first + "if (d->HeadSubscriberPtr_" + name + "==t)" + cl); cs.Append(" {" + cl); cs.Append(" --(d->NumberOfSubscribers_" + name + ");" + cl); cs.Append(" }" + cl); first = "else "; } WS = WS.Replace("//{{{UPnPExpireSubscriberInfo1}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); first = ""; while (en.MoveNext()) { string name = (string)en.Key; cs.Append(" " + first + "if (d->HeadSubscriberPtr_" + name + "==info)" + cl); cs.Append(" {" + cl); cs.Append(" d->HeadSubscriberPtr_" + name + " = info->Next;" + cl); cs.Append(" if (info->Next!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" info->Next->Previous = NULL;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); first = "else "; } cs.Append(" " + first + cl); if (first != "") { cs.Append(" {" + cl); } cs.Append(" // Error" + cl); cs.Append(" return;" + cl); if (first != "") { cs.Append(" }" + cl); } WS = WS.Replace("//{{{UPnPExpireSubscriberInfo2}}}", cs.ToString()); #endregion #region TryToSubscribe HeadPointer Initializer cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); while (en.MoveNext()) { string name = (string)en.Key; cs.Append(" if (strncmp(ServiceName,\"" + name + "\"," + name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" TotalSubscribers = &(dataObject->NumberOfSubscribers_" + name + ");" + cl); cs.Append(" HeadPtr = &(dataObject->HeadSubscriberPtr_" + name + ");" + cl); cs.Append(" }" + cl); } WS = WS.Replace("//{{{SubscribeHeadPointerInitializer}}}", cs.ToString()); #endregion #region TryToSubscribe Initial Event cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); first = ""; while (en.MoveNext()) { string name = (string)en.Key; UPnPService s = (UPnPService)en.Value; bool HasEvents = false; foreach (UPnPStateVariable v in s.GetStateVariables()) { if (v.SendEvent) { HasEvents = true; break; } } if (HasEvents) { cs.Append(" " + first + "if (strcmp(ServiceName,\"" + name + "\")==0)" + cl); cs.Append(" {" + cl); cs.Append(" UPnPGetInitialEventBody_" + name + "(dataObject,&packetbody,&packetbodyLength);" + cl); cs.Append(" }" + cl); first = "else "; } } WS = WS.Replace("//{{{TryToSubscribe_InitialEvent}}}", cs.ToString()); #endregion #region Subscription Renewal HeadPointer Initializer cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); first = ""; en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; UPnPDebugObject obj = new UPnPDebugObject(service); string name = (string)obj.GetField("__eventurl"); string sname = (string)en.Key; cs.Append(first + " if (pathlength==" + (name.Length + 1).ToString() + " && memcmp(path+1,\"" + name + "\"," + name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" info = ((struct " + this.pc_methodPrefix + "DataObject*)ReaderObject->User)->HeadSubscriberPtr_" + sname + ";" + cl); cs.Append(" }" + cl); first = "else"; } WS = WS.Replace("//{{{RenewHeadInitializer}}}", cs.ToString()); #endregion #region SendEvent HeadPointer Initializer cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en.Reset(); while (en.MoveNext()) { string name = (string)en.Key; cs.Append(" if (strncmp(eventname,\"" + name + "\"," + name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" info = UPnPObject->HeadSubscriberPtr_" + name + ";" + cl); cs.Append(" }" + cl); } WS = WS.Replace("//{{{SendEventHeadPointerInitializer}}}", cs.ToString()); #endregion #region HeadDispatcher cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildHTTPSink_SCPD_HEAD(cs, device, serviceNames); WS = WS.Replace("//{{{HeadDispatcher}}}", cs.ToString()); #endregion #region GetDispatcher cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildHTTPSink_SCPD(cs, device, serviceNames); WS = WS.Replace("//{{{GetDispatcher}}}", cs.ToString()); if (DeviceObjectGenerator.CalculateMaxAllowedValues(device, 0) != 0) { WS = SourceCodeRepository.RemoveTag("//{{{HASALLOWEDVALUES_BEGIN}}}", "//{{{HASALLOWEDVALUES_END}}}", WS); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{HASALLOWEDVALUES_BEGIN}}}", "//{{{HASALLOWEDVALUES_END}}}", WS); } #endregion #region DestroyMicroStack cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); while (en.MoveNext()) { string name = (string)en.Key; UPnPService s = (UPnPService)en.Value; foreach (UPnPStateVariable v in s.GetStateVariables()) { if (v.SendEvent && v.GetNetType() != typeof(bool)) { cs.Append(" free(upnp->" + name + "_" + v.Name + ");" + cl); } } } WS = WS.Replace("//{{{UPnPDestroyMicroStack_FreeEventResources}}}", cs.ToString()); cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); en = SL.GetEnumerator(); while (en.MoveNext()) { string name = (string)en.Key; cs.Append(" sinfo = upnp->HeadSubscriberPtr_" + name + ";" + cl); cs.Append(" while(sinfo!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" sinfo2 = sinfo->Next;" + cl); cs.Append(" UPnPDestructSubscriberInfo(sinfo);" + cl); cs.Append(" sinfo = sinfo2;" + cl); cs.Append(" }" + cl); } WS = WS.Replace("//{{{UPnPDestroyMicroStack_DestructSubscriber}}}", cs.ToString()); #endregion #region UPnP/1.1 Complex Types cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildComplexTypeParser(SeqTable, ChoTable, cs, SL, this.pc_methodPrefix, this.pc_methodLibPrefix); CPEmbeddedCGenerator.BuildComplexTypeSerializer(SeqTable, ChoTable, cs, SL, this.pc_methodPrefix, this.pc_methodLibPrefix); WS = WS.Replace("//{{{ComplexTypeCode}}}", cs.ToString()); #endregion #region HTTP Version if (!Configuration.HTTP_1dot1) { WS = WS.Replace("!HTTPVERSION!", "1.0"); WS = SourceCodeRepository.RemoveAndClearTag("//{{{ REMOVE_THIS_FOR_HTTP/1.0_ONLY_SUPPORT--> }}}", "//{{{ <--REMOVE_THIS_FOR_HTTP/1.0_ONLY_SUPPORT }}}", WS); } else { WS = WS.Replace("!HTTPVERSION!", "1.1"); WS = SourceCodeRepository.RemoveTag("//{{{ REMOVE_THIS_FOR_HTTP/1.0_ONLY_SUPPORT--> }}}", "//{{{ <--REMOVE_THIS_FOR_HTTP/1.0_ONLY_SUPPORT }}}", WS); } WS = WS.Replace("!MICROSTACKVERSION!", this.UseVersion); #endregion #region UPnP Specific Version if (device.ArchitectureVersion == "1.0") { // UPnP/1.0 WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_UPnP/1.1_Specific}}}", "//{{{END_UPnP/1.1_Specific}}}", WS); WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_UPnP/1.0_Specific}}}", "//{{{END_UPnP/1.0_Specific}}}", WS); WS = WS.Replace("!UPNPVERSION!", "1.0"); } else { // UPnP/1.1 WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_UPnP/1.0_Specific}}}", "//{{{END_UPnP/1.0_Specific}}}", WS); WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_UPnP/1.1_Specific}}}", "//{{{END_UPnP/1.1_Specific}}}", WS); WS = WS.Replace("!UPNPVERSION!", "1.1"); } #endregion #region Remove Event Processing if no evented State Variables if (DeviceHasEvents(device)) { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_EVENTPROCESSING}}}", "//{{{END_EVENTPROCESSING}}}", WS); } else { WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_EVENTPROCESSING}}}", "//{{{END_EVENTPROCESSING}}}", WS); } #endregion #region Prefixes WS = FixPrefix_DeviceService(device, WS); WS = WS.Replace("UPnP/", "upnp/"); WS = WS.Replace("UPnPError", "_upnperror_"); WS = WS.Replace(" UPnP ", " _upnp_ "); WS = WS.Replace("UPnP", this.pc_methodPrefix); WS = WS.Replace("ILib", this.pc_methodLibPrefix); WS = WS.Replace("_upnperror_", "UPnPError"); WS = WS.Replace("upnp/", "UPnP/"); WS = WS.Replace(" _upnp_ ", " UPnP "); WS = FixPrefix2_DeviceService(device, WS); #endregion #region Reformat String WS = CodeProcessor.ProcessCode(WS, Indent); #endregion #region Write to disk if (this.Language == LANGUAGES.C) { W = File.CreateText(outputDirectory.FullName + "\\" + pc_methodPrefix + "MicroStack.c"); } else { W = File.CreateText(outputDirectory.FullName + "\\" + pc_methodPrefix + "MicroStack.cpp"); } W.Write(WS); W.Close(); #endregion #endregion #region Sample Application if (BuildSampleApp) { WS = SampleApp; #region Display Message WS = WS.Replace("{{{INITSTRING}}}", (string)FriendlyNameTable[device] + " {{{INITSTRING}}}"); #endregion #region ImplementationMethods if (!Configuration.BareBonesSample) { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildMainUserCode(cs, device, serviceNames); WS = WS.Replace("//{{{DEVICE_INVOCATION_DISPATCH}}}", "//{{{DEVICE_INVOCATION_DISPATCH}}}" + cl + cs.ToString()); } #endregion #region ImplementationMethods: Function Pointer Initialization if (!Configuration.BareBonesSample) { if (Configuration.EXTERN_Callbacks == false) { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.ident = 1; if (DeviceConf.AdvertisesPresentationPage) { cs.Append(pc_methodPrefix + "FP_PresentationPage=&" + pc_methodPrefix + "PresentationRequest;" + cl); } BuildMain_SetFunctionPointers(cs, device, serviceNames); WS = WS.Replace("//{{{INVOCATION_FP}}}", "//{{{INVOCATION_FP}}}" + cl + cs.ToString()); } } #endregion #region PresentationRequest if (DeviceConf.AdvertisesPresentationPage) { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.Append("void UPnPPresentationRequest(void* upnptoken, struct packetheader *packet)" + cl); cs.Append("{" + cl); cs.Append(" char str[40];" + cl); cs.Append(" ILibInet_ntop2((struct sockaddr*)packet->Source, str, 40);" + cl); cs.Append(" printf(\"UPnP Presentation Request: %s %s %s\\r\\n\", packet->Directive, packet->DirectiveObj, str);" + cl); cs.Append(cl); cs.Append(" // TODO: Add Web Response Code Here..." + cl); cs.Append(cl); cs.Append(" ILibWebServer_Send_Raw((struct ILibWebServer_Session *)upnptoken, \"HTTP/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\", 38, 1, 1);" + cl); cs.Append("}" + cl); WS = WS.Replace("//{{{PresentationRequest}}}", "//{{{PresentationRequest}}}" + cl + cs.ToString()); } #endregion #region MicroStack.h include WS = WS.Replace("//{{{MicroStack_Include}}}", "//{{{MicroStack_Include}}}" + cl + "#include \"UPnPMicroStack.h\""); #endregion #region MicroStack Veriable Declaration WS = WS.Replace("//{{{MICROSTACK_VARIABLE}}}", "//{{{MICROSTACK_VARIABLE}}}" + cl + "void *UPnPmicroStack;"); #endregion #region CreateMicroStack if (!Configuration.BareBonesSample) { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); cs.ident = 1; cs.Append(" // TODO: Each device must have a unique device identifier (UDN)" + cl); cs.Append(" " + this.pc_methodPrefix + "microStack = " + pc_methodPrefix + "CreateMicroStack(MicroStackChain, "); CreateMicroStack_Device_Values(cs, device); cs.Append("\"" + Guid.NewGuid().ToString() + "\", \"0000001\", " + DeviceConf.SSDPCycleTime.ToString() + ", " + DeviceConf.WebPort.ToString() + ");" + cl); WS = WS.Replace("//{{{CREATE_MICROSTACK}}}", "//{{{CREATE_MICROSTACK}}}" + cl + cs.ToString()); } #endregion #region InitialEvent Initialization if (!Configuration.BareBonesSample) { cs = new CodeProcessor(new StringBuilder(), this.Language == LANGUAGES.CPP); BuildStateVariableEventingSample(cs, device, serviceNames); WS = WS.Replace("//{{{STATEVARIABLES_INITIAL_STATE}}}", "//{{{STATEVARIABLES_INITIAL_STATE}}}" + cl + cs.ToString()); } #endregion #region IPAddress Monitor if (Configuration.DefaultIPAddressMonitor) { WS = WS.Replace("//{{{IPAddress_Changed}}}", "//{{{IPAddress_Changed}}}" + cl + "UPnPIPAddressListChanged(UPnPmicroStack);"); if (Configuration.TargetPlatform == ServiceGenerator.PLATFORMS.MICROSTACK_WINSOCK2) { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_WINSOCK2_IPADDRESS_MONITOR}}}", "//{{{END_WINSOCK2_IPADDRESS_MONITOR}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_POSIX/WINSOCK1_IPADDRESS_MONITOR}}}", "//{{{END_POSIX/WINSOCK1_IPADDRESS_MONITOR}}}", WS); } else { WS = SourceCodeRepository.RemoveTag("//{{{BEGIN_POSIX/WINSOCK1_IPADDRESS_MONITOR}}}", "//{{{END_POSIX/WINSOCK1_IPADDRESS_MONITOR}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_WINSOCK2_IPADDRESS_MONITOR}}}", "//{{{END_WINSOCK2_IPADDRESS_MONITOR}}}", WS); } } else { WS = WS.Replace("//{{{IPAddress_Changed}}}", ""); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_WINSOCK2_IPADDRESS_MONITOR}}}", "//{{{END_WINSOCK2_IPADDRESS_MONITOR}}}", WS); WS = SourceCodeRepository.RemoveAndClearTag("//{{{BEGIN_POSIX/WINSOCK1_IPADDRESS_MONITOR}}}", "//{{{END_POSIX/WINSOCK1_IPADDRESS_MONITOR}}}", WS); } #endregion #region Prefixes WS = WS.Replace("UPnPAbstraction.h", "_upnpabstraction.h_"); WS = WS.Replace("pUPnP", "_pupnp_"); WS = WS.Replace("CUPnP_", "_cupnp_"); WS = WS.Replace("UPnP/", "upnp/"); WS = WS.Replace("UPnPControlPointStructs.h", "upnpcontrolpointstructs.h"); WS = WS.Replace("UPnPDevice", "upnpdevice"); WS = WS.Replace("UPnPService", "upnpservice"); WS = WS.Replace("SubscribeForUPnPEvents", "subscribeforupnpevents"); WS = WS.Replace("UnSubscribeUPnPEvents", "unsubscribeupnpevents"); WS = WS.Replace("UPnPError", "_upnperror_"); WS = WS.Replace(" UPnP ", " _upnp_ "); WS = WS.Replace("UPnP", this.pc_methodPrefix); WS = WS.Replace("ILib", this.pc_methodLibPrefix); WS = WS.Replace("_upnperror_", "UPnPError"); WS = WS.Replace("upnp/", "UPnP/"); WS = WS.Replace(" _upnp_ ", " UPnP "); WS = WS.Replace("_pupnp_", "pUPnP"); WS = WS.Replace("_cupnp_", "CUPnP_"); WS = WS.Replace("upnpdevice", "UPnPDevice"); WS = WS.Replace("upnpservice", "UPnPService"); WS = WS.Replace("upnpcontrolpointstructs.h", "UPnPControlPointStructs.h"); WS = WS.Replace("subscribeforupnpevents", "SubscribeForUPnPEvents"); WS = WS.Replace("unsubscribeupnpevents", "UnSubscribeUPnPEvents"); WS = WS.Replace("_upnpabstraction.h_", "UPnPAbstraction.h"); #endregion SampleApp = WS; } #endregion Log("UPnP Stack Generation Complete."); return true; } private int BuildFragmentedNotify_CaseStatement(CodeProcessor cs, UPnPDevice device, int StartNum, int number, bool ipv6) { cs.Append(" case " + StartNum + ":" + cl); if (!ipv6) cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV4[i]), " + number.ToString() + ", \"\", \"uuid:\", FNS->upnp->UDN);" + cl); else cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks6[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV6[i]), " + number.ToString() + ", \"\", \"uuid:\", FNS->upnp->UDN);" + cl); cs.Append(" break;" + cl); ++StartNum; cs.Append(" case " + StartNum + ":" + cl); if (!ipv6) cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV4[i]), " + number.ToString() + ", \"::" + device.DeviceURN + "\", \"" + device.DeviceURN + "\", \"\");" + cl); else cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks6[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV6[i]), " + number.ToString() + ", \"::" + device.DeviceURN + "\", \"" + device.DeviceURN + "\", \"\");" + cl); cs.Append(" break;" + cl); ++StartNum; foreach (UPnPService service in device.Services) { cs.Append(" case " + StartNum + ":" + cl); if (!ipv6) cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV4[i]), " + number.ToString() + ", \"::" + service.ServiceURN + "\", \"" + service.ServiceURN + "\", \"\");" + cl); else cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(FNS->upnp->NOTIFY_SEND_socks6[i], FNS->upnp, (struct sockaddr*)&(FNS->upnp->AddressListV6[i]), " + number.ToString() + ", \"::" + service.ServiceURN + "\", \"" + service.ServiceURN + "\", \"\");" + cl); cs.Append(" break;" + cl); ++StartNum; } foreach (UPnPDevice d in device.EmbeddedDevices) { StartNum = BuildFragmentedNotify_CaseStatement(cs, d, StartNum, ++number, ipv6); } return (StartNum); } private string BuildEventHelpers_GetLine(UPnPStateVariable v) { string datablock = "<e:property><" + v.Name + ">%s</" + v.Name + "></e:property>"; return (datablock); } private void BuildEventHelpers_InitialEvent(CodeProcessor cs, Hashtable serviceNames) { //string start_block = "<?xml version=\\\"1.0\\\" encoding=\\\"utf-8\\\"?><e:propertyset xmlns:e=\\\"urn:schemas-upnp-org:event-1-0\\\">"; //string end_block = "</e:propertyset>"; SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); while (en.MoveNext()) { string name = (string)en.Key; UPnPService service = (UPnPService)en.Value; // Figure out if this service as any evented state variables bool eventedvars = false; foreach (UPnPStateVariable v in service.GetStateVariables()) { if (v.SendEvent == true) { eventedvars = true; break; } } if (eventedvars == false) continue; // Define the initial event body method for this service cs.Define("void " + pc_methodPrefixDef + "GetInitialEventBody_" + name + "(struct " + pc_methodPrefix + "DataObject *UPnPObject,char ** body, int *bodylength)"); cs.Append("{" + cl); cs.Append(" int TempLength;" + cl); StringBuilder ev = new StringBuilder(); //ev.Append(start_block); foreach (UPnPStateVariable V in service.GetStateVariables()) { if (V.SendEvent) { ev.Append(this.BuildEventHelpers_GetLine(V)); } } //ev.Append(end_block); string eventbody = ev.ToString(); if (eventbody.Length != 0) { eventbody = eventbody.Substring(13, eventbody.Length - (13 + 14)); } cs.Append(" TempLength = (int)(" + eventbody.Length.ToString()); foreach (UPnPStateVariable V in service.GetStateVariables()) { if (V.SendEvent) { cs.Append("+(int)strlen(UPnPObject->" + name + "_" + V.Name + ")"); } } cs.Append(");" + cl); cs.Append(" if ((*body = (char*)malloc(sizeof(char) * TempLength)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" *bodylength = snprintf(*body, sizeof(char) * TempLength, \"" + eventbody + "\""); foreach (UPnPStateVariable V in service.GetStateVariables()) { if (V.SendEvent) { cs.Append(",UPnPObject->" + name + "_" + V.Name); } } cs.Append(");" + cl); cs.Append("}" + cl); } } private int GetNumberOfTotalEmbeddedDevices(UPnPDevice device) { int RetVal = 0; if (device.Root == false) RetVal = 1; foreach (UPnPDevice d in device.EmbeddedDevices) { RetVal += GetNumberOfTotalEmbeddedDevices(d); } return (RetVal); } private void TypeCheckURI(CodeProcessor cs, UPnPArgument args) { cs.Append(" TempParser = " + pc_methodLibPrefix + "ParseString(p_" + args.Name + ", 0, p_" + args.Name + "Length, \"://\",3);" + cl); cs.Append(" if (TempParser->NumResults!=2)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Argument[" + args.Name + "] illegal format\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Illegal value\");" + cl); } cs.Append(" " + this.pc_methodLibPrefix + "DestructParserResults(TempParser);" + cl); cs.Append(" return;" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" _" + args.Name + " = p_" + args.Name + ";" + cl); cs.Append(" _" + args.Name + "Length = p_" + args.Name + "Length;" + cl); cs.Append(" " + this.pc_methodLibPrefix + "DestructParserResults(TempParser);" + cl); cs.Append(" }" + cl); } private void Build_TypeCheckIntegral(CodeProcessor cs) { cs.Define("int " + pc_methodPrefixDef + "TypeCheckIntegral(char* inVar, int inVarLength, long MinVal, long MaxVal, void *outVar, char *varName,struct HTTPReaderObject *ReaderObject)"); cs.Append("{" + cl); cs.Append(" long TempLong;" + cl); //cs.Append(" char* msg;"+cl); cs.Append(" int OK = 0;" + cl); cs.Append(" if (" + pc_methodLibPrefix + "GetLong(inVar, inVarLength, &TempLong)!=0)" + cl); cs.Append(" {" + cl); cs.Append(" OK=-1;" + cl); cs.Append(" }" + cl); cs.Append(" if (!(TempLong >= MinVal && TempLong <= MaxVal))" + cl); cs.Append(" {" + cl); cs.Append(" OK = -2;" + cl); cs.Append(" }" + cl); cs.Append(" if (OK == -1)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" if ((msg = (char*)malloc(25 + (int)strlen(varName))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(msg, 25 + (int)strlen(varName), \"Argument[%s] illegal value\", varName);" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, msg);" + cl); cs.Append(" free(msg);" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return -1;" + cl); cs.Append(" }" + cl); cs.Append(" if (OK == -2)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" if ((msg = (char*)malloc(25 + (int)strlen(varName))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(msg, 25 + (int)strlen(varName), \"Argument[%s] out of range\", varName);" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, msg);" + cl); cs.Append(" free(msg);" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return -1;" + cl); cs.Append(" }" + cl); cs.Append(" *((long*)outVar) = TempLong;" + cl); cs.Append(" return 0;" + cl); cs.Append("}" + cl); } private void Build_TypeCheckUnsignedIntegral(CodeProcessor cs) { cs.Define("int " + pc_methodPrefixDef + "TypeCheckUnsignedIntegral(char* inVar, int inVarLength, unsigned long MinVal, unsigned long MaxVal, void *outVar, char *varName,struct HTTPReaderObject *ReaderObject)"); cs.Append("{" + cl); cs.Append(" unsigned long TempULong;" + cl); cs.Append(" int OK = 0;" + cl); cs.Append(" char *msg;" + cl); cs.Append(" if (" + pc_methodLibPrefix + "GetULong(inVar, inVarLength, &TempULong)!=0)" + cl); cs.Append(" {" + cl); cs.Append(" OK=-1;" + cl); cs.Append(" }" + cl); cs.Append(" if (!(TempULong >= MinVal && TempULong <= MaxVal))" + cl); cs.Append(" {" + cl); cs.Append(" OK=-2;" + cl); cs.Append(" }" + cl); cs.Append(" if (OK==-1)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" if ((msg = (char*)malloc(25 + (int)strlen(varName))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(msg, 25 + (int)strlen(varName), \"Argument[%s] illegal value\", varName);" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, msg);" + cl); cs.Append(" free(msg);" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Illegal value\");" + cl); } cs.Append(" return(-1);" + cl); cs.Append(" }" + cl); cs.Append(" if (OK==-2)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" if ((msg = (char*)malloc(25 + (int)strlen(varName))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(msg, 25 + (int)strlen(varName), \"Argument[%s] out of range\", varName);" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, msg);" + cl); cs.Append(" free(msg);" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return(-1);" + cl); cs.Append(" }" + cl); cs.Append(" *((unsigned long*)outVar) = TempULong;" + cl); cs.Append(" return(0);" + cl); cs.Append("}" + cl); } private void Build_TypeCheckString(CodeProcessor cs, Hashtable serviceNames) { SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); cs.Define("int " + pc_methodPrefixDef + "TypeCheckString(char* inVar, int inVarLength, char* ServiceName, char* StateVariable, char** outVar, int* outVarLength, char* varName, struct HTTPReaderObject *ReaderObject)"); cs.Append("{" + cl); cs.Append(" int OK = 0;" + cl); cs.Append(" char* msg;" + cl); while (en.MoveNext()) { UPnPService S = (UPnPService)en.Value; string key = (string)en.Key; bool Needed = false; foreach (UPnPStateVariable V in S.GetStateVariables()) { if (V.AllowedStringValues != null) { Needed = true; break; } } if (Needed) { cs.Append(" if (strncmp(ServiceName,\"" + key + "\"," + key.Length.ToString() + ") == 0)" + cl); cs.Append(" {" + cl); foreach (UPnPStateVariable V in S.GetStateVariables()) { if (V.AllowedStringValues != null) { cs.Append(" if (strncmp(StateVariable,\"" + V.Name + "\"," + V.Name.Length.ToString() + ") == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = -1;" + cl); bool first = true; foreach (string AllowedString in V.AllowedStringValues) { if (first == false) cs.Append("else "); first = false; cs.Append(" if (inVarLengt h== " + AllowedString.Length.ToString() + ")" + cl); cs.Append(" {" + cl); cs.Append(" if (memcmp(inVar,\"" + AllowedString + "\"," + AllowedString.Length.ToString() + ") == 0) {OK = 0;}" + cl); cs.Append(" }" + cl); } cs.Append(" if (OK != 0)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" if ((msg = (char*)malloc(65)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(msg, 65, \"Argument[%s] contains a value that is not in AllowedValueList\", varName);" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, msg);" + cl); cs.Append(" free(msg);" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return -1;" + cl); cs.Append(" }" + cl); cs.Append(" *outVar = inVar;" + cl); cs.Append(" *outVarLength = inVarLength;" + cl); cs.Append(" return(0);" + cl); cs.Append(" }" + cl); } } cs.Append(" }" + cl); } } cs.Append("}" + cl); } private void TypeCheckBoolean(CodeProcessor cs, UPnPArgument args) { cs.Append(" OK=0;" + cl); cs.Append(" if (p_" + args.Name + "Length == 4)" + cl); cs.Append(" {" + cl); cs.Append(" if (strncasecmp(p_" + args.Name + ",\"true\",4) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" _" + args.Name + " = 1;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (p_" + args.Name + "Length == 5)" + cl); cs.Append(" {" + cl); cs.Append(" if (strncasecmp(p_" + args.Name + ",\"false\",5) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" _" + args.Name + " = 0;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (p_" + args.Name + "Length == 1)" + cl); cs.Append(" {" + cl); cs.Append(" if (memcmp(p_" + args.Name + ",\"0\",1) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" _" + args.Name + " = 0;" + cl); cs.Append(" }" + cl); cs.Append(" if (memcmp(p_" + args.Name + ",\"1\",1) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" _" + args.Name + " = 1;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (OK == 0)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Argument[" + args.Name + "] illegal value\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return;" + cl); cs.Append(" }" + cl); } private void TypeCheckIntegral(CodeProcessor cs, UPnPArgument args) { UPnPDebugObject obj = new UPnPDebugObject(args.RelatedStateVar.GetNetType()); switch (args.RelatedStateVar.GetNetType().FullName) { case "System.SByte": case "System.Int16": case "System.Int32": cs.Append(" OK = " + pc_methodLibPrefix + "GetLong(p_" + args.Name + ",p_" + args.Name + "Length, &TempLong);" + cl); break; case "System.Byte": case "System.UInt16": case "System.UInt32": cs.Append(" OK = " + pc_methodLibPrefix + "GetULong(p_" + args.Name + ",p_" + args.Name + "Length, &TempULong);" + cl); break; } cs.Append(" if (OK!=0)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Argument[" + args.Name + "] illegal value\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Illegal value\");" + cl); } cs.Append(" return;" + cl); cs.Append(" }" + cl); bool endtag = false; switch (args.RelatedStateVar.GetNetType().FullName) { case "System.SByte": case "System.Int16": case "System.Int32": if (args.RelatedStateVar.Minimum == null && args.RelatedStateVar.Maximum == null) { // No need to check anything since this is without bounds. } else { // Check lower and upper bounds. endtag = true; cs.Append(" else" + cl); cs.Append(" {" + cl); if (!Configuration.DynamicObjectModel) { cs.Append(" if (!(TempLong>="); if (args.RelatedStateVar.Minimum != null) { cs.Append("(long)0x" + ToHex(args.RelatedStateVar.Minimum)); } else { cs.Append("(long)0x" + ToHex(obj.GetStaticField("MinValue"))); } cs.Append(" && TempLong<="); if (args.RelatedStateVar.Maximum != null) { cs.Append("(long)0x" + ToHex(args.RelatedStateVar.Maximum)); } else { cs.Append("(long)0x" + ToHex(obj.GetStaticField("MaxValue"))); } cs.Append("))" + cl); } else { string vIdent = DeviceObjectGenerator.GetStateVariableIdentifier(args.RelatedStateVar); cs.Append(" OK = 0;" + cs.NewLine); cs.Append(" if (" + vIdent + "->MinMaxStep[0]!=NULL)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" " + pc_methodLibPrefix + "GetLong(" + vIdent + "->MinMaxStep[0],(int)strlen(" + vIdent + "->MinMaxStep[0]), &TempLong2);" + cl); cs.Append(" if (TempLong<TempLong2){OK=1;}" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" if (" + vIdent + "->MinMaxStep[1]!=NULL)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" " + pc_methodLibPrefix + "GetLong(" + vIdent + "->MinMaxStep[1],(int)strlen(" + vIdent + "->MinMaxStep[1]), &TempLong2);" + cl); cs.Append(" if (TempLong>TempLong2){OK=1;}" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" if (OK!=0)" + cs.NewLine); } cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Argument[" + args.Name + "] out of Range\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Illegal value\");" + cl); } cs.Append(" return;" + cl); cs.Append(" }" + cl); } break; case "System.Byte": case "System.UInt16": case "System.UInt32": if (args.RelatedStateVar.Minimum == null && args.RelatedStateVar.Maximum == null) { // No need to check anything since this is an int without bounds. } else { endtag = true; cs.Append(" else" + cl); cs.Append(" {" + cl); if (!this.Configuration.DynamicObjectModel) { cs.Append(" if (!(TempULong>="); if (args.RelatedStateVar.Minimum != null) { cs.Append("(unsigned long)0x" + ToHex(args.RelatedStateVar.Minimum)); } else { cs.Append("(unsigned long)0x" + ToHex(obj.GetStaticField("MinValue"))); } cs.Append(" && TempULong<="); if (args.RelatedStateVar.Maximum != null) { cs.Append("(unsigned long)0x" + ToHex(args.RelatedStateVar.Maximum)); } else { cs.Append("(unsigned long)0x" + ToHex(obj.GetStaticField("MaxValue"))); } cs.Append("))" + cl); } else { string vIdent = DeviceObjectGenerator.GetStateVariableIdentifier(args.RelatedStateVar); cs.Append(" OK = 0;" + cs.NewLine); cs.Append(" if (" + vIdent + "->MinMaxStep[0]!=NULL)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" " + pc_methodLibPrefix + "GetULong(" + vIdent + "->MinMaxStep[0],(int)strlen(" + vIdent + "->MinMaxStep[0]), &TempULong2);" + cl); cs.Append(" if (TempULong<TempULong2){OK=1;}" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" if (" + vIdent + "->MinMaxStep[1]!=NULL)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" " + pc_methodLibPrefix + "GetULong(" + vIdent + "->MinMaxStep[1],(int)strlen(" + vIdent + "->MinMaxStep[1]), &TempULong2);" + cl); cs.Append(" if (TempULong>TempULong2){OK=1;}" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" if (OK!=0)" + cs.NewLine); } cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Argument[" + args.Name + "] out of Range\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Illegal value\");" + cl); } cs.Append(" return;" + cl); cs.Append(" }" + cl); } break; } switch (args.RelatedStateVar.GetNetType().FullName) { case "System.SByte": case "System.Int16": case "System.Int32": cs.Append(" _" + args.Name + " = (" + ToCType(args.RelatedStateVar.GetNetType().FullName) + ")TempLong;" + cl); break; case "System.Byte": case "System.UInt16": case "System.UInt32": cs.Append(" _" + args.Name + " = (" + ToCType(args.RelatedStateVar.GetNetType().FullName) + ")TempULong;" + cl); break; } if (endtag == true) cs.Append(" }" + cl); } private void TypeCheckDateTime(CodeProcessor cs, UPnPArgument args) { cs.Append(" p_" + args.Name + "[p_" + args.Name + "Length]=0;" + cl); cs.Append(" _" + args.Name + " = " + this.pc_methodLibPrefix + "Time_Parse(p_" + args.Name + ");" + cl); } private void TypeCheckString(CodeProcessor cs, UPnPArgument args) { cs.Append(" _" + args.Name + "Length = " + this.pc_methodLibPrefix + "InPlaceXmlUnEscape(p_" + args.Name + ");" + cl); cs.Append(" _" + args.Name + " = p_" + args.Name + ";" + cl); if (args.RelatedStateVar.AllowedStringValues != null) { if (!Configuration.DynamicObjectModel) { cs.Append(" if ("); bool first = true; foreach (string val in args.RelatedStateVar.AllowedStringValues) { if (first == false) cs.Append("&& "); first = false; cs.Append("memcmp(_" + args.Name + ", \"" + val + "\\0\"," + (val.Length + 1).ToString() + ") != 0" + cl); } cs.Append(" )" + cl); } else { string vIdent = DeviceObjectGenerator.GetStateVariableIdentifier(args.RelatedStateVar); cs.Append(" for(OK=0;OK<UPnP_StateVariable_AllowedValues_MAX;++OK)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" if (" + vIdent + "->AllowedValues[OK]!=NULL)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" if (strcmp(_" + args.Name + "," + vIdent + "->AllowedValues[OK])==0)" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" OK=0;" + cs.NewLine); cs.Append(" break;" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" else" + cs.NewLine); cs.Append(" {" + cs.NewLine); cs.Append(" break;" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" }" + cs.NewLine); cs.Append(" if (OK!=0)" + cs.NewLine); } cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Argument[" + args.Name + "] contains a value that is not in AllowedValueList\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,402,\"Illegal value\");" + cl); } cs.Append(" return;" + cl); cs.Append(" }" + cl); } } private void Build_TypeCheckBoolean(CodeProcessor cs) { cs.Define("int " + pc_methodPrefixDef + "TypeCheckBoolean(char *inVar, int inVarLength, int* BoolValue, char* varName, struct HTTPReaderObject *ReaderObject)"); cs.Append("{" + cl); cs.Append(" int OK = 0;" + cl); cs.Append(" char* msg;" + cl); cs.Append(" if (inVarLength == 4)" + cl); cs.Append(" {" + cl); cs.Append(" if (strncasecmp(inVar, \"true\", 4) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" *BoolValue = 1;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (inVarLength == 5)" + cl); cs.Append(" {" + cl); cs.Append(" if (strncasecmp(inVar, \"false\", 5) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" *BoolValue = 0;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (inVarLength==1)" + cl); cs.Append(" {" + cl); cs.Append(" if (memcmp(inVar, \"0\", 1) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" *BoolValue = 0;" + cl); cs.Append(" }" + cl); cs.Append(" if (memcmp(inVar, \"1\", 1) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" OK = 1;" + cl); cs.Append(" *BoolValue = 1;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (OK == 0)" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" if ((msg = (char*)malloc(25 + (int)strlen(varName))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(msg, 25 + (int)strlen(varName), \"Argument[%s] illegal value\", varName);" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, msg);" + cl); cs.Append(" free(msg);" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return -1;" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" return 0;" + cl); cs.Append(" }" + cl); cs.Append("}" + cl); } private void Build_DispatchMethods(CodeProcessor cs, Hashtable serviceNames) { SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); UPnPService service; string name; while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); while (en.MoveNext()) { int numArgs = 0; service = (UPnPService)en.Value; name = (string)en.Key; foreach (UPnPAction action in service.Actions) { numArgs = 0; foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in") { ++numArgs; } } // Define a macro version if (numArgs == 0) { cs.Define("#define " + pc_methodPrefixDef + "Dispatch_" + name + "_" + action.Name + "(buffer,offset,bufferLength, session)\\"); cs.Append("{\\" + cl); if (name != "DeviceSecurity") { if (Configuration.EXTERN_Callbacks == false) { cs.Append(" if (" + pc_methodPrefix + "FP_" + name + "_" + action.Name + " == NULL)\\" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(session,501,\"No Function Handler\");\\" + cl); cs.Append(" else\\" + cl); cs.Append(" " + pc_methodPrefix + "FP_" + name + "_" + action.Name + "((void*)session);\\" + cl); } else { cs.Append(" " + pc_methodPrefix + name + "_" + action.Name + "((void*)session);\\" + cl); } } else { cs.Append(" " + pc_methodLibPrefix + name + "_" + action.Name + "((void*)session);\\" + cl); } cs.Append("}" + cl); cs.Append(cl); } if (numArgs > 0) { cs.Define("void " + pc_methodPrefixDef + "Dispatch_" + name + "_" + action.Name + "(char *buffer, int offset, int bufferLength, struct " + this.pc_methodLibPrefix + "WebServer_Session *ReaderObject)"); cs.Append("{" + cl); bool varlong = false; bool varlongtemp = false; bool varulong = false; bool varulongtemp = false; bool varuuri = false; bool varok = false; foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in") { varok = true; switch (args.RelatedStateVar.GetNetType().ToString()) { case "System.Uri": varuuri = true; break; case "System.Byte": case "System.UInt16": case "System.UInt32": varulong = true; if (args.RelatedStateVar.Maximum != null || args.RelatedStateVar.Minimum != null) { varulongtemp = true; } break; case "System.SByte": case "System.Int16": case "System.Int32": varlong = true; if (args.RelatedStateVar.Maximum != null || args.RelatedStateVar.Minimum != null) { varlongtemp = true; } break; case "System.Boolean": case "System.Char": case "System.Single": case "System.Double": case "System.Byte[]": case "System.String": break; } } } //cs.Append(" char *TempString;"+cl); if (varlong == true) { cs.Append(" long TempLong;" + cl); if (varlongtemp && Configuration.DynamicObjectModel) { cs.Append(" long TempLong2;" + cl); } } if (varulong == true) { cs.Append(" unsigned long TempULong;" + cl); if (varulongtemp && Configuration.DynamicObjectModel) { cs.Append(" unsigned long TempULong2;" + cl); } } if (varuuri == true) cs.Append(" struct parser_result *TempParser;" + cl); if (varok == true) cs.Append(" int OK = 0;" + cl); //cs.Comment("Service Variables"); foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in") { cs.Append(" char *p_" + args.Name + " = NULL;" + cl); cs.Append(" int p_" + args.Name + "Length = 0;" + cl); if (args.RelatedStateVar.ComplexType == null) { cs.Append(" " + ToCType(args.RelatedStateVar.GetNetType().FullName) + " _" + args.Name + " = " + ToEmptyValue(args.RelatedStateVar.GetNetType().FullName) + ";" + cl); if (ToCType(args.RelatedStateVar.GetNetType().FullName) == "char*" || ToCType(args.RelatedStateVar.GetNetType().FullName) == "unsigned char*") { cs.Append(" int _" + args.Name + "Length;" + cl); } } else { cs.Append(" struct " + args.RelatedStateVar.ComplexType.Name_LOCAL + " *_" + args.Name + "=NULL;" + cl); } } } // // Setup the XML Parsing // cs.Append(" struct " + this.pc_methodLibPrefix + "XMLNode *xnode = " + this.pc_methodLibPrefix + "ParseXML(buffer, offset, bufferLength);" + cl); cs.Append(" struct " + this.pc_methodLibPrefix + "XMLNode *root = xnode;" + cl); foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in" && args.RelatedStateVar.ComplexType != null) { cs.Append(" struct " + this.pc_methodLibPrefix + "XMLNode *tnode, *tnode_root;" + cl); cs.Append(" char* tempText;" + cl); cs.Append(" int tempTextLength;" + cl); break; } } cs.Append(" if (" + this.pc_methodLibPrefix + "ProcessXMLNodeList(root)!=0)" + cl); cs.Append(" {" + cl); cs.Comment("The XML is not well formed!"); cs.Append(" " + this.pc_methodLibPrefix + "DestructXMLNodeList(root);" + cl); cs.Append(" " + this.pc_methodPrefix + "Response_Error(ReaderObject, 501, \"Invalid XML\");" + cl); cs.Append(" return;" + cl); cs.Append(" }" + cl); cs.Append(" while(xnode != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" if (xnode->StartTag != 0 && xnode->NameLength == 8 && memcmp(xnode->Name, \"Envelope\", 8)==0)" + cl); cs.Append(" {" + cl); cs.Append(" // Envelope" + cl); cs.Append(" xnode = xnode->Next;" + cl); cs.Append(" while(xnode != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" if (xnode->StartTag!=0 && xnode->NameLength == 4 && memcmp(xnode->Name, \"Body\", 4) == 0)" + cl); cs.Append(" {" + cl); cs.Append(" // Body" + cl); cs.Append(" xnode = xnode->Next;" + cl); cs.Append(" while(xnode != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" if (xnode->StartTag != 0 && xnode->NameLength == " + action.Name.Length.ToString() + " && memcmp(xnode->Name, \"" + action.Name + "\"," + action.Name.Length.ToString() + ") == 0)" + cl); cs.Append(" {" + cl); cs.Append(" // Inside the interesting part of the SOAP" + cl); cs.Append(" xnode = xnode->Next;" + cl); cs.Append(" while(xnode != NULL)" + cl); cs.Append(" {" + cl); int argflag = 1; string eLsE = ""; foreach (UPnPArgument arg in action.ArgumentList) { if (arg.Direction == "in") { cs.Append(" " + eLsE + "if (xnode->NameLength == " + arg.Name.Length.ToString() + " && memcmp(xnode->Name, \"" + arg.Name + "\"," + arg.Name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); if (arg.RelatedStateVar.ComplexType == null) { cs.Append(" p_" + arg.Name + "Length = " + this.pc_methodLibPrefix + "ReadInnerXML(xnode, &p_" + arg.Name + ");" + cl); if ((arg.RelatedStateVar.GetNetType().FullName == "System.String") || (arg.RelatedStateVar.GetNetType().FullName == "System.Uri")) { cs.Append(" p_" + arg.Name + "[p_" + arg.Name + "Length]=0;" + cl); } } else { // Complex Type cs.Append(" tempTextLength = " + this.pc_methodLibPrefix + "ReadInnerXML(xnode, &tempText);" + cl); cs.Append(" tempText[tempTextLength] = 0;" + cl); cs.Append(" if (ReaderObject->Reserved9 == 0)" + cl); cs.Append(" {" + cl); cs.Append(" // Legacy" + cl); cs.Append(" tempTextLength = " + this.pc_methodLibPrefix + "InPlaceXmlUnEscape(tempText);" + cl); cs.Append(" tnode_root = tnode = " + this.pc_methodLibPrefix + "ParseXML(tempText,0,tempTextLength);" + cl); cs.Append(" " + this.pc_methodLibPrefix + "ProcessXMLNodeList(tnode_root);" + cl); cs.Append(" _" + arg.Name + " = " + this.pc_methodPrefix + "Parse_" + arg.RelatedStateVar.ComplexType.Name_LOCAL + "(tnode);" + cl); cs.Append(" " + this.pc_methodLibPrefix + "DestructXMLNodeList(tnode_root);" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" // UPnP/1.1 Enabled" + cl); cs.Append(" _" + arg.Name + " = " + this.pc_methodPrefix + "Parse_" + arg.RelatedStateVar.ComplexType.Name_LOCAL + "(xnode->Next);" + cl); cs.Append(" }" + cl); } cs.Append(" OK |= " + argflag + ";" + cl); argflag = argflag << 1; cs.Append(" }" + cl); eLsE = "else "; } } cs.Append(" if (xnode->Peer == NULL)" + cl); cs.Append(" {" + cl); cs.Append(" xnode = xnode->Parent;" + cl); cs.Append(" break;" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" xnode = xnode->Peer;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (xnode != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" if (xnode->Peer == NULL)" + cl); cs.Append(" {" + cl); cs.Append(" xnode = xnode->Parent;" + cl); cs.Append(" break;" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" xnode = xnode->Peer;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (xnode != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" if (xnode->Peer == NULL)" + cl); cs.Append(" {" + cl); cs.Append(" xnode = xnode->Parent;" + cl); cs.Append(" break;" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" xnode = xnode->Peer;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (xnode != NULL){xnode = xnode->Peer;}" + cl); cs.Append(" }" + cl); cs.Append(" " + this.pc_methodLibPrefix + "DestructXMLNodeList(root);" + cl); cs.Append(" if (OK != " + (argflag - 1) + ")" + cl); cs.Append(" {" + cl); if (Configuration.ExplicitErrorEncoding == true) { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Incorrect Arguments\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject, 402, \"Illegal value\");" + cl); } cs.Append(" return;" + cl); cs.Append(" }" + cl); cs.Append(cl); cs.Comment("Type Checking"); foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in") { switch (args.RelatedStateVar.GetNetType().FullName) { case "System.Boolean": TypeCheckBoolean(cs, args); break; case "System.Int16": case "System.Int32": case "System.UInt16": case "System.UInt32": case "System.Byte": case "System.SByte": TypeCheckIntegral(cs, args); break; case "System.Uri": TypeCheckURI(cs, args); break; case "System.DateTime": TypeCheckDateTime(cs, args); break; case "System.Byte[]": cs.Append(" _" + args.Name + "Length = " + this.pc_methodLibPrefix + "Base64Decode(p_" + args.Name + ",p_" + args.Name + "Length,&_" + args.Name + ");" + cl); break; case "System.String": default: if (args.RelatedStateVar.ComplexType == null) { TypeCheckString(cs, args); } break; } } } string FPtrType = "(void (__cdecl *)(void *"; foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in") { FPtrType += ("," + ToCType(arg.RelatedStateVar.GetNetType().ToString())); } } FPtrType += "))"; if (name != "DeviceSecurity") { if (Configuration.EXTERN_Callbacks == false) { cs.Append(" if (" + pc_methodPrefix + "FP_" + name + "_" + action.Name + " == NULL)" + cl); cs.Append(" " + pc_methodPrefix + "Response_Error(ReaderObject,501,\"No Function Handler\");" + cl); cs.Append(" else" + cl); cs.Append(" " + pc_methodPrefix + "FP_" + name + "_" + action.Name + "((void*)ReaderObject"); } else { cs.Append(" " + pc_methodPrefix + name + "_" + action.Name + "((void*)ReaderObject"); } } else { cs.Append(" " + pc_methodLibPrefix + name + "_" + action.Name + "((void*)ReaderObject"); } foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in") { cs.Append(",_" + args.Name); if (args.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(",_" + args.Name + "Length"); } } } cs.Append(");" + cl); foreach (UPnPArgument args in action.ArgumentList) { if (args.Direction == "in") { if (args.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append("free(_" + args.Name + ");" + cl); } } } cs.Append("}" + cl); cs.Append(cl); } } } } private string BuildHTTPSink_CONTROL(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames, string f1) { UPnPDebugObject obj; string first1 = f1; foreach (UPnPService service in device.Services) { obj = new UPnPDebugObject(service); string CONTROLURL = (string)obj.GetField("__controlurl"); cs.Append(" " + first1 + " if (header->DirectiveObjLength==" + (CONTROLURL.Length + 1).ToString() + " && memcmp((header->DirectiveObj)+1,\"" + CONTROLURL + "\"," + CONTROLURL.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); string first2 = ""; foreach (UPnPAction action in service.Actions) { cs.Append(" " + first2 + " if (SOAPACTIONLength==" + action.Name.Length.ToString() + " && memcmp(SOAPACTION,\"" + action.Name + "\"," + action.Name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" " + pc_methodPrefix + "Dispatch_" + (string)serviceNames[service] + "_" + action.Name + "(bodyBuffer, offset, bodyBufferLength, session);" + cl); cs.Append(" }" + cl); first2 = "else"; } if (service.Actions.Count > 0) { cs.Append(" else" + cl); cs.Append(" {" + cl); } cs.Append(" RetVal=1;" + cl); if (service.Actions.Count > 0) { cs.Append(" }" + cl); } cs.Append(" }" + cl); first1 = "else"; } // if (device.Services.Length>0) // { // cs.Append(" else"+cl); // cs.Append(" {"+cl); // cs.Append(" RetVal=1;"+cl); // cs.Append(" }"+cl); // } foreach (UPnPDevice d in device.EmbeddedDevices) { first1 = this.BuildHTTPSink_CONTROL(cs, d, serviceNames, first1); } return (first1); } private void BuildHTTPSink_SCPD_HEAD(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { UPnPDebugObject obj; foreach (UPnPService service in device.Services) { obj = new UPnPDebugObject(service); string SCPDURL = (string)obj.GetField("SCPDURL"); cs.Append(" else if (header->DirectiveObjLength==" + (SCPDURL.Length + 1).ToString() + " && memcmp((header->DirectiveObj)+1,\"" + SCPDURL + "\"," + SCPDURL.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" " + this.pc_methodLibPrefix + "WebServer_StreamHeader_Raw(session,200,\"OK\",responseHeader,1);" + cl); cs.Append(" " + this.pc_methodLibPrefix + "WebServer_StreamBody(session,NULL,0," + this.pc_methodLibPrefix + "AsyncSocket_MemoryOwnership_STATIC,1);" + cl); cs.Append(" }" + cl); } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildHTTPSink_SCPD_HEAD(cs, d, serviceNames); } } private void BuildHTTPSink_SCPD(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { UPnPDebugObject obj; foreach (UPnPService service in device.Services) { obj = new UPnPDebugObject(service); string SCPDURL = (string)obj.GetField("SCPDURL"); cs.Append(" else if (header->DirectiveObjLength==" + (SCPDURL.Length + 1).ToString() + " && memcmp((header->DirectiveObj)+1,\"" + SCPDURL + "\"," + SCPDURL.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); if (this.Configuration.DynamicObjectModel) { cs.Append(" ILibWebServer_StreamHeader_Raw(session,200,\"OK\",responseHeader,1);" + cl); cs.Append(" UPnPStreamDescriptionDocument_SCPD(session,1,NULL,0,0,0,0);" + cl); if (service.Actions.Count > 0) { cs.Append(" buffer = ILibDecompressString((unsigned char*)UPnP_ActionTable_" + serviceNames[service] + "_Impl.Reserved,UPnP_ActionTable_" + serviceNames[service] + "_Impl.ReservedXL,UPnP_ActionTable_" + serviceNames[service] + "_Impl.ReservedUXL);" + cl); foreach (UPnPAction A in service.Actions) { string serviceIdent = DeviceObjectGenerator.GetServiceIdentifier(service); serviceIdent += ("->" + A.Name); cs.Append(" if (" + serviceIdent + "!=NULL){UPnPStreamDescriptionDocument_SCPD(session,0,buffer," + serviceIdent + "->Reserved," + serviceIdent + "->Reserved2,0,0);}" + cl); } cs.Append(" free(buffer);" + cl); } cs.Append(" UPnPStreamDescriptionDocument_SCPD(session,0,NULL,0,0,1,0);" + cl); if (service.GetStateVariables().Length > 0) { cs.Append(" buffer = ILibDecompressString((unsigned char*)UPnP_StateVariableTable_" + serviceNames[service] + "_Impl.Reserved,UPnP_StateVariableTable_" + serviceNames[service] + "_Impl.ReservedXL,UPnP_StateVariableTable_" + serviceNames[service] + "_Impl.ReservedUXL);" + cl); foreach (UPnPStateVariable V in service.GetStateVariables()) { string vIdent = DeviceObjectGenerator.GetStateVariableIdentifier(V); cs.Append(" if (" + vIdent + "!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved1," + vIdent + "->Reserved1L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); if (V.Minimum != null || V.Maximum != null) { cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved4," + vIdent + "->Reserved4L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" if (" + vIdent + "->MinMaxStep[0]!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"<minimum>\",9,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session," + vIdent + "->MinMaxStep[0],(int)strlen(" + vIdent + "->MinMaxStep[0]),ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"</minimum>\",10,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" }" + cl); cs.Append(" if (" + vIdent + "->MinMaxStep[1]!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"<maximum>\",9,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session," + vIdent + "->MinMaxStep[1],(int)strlen(" + vIdent + "->MinMaxStep[1]),ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"</maximum>\",10,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" }" + cl); cs.Append(" if (" + vIdent + "->MinMaxStep[2]!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"<step>\",6,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session," + vIdent + "->MinMaxStep[2],(int)strlen(" + vIdent + "->MinMaxStep[2]),ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"</step>\",7,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" }" + cl); cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved5," + vIdent + "->Reserved5L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); } if (V.DefaultValue != null) { cs.Append(" if (" + vIdent + "->DefaultValue!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved6," + vIdent + "->Reserved6L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session," + vIdent + "->DefaultValue,(int)strlen(" + vIdent + "->DefaultValue),ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved7," + vIdent + "->Reserved7L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" }" + cl); } if (V.AllowedStringValues != null) { cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved2," + vIdent + "->Reserved2L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" for(i=0;i<UPnP_StateVariable_AllowedValues_MAX;++i)" + cl); cs.Append(" {" + cl); cs.Append(" if (" + vIdent + "->AllowedValues[i]!=NULL)" + cl); cs.Append(" {" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"<allowedValue>\",14,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session," + vIdent + "->AllowedValues[i],(int)strlen(" + vIdent + "->AllowedValues[i]),ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" ILibWebServer_StreamBody(session,\"</allowedValue>\",15,ILibAsyncSocket_MemoryOwnership_STATIC,0);" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved3," + vIdent + "->Reserved3L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); } cs.Append(" ILibWebServer_StreamBody(session,buffer+" + vIdent + "->Reserved8," + vIdent + "->Reserved8L,ILibAsyncSocket_MemoryOwnership_USER,0);" + cl); cs.Append(" }" + cl); } cs.Append(" free(buffer);" + cl); } cs.Append(" UPnPStreamDescriptionDocument_SCPD(session,0,NULL,0,0,0,1);" + cl); } else { cs.Append(" buffer = " + this.pc_methodLibPrefix + "DecompressString((unsigned char*)" + pc_methodPrefix + serviceNames[service] + "Description," + pc_methodPrefix + serviceNames[service] + "DescriptionLength," + pc_methodPrefix + serviceNames[service] + "DescriptionLengthUX);" + cl); cs.Append(" " + this.pc_methodLibPrefix + "WebServer_Send_Raw(session,buffer," + pc_methodPrefix + serviceNames[service] + "DescriptionLengthUX,0,1);" + cl); } cs.Append(" }" + cl); } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildHTTPSink_SCPD(cs, d, serviceNames); } } private string Build_SubscribeEvents_Device(string first, CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { foreach (UPnPService service in device.Services) { bool HasEvent = false; foreach (UPnPStateVariable sv in service.GetStateVariables()) { if (sv.SendEvent) { HasEvent = true; break; } } if (HasEvent) { UPnPDebugObject obj = new UPnPDebugObject(service); string name = (string)obj.GetField("__eventurl"); cs.Append(first + " if (pathlength==" + (name.Length + 1).ToString() + " && memcmp(path+1,\"" + name + "\"," + name.Length.ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" " + pc_methodPrefix + "TryToSubscribe(\"" + (string)serviceNames[service] + "\",TimeoutVal,URL,URLLength,session);" + cl); cs.Append(" }" + cl); first = "else"; } } foreach (UPnPDevice d in device.EmbeddedDevices) { first = Build_SubscribeEvents_Device(first, cs, d, serviceNames); } return (first); } private void BuildSSDPALL_Response(UPnPDevice device, CodeProcessor cs, int number) { if (number == 0) { cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), 0, \"::upnp:rootdevice\", \"upnp:rootdevice\", \"\");" + cl); } // Device UUID Response if (number == 0) { cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"\", upnp->UUID, \"\");" + cl); } else { cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"\", ST, \"\");" + cl); } // Device URN cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"::" + device.DeviceURN + "\", \"" + device.DeviceURN + "\", \"\");" + cl); foreach (UPnPService service in device.Services) { // Service URN cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"::" + service.ServiceURN + "\", \"" + service.ServiceURN + "\", \"\");" + cl); } foreach (UPnPDevice d in device.EmbeddedDevices) { BuildSSDPALL_Response(d, cs, ++number); } } private void BuildMSEARCHHandler_device(UPnPDevice device, CodeProcessor cs, int number) { if (number == 0) { cs.Append(" else if (STLength == (int)strlen(upnp->UUID) && memcmp(ST,upnp->UUID,(int)strlen(upnp->UUID))==0)" + cl); cs.Append(" {" + cl); cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), 0,\"\",upnp->UUID,\"\");" + cl); cs.Append(" }" + cl); } else if (number > 0) { cs.Append(" else if (STLength == (int)strlen(upnp->UUID) + " + (number.ToString().Length + 1).ToString() + ")" + cl); cs.Append(" {" + cl); cs.Append(" if (memcmp(ST,upnp->UUID,(int)strlen(upnp->UUID))==0)" + cl); cs.Append(" {" + cl); cs.Append(" if (memcmp(ST+(int)strlen(upnp->UUID),\"_" + number.ToString() + "\"," + (number.ToString().Length + 1).ToString() + ")==0)" + cl); cs.Append(" {" + cl); cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"\", ST, \"\");" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); } cs.Append(" else if (STLength >= " + device.DeviceURN_Prefix.Length.ToString() + " && memcmp(ST,\"" + device.DeviceURN_Prefix + "\"," + device.DeviceURN_Prefix.Length.ToString() + ")==0 && atoi(ST+" + device.DeviceURN_Prefix.Length.ToString() + ")<=" + device.Version.ToString() + ")" + cl); cs.Append(" {" + cl); cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"::" + device.DeviceURN_Prefix + "1\", ST, \"\");" + cl); if (device.Major > 1) { DText p = new DText(); p.ATTRMARK = ":"; p[0] = device.DeviceURN; cs.Append(" // packetlength = " + pc_methodPrefix + "FixVersion(b, \"" + p[p.DCOUNT() - 1] + ":1\", atoi(ST + " + device.DeviceURN_Prefix.Length.ToString() + "));" + cl); } cs.Append(" }" + cl); foreach (UPnPService service in device.Services) { cs.Append(" else if (STLength >= " + service.ServiceURN_Prefix.Length.ToString() + " && memcmp(ST,\"" + service.ServiceURN_Prefix + "\"," + service.ServiceURN_Prefix.Length.ToString() + ")==0 && atoi(ST+" + service.ServiceURN_Prefix.Length.ToString() + ")<=" + service.Version.ToString() + ")" + cl); cs.Append(" {" + cl); cs.Append(" rcode = " + pc_methodPrefix + "BuildSendSsdpResponsePacket(response_socket, upnp, (struct sockaddr*)&(mss->localIPAddress), (struct sockaddr*)&(mss->dest_addr), " + number.ToString() + ", \"::" + service.ServiceURN_Prefix + "1\", ST, \"\");" + cl); if (service.Major > 1) { DText p = new DText(); p.ATTRMARK = ":"; p[0] = service.ServiceURN; cs.Append(" // packetlength = " + pc_methodPrefix + "FixVersion(b, \"" + p[p.DCOUNT() - 1] + ":1\", atoi(ST + " + service.ServiceURN_Prefix.Length.ToString() + "));" + cl); } cs.Append(" }" + cl); } foreach (UPnPDevice d in device.EmbeddedDevices) { BuildMSEARCHHandler_device(d, cs, ++number); } } private int CountPackets(UPnPDevice d) { int RetVal = d.Root == true ? 3 : 2; RetVal += d.Services.Length; foreach (UPnPDevice e in d.EmbeddedDevices) { RetVal += CountPackets(e); } return (RetVal); } private void BuildDeviceDescription(CodeProcessor cs, UPnPDevice device) { UTF8Encoding U = new UTF8Encoding(); string deviceDescription = (new UTF8Encoding().GetString(device.GetRootDeviceXML(new IPEndPoint(new IPAddress(0x0100007F), 80)))); for (int i = 0; i < 40; i++) deviceDescription = deviceDescription.Replace("\r\n ", "\r\n"); deviceDescription = deviceDescription.Replace("\r\n", ""); //deviceDescription = deviceDescription; // if (this.BasicHTTP) // { // deviceDescription = "HTTP/1.0 200 OK\r\nCONTENT-TYPE: text/xml\r\nServer: " + UseSystem + ", UPnP/1.0, MicroStack/" + UseVersion + "\r\n\r\n" + deviceDescription; // } byte[] deviceDescriptionX = OpenSource.Utilities.StringCompressor.CompressString(deviceDescription); cs.Append("const int " + this.pc_methodPrefix + "DeviceDescriptionTemplateLengthUX = " + U.GetByteCount(deviceDescription).ToString() + ";" + cl); cs.Append("const int " + this.pc_methodPrefix + "DeviceDescriptionTemplateLength = " + deviceDescriptionX.Length.ToString() + ";" + cl); cs.Append("const char " + this.pc_methodPrefix + "DeviceDescriptionTemplate[" + deviceDescriptionX.Length.ToString() + "]={" + cl); bool _first = true; int _ctr = 0; foreach (byte b in deviceDescriptionX) { if (_first == false) { cs.Append(","); } else { _first = false; } string hx = b.ToString("X"); cs.Append("0x"); if (hx.Length == 1) { cs.Append("0"); } cs.Append(hx); ++_ctr; if (_ctr % 20 == 0) { cs.Append("\r\n"); } } cs.Append("};\r\n"); } private void BuildServiceDescriptions(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { UTF8Encoding U = new UTF8Encoding(); Log(" Service description blocks."); string http_200_header; UPnPDevice root = device; while (root.ParentDevice != null) { root = root.ParentDevice; } if (!Configuration.HTTP_1dot1) { http_200_header = "HTTP/1.0 200 OK\r\nCONTENT-TYPE: text/xml; charset=\"utf-8\"\r\nServer: " + UseSystem + ", UPnP/" + root.ArchitectureVersion + ", MicroStack/" + UseVersion + "\r\n"; } else { http_200_header = "HTTP/1.1 200 OK\r\nCONTENT-TYPE: text/xml; charset=\"utf-8\"\r\nServer: " + UseSystem + ", UPnP/" + root.ArchitectureVersion + ", MicroStack/" + UseVersion + "\r\n"; } foreach (UPnPService service in device.Services) { string servicexml = new UTF8Encoding().GetString(service.GetSCPDXml()); for (int i = 0; i < 40; i++) servicexml = servicexml.Replace("\r\n ", "\r\n"); servicexml = servicexml.Replace("\r\n", ""); string servicehttpresponse = http_200_header + "Content-Length: " + U.GetByteCount(servicexml).ToString() + "\r\n\r\n" + servicexml; cs.Comment(serviceNames[service].ToString()); byte[] _sr = OpenSource.Utilities.StringCompressor.CompressString(servicehttpresponse); cs.Append("const int " + pc_methodPrefix + serviceNames[service] + "DescriptionLengthUX = " + U.GetByteCount(servicehttpresponse).ToString() + ";" + cl); cs.Append("const int " + pc_methodPrefix + serviceNames[service] + "DescriptionLength = " + (_sr.Length).ToString() + ";" + cl); cs.Append("const char " + pc_methodPrefix + serviceNames[service] + "Description[" + (_sr.Length).ToString() + "] = {" + cl); bool _first = true; int _ctr = 0; foreach (byte b in _sr) { if (_first == false) { cs.Append(","); } else { _first = false; } string hx = b.ToString("X"); cs.Append("0x"); if (hx.Length == 1) { cs.Append("0"); } cs.Append(hx); ++_ctr; if (_ctr % 20 == 0) { cs.Append("\r\n"); } } cs.Append("};" + cl); } foreach (UPnPDevice d in device.EmbeddedDevices) { BuildServiceDescriptions(cs, d, serviceNames); } } private void BuildNotifyPackets_Device(CodeProcessor cs, UPnPDevice device, int number, bool ipv6) { if (!ipv6) { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->AddressListV4[i]), " + number.ToString() + ", \"\", \"uuid:\", upnp->UDN);" + cl); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->AddressListV4[i]), " + number.ToString() + ", \"::" + device.DeviceURN + "\", \"" + device.DeviceURN + "\", \"\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks6[i], upnp, (struct sockaddr*)&(upnp->AddressListV6[i]), " + number.ToString() + ", \"\", \"uuid:\", upnp->UDN);" + cl); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks6[i], upnp, (struct sockaddr*)&(upnp->AddressListV6[i]), " + number.ToString() + ", \"::" + device.DeviceURN + "\", \"" + device.DeviceURN + "\", \"\");" + cl); } foreach (UPnPService service in device.Services) { if (!ipv6) { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->AddressListV4[i]), " + number.ToString() + ", \"::" + service.ServiceURN + "\", \"" + service.ServiceURN + "\", \"\");" + cl); } else { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpNotifyPacket(upnp->NOTIFY_SEND_socks6[i], upnp, (struct sockaddr*)&(upnp->AddressListV6[i]), " + number.ToString() + ", \"::" + service.ServiceURN + "\", \"" + service.ServiceURN + "\", \"\");" + cl); } } foreach (UPnPDevice d in device.EmbeddedDevices) { BuildNotifyPackets_Device(cs, d, ++number, ipv6); } } private void BuildByeByePackets_Device(CodeProcessor cs, UPnPDevice device, int dn, bool ipv6) { if (!ipv6) { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->MulticastAddrV4), UPNP_MCASTv4_GROUP, \"\", \"uuid:\", upnp->UDN, " + dn.ToString() + ");" + cl); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->MulticastAddrV4), UPNP_MCASTv4_GROUP, \"::" + device.DeviceURN_Prefix + "1\", \"" + device.DeviceURN + "\", \"\", " + dn.ToString() + ");" + cl); } else { //cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks6[i], upnp, (struct sockaddr*)&(upnp->MulticastAddrV6), UPNP_MCASTv6_GROUPB, \"\", \"uuid:\", upnp->UDN, " + dn.ToString() + ");" + cl); //cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks6[i], upnp, (struct sockaddr*)&(upnp->MulticastAddrV6), UPNP_MCASTv6_GROUPB, \"::" + device.DeviceURN_Prefix + "1\", \"" + device.DeviceURN + "\", \"\", " + dn.ToString() + ");" + cl); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks6[i], upnp, t1, t2, \"\", \"uuid:\", upnp->UDN, " + dn.ToString() + ");" + cl); cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks6[i], upnp, t1, t2, \"::" + device.DeviceURN_Prefix + "1\", \"" + device.DeviceURN + "\", \"\", " + dn.ToString() + ");" + cl); } foreach (UPnPService service in device.Services) { if (!ipv6) { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks[i], upnp, (struct sockaddr*)&(upnp->MulticastAddrV4), UPNP_MCASTv4_GROUP, \"::" + service.ServiceURN + "\", \"" + service.ServiceURN_Prefix + "1\", \"\", " + dn.ToString() + ");" + cl); } else { cs.Append(" " + pc_methodPrefix + "BuildSendSsdpByeByePacket(upnp->NOTIFY_SEND_socks6[i], upnp, t1, t2, \"::" + service.ServiceURN + "\", \"" + service.ServiceURN_Prefix + "1\", \"\", " + dn.ToString() + ");" + cl); } } foreach (UPnPDevice d in device.EmbeddedDevices) { BuildByeByePackets_Device(cs, d, ++dn, ipv6); } } private void BuildUPnPResponse(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { foreach (UPnPService service in device.Services) { ServiceGenerator.ServiceConfiguration SConf = (ServiceGenerator.ServiceConfiguration)service.User; foreach (UPnPAction action in service.Actions) { StringBuilder SB = new StringBuilder(); SB.Append(pc_methodPrefixDef + "Response_" + serviceNames[service] + "_" + action.Name + "(const UPnPSessionToken UPnPToken"); int argcount = 0; foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { if (arg.RelatedStateVar.ComplexType == null) { // Primitive SB.Append(", const " + ToCType(arg.RelatedStateVar.GetNetType().ToString()) + " "); if (arg.RelatedStateVar.GetNetType() == typeof(string) && !SConf.Actions_ManualEscape.Contains(action)) { SB.Append("unescaped_" + arg.Name); } else { SB.Append(arg.Name); } if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { SB.Append(", const int _" + arg.Name + "Length"); } } else { // Complex SB.Append(", struct " + arg.RelatedStateVar.ComplexType.Name_LOCAL + " *_" + arg.Name); } argcount++; } } SB.Append(")"); cs.Append("/*! \\fn " + SB.ToString() + cl); cs.Append(" \\brief Response Method for " + serviceNames[service] + " >> " + service.ServiceURN + " >> " + action.Name + cl); cs.Append(" \\param UPnPToken MicroStack token" + cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { cs.Append(" \\param "); if (arg.RelatedStateVar.ComplexType == null) { // Primitive if (arg.RelatedStateVar.GetNetType() == typeof(string) && !SConf.Actions_ManualEscape.Contains(action)) { cs.Append("unescaped_" + arg.Name + " Value of argument " + arg.Name + " \\b Note: Automatically Escaped" + cl); } else { cs.Append(arg.Name + " Value of argument " + arg.Name); if (arg.RelatedStateVar.GetNetType() == typeof(string)) { cs.Append(" \\b Note: Must be escaped" + cl); } else { cs.Append(cl); } } if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(" \\param " + arg.Name + "Length Length of \\a " + arg.Name + cl); } } else { // Complex cs.Append(" _" + arg.Name + " Value of argument " + arg.Name + cl); } } } cs.Append("*/" + cl); cs.DefinePublic("void " + SB.ToString()); cs.Append("{" + cl); if (argcount == 0) { cs.Append(pc_methodPrefixDef + "ResponseGeneric(UPnPToken,\"" + service.ServiceURN + "\",\"" + action.Name + "\",\"\");" + cl); cs.Append("}" + cl); cs.Append(cl); continue; } foreach (UPnPArgument arg in action.Arguments) { if (arg.RelatedStateVar.ComplexType != null) { cs.Append(" char *tempString;" + cl); break; } } cs.Append(" char* body;" + cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { if (arg.RelatedStateVar.ComplexType == null) { // Simple if (arg.RelatedStateVar.GetNetType() == typeof(string) && !SConf.Actions_ManualEscape.Contains(action)) { cs.Append(" char *" + arg.Name + " = (char*)malloc(1+" + this.pc_methodLibPrefix + "XmlEscapeLength(unescaped_" + arg.Name + "));" + cl); } } else { //Complex cs.Append(" char *" + arg.Name + ";" + cl); } } } cs.Append(cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.RelatedStateVar.ComplexType == null) { // If this is a simple string, we need to escape it if (arg.Direction == "out" && arg.RelatedStateVar.GetNetType() == typeof(string) && !SConf.Actions_ManualEscape.Contains(action)) { cs.Append(" " + this.pc_methodLibPrefix + "XmlEscape(" + arg.Name + ", unescaped_" + arg.Name + ");" + cl); } } } bool needSpecial = false; foreach (UPnPArgument arg in action.Arguments) { if (arg.RelatedStateVar.ComplexType != null) { needSpecial = true; break; } } if (needSpecial) { cs.Comment("For Complex Types, we need to:"); cs.Comment("1.) Serialize the XML structure"); cs.Comment("2.) Escape the serialization for legacy CPs"); cs.Append(cl); foreach (UPnPArgument a in action.Arguments) { if (a.RelatedStateVar.ComplexType != null) { cs.Append(" " + a.Name + "= " + this.pc_methodPrefix + "Serialize_" + a.RelatedStateVar.ComplexType.Name_LOCAL + "(_" + a.Name + ");" + cl); } } cs.Append(" if (((struct ILibWebServer_Session*)UPnPToken)->Reserved9 == 0)" + cl); cs.Append(" {" + cl); cs.Comment("Serialization for Legacy CP"); foreach (UPnPArgument a in action.Arguments) { if (a.RelatedStateVar.ComplexType != null) { cs.Append(cl); cs.Append(" if ((tempString = (char*)malloc(1+" + this.pc_methodLibPrefix + "XmlEscapeLength(" + a.Name + "))) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" tempString[" + this.pc_methodLibPrefix + "XmlEscape(tempString, " + a.Name + ")] = 0;" + cl); cs.Append(" free(" + a.Name + ");" + cl); cs.Append(" " + a.Name + " = tempString;" + cl); } } cs.Append(" }" + cl); } string soap_invokeResponse = ""; int soap_size = 1; string soap_size_str = ""; foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { soap_invokeResponse += "<" + arg.Name + ">"; soap_invokeResponse += this.ToSPrintfType(arg.RelatedStateVar.GetNetType().ToString()); soap_invokeResponse += "</" + arg.Name + ">"; soap_size += (5 + (2 * arg.Name.Length)); switch (arg.RelatedStateVar.GetNetType().ToString()) { case "System.Boolean": soap_size += 1; break; case "System.Byte[]": soap_size_str += "+strlen(" + arg.Name + "_Base64)"; break; case "System.String": case "System.Uri": soap_size_str += "+strlen(" + arg.Name + ")"; break; case "System.DateTime": soap_size += 20; break; case "System.Byte": case "System.SByte": case "System.Char": soap_size += 4; break; case "System.UInt16": case "System.Int16": soap_size += 6; break; case "System.UInt32": case "System.Int32": soap_size += 11; break; case "System.Single": case "System.Double": soap_size += 16; break; } } } foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out" && arg.RelatedStateVar.GetNetType() == typeof(byte[])) { cs.Append(" unsigned char* " + arg.Name + "_Base64;" + cl); } if (arg.Direction == "out" && arg.RelatedStateVar.GetNetType() == typeof(DateTime)) { cs.Append(" char* " + arg.Name + "_DateTime;" + cl); } } foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out" && arg.RelatedStateVar.GetNetType() == typeof(byte[])) { cs.Append(" " + pc_methodLibPrefix + "Base64Encode((unsigned char*)" + arg.Name + ", _" + arg.Name + "Length, &" + arg.Name + "_Base64);" + cl); } if (arg.Direction == "out" && arg.RelatedStateVar.GetNetType() == typeof(DateTime)) { cs.Append(" " + arg.Name + "_DateTime = " + pc_methodLibPrefix + "Time_Serialize(" + arg.Name + ");" + cl); } } cs.Append(" if ((body = (char*)malloc(" + soap_size.ToString() + soap_size_str + ")) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(body, " + soap_size.ToString() + soap_size_str + ", \"" + PrintfTransform(soap_invokeResponse) + "\""); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { if (arg.RelatedStateVar.GetNetType() != typeof(bool)) { cs.Append(", " + arg.Name); if (arg.RelatedStateVar.GetNetType() == typeof(byte[])) { cs.Append("_Base64"); } if (arg.RelatedStateVar.GetNetType() == typeof(System.DateTime)) { cs.Append("_DateTime"); } } else { cs.Append(", (" + arg.Name + "!=0?1:0)"); } } } cs.Append(");" + cl); cs.Append(" " + this.pc_methodPrefix + "ResponseGeneric(UPnPToken, \"" + service.ServiceURN + "\", \"" + action.Name + "\", body);" + cl); cs.Append(" free(body);" + cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { if (arg.RelatedStateVar.GetNetType() == typeof(byte[])) { cs.Append(" free(" + arg.Name + "_Base64);" + cl); } if (arg.RelatedStateVar.GetNetType() == typeof(DateTime)) { cs.Append(" free(" + arg.Name + "_DateTime);" + cl); } if (arg.RelatedStateVar.GetNetType() == typeof(string) && !SConf.Actions_ManualEscape.Contains(action)) { cs.Append(" free(" + arg.Name + ");" + cl); } } } cs.Append("}" + cl); cs.Append(cl); } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildUPnPResponse(cs, d, serviceNames); } } private int GetAbsoluteTotalNumberOfEventedStateVariables(UPnPDevice device) { int RetVal = 0; foreach (UPnPService service in device.Services) { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.SendEvent == true) ++RetVal; } } foreach (UPnPDevice d in device.EmbeddedDevices) { RetVal += GetAbsoluteTotalNumberOfEventedStateVariables(d); } return (RetVal); } string BuildMulticastSoapEventsProcessor(string WS, UPnPDevice device, Hashtable serviceNames) { string RetVal = WS; string WS2; foreach (UPnPService service in device.Services) { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.MulticastEvent) { WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "//{{{BEGIN_CHECK_MULTICASTVARIABLE}}}", "//{{{END_CHECK_MULTICASTVARIABLE}}}"); WS2 = WS2.Replace("{{{VARNAME}}}", statevar.Name); WS2 = WS2.Replace("{{{SERVICENAME}}}", (string)serviceNames[service]); WS2 = WS2.Replace("{{{SERVICETYPE}}}", service.ServiceURN_Prefix.Substring(0, service.ServiceURN_Prefix.Length - 1)); WS2 = WS2.Replace("{{{SERVICETYPELENGTH}}}", (service.ServiceURN_Prefix.Length - 1).ToString()); WS2 = WS2.Replace("{{{VARDISPATCH}}}", EmbeddedCGenerator.ToCTypeFromStateVar_Dispatch(statevar)); WS2 = WS2.Replace("{{{VARSERIALIZE}}}", EmbeddedCGenerator.ToCTypeFromStateVar_Serialize("VariableValue", "VariableValueLength", "OK", this.pc_methodLibPrefix, statevar)); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "//{{{BEGIN_CHECK_MULTICASTVARIABLE}}}", WS2); WS = WS.Replace("{{{VARDEFS}}}", "{{{VARDEFS}}}" + cl + EmbeddedCGenerator.ToCTypeFromStateVar(statevar) + ";"); } } } foreach (UPnPDevice d in device.EmbeddedDevices) { WS = this.BuildMulticastSoapEventsProcessor(WS, d, serviceNames); } return (WS); } private void BuildMulticastSoapEvents(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { foreach (UPnPService service in device.Services) { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.MulticastEvent) { #region Initialize string eventname = "UPnPObject->" + serviceNames[service] + "_" + statevar.Name; string binaryextra = ""; if (statevar.GetNetType().ToString() == "System.Byte[]") binaryextra = ",int vallen"; #endregion cs.DefinePublic("void " + pc_methodPrefixDef + "SetMulticastState_" + serviceNames[service] + "_" + statevar.Name + "(UPnPMicroStackToken upnptoken, enum MULTICAST_EVENT_TYPE eventType, " + ToCType(statevar.GetNetType().ToString()) + " val" + binaryextra + ")"); cs.Append("{" + cl); cs.Append(" struct " + pc_methodPrefix + "DataObject *UPnPObject = (struct " + pc_methodPrefix + "DataObject*)upnptoken;" + cl); cs.Append(" char *b;" + cl); cs.Append(" int bLength;" + cl); cs.Append(" void *response_socket;" + cl); cs.Append(" void *subChain;" + cl); cs.Append(" int *addrList;" + cl); cs.Append(" int addrListLength;" + cl); cs.Append(" int i;" + cl); cs.Append(" char newVal[32];" + cl); //ToDo: Magic Value cs.Append(" if ((b = (char*)malloc(5000)) == NULL) ILIBCRITICALEXIT(254);" + cl); //ToDo: Magic Value cs.Append(cl); cs.Append(" subChain = ILibCreateChain();" + cl); cs.Append(" response_socket = ILibAsyncUDPSocket_Create(" + cl); cs.Append(" subChain," + cl); cs.Append(" UPNP_MAX_SSDP_HEADER_SIZE," + cl); cs.Append(" 0," + cl); cs.Append(" 0," + cl); cs.Append(" ILibAsyncUDPSocket_Reuse_SHARED," + cl); cs.Append(" NULL," + cl); cs.Append(" NULL," + cl); cs.Append(" subChain);" + cl); cs.Append(cl); cs.Append(" ++" + pc_methodPrefix + "Object->" + serviceNames[service] + "_" + statevar.Name + "_SEQ;" + cl); cs.Append(cl); cs.Append(" snprintf(newVal, 32, \"%d\", val);" + cl); cs.Append(" bLength = snprintf(b, 5000, UPnPMulticastPacketTemplate," + cl); //ToDo: Magic Value cs.Append(" UPNP_GROUP," + cl); cs.Append(" UPNP_MULTICASTEVENT_PORT," + cl); cs.Append(" UPnPObject->UDN," + cl); cs.Append(" \"" + service.ServiceURN + "\"," + cl); cs.Append(" UPnPObject->" + serviceNames[service] + "_" + statevar.Name + "_SEQ," + cl); cs.Append(" MULTICAST_EVENT_TYPE_DESCRIPTION[(int)eventType]," + cl); cs.Append(" UPnPObject->InitialNotify," + cl); cs.Append(" \"" + statevar.Name + "\"," + cl); cs.Append(" newVal," + cl); cs.Append(" \"" + statevar.Name + "\");" + cl); cs.Append(" addrListLength = ILibGetLocalIPAddressList(&addrList);" + cl); cs.Append(" ILibAsyncUDPSocket_JoinMulticastGroup(response_socket, 0, inet_addr(UPNP_GROUP));" + cl); cs.Append(" for(i = 0; i < addrListLength; ++i)" + cl); cs.Append(" {" + cl); cs.Append(" ILibAsyncUDPSocket_SetMulticastInterface(response_socket, addrList[i]);" + cl); cs.Append(" ILibAsyncUDPSocket_SendTo(response_socket, inet_addr(UPNP_GROUP), UPNP_MULTICASTEVENT_PORT, b, bLength, ILibAsyncSocket_MemoryOwnership_USER);" + cl); cs.Append(" }" + cl); cs.Append(" free(addrList);" + cl); cs.Append(" ILibChain_DestroyEx(subChain);" + cl); cs.Append("}" + cl); } } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildMulticastSoapEvents(cs, d, serviceNames); } } private void BuildSoapEvents(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { //string soap_eventblock = "<?xml version=\"1.0\" encoding=\"utf-8\"?><e:propertyset xmlns:e=\"urn:schemas-upnp-org:event-1-0\"><e:property><%s>%s</%s></e:property></e:propertyset>"; string soap_eventblock = "%s>%s</%s"; foreach (UPnPService service in device.Services) { #region Calculate number of evented variables int eventedStateVariables = 0; foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.SendEvent == true) eventedStateVariables++; } #endregion foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.SendEvent == true) { #region Initialize string eventname = "UPnPObject->" + serviceNames[service] + "_" + statevar.Name; string binaryextra = ""; if (statevar.GetNetType().ToString() == "System.Byte[]") binaryextra = ",int vallen"; #endregion #region SetState cs.Append("/*! \\fn " + pc_methodPrefixDef + "SetState_" + serviceNames[service] + "_" + statevar.Name + "(UPnPMicroStackToken upnptoken, " + ToCType(statevar.GetNetType().ToString()) + " val" + binaryextra + ")" + cl); cs.Append(" \\brief Sets the state of " + statevar.Name + " << " + statevar.OwningService.ServiceURN + " << " + serviceNames[service] + " \\par" + cl); cs.Append(" \\b Note: Must be called at least once prior to start" + cl); cs.Append(" \\param upnptoken The MicroStack token" + cl); cs.Append(" \\param val The new value of the state variable" + cl); if (binaryextra != "") { cs.Append(" \\param vallen Length of \\a val" + cl); } cs.Append("*/" + cl); cs.DefinePublic("void " + pc_methodPrefixDef + "SetState_" + serviceNames[service] + "_" + statevar.Name + "(UPnPMicroStackToken upnptoken, " + ToCType(statevar.GetNetType().ToString()) + " val" + binaryextra + ")"); cs.Append("{" + cl); cs.Append(" struct " + pc_methodPrefix + "DataObject *UPnPObject = (struct " + pc_methodPrefix + "DataObject*)upnptoken;" + cl); cs.Append(" char* body;" + cl); cs.Append(" int bodylength;" + cl); if (statevar.GetNetType().ToString() == "System.Byte[]") { cs.Append(" unsigned char* valstr;" + cl); } else { cs.Append(" char* valstr;" + cl); } #region Data Handling // Data Type Handling Code switch (statevar.GetNetType().ToString()) { case "System.Boolean": cs.Append(" if (val != 0) valstr = \"true\"; else valstr = \"false\";" + cl); break; case "System.Byte[]": cs.Append(" " + pc_methodLibPrefix + "Base64Encode(val, vallen, &valstr);" + cl); break; case "System.Uri": case "System.String": cs.Append(" if ((valstr = (char*)malloc(" + pc_methodLibPrefix + "XmlEscapeLength(val) + 1)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" " + this.pc_methodLibPrefix + "XmlEscape(valstr, val);" + cl); break; case "System.Byte": case "System.Int16": case "System.Int32": cs.Append(" if ((valstr = (char*)malloc(10)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(valstr, 10, \"%d\", val);" + cl); break; case "System.Char": case "System.SByte": case "System.UInt16": case "System.UInt32": cs.Append(" if ((valstr = (char*)malloc(10)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(valstr, 10, \"%u\", val);" + cl); break; case "System.Single": case "System.Double": cs.Append(" if ((valstr = (char*)malloc(30)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" snprintf(valstr, 30, \"%f\", val);" + cl); break; case "System.DateTime": cs.Append(" valstr = " + this.pc_methodLibPrefix + "Time_Serialize(val);" + cl); break; default: cs.Append(" char* valuestr = NULL;" + cl); break; } #endregion #region Memory Handling // Data Type Handling Code switch (statevar.GetNetType().ToString()) { case "System.Uri": case "System.String": case "System.Byte[]": case "System.Byte": case "System.Int16": case "System.Int32": case "System.Char": case "System.SByte": case "System.UInt16": case "System.UInt32": case "System.Single": case "System.Double": cs.Append(" if (" + eventname + " != NULL) free(" + eventname + ");" + cl); break; } #endregion cs.Append(" " + eventname + " = valstr;" + cl); cs.Append(" bodylength = " + (soap_eventblock.Length + (statevar.Name.Length * 2) + 1) + " + (int)strlen(valstr);" + cl); cs.Append(" if ((body = (char*)malloc(bodylength)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" bodylength = snprintf(body, bodylength, \"" + PrintfTransform(soap_eventblock) + "\", \"" + statevar.Name + "\", valstr, \"" + statevar.Name + "\");" + cl); cs.Append(" " + pc_methodPrefix + "SendEvent(upnptoken, body, bodylength, \"" + (string)serviceNames[service] + "\");" + cl); cs.Append(" free(body);" + cl); cs.Append("}" + cl); cs.Append(cl); #endregion } } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildSoapEvents(cs, d, serviceNames); } } private void BuildFunctionPointers(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { string staticdef = ""; if (this.Language == LANGUAGES.CPP) staticdef = "static "; SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); while (en.MoveNext()) { UPnPService S = (UPnPService)en.Value; string name = (string)en.Key; if (name != "DeviceSecurity") { foreach (UPnPAction A in S.Actions) { string d = ""; d += "/*! \\var " + pc_methodPrefixDef + "FP_" + name + "_" + A.Name + cl; d += " \\brief Dispatch Pointer for " + name + " >> " + S.ServiceURN + " >> " + A.Name + cl; d += "*/" + cl; d += "UPnP_ActionHandler_" + serviceNames[S] + "_" + A.Name + " " + pc_methodPrefixDef + "FP_" + name + "_" + A.Name + ";" + cl; cs.Append(d); cs.PublicClassDefinitions.Append(staticdef + d); } } } } private void BuildFunctionPointerHeaders(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); while (en.MoveNext()) { UPnPService S = (UPnPService)en.Value; string name = (string)en.Key; if (name != "DeviceSecurity") { foreach (UPnPAction A in S.Actions) { cs.Append("extern UPnP_ActionHandler_" + serviceNames[S] + "_" + A.Name + " " + pc_methodPrefixDef + "FP_" + name + "_" + A.Name + ";" + cl); } } } } private void BuildUPnPResponseHeaders(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { foreach (UPnPService service in device.Services) { ServiceGenerator.ServiceConfiguration SConf = (ServiceGenerator.ServiceConfiguration)service.User; foreach (UPnPAction action in service.Actions) { cs.Append("void " + pc_methodPrefixDef + "Response_" + serviceNames[service] + "_" + action.Name + "(const UPnPSessionToken UPnPToken"); /* if (action.HasReturnValue) { cs.Append(", const " + ToCType(action.GetRetArg().RelatedStateVar.GetNetType().FullName) + " __ReturnValue"); if (action.GetRetArg().RelatedStateVar.GetNetType().FullName=="System.Byte[]") { cs.Append(", const int __ReturnValueLength"); } } */ foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { if (arg.RelatedStateVar.ComplexType == null) { // Simple Type cs.Append(", const " + ToCType(arg.RelatedStateVar.GetNetType().ToString()) + " " + arg.Name); if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(", const int _" + arg.Name + "Length"); } } else { // Complex Type cs.Append(", struct " + arg.RelatedStateVar.ComplexType.Name_LOCAL + " *" + arg.Name); } } } cs.Append(");" + cl); } bool NeedManualComment = false; foreach (UPnPAction action in service.Actions) { if (SConf.Actions_ManualEscape.Contains(action)) { // Manual Escape NeedManualComment = true; break; } } if (NeedManualComment) { cs.Append(cl); cs.Comment("The string parameters for the following response methods MUST be MANUALLY escaped"); foreach (UPnPAction action in service.Actions) { if (SConf.Actions_ManualEscape.Contains(action)) { cs.Comment("void " + pc_methodPrefixDef + "Response_" + serviceNames[service] + "_" + action.Name); } } cs.Append(cl); } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildUPnPResponseHeaders(cs, d, serviceNames); } } private string BuildMulticastStateVariableHeaders2(string WS, UPnPDevice device, Hashtable serviceNames) { string WS2; foreach (UPnPService service in device.Services) { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.MulticastEvent) { WS2 = SourceCodeRepository.GetTextBetweenTags(WS, "//{{{BEGIN_MulticastEventing_Specific}}}", "//{{{END_MulticastEventing_Specific}}}"); WS2 = WS2.Replace("{{{SERVICENAME}}}", (string)serviceNames[service]); WS2 = WS2.Replace("{{{VARNAME}}}", statevar.Name); WS2 = WS2.Replace("{{{ARGLIST}}}", EmbeddedCGenerator.ToCTypeFromStateVar(statevar)); WS = SourceCodeRepository.InsertTextBeforeTag(WS, "//{{{BEGIN_MulticastEventing_Specific}}}", WS2); } } } foreach (UPnPDevice d in device.EmbeddedDevices) { WS = this.BuildMulticastStateVariableHeaders2(WS, d, serviceNames); } return (WS); } private void BuildMulticastStateVariableHeaders(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { foreach (UPnPService service in device.Services) { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.MulticastEvent) { cs.Append("void " + pc_methodPrefixDef + "SetMulticastState_" + serviceNames[service] + "_" + statevar.Name + "(UPnPMicroStackToken upnptoken, enum MULTICAST_EVENT_TYPE eventType," + EmbeddedCGenerator.ToCTypeFromStateVar(statevar) + ");" + cl); } } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildMulticastStateVariableHeaders(cs, d, serviceNames); } } private void BuildStateVariableHeaders(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { foreach (UPnPService service in device.Services) { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.SendEvent == true) { string binaryextra = ""; if (statevar.GetNetType().ToString() == "System.Byte[]") binaryextra = ", int _" + statevar.Name + "Length"; cs.Append("void " + pc_methodPrefixDef + "SetState_" + serviceNames[service] + "_" + statevar.Name + "(UPnPMicroStackToken microstack," + ToCType(statevar.GetNetType().ToString()) + " val" + binaryextra + ");" + cl); } } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildStateVariableHeaders(cs, d, serviceNames); } } private void BuildStateVariableEventingSample(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { string cppobj = ""; if (this.Language == LANGUAGES.CPP) { cppobj = "microstack->"; } bool present = false; foreach (UPnPService service in device.Services) { if (serviceNames[service].ToString() != "DeviceSecurity") { foreach (UPnPStateVariable statevar in service.GetStateVariables()) if (statevar.SendEvent == true) present = true; } } if (present) cs.Append(" // All evented state variables MUST be initialized before UPnPStart is called." + cl); foreach (UPnPService service in device.Services) { if (serviceNames[service].ToString() != "DeviceSecurity") { foreach (UPnPStateVariable statevar in service.GetStateVariables()) { if (statevar.SendEvent == true) { cs.Append(" " + cppobj + pc_methodPrefix + "SetState_" + serviceNames[service] + "_" + statevar.Name + "(" + this.pc_methodPrefix + "microStack, " + ToSampleValue(statevar.GetNetType().ToString()) + ");" + cl); } } } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildStateVariableEventingSample(cs, d, serviceNames); } } private void BuildMainUserCode(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { string cppobj = ""; foreach (UPnPService service in device.Services) { ServiceGenerator.ServiceConfiguration SConf = (ServiceGenerator.ServiceConfiguration)service.User; if (serviceNames[service].ToString() != "DeviceSecurity") { foreach (UPnPAction action in service.Actions) { #region Invoke #region Header cs.Append("void " + pc_methodPrefix + serviceNames[service] + "_" + action.Name + "(" + this.pc_methodPrefix + "SessionToken upnptoken"); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in") { if (arg.RelatedStateVar.ComplexType == null) { cs.Append("," + ToCType(arg.RelatedStateVar.GetNetType().ToString()) + " " + arg.Name); if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(",int _" + arg.Name + "Length"); } } else { cs.Append(", struct " + arg.RelatedStateVar.ComplexType.Name_LOCAL + " *" + arg.Name); } } } cs.Append(")" + cl); #endregion #region Body cs.Append("{" + cl); #region printf if (this.SubTarget == SUBTARGETS.PPC2003) { cs.Append(" CString display;" + cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in" && (arg.RelatedStateVar.GetNetType() == typeof(string) || arg.RelatedStateVar.GetNetType() == typeof(System.Uri))) { cs.Append(" wchar_t *wc_" + arg.Name + " = NULL;" + cl); cs.Append(" int wc_" + arg.Name + "Length = 0;" + cl); } } cs.Append(cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in" && (arg.RelatedStateVar.GetNetType() == typeof(string) || arg.RelatedStateVar.GetNetType() == typeof(System.Uri))) { cs.Append(" if (" + arg.Name + " != NULL)" + cl); cs.Append(" {" + cl); cs.Append(" wc_" + arg.Name + "Length = MultiByteToWideChar(CP_UTF8, 0, " + arg.Name + ", -1, wc_" + arg.Name + ", 0);" + cl); cs.Append(" if ((wc_" + arg.Name + " = (wchar_t*)malloc(sizeof(wchar_t)*wc_" + arg.Name + "Length)) == NULL) ILIBCRITICALEXIT(254);" + cl); cs.Append(" MultiByteToWideChar(CP_UTF8, 0 ," + arg.Name + ", -1, wc_" + arg.Name + ", wc_" + arg.Name + "Length);" + cl); cs.Append(" }" + cl); } } cs.Append(" display.Format(_T("); } else { cs.Append(" printf("); } cs.Append("\"Invoke: " + pc_methodPrefix + serviceNames[service] + "_" + action.Name + "("); bool firstArg = true; foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in") { if (firstArg == false) cs.Append(","); if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append("BINARY(%d)"); } else { cs.Append(ToPrintfType(arg.RelatedStateVar.GetNetType().ToString())); } firstArg = false; } } cs.Append(");\\r\\n\""); if (this.SubTarget == SUBTARGETS.PPC2003) { cs.Append(")"); } foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in") { if (arg.RelatedStateVar.GetNetType().FullName == "System.Byte[]") { cs.Append(", _" + arg.Name + "Length"); } else { if ((arg.RelatedStateVar.GetNetType() == typeof(Uri) || arg.RelatedStateVar.GetNetType() == typeof(string)) && this.SubTarget == SUBTARGETS.PPC2003) { cs.Append(", wc_" + arg.Name); } else { cs.Append(", " + arg.Name); } } } } cs.Append(");" + cl); if (this.SubTarget == SUBTARGETS.PPC2003) { cs.Append(" if (that->m_Text.GetLength() > 16384)" + cl); cs.Append(" {" + cl); cs.Append(" that->m_Text = display;" + cl); cs.Append(" }" + cl); cs.Append(" else" + cl); cs.Append(" {" + cl); cs.Append(" that->m_Text += display;" + cl); cs.Append(" }" + cl); cs.Append(" that->SendMessage(WM_USER_UPDATE);" + cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "in" && (arg.RelatedStateVar.GetNetType() == typeof(string) || arg.RelatedStateVar.GetNetType() == typeof(System.Uri))) { cs.Append(" if (wc_" + arg.Name + " != NULL) {free(wc_" + arg.Name + ");}" + cl); } } } #endregion cs.Append(cl); cs.Comment("If you intend to make the response later, you MUST reference count upnptoken with calls to " + this.pc_methodLibPrefix + "WebServer_AddRef()"); cs.Comment("and " + this.pc_methodLibPrefix + "WebServer_Release()"); cs.Append(cl); cs.Comment("TODO: Place Action Code Here..."); cs.Append(cl); cs.Comment(cppobj + pc_methodPrefix + "Response_Error(upnptoken, 404, \"Method Not Implemented\");"); if (SConf.Actions_Fragmented.Contains(action) == false) { // Standard Response System Only #region Standard Response cs.Append(" " + cppobj + pc_methodPrefix + "Response_" + serviceNames[service] + "_" + action.Name + "(upnptoken"); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { cs.Append("," + ToSampleValue(arg.RelatedStateVar.GetNetType().ToString())); } } cs.Append(");" + cl); #endregion } else { // Fragmented Response System #region Standard Response, Commented out cs.Append(" /* " + cppobj + pc_methodPrefix + "Response_" + serviceNames[service] + "_" + action.Name + "(upnptoken"); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { cs.Append("," + ToSampleValue(arg.RelatedStateVar.GetNetType().ToString())); } } cs.Append("); */" + cl); #endregion #region Fragmented Response cs.Append(cl); cs.Comment("Fragmented response system, action result is constructed and sent on-the-fly."); cs.Append(" " + cppobj + pc_methodPrefix + "AsyncResponse_START(upnptoken, \"" + action.Name + "\", \"" + service.ServiceURN + "\");" + cl); foreach (UPnPArgument arg in action.Arguments) { if (arg.Direction == "out") { if (!Configuration.HTTP_1dot1) { cs.Append(" " + cppobj + pc_methodPrefix + "AsyncResponse_OUT(upnptoken, \"" + arg.Name + "\", \"\", 0, 1, 1);" + cl); } else { cs.Append(" " + cppobj + pc_methodPrefix + "AsyncResponse_OUT(upnptoken, \"" + arg.Name + "\", \"\", 0, " + pc_methodLibPrefix + "AsyncSocket_MemoryOwnership_STATIC, 1, 1);" + cl); } } } cs.Append(" " + cppobj + pc_methodPrefix + "AsyncResponse_DONE(upnptoken, \"" + action.Name + "\");" + cl); #endregion } cs.Append("}" + cl); #endregion #endregion cs.Append(cl); } } } foreach (UPnPDevice d in device.EmbeddedDevices) { this.BuildMainUserCode(cs, d, serviceNames); } } private void BuildMain_SetFunctionPointers(CodeProcessor cs, UPnPDevice device, Hashtable serviceNames) { string cppobj = ""; if (this.Language == LANGUAGES.CPP) { cppobj = "microstack->"; } SortedList SL = new SortedList(); IDictionaryEnumerator en = serviceNames.GetEnumerator(); while (en.MoveNext()) { SL[en.Value] = en.Key; } en = SL.GetEnumerator(); while (en.MoveNext()) { UPnPService S = (UPnPService)en.Value; string name = (string)en.Key; if (name != "DeviceSecurity") { foreach (UPnPAction A in S.Actions) { cs.Append(" " + cppobj + pc_methodPrefix + "FP_" + name + "_" + A.Name + " = (UPnP_ActionHandler_" + name + "_" + A.Name + ")&" + pc_methodPrefix + name + "_" + A.Name + ";" + cl); } } } cs.Append(cl); } private int GetTotalNumberOfDevices(UPnPDevice device) { int RetVal = 1; foreach (UPnPDevice d in device.EmbeddedDevices) { RetVal += GetTotalNumberOfDevices(d); } return (RetVal); } private void Fix(UPnPDevice device, int number, Hashtable serviceNameTable) { device.User3 = new object[9]{ device.SerialNumber, device.FriendlyName, device.Manufacturer, device.ManufacturerURL, device.ModelDescription, device.ModelName, device.ModelNumber, device.ModelURL, device.ProductCode}; if (device.FriendlyName != "%s") { FriendlyNameTable[device] = device.FriendlyName; } if (device.Root) { device.UniqueDeviceName = "%s"; } else { device.UniqueDeviceName = "%s_" + number.ToString(); } device.SerialNumber = "%s"; device.FriendlyName = "%s"; if (Configuration.DynamicObjectModel) { device.Manufacturer = "%s"; device.ManufacturerURL = "%s"; device.ModelDescription = "%s"; device.ModelName = "%s"; device.ModelNumber = "%s"; device.ModelURL = new Uri("http://255.255.255.255:255/"); device.ProductCode = "%s"; } foreach (UPnPService service in device.Services) { UPnPDebugObject obj = new UPnPDebugObject(service); obj.SetField("SCPDURL", (string)serviceNameTable[service] + "/scpd.xml"); obj.SetField("__controlurl", (string)serviceNameTable[service] + "/control"); bool eventOK = false; foreach (UPnPStateVariable sv in service.GetStateVariables()) { if (sv.SendEvent) { eventOK = true; break; } } if (eventOK) { obj.SetField("__eventurl", (string)serviceNameTable[service] + "/event"); } else { obj.SetField("__eventurl", ""); } } foreach (UPnPDevice d in device.EmbeddedDevices) { Fix(d, ++number, serviceNameTable); } } public string PrintfTransform(string data) { data = data.Replace("\\", "\\\\"); data = data.Replace("\r", "\\r"); data = data.Replace("\n", "\\n"); data = data.Replace("\"", "\\\""); return data; } public string ToCType(string t) { return (Static_ToCType(t)); } public static string Static_ToCType(string t) { switch (t) { case "System.Char": return "char"; case "System.String": return "char*"; case "System.Boolean": return "int"; case "System.Uri": return "char*"; case "System.Byte": return "unsigned char"; case "System.UInt16": return "unsigned short"; case "System.UInt32": return "unsigned int"; case "System.Int32": return "int"; case "System.Int16": return "short"; case "System.SByte": return "char"; case "System.Single": return "float"; case "System.Double": return "double"; case "System.Byte[]": return "unsigned char*"; case "System.DateTime": return "time_t"; default: return "char*"; } } public static string ToCTypeFromStateVar(UPnPStateVariable V) { if (V.ComplexType == null) { string RetVal = Static_ToCType(V.GetNetType().ToString()) + " " + V.Name; if (Static_ToCType(V.GetNetType().ToString()) == "unsigned char*") { RetVal += ", int " + V.Name + "Length"; } return (RetVal); } else { return ("struct " + V.ComplexType.Name_LOCAL + "* " + V.Name); } } public static string ToCTypeFromStateVar_Dispatch(UPnPStateVariable V) { if (V.ComplexType == null) { string RetVal = V.Name; if (Static_ToCType(V.GetNetType().ToString()) == "unsigned char*") { RetVal += ", " + V.Name + "Length"; } return (RetVal); } else { return (V.Name); } } public static string ToCTypeFromStateVar_Serialize(string InVar, string InVarLength, string OK, string LibPrefix, UPnPStateVariable A) { StringBuilder cs = new StringBuilder(); if (OK != "") { OK = OK + " = "; } switch (A.GetNetType().FullName) { case "System.DateTime": cs.Append(" " + A.Name + " = " + LibPrefix + "Time_Parse(" + InVar + ");" + cl); break; case "System.SByte": case "System.Int16": case "System.Int32": cs.Append(" " + OK + LibPrefix + "GetLong(" + InVar + ", " + InVarLength + ", (long*)&" + A.Name + ");" + cl); break; case "System.Byte": case "System.UInt16": case "System.UInt32": cs.Append(" " + OK + LibPrefix + "GetULong(" + InVar + ", " + InVarLength + ", (unsigned long*)&" + A.Name + ");" + cl); break; case "System.Boolean": if (OK != "") { cs.Append(OK + "0;" + cl); } cs.Append(" if (" + InVarLength + "==4)" + cl); cs.Append(" {" + cl); cs.Append(" if (strncasecmp(" + InVar + ", \"true\", 4)==0)" + cl); cs.Append(" {" + cl); if (OK != "") { cs.Append(OK + "1;" + cl); } cs.Append(" " + A.Name + " = 1;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (" + InVarLength + " == 5)" + cl); cs.Append(" {" + cl); cs.Append(" if (strncasecmp(" + InVar + ", \"false\", 5)==0)" + cl); cs.Append(" {" + cl); if (OK != "") { cs.Append(OK + "1;" + cl); } cs.Append(" " + A.Name + " = 0;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); cs.Append(" if (" + InVarLength + " == 1)" + cl); cs.Append(" {" + cl); cs.Append(" if (memcmp(" + InVar + ", \"0\", 1) == 0)" + cl); cs.Append(" {" + cl); if (OK != "") { cs.Append(OK + "1;" + cl); } cs.Append(" " + A.Name + " = 0;" + cl); cs.Append(" }" + cl); cs.Append(" if (memcmp(" + InVar + ", \"1\", 1) == 0)" + cl); cs.Append(" {" + cl); if (OK != "") { cs.Append(OK + "1;" + cl); } cs.Append(" " + A.Name + " = 1;" + cl); cs.Append(" }" + cl); cs.Append(" }" + cl); break; case "System.Byte[]": cs.Append(A.Name + "Length = " + LibPrefix + "Base64Decode(" + InVar + ", " + InVarLength + ", &" + A.Name + ");" + cl); break; case "System.Uri": case "System.String": default: if (A.ComplexType == null) { cs.Append(" " + A.Name + "Length = " + LibPrefix + "InPlaceXmlUnEscape(" + InVar + ");" + cl); cs.Append(" " + A.Name + " = " + InVar + ";" + cl); } else { cs.Append(cl + "//ToDo: DeviceBuilder needs to be modified to include a ComplexTypeParsre here!"); } break; } return (cs.ToString()); } public static string ToCTypeFromArg(UPnPArgument A) { if (A.RelatedStateVar.ComplexType == null) { string RetVal = Static_ToCType(A.RelatedStateVar.GetNetType().ToString()) + " " + A.Name; if (Static_ToCType(A.RelatedStateVar.GetNetType().ToString()) == "unsigned char*") { RetVal += ", int " + A.Name + "Length"; } return (RetVal); } else { return ("struct " + A.RelatedStateVar.ComplexType.Name_LOCAL + "* " + A.Name); } } public static string ToCTypeFromArg_Dispatch(UPnPArgument A) { if (A.RelatedStateVar.ComplexType == null) { string RetVal = A.Name; if (Static_ToCType(A.RelatedStateVar.GetNetType().ToString()) == "unsigned char*") { RetVal += ", " + A.Name + "Length"; } return (RetVal); } else { return (A.Name); } } public static string Static_ToPrintfType(string t) { switch (t) { case "System.Byte[]": case "System.String": case "System.Uri": return "%s"; case "System.Byte": case "System.UInt16": case "System.UInt32": return "%u"; case "System.Boolean": case "System.DateTime": case "System.Char": case "System.SByte": case "System.Int16": case "System.Int32": return "%d"; case "System.Single": case "System.Double": return "%f"; default: return "void"; } } public string ToPrintfType(string t) { return (Static_ToPrintfType(t)); } public static string Static_ToSPrintfType(string t) { switch (t) { case "System.DateTime": case "System.Byte[]": case "System.String": case "System.Uri": return "%s"; case "System.Byte": case "System.UInt16": case "System.UInt32": return "%u"; case "System.Boolean": case "System.Char": case "System.SByte": case "System.Int16": case "System.Int32": return "%d"; case "System.Single": case "System.Double": return "%f"; default: return "void"; } } public string ToSPrintfType(string t) { return (Static_ToSPrintfType(t)); } public static string Static_ToSampleValue(string t) { switch (t) { case "System.Boolean": return "1"; case "System.Byte[]": return "\"Sample Binary\",13"; case "System.String": return "\"Sample String\""; case "System.Uri": return "\"http://opentools.homeip.net\""; case "System.Byte": return "250"; case "System.UInt16": return "250"; case "System.UInt32": return "250"; case "System.Char": case "System.SByte": return "250"; case "System.Int16": return "25000"; case "System.Int32": return "25000"; case "System.Single": case "System.Double": return "0.01"; case "System.DateTime": return "0"; default: return "NULL"; } } public string ToSampleValue(string t) { return (Static_ToSampleValue(t)); } public string ToEmptyValue(string t) { switch (t) { case "System.Byte[]": return "NULL"; case "System.String": case "System.Uri": return "\"\""; case "System.DateTime": case "System.Boolean": case "System.Byte": case "System.UInt16": case "System.UInt32": case "System.Char": case "System.SByte": case "System.Int16": case "System.Int32": case "System.Single": case "System.Double": return "0"; default: return "NULL"; } } public int FromHex(string hn) { return (int.Parse(hn.ToUpper(), System.Globalization.NumberStyles.HexNumber)); } public string ToHex(object obj) { if (obj.GetType().FullName == "System.UInt32") { UInt32 unumber = UInt32.Parse(obj.ToString()); return (unumber.ToString("X")); } else { Int32 number = Int32.Parse(obj.ToString()); return (number.ToString("X")); } } private int CalculateLength(string s) { int ln = s.Length; int c = 0; while (s.IndexOf("\\r", c) != -1) { c = s.IndexOf("\\r", c) + 1; ln -= 1; } c = 0; while (s.IndexOf("\\n", c) != -1) { c = s.IndexOf("\\n", c) + 1; ln -= 1; } c = 0; while (s.IndexOf("\\0", c) != -1) { c = s.IndexOf("\\0", c) + 1; ln -= 1; } c = 0; while (s.IndexOf("\\\"", c) != -1) { c = s.IndexOf("\\\"", c) + 1; ln -= 1; } return (ln); } #region Complex Type Methods public static void BuildComplexTypeDefinitionsAndHeaders_InnerCollections_Nested(CodeProcessor cs, UPnPComplexType.ItemCollection[] icList, Hashtable SequenceTable, Hashtable ChoiceTable) { int x = 0; foreach (UPnPComplexType.ItemCollection ic in icList) { if (ic.GetType() == typeof(UPnPComplexType.Sequence)) { cs.Append(" struct SEQUENCE_" + SequenceTable[ic].ToString() + " *_sequence_" + (++x).ToString() + ";" + cl); } else if (ic.GetType() == typeof(UPnPComplexType.Choice)) { cs.Append(" struct CHOICE_" + ChoiceTable[ic].ToString() + " *_choice_" + (++x).ToString() + ";" + cl); } //ToDo: Insert MaxOccurs Logic } } public static void BuildComplexTypeDefinitionsAndHeaders_InnerCollections(CodeProcessor cs, UPnPService service, UPnPComplexType.ItemCollection ic, Hashtable SequenceTable, Hashtable ChoiceTable) { if (ic.Items.Length > 0 || ic.NestedCollections.Length > 0) { if (ic.GetType() == typeof(UPnPComplexType.Sequence)) { cs.Append("struct SEQUENCE_" + SequenceTable[ic].ToString() + cl); cs.Append("{" + cl); BuildComplexTypeDefinitionsAndHeaders_FillInner(cs, service, ic.Items); BuildComplexTypeDefinitionsAndHeaders_InnerCollections_Nested(cs, ic.NestedCollections, SequenceTable, ChoiceTable); cs.Append("};" + cl); } else if (ic.GetType() == typeof(UPnPComplexType.Choice)) { cs.Append("struct CHOICE_" + ChoiceTable[ic].ToString() + cl); cs.Append("{" + cl); BuildComplexTypeDefinitionsAndHeaders_FillInner(cs, service, ic.Items); BuildComplexTypeDefinitionsAndHeaders_InnerCollections_Nested(cs, ic.NestedCollections, SequenceTable, ChoiceTable); cs.Append("};" + cl); } } foreach (UPnPComplexType.ItemCollection nc in ic.NestedCollections) { BuildComplexTypeDefinitionsAndHeaders_InnerCollections(cs, service, nc, SequenceTable, ChoiceTable); } } public static void BuildComplexTypeDefinitionsAndHeaders_InnerCollections_Number(UPnPComplexType CT, CodeProcessor cs, UPnPComplexType.ItemCollection ic, Hashtable SequenceTable, Hashtable ChoiceTable, ref int SequenceCounter, ref int ChoiceCounter) { if (ic.GetType() == typeof(UPnPComplexType.Sequence)) { SequenceTable[ic] = ++SequenceCounter; ((Hashtable)SequenceTable[CT])[ic] = SequenceCounter; } else if (ic.GetType() == typeof(UPnPComplexType.Choice)) { ChoiceTable[ic] = ++ChoiceCounter; ((Hashtable)ChoiceTable[CT])[ic] = ChoiceCounter; } foreach (UPnPComplexType.ItemCollection nc in ic.NestedCollections) { BuildComplexTypeDefinitionsAndHeaders_InnerCollections_Number(CT, cs, nc, SequenceTable, ChoiceTable, ref SequenceCounter, ref ChoiceCounter); } } public static void BuildComplexTypeDefinitionsAndHeaders(SortedList SL, CodeProcessor cs, Hashtable SequenceTable, Hashtable ChoiceTable, ref int SequenceCounter, ref int ChoiceCounter, string pc_methodPrefix, string pc_methodLibPrefix) { IDictionaryEnumerator en = SL.GetEnumerator(); // Build all the Inner Structs and Headers that are used by Sequences/Choices en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; foreach (UPnPComplexType CT in service.GetComplexTypeList()) { SequenceTable[CT] = new Hashtable(); ChoiceTable[CT] = new Hashtable(); foreach (UPnPComplexType.GenericContainer gc in CT.Containers) { foreach (UPnPComplexType.ItemCollection ic in gc.Collections) { BuildComplexTypeDefinitionsAndHeaders_InnerCollections_Number(CT, cs, ic, SequenceTable, ChoiceTable, ref SequenceCounter, ref ChoiceCounter); } } } } en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; foreach (UPnPComplexType CT in service.GetComplexTypeList()) { foreach (UPnPComplexType.GenericContainer gc in CT.Containers) { foreach (UPnPComplexType.ItemCollection ic in gc.Collections) { BuildComplexTypeDefinitionsAndHeaders_InnerCollections(cs, service, ic, SequenceTable, ChoiceTable); } } } } // Build the Main Structs and Headers en.Reset(); while (en.MoveNext()) { UPnPService service = (UPnPService)en.Value; foreach (UPnPComplexType CT in service.GetComplexTypeList()) { int idx = 0; cs.Append("struct " + CT.Name_LOCAL + cl); cs.Append("{" + cl); foreach (UPnPComplexType.GenericContainer gc in CT.Containers) { BuildComplexTypeDefinitionsAndHeaders_Containers(ref idx, cs, gc, service, SequenceTable, ChoiceTable); } cs.Append("};" + cl); } } BuildComplexTypeParser_Header(cs, SL, pc_methodPrefix, pc_methodLibPrefix); CPEmbeddedCGenerator.BuildComplexTypeSerializer_Header(cs, SL, pc_methodPrefix); } public static void BuildComplexTypeDefinitionsAndHeaders_Containers(ref int idx, CodeProcessor cs, UPnPComplexType.GenericContainer gc, UPnPService s, Hashtable SequenceTable, Hashtable ChoiceTable) { foreach (UPnPComplexType.ItemCollection ic in gc.Collections) { BuildComplexTypeDefinitionsAndHeaders_Collections(ref idx, cs, ic, s, SequenceTable, ChoiceTable); } } public static void BuildComplexTypeDefinitionsAndHeaders_Collections(ref int idx, CodeProcessor cs, UPnPComplexType.ItemCollection ic, UPnPService service, Hashtable SequenceTable, Hashtable ChoiceTable) { if (ic.GetType() == typeof(UPnPComplexType.Choice)) { ++idx; UPnPComplexType.Choice ch = (UPnPComplexType.Choice)ic; cs.Append(" struct CHOICE_" + ChoiceTable[ic].ToString() + " *_choice_" + idx.ToString() + ";" + cl); if (ch.MaxOccurs != "" && (ch.MaxOccurs.ToLower() == "unbounded" || int.Parse(ch.MaxOccurs) > 1)) { cs.Append(" int _choice_" + idx.ToString() + "_Length;" + cl); } } else if (ic.GetType() == typeof(UPnPComplexType.Sequence)) { ++idx; UPnPComplexType.Sequence sequ = (UPnPComplexType.Sequence)ic; cs.Append(" struct SEQUENCE_" + SequenceTable[ic].ToString() + " *_sequence_" + idx.ToString() + ";" + cl); if (sequ.MaxOccurs != "" && (sequ.MaxOccurs.ToLower() == "unbounded" || int.Parse(sequ.MaxOccurs) > 1)) { cs.Append(" int _sequence_" + idx.ToString() + "_Length;" + cl); } } else { BuildComplexTypeDefinitionsAndHeaders_FillInner(cs, service, ic.Items); } } public static void BuildComplexTypeDefinitionsAndHeaders_FillInner(CodeProcessor cs, UPnPService service, UPnPComplexType.ContentData[] Items) { string varType = ""; foreach (UPnPComplexType.ContentData cd in Items) { switch (cd.TypeNS) { case "http://www.w3.org/2001/XMLSchema": // XSD Simple Types switch (cd.Type) { case "unsignedByte": varType = "unsigned byte"; break; case "byte": varType = "byte"; break; case "unsignedInt": varType = "unsigned int"; break; case "unsignedShort": varType = "unsigned short"; break; case "unsignedLong": varType = "unsigned long"; break; case "boolean": case "int": case "integer": case "positiveInteger": case "negativeInteger": case "nonNegativeInteger": case "nonPositiveInteger": varType = "int"; break; case "long": varType = "long"; break; case "short": varType = "short"; break; case "decimal": case "float": varType = "single"; break; case "double": varType = "double"; break; default: varType = "char*"; break; } if (varType != "char*" && cd.MinOccurs == "0") { varType = varType + "*"; } break; default: // User Defined Types UPnPComplexType temp = service.GetComplexType(cd.TypeNS, cd.Type); if (temp != null) { varType = "struct " + cd.Type + "*"; } else { // Unknown type varType = "char*"; } break; } if (varType != "") { cs.Append(" " + varType + " " + cd.Name + ";" + cl); } } } #endregion } }
dlna/DeveloperToolsForUPnP
Tools/DeviceBuilder/EmbeddedCGenerator.cs
C#
apache-2.0
274,605
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.sql.operator; import com.orientechnologies.orient.core.command.OCommandContext; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.index.OCompositeIndexDefinition; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.index.OIndexCursor; import com.orientechnologies.orient.core.index.OIndexDefinition; import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue; import com.orientechnologies.orient.core.index.OIndexInternal; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocumentHelper; import com.orientechnologies.orient.core.serialization.serializer.record.binary.OBinaryField; import com.orientechnologies.orient.core.serialization.serializer.record.binary.ORecordSerializerBinary; import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemParameter; import java.util.List; /** * MINOR operator. * * @author Luca Garulli * */ public class OQueryOperatorMinor extends OQueryOperatorEqualityNotNulls { private boolean binaryEvaluate=false; public OQueryOperatorMinor() { super("<", 5, false); ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(); if (db != null) binaryEvaluate = db.getSerializer().getSupportBinaryEvaluate(); } @Override @SuppressWarnings("unchecked") protected boolean evaluateExpression(final OIdentifiable iRecord, final OSQLFilterCondition iCondition, final Object iLeft, final Object iRight, OCommandContext iContext) { final Object right = OType.convert(iRight, iLeft.getClass()); if (right == null) return false; return ((Comparable<Object>) iLeft).compareTo(right) < 0; } @Override public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight) { if (iRight == null || iLeft == null) return OIndexReuseType.NO_INDEX; return OIndexReuseType.INDEX_METHOD; } @Override public OIndexCursor executeIndexQuery(OCommandContext iContext, OIndex<?> index, List<Object> keyParams, boolean ascSortOrder) { final OIndexDefinition indexDefinition = index.getDefinition(); final OIndexInternal<?> internalIndex = index.getInternal(); if (!internalIndex.canBeUsedInEqualityOperators() || !internalIndex.hasRangeQuerySupport()) return null; final OIndexCursor cursor; if (indexDefinition.getParamCount() == 1) { final Object key; if (indexDefinition instanceof OIndexDefinitionMultiValue) key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0)); else key = indexDefinition.createValue(keyParams); if (key == null) return null; cursor = index.iterateEntriesMinor(key, false, ascSortOrder); } else { // if we have situation like "field1 = 1 AND field2 < 2" // then we fetch collection which left included boundary is the smallest composite key in the // index that contains key with value field1=1 and which right not included boundary // is the biggest composite key in the index that contains key with values field1=1 and field2=2. final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition; final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams.subList(0, keyParams.size() - 1)); if (keyOne == null) return null; final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams); if (keyTwo == null) return null; cursor = index.iterateEntriesBetween(keyOne, true, keyTwo, false, ascSortOrder); } updateProfiler(iContext, index, keyParams, indexDefinition); return cursor; } @Override public ORID getBeginRidRange(Object iLeft, Object iRight) { return null; } @Override public ORID getEndRidRange(final Object iLeft, final Object iRight) { if (iLeft instanceof OSQLFilterItemField && ODocumentHelper.ATTRIBUTE_RID.equals(((OSQLFilterItemField) iLeft).getRoot())) if (iRight instanceof ORID) return (ORID) iRight; else { if (iRight instanceof OSQLFilterItemParameter && ((OSQLFilterItemParameter) iRight).getValue(null, null, null) instanceof ORID) return (ORID) ((OSQLFilterItemParameter) iRight).getValue(null, null, null); } return null; } @Override public boolean evaluate(final OBinaryField iFirstField, final OBinaryField iSecondField, OCommandContext iContext) { return ORecordSerializerBinary.INSTANCE.getCurrentSerializer().getComparator().compare(iFirstField, iSecondField) < 0; } @Override public boolean isSupportingBinaryEvaluate() { return binaryEvaluate; } }
wouterv/orientdb
core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinor.java
Java
apache-2.0
6,113
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.loan.business; import org.mifos.accounts.business.AccountActionDateEntity; import org.mifos.accounts.business.AccountBO; import org.mifos.accounts.business.AccountFeesActionDetailEntity; import org.mifos.accounts.business.AccountPaymentEntity; import org.mifos.accounts.loan.persistance.LoanPersistence; import org.mifos.accounts.loan.schedule.domain.Installment; import org.mifos.accounts.loan.util.helpers.LoanConstants; import org.mifos.accounts.loan.util.helpers.RepaymentScheduleInstallment; import org.mifos.accounts.util.helpers.*; import org.mifos.application.master.business.MifosCurrency; import org.mifos.customers.business.CustomerBO; import org.mifos.customers.personnel.business.PersonnelBO; import org.mifos.framework.util.DateTimeService; import org.mifos.framework.util.helpers.Money; import org.mifos.platform.util.CollectionUtils; import java.util.*; import static org.mifos.framework.util.helpers.NumberUtils.min; public class LoanScheduleEntity extends AccountActionDateEntity { private Money principal; private Money interest; // TODO: Instance variable "penalty" appears to be unused. Verify and // remove. private Money penalty; private Money extraInterest; private Money miscFee; private Money miscPenalty; private Money principalPaid; private Money interestPaid; private Money penaltyPaid; private Money extraInterestPaid; private Money miscFeePaid; private Money miscPenaltyPaid; private Set<AccountFeesActionDetailEntity> accountFeesActionDetails = new HashSet<AccountFeesActionDetailEntity>(); private int versionNo; private PaymentAllocation paymentAllocation; protected LoanScheduleEntity() { super(null, null, null, null, null); } public LoanScheduleEntity(AccountBO account, CustomerBO customer, Short installmentId, java.sql.Date actionDate, PaymentStatus paymentStatus, Money principal, Money interest) { super(account, customer, installmentId, actionDate, paymentStatus); this.principal = principal; this.interest = interest; reset(account.getCurrency()); } private void reset(MifosCurrency currency) { this.penalty = new Money(currency); this.extraInterest = new Money(currency); this.miscFee = new Money(currency); this.miscPenalty = new Money(currency); this.principalPaid = new Money(currency); this.interestPaid = new Money(currency); this.penaltyPaid = new Money(currency); this.extraInterestPaid = new Money(currency); this.miscFeePaid = new Money(currency); this.miscPenaltyPaid = new Money(currency); } public Money getInterest() { return interest; } public void setInterest(Money interest) { this.interest = interest; } public Money getInterestPaid() { return interestPaid; } void setInterestPaid(Money interestPaid) { this.interestPaid = interestPaid; } void setPenalty(Money penalty) { this.penalty = penalty; } public Money getPenaltyPaid() { return penaltyPaid; } void setPenaltyPaid(Money penaltyPaid) { this.penaltyPaid = penaltyPaid; } public Money getPrincipal() { return principal; } public void setPrincipal(Money principal) { this.principal = principal; } public Money getPrincipalPaid() { return principalPaid; } void setPrincipalPaid(Money principalPaid) { this.principalPaid = principalPaid; } public Money getPrincipalDue() { return principal.subtract(principalPaid); } public Money getInterestDue() { return interest.subtract(interestPaid); } public Money getPenalty() { return penalty; } public Set<AccountFeesActionDetailEntity> getAccountFeesActionDetails() { return accountFeesActionDetails; } public void addAccountFeesAction(AccountFeesActionDetailEntity accountFeesAction) { accountFeesActionDetails.add(accountFeesAction); } public Money getMiscFee() { return miscFee; } void setMiscFee(Money miscFee) { this.miscFee = miscFee; } public Money getMiscFeePaid() { return miscFeePaid; } void setMiscFeePaid(Money miscFeePaid) { this.miscFeePaid = miscFeePaid; } public Money getMiscPenalty() { return miscPenalty; } void setMiscPenalty(Money miscPenalty) { this.miscPenalty = miscPenalty; } public Money getMiscPenaltyPaid() { return miscPenaltyPaid; } public Money getMiscPenaltyDue() { return miscPenalty.subtract(miscPenaltyPaid); } void setMiscPenaltyPaid(Money miscPenaltyPaid) { this.miscPenaltyPaid = miscPenaltyPaid; } public Money getPenaltyDue() { return (penalty.add(miscPenalty)).subtract(penaltyPaid.add(miscPenaltyPaid)); } public Money getTotalDue() { return principal.subtract(principalPaid).add(getEffectiveInterestDue()).add(getPenaltyDue()).add(getMiscFeeDue()); } public Money getTotalDueWithoutPrincipal() { return getInterestDue().add(getPenaltyDue()).add(getMiscFeeDue()); } public Money getTotalPenalty() { return penalty.add(miscPenalty); } public Money getTotalDueWithFees() { return getTotalDue().add(getTotalFeesDue()); } public Money getTotalScheduleAmountWithFees() { return principal.add( interest.add(penalty).add(getTotalScheduledFeeAmountWithMiscFee()).add(miscPenalty)); } public OverDueAmounts getDueAmnts() { OverDueAmounts overDueAmounts = new OverDueAmounts(); overDueAmounts.setFeesOverdue(getTotalFeesDue().add(getMiscFeeDue())); overDueAmounts.setInterestOverdue(getInterestDue()); overDueAmounts.setPenaltyOverdue(getPenaltyDue()); overDueAmounts.setPrincipalOverDue(getPrincipalDue()); overDueAmounts.setTotalPrincipalPaid(getPrincipalPaid()); return overDueAmounts; } void makeEarlyRepaymentEnteries(String payFullOrPartial) { if (payFullOrPartial.equals(LoanConstants.PAY_FEES_PENALTY_INTEREST)) { setPrincipalPaid(getPrincipalPaid().add(getPrincipalDue())); setInterestPaid(getInterestPaid().add(getInterestDue())); setPenaltyPaid(getPenaltyPaid().add(getPenaltyDue())); setMiscFeePaid(getMiscFeePaid().add(getMiscFee())); setMiscPenaltyPaid(getMiscPenaltyPaid().add(getMiscPenalty())); makeRepaymentEntries(payFullOrPartial); } else if (payFullOrPartial.equals(LoanConstants.PAY_FEES_PENALTY)) { setPrincipalPaid(getPrincipalPaid().add(getPrincipalDue())); setPenaltyPaid(getPenaltyPaid().add(getPenaltyDue())); setMiscFeePaid(getMiscFeePaid().add(getMiscFee())); setMiscPenaltyPaid(getMiscPenaltyPaid().add(getMiscPenalty())); makeRepaymentEntries(payFullOrPartial); } else { setPrincipalPaid(getPrincipalPaid().add(getPrincipalDue())); makeRepaymentEntries(payFullOrPartial); } } private void makeRepaymentEntries(String payFullOrPartial) { setPaymentStatus(PaymentStatus.PAID); setPaymentDate(new DateTimeService().getCurrentJavaSqlDate()); Set<AccountFeesActionDetailEntity> accountFeesActionDetailSet = this.getAccountFeesActionDetails(); for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : accountFeesActionDetailSet) { ((LoanFeeScheduleEntity) accountFeesActionDetailEntity).makeRepaymentEnteries(payFullOrPartial); } } public void updatePaymentDetailsForAdjustment(LoanTrxnDetailEntity loanReverseTrxn) { CalculatedInterestOnPayment interestOnPayment = loanReverseTrxn.getCalculatedInterestOnPayment(); Money overdueInterestPaid = calculateExtraInterestPaid(interestOnPayment); principalPaid = principalPaid.add(loanReverseTrxn.getPrincipalAmount()); interest = calculateAdjustedInterest(interestOnPayment, overdueInterestPaid, loanReverseTrxn); interestPaid = interestPaid.add(loanReverseTrxn.getInterestAmount()).add(overdueInterestPaid); penaltyPaid = penaltyPaid.add(loanReverseTrxn.getPenaltyAmount()); miscPenaltyPaid = miscPenaltyPaid.add(loanReverseTrxn.getMiscPenaltyAmount()); miscFeePaid = miscFeePaid.add(loanReverseTrxn.getMiscFeeAmount()); extraInterestPaid = extraInterestPaid.subtract(overdueInterestPaid); } private Money calculateExtraInterestPaid(CalculatedInterestOnPayment interestOnPayment) { return interestOnPayment == null ? Money.zero(getCurrency()) : interestOnPayment.getExtraInterestPaid(); } private Money calculateAdjustedInterest(CalculatedInterestOnPayment interestOnPayment, Money overdueInterestPaid, LoanTrxnDetailEntity loanReverseTrxn) { if (((LoanBO)account).isDecliningBalanceInterestRecalculation()) { return interestOnPayment.getOriginalInterest().subtract(loanReverseTrxn.getInterestAmount()).subtract(overdueInterestPaid. add(interestOnPayment.getInterestDueTillPaid())); } return interest; } Money waiveFeeCharges() { Money chargeWaived = new Money(getCurrency()); chargeWaived = chargeWaived.add(getMiscFeeDue()); setMiscFee(getMiscFeePaid()); for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : getAccountFeesActionDetails()) { chargeWaived = chargeWaived.add(((LoanFeeScheduleEntity) accountFeesActionDetailEntity).waiveCharges()); } return chargeWaived; } void removeAccountFeesActionDetailEntity(AccountFeesActionDetailEntity accountFeesActionDetailEntity) { accountFeesActionDetails.remove(accountFeesActionDetailEntity); } public Money getMiscFeeDue() { return getMiscFee().subtract(getMiscFeePaid()); } public Money getTotalFeesDue() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeDue()); } return totalFees; } public Money getTotalFeeAmountPaidWithMiscFee() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmountPaid()); } totalFees = totalFees.add(getMiscFeePaid()); return totalFees; } public Money getTotalScheduledFeeAmountWithMiscFee() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmount()); } totalFees = totalFees.add(getMiscFee()); return totalFees; } public Money getTotalFeesDueWithMiscFee() { return miscFee.add(getTotalFeesDue()); } public Money getTotalFees() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmount()); } return totalFees; } public Money getTotalFeesPaid() { Money totalFees = new Money(getCurrency()); for (AccountFeesActionDetailEntity obj : accountFeesActionDetails) { totalFees = totalFees.add(obj.getFeeAmountPaid()); } return totalFees; } public Money getTotalFeeDueWithMiscFeeDue() { return getMiscFeeDue().add(getTotalFeesDue()); } public Money getTotalPaymentDue() { return getTotalDue().add(getTotalFeesDue()); } Money removeFees(Short feeId) { Money feeAmount = null; AccountFeesActionDetailEntity objectToRemove = null; Set<AccountFeesActionDetailEntity> accountFeesActionDetailSet = this.getAccountFeesActionDetails(); for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : accountFeesActionDetailSet) { if (accountFeesActionDetailEntity.getFee().getFeeId().equals(feeId) && (accountFeesActionDetailEntity.getFeeAmountPaid() == null || accountFeesActionDetailEntity .getFeeAmountPaid().isZero())) { objectToRemove = accountFeesActionDetailEntity; feeAmount = objectToRemove.getFeeAmount(); break; } else if (accountFeesActionDetailEntity.getFee().getFeeId().equals(feeId) && accountFeesActionDetailEntity.getFeeAmountPaid() != null && accountFeesActionDetailEntity.getFeeAmountPaid().isGreaterThanZero()) { feeAmount = accountFeesActionDetailEntity.getFeeAmount().subtract( accountFeesActionDetailEntity.getFeeAmountPaid()); ((LoanFeeScheduleEntity) accountFeesActionDetailEntity).setFeeAmount(accountFeesActionDetailEntity .getFeeAmountPaid()); break; } } if (objectToRemove != null) { this.removeAccountFeesActionDetailEntity(objectToRemove); } return feeAmount; } public AccountFeesActionDetailEntity getAccountFeesAction(Short feeId) { for (AccountFeesActionDetailEntity accountFeesAction : getAccountFeesActionDetails()) { if (accountFeesAction.getFee().getFeeId().equals(feeId)) { return accountFeesAction; } } return null; } Money waivePenaltyCharges() { Money chargeWaived = new Money(getCurrency()); chargeWaived = chargeWaived.add(getMiscPenaltyDue()); setMiscPenalty(getMiscPenaltyPaid()); return chargeWaived; } void applyMiscCharge(Short chargeType, Money charge) { if (chargeType.equals(Short.valueOf(AccountConstants.MISC_FEES))) { setMiscFee(getMiscFee().add(charge)); } else if (chargeType.equals(Short.valueOf(AccountConstants.MISC_PENALTY))) { setMiscPenalty(getMiscPenalty().add(charge)); } } public boolean isPrincipalZero() { return principal.isZero(); } public boolean isFeeAlreadyAttatched(Short feeId) { for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : this.getAccountFeesActionDetails()) { if (accountFeesActionDetailEntity.getFee().getFeeId().equals(feeId)) { return true; } } return false; } public boolean isPaymentAppliedToAccountFees() { Money feesPaid = new Money(getCurrency(),"0.0"); for (AccountFeesActionDetailEntity accountFeesActionDetail : getAccountFeesActionDetails()) { feesPaid = feesPaid.add(accountFeesActionDetail.getFeeAmountPaid()); } return feesPaid.isNonZero(); } public boolean isPaymentApplied() { return getPrincipalPaid().isNonZero() || getEffectiveInterestPaid().isNonZero() || getMiscFeePaid().isNonZero() || getMiscPenaltyPaid().isNonZero() || isPaymentAppliedToAccountFees(); } public void setVersionNo(int versionNo) { this.versionNo = versionNo; } public int getVersionNo() { return versionNo; } public List<AccountFeesActionDetailEntity> getAccountFeesActionDetailsSortedByFeeId() { List<AccountFeesActionDetailEntity> sortedList = new ArrayList<AccountFeesActionDetailEntity>(); sortedList.addAll(this.getAccountFeesActionDetails()); Collections.sort(sortedList); return sortedList; } public RepaymentScheduleInstallment toDto(Locale userLocale) { return new RepaymentScheduleInstallment(this.installmentId, this.actionDate, this.principal, this.interest, this.getTotalFeesDue(), this.miscFee, this.miscPenalty, userLocale); } public boolean isSameAs(AccountActionDateEntity accountActionDateEntity) { return getInstallmentId().equals(accountActionDateEntity.getInstallmentId()); } public Money getExtraInterest() { return extraInterest == null ? Money.zero(getCurrency()) : new Money(getCurrency(), extraInterest.getAmount()); } public void setExtraInterest(Money extraInterest) { this.extraInterest = extraInterest; } public Money getExtraInterestPaid() { return extraInterestPaid == null ? Money.zero(getCurrency()) : new Money(getCurrency(), extraInterestPaid.getAmount()); } public void setExtraInterestPaid(Money extraInterestPaid) { this.extraInterestPaid = extraInterestPaid; } public Money getExtraInterestDue() { return getExtraInterest().subtract(getExtraInterestPaid()); } public Money getEffectiveInterestPaid() { return interestPaid.add(getExtraInterestPaid()); } public Money getEffectiveInterestDue() { return getInterestDue().add(getExtraInterestDue()); } private Money payMiscPenalty(final Money amount) { Money payable = min(amount, getMiscPenaltyDue()); allocateMiscPenalty(payable); return amount.subtract(payable); } private void allocateMiscPenalty(Money payable) { paymentAllocation.allocateForMiscPenalty(payable); miscPenaltyPaid = miscPenaltyPaid.add(payable); } private Money payPenalty(final Money amount) { Money payable = min(amount, (getPenalty().subtract(getPenaltyPaid()))); allocatePenalty(payable); return amount.subtract(payable); } private void allocatePenalty(Money payable) { paymentAllocation.allocateForPenalty(payable); penaltyPaid = penaltyPaid.add(payable); } private Money payMiscFees(final Money amount) { Money payable = min(amount, getMiscFeeDue()); allocateMiscFees(payable); return amount.subtract(payable); } private void allocateMiscFees(Money payable) { paymentAllocation.allocateForMiscFees(payable); miscFeePaid = miscFeePaid.add(payable); } private void allocateExtraInterest(Money payable) { paymentAllocation.allocateForExtraInterest(payable); extraInterestPaid = extraInterestPaid.add(payable); } private Money payFees(final Money amount) { Money balance = amount; for (AccountFeesActionDetailEntity accountFeesActionDetailEntity : getAccountFeesActionDetails()) { balance = accountFeesActionDetailEntity.payFee(balance); Integer feeId = accountFeesActionDetailEntity.getAccountFeesActionDetailId(); Money feeAllocated = accountFeesActionDetailEntity.getFeeAllocated(); paymentAllocation.allocateForFee(feeId, feeAllocated); } return balance; } private Money payInterest(final Money amount) { Money payable = min(amount, getInterestDue()); allocateInterest(payable); return amount.subtract(payable); } private void allocateInterest(Money payable) { paymentAllocation.allocateForInterest(payable); interestPaid = interestPaid.add(payable); } private Money payPrincipal(final Money amount) { Money payable = min(amount, getPrincipalDue()); allocatePrincipal(payable); return amount.subtract(payable); } private void allocatePrincipal(Money payable) { paymentAllocation.allocateForPrincipal(payable); principalPaid = principalPaid.add(payable); } public Money payComponents(Money paymentAmount, Date paymentDate) { initPaymentAllocation(paymentAmount.getCurrency()); Money balanceAmount = paymentAmount; balanceAmount = payMiscPenalty(balanceAmount); balanceAmount = payPenalty(balanceAmount); balanceAmount = payMiscFees(balanceAmount); balanceAmount = payFees(balanceAmount); balanceAmount = payInterest(balanceAmount); balanceAmount = payPrincipal(balanceAmount); recordPayment(paymentDate); return balanceAmount; } public void payComponents(Installment installment, MifosCurrency currency, Date paymentDate) { initPaymentAllocation(currency); allocatePrincipal(new Money(currency, installment.getCurrentPrincipalPaid())); allocateInterest(new Money(currency, installment.getCurrentInterestPaid())); allocateExtraInterest(new Money(currency, installment.getCurrentExtraInterestPaid())); payFees(new Money(currency, installment.getCurrentFeesPaid())); allocateMiscFees(new Money(currency, installment.getCurrentMiscFeesPaid())); allocatePenalty(new Money(currency, installment.getCurrentPenaltyPaid())); allocateMiscPenalty(new Money(currency, installment.getCurrentMiscPenaltyPaid())); updateInterest(installment, currency); setExtraInterest(new Money(currency, installment.getExtraInterest())); recordPayment(paymentDate); } private void updateInterest(Installment installment, MifosCurrency currency) { if (installment.hasEffectiveInterest()) { setInterest(new Money(currency, installment.getEffectiveInterest().add(interestPaid.getAmount()))); } else { setInterest(new Money(currency, installment.getInterest())); } } private void initPaymentAllocation(MifosCurrency currency) { paymentAllocation = new PaymentAllocation(currency); } public PaymentAllocation getPaymentAllocation() { return paymentAllocation; } void recordForAdjustment() { setPaymentStatus(PaymentStatus.UNPAID); setPaymentDate(null); } void recordPayment(Date paymentDate) { setPaymentDate(new java.sql.Date(paymentDate.getTime())); setPaymentStatus(getTotalDueWithFees().isTinyAmount() ? PaymentStatus.PAID : PaymentStatus.UNPAID); } public double getPrincipalAsDouble() { return principal.getAmount().doubleValue(); } public double getInterestAsDouble() { return interest.getAmount().doubleValue(); } public double getPenaltyAsDouble() { return penalty.getAmount().doubleValue(); } public double getMiscFeeAsDouble() { return miscFee.getAmount().doubleValue(); } public double getMiscPenaltyAsDouble() { return miscPenalty.getAmount().doubleValue(); } public double getTotalFeesAsDouble() { return getTotalFees().getAmount().doubleValue(); } public double getPrincipalPaidAsDouble() { return principalPaid.getAmount().doubleValue(); } public double getInterestPaidAsDouble() { return interestPaid.getAmount().doubleValue(); } public double getPenaltyPaidAsDouble() { return penaltyPaid.getAmount().doubleValue(); } public double getMiscFeePaidAsDouble() { return miscFeePaid.getAmount().doubleValue(); } public double getMiscPenaltyPaidAsDouble() { return miscPenaltyPaid.getAmount().doubleValue(); } public double getTotalFeesPaidAsDouble() { return getTotalFeesPaid().getAmount().doubleValue(); } public double getPrincipalDueAsDouble() { return getPrincipalDue().getAmount().doubleValue(); } public double getInterestDueAsDouble() { return getInterestDue().getAmount().doubleValue(); } public double getPenaltyDueAsDouble() { return getPenaltyDue().getAmount().doubleValue(); } public double getMiscFeesDueAsDouble() { return getMiscFeeDue().getAmount().doubleValue(); } public double getMiscPenaltyDueAsDouble() { return getMiscPenaltyDue().getAmount().doubleValue(); } public double getTotalFeesDueAsDouble() { return getTotalFeesDue().getAmount().doubleValue(); } public LoanTrxnDetailEntity updateSummaryAndPerformanceHistory(AccountPaymentEntity accountPayment, PersonnelBO personnel, Date transactionDate) { LoanBO loanBO = (LoanBO) account; LoanPersistence loanPersistence = loanBO.getLoanPersistence(); LoanTrxnDetailEntity loanTrxnDetailEntity = recordTransaction(accountPayment, personnel, transactionDate, loanPersistence); loanBO.recordSummaryAndPerfHistory(isPaid(), paymentAllocation); return loanTrxnDetailEntity; } private LoanTrxnDetailEntity recordTransaction(AccountPaymentEntity accountPayment, PersonnelBO personnel, Date transactionDate, LoanPersistence loanPersistence) { // TODO: Avoid passing the persistence instance in the constructor for reference data lookup LoanTrxnDetailEntity loanTrxnDetailEntity = new LoanTrxnDetailEntity(accountPayment, this, personnel, transactionDate, AccountActionTypes.LOAN_REPAYMENT, AccountConstants.PAYMENT_RCVD, loanPersistence); accountPayment.addAccountTrxn(loanTrxnDetailEntity); return loanTrxnDetailEntity; } public Money applyPayment(AccountPaymentEntity accountPaymentEntity, Money balance, PersonnelBO personnel, Date transactionDate) { if (isNotPaid() && balance.isGreaterThanZero()) { balance = payComponents(balance, transactionDate); updateSummaryAndPerformanceHistory(accountPaymentEntity, personnel, transactionDate); } return balance; } boolean hasFees() { return CollectionUtils.isNotEmpty(accountFeesActionDetails); } public void setPaymentAllocation(PaymentAllocation paymentAllocation) { this.paymentAllocation = paymentAllocation; } double getExtraInterestPaidAsDouble() { return getExtraInterestPaid().getAmount().doubleValue(); } }
vorburger/mifos-head
application/src/main/java/org/mifos/accounts/loan/business/LoanScheduleEntity.java
Java
apache-2.0
26,882
<?php return array ( 'Etherpad API Key' => 'EtherpadのAPIキー', 'URL to Etherpad' => 'EtherpadへのURL', );
vongalpha/humhub-modules-notes
messages/ja/forms_NotesConfigureForm.php
PHP
apache-2.0
116
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mail; import java.util.List; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; import org.jvnet.mock_javamail.Mailbox; public class MailCollectionHeaderTest extends CamelTestSupport { @Test public void testMailHeaderWithCollection() throws Exception { Mailbox.clearAll(); String[] foo = new String[] {"Carlsberg", "Heineken"}; template.sendBodyAndHeader("direct:a", "Hello World", "beers", foo); MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.expectedBodiesReceived("Hello World"); mock.message(0).header("beers").isNotNull(); mock.assertIsSatisfied(); Object beers = mock.getReceivedExchanges().get(0).getIn().getHeader("beers"); assertNotNull(beers); List<?> list = assertIsInstanceOf(List.class, beers); assertEquals("Carlsberg", list.get(0)); assertEquals("Heineken", list.get(1)); } protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() throws Exception { from("direct:a").to("smtp://localhost?username=james@localhost"); from("pop3://localhost?username=james&password=secret&consumer.initialDelay=100&consumer.delay=100").to("mock:result"); } }; } }
kevinearls/camel
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailCollectionHeaderTest.java
Java
apache-2.0
2,329
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.text.documentiterator; import lombok.NonNull; import org.deeplearning4j.text.documentiterator.interoperability.DocumentIteratorConverter; import org.deeplearning4j.text.sentenceiterator.SentenceIterator; import org.deeplearning4j.text.sentenceiterator.interoperability.SentenceIteratorConverter; import org.deeplearning4j.text.sentenceiterator.labelaware.LabelAwareSentenceIterator; import java.util.concurrent.atomic.AtomicLong; /** * This is simple class, for building Sentence-Label pairs for ParagraphVectors/Doc2Vec. * Idea is simple - you provide SentenceIterator or DocumentIterator, and it builds nice structure for future model reuse * * @author raver119@gmail.com */ public class BasicLabelAwareIterator implements LabelAwareIterator { // this counter is used for dumb labels generation protected AtomicLong documentPosition = new AtomicLong(0); protected LabelsSource generator; protected transient LabelAwareIterator backendIterator; private BasicLabelAwareIterator() { } /** * This method checks, if there's more LabelledDocuments * @return */ public boolean hasNextDocument() { return backendIterator.hasNextDocument(); } /** * This method returns next LabelledDocument * @return */ public LabelledDocument nextDocument() { return backendIterator.nextDocument(); } /** * This methods resets LabelAwareIterator */ public void reset() { backendIterator.reset(); } /** * This method returns LabelsSource instance, containing all labels derived from this iterator * @return */ @Override public LabelsSource getLabelsSource() { return generator; } @Override public boolean hasNext() { return hasNextDocument(); } @Override public LabelledDocument next() { return nextDocument(); } @Override public void shutdown() { // no-op } @Override public void remove() { // no-op } public static class Builder { private String labelTemplate = "DOC_"; private LabelAwareIterator labelAwareIterator; private LabelsSource generator = new LabelsSource(labelTemplate); /** * This method should stay protected, since it's only viable for testing purposes */ protected Builder() { } /** * We assume that each sentence in this iterator is separate document/paragraph * * @param iterator */ public Builder(@NonNull SentenceIterator iterator) { this.labelAwareIterator = new SentenceIteratorConverter(iterator, generator); } /** * We assume that each inputStream in this iterator is separate document/paragraph * @param iterator */ public Builder(@NonNull DocumentIterator iterator) { this.labelAwareIterator = new DocumentIteratorConverter(iterator, generator); } /** * We assume that each sentence in this iterator is separate document/paragraph. * Labels will be converted into LabelledDocument format * * @param iterator */ public Builder(@NonNull LabelAwareSentenceIterator iterator) { this.labelAwareIterator = new SentenceIteratorConverter(iterator, generator); } /** * We assume that each inputStream in this iterator is separate document/paragraph * Labels will be converted into LabelledDocument format * * @param iterator */ public Builder(@NonNull LabelAwareDocumentIterator iterator) { this.labelAwareIterator = new DocumentIteratorConverter(iterator, generator); } public Builder(@NonNull LabelAwareIterator iterator) { this.labelAwareIterator = iterator; this.generator = iterator.getLabelsSource(); } /** * Label template will be used for sentence labels generation. I.e. if provided template is "DOCUMENT_", all documents/paragraphs will have their labels starting from "DOCUMENT_0" to "DOCUMENT_X", where X is the total number of documents - 1 * * @param template * @return */ public Builder setLabelTemplate(@NonNull String template) { this.labelTemplate = template; this.generator.setTemplate(template); return this; } /** * TODO: To be implemented * * @param source * @return */ public Builder setLabelsSource(@NonNull LabelsSource source) { this.generator = source; return this; } public BasicLabelAwareIterator build() { BasicLabelAwareIterator iterator = new BasicLabelAwareIterator(); iterator.generator = this.generator; iterator.backendIterator = this.labelAwareIterator; return iterator; } } }
deeplearning4j/deeplearning4j
deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/text/documentiterator/BasicLabelAwareIterator.java
Java
apache-2.0
5,848
# frozen_string_literal: true # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! # Require this file early so that the version constant gets defined before # requiring "google/cloud". This is because google-cloud-core will load the # entrypoint (gem name) file, which in turn re-requires this file (hence # causing a require cycle) unless the version constant is already defined. require "google/cloud/document_ai/version" require "googleauth" gem "google-cloud-core" require "google/cloud" unless defined? ::Google::Cloud.new require "google/cloud/config" # Set the default configuration ::Google::Cloud.configure.add_config! :document_ai do |config| config.add_field! :endpoint, "documentai.googleapis.com", match: ::String config.add_field! :credentials, nil, match: [::String, ::Hash, ::Google::Auth::Credentials] config.add_field! :scope, nil, match: [::Array, ::String] config.add_field! :lib_name, nil, match: ::String config.add_field! :lib_version, nil, match: ::String config.add_field! :interceptors, nil, match: ::Array config.add_field! :timeout, nil, match: ::Numeric config.add_field! :metadata, nil, match: ::Hash config.add_field! :retry_policy, nil, match: [::Hash, ::Proc] config.add_field! :quota_project, nil, match: ::String end module Google module Cloud module DocumentAI ## # Create a new client object for DocumentProcessorService. # # By default, this returns an instance of # [Google::Cloud::DocumentAI::V1::DocumentProcessorService::Client](https://googleapis.dev/ruby/google-cloud-document_ai-v1/latest/Google/Cloud/DocumentAI/V1/DocumentProcessorService/Client.html) # for version V1 of the API. # However, you can specify specify a different API version by passing it in the # `version` parameter. If the DocumentProcessorService service is # supported by that API version, and the corresponding gem is available, the # appropriate versioned client will be returned. # # ## About DocumentProcessorService # # Service to call Cloud DocumentAI to process documents according to the # processor's definition. Processors are built using state-of-the-art Google # AI such as natural language, computer vision, and translation to extract # structured information from unstructured or semi-structured documents. # # @param version [::String, ::Symbol] The API version to connect to. Optional. # Defaults to `:v1`. # @return [DocumentProcessorService::Client] A client object for the specified version. # def self.document_processor_service version: :v1, &block require "google/cloud/document_ai/#{version.to_s.downcase}" package_name = Google::Cloud::DocumentAI .constants .select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") } .first package_module = Google::Cloud::DocumentAI.const_get package_name package_module.const_get(:DocumentProcessorService).const_get(:Client).new(&block) end ## # Configure the google-cloud-document_ai library. # # The following configuration parameters are supported: # # * `credentials` (*type:* `String, Hash, Google::Auth::Credentials`) - # The path to the keyfile as a String, the contents of the keyfile as a # Hash, or a Google::Auth::Credentials object. # * `lib_name` (*type:* `String`) - # The library name as recorded in instrumentation and logging. # * `lib_version` (*type:* `String`) - # The library version as recorded in instrumentation and logging. # * `interceptors` (*type:* `Array<GRPC::ClientInterceptor>`) - # An array of interceptors that are run before calls are executed. # * `timeout` (*type:* `Numeric`) - # Default timeout in seconds. # * `metadata` (*type:* `Hash{Symbol=>String}`) - # Additional gRPC headers to be sent with the call. # * `retry_policy` (*type:* `Hash`) - # The retry policy. The value is a hash with the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array<String>`) - # The error codes that should trigger a retry. # # @return [::Google::Cloud::Config] The default configuration used by this library # def self.configure yield ::Google::Cloud.configure.document_ai if block_given? ::Google::Cloud.configure.document_ai end end end end helper_path = ::File.join __dir__, "document_ai", "helpers.rb" require "google/cloud/document_ai/helpers" if ::File.file? helper_path
googleapis/google-cloud-ruby
google-cloud-document_ai/lib/google/cloud/document_ai.rb
Ruby
apache-2.0
5,540
// Copyright (c) 2017 Uber Technologies, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package adjuster import ( "errors" "github.com/uber/jaeger/model" ) // SpanIDDeduper returns an adjuster that changes span ids for server // spans (i.e. spans with tag: span.kind == server) if there is another // client span that shares the same span ID. This is needed to deal with // Zipkin-style clients that reuse the same span ID for both client and server // side of an RPC call. Jaeger UI expects all spans to have unique IDs. // // This adjuster never returns any errors. Instead it records any issues // it encounters in Span.Warnings. func SpanIDDeduper() Adjuster { return Func(func(trace *model.Trace) (*model.Trace, error) { deduper := &spanIDDeduper{trace: trace} deduper.groupSpansByID() deduper.dedupeSpanIDs() return deduper.trace, nil }) } const ( warningTooManySpans = "cannot assign unique span ID, too many spans in the trace" maxSpanID = model.SpanID(0xffffffffffffffff) ) type spanIDDeduper struct { trace *model.Trace spansByID map[model.SpanID][]*model.Span maxUsedID model.SpanID } // groupSpansByID groups spans with the same ID returning a map id -> []Span func (d *spanIDDeduper) groupSpansByID() { spansByID := make(map[model.SpanID][]*model.Span) for _, span := range d.trace.Spans { if spans, ok := spansByID[span.SpanID]; ok { // TODO maybe return an error if more than 2 spans found spansByID[span.SpanID] = append(spans, span) } else { spansByID[span.SpanID] = []*model.Span{span} } } d.spansByID = spansByID } func (d *spanIDDeduper) isSharedWithClientSpan(spanID model.SpanID) bool { for _, span := range d.spansByID[spanID] { if span.IsRPCClient() { return true } } return false } func (d *spanIDDeduper) dedupeSpanIDs() { oldToNewSpanIDs := make(map[model.SpanID]model.SpanID) for _, span := range d.trace.Spans { // only replace span IDs for server-side spans that share the ID with something else if span.IsRPCServer() && d.isSharedWithClientSpan(span.SpanID) { newID, err := d.makeUniqueSpanID() if err != nil { span.Warnings = append(span.Warnings, err.Error()) continue } oldToNewSpanIDs[span.SpanID] = newID span.ParentSpanID = span.SpanID // previously shared ID is the new parent span.SpanID = newID } } d.swapParentIDs(oldToNewSpanIDs) } // swapParentIDs corrects ParentSpanID of all spans that are children of the server // spans whose IDs we deduped. func (d *spanIDDeduper) swapParentIDs(oldToNewSpanIDs map[model.SpanID]model.SpanID) { for _, span := range d.trace.Spans { if parentID, ok := oldToNewSpanIDs[span.ParentSpanID]; ok { if span.SpanID != parentID { span.ParentSpanID = parentID } } } } // makeUniqueSpanID returns a new ID that is not used in the trace, // or an error if such ID cannot be generated, which is unlikely, // given that the whole space of span IDs is 2^64. func (d *spanIDDeduper) makeUniqueSpanID() (model.SpanID, error) { for id := d.maxUsedID + 1; id < maxSpanID; id++ { if _, ok := d.spansByID[id]; !ok { d.maxUsedID = id return id, nil } } return 0, errors.New(warningTooManySpans) }
tangfeixiong/go-for-docker
vendor/github.com/uber/jaeger/model/adjuster/span_id_deduper.go
GO
apache-2.0
3,704
/** * Copyright (C) 2014 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.ingestion.sink.druid; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.concurrent.TimeUnit; import org.apache.flume.Channel; import org.apache.flume.Context; import org.apache.flume.Event; import org.apache.flume.EventDeliveryException; import org.apache.flume.Transaction; import org.apache.flume.channel.MemoryChannel; import org.apache.flume.conf.Configurables; import org.apache.flume.event.EventBuilder; import org.fest.assertions.Assertions; import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Charsets; import com.google.common.collect.Maps; /** * Created by eambrosio on 30/03/15. */ public class DruidSinkIT { private Channel channel; private DruidSink druidSink; @Before public void setup() { // Context channelContext = new Context(); // channelContext.put("checkpointDir","data/check"); // channelContext.put("dataDirs","data/data"); // channelContext.put("capacity","1000"); // channelContext.put("transactionCapacity","100"); // channelContext.put("checkpointInterval","300"); // channel = new FileChannel(); Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "5000"); channel = new MemoryChannel(); channel.setName("junitChannel"); Configurables.configure(channel, channelContext); channel.start(); druidSink = new DruidSink(); druidSink.setChannel(channel); druidSink.configure(getMockContext()); druidSink.start(); } @Test public void processValidEvents() throws EventDeliveryException { Transaction tx = channel.getTransaction(); tx.begin(); getNTrackerEvents(1000); tx.commit(); tx.close(); for (int i = 0; i < 1; i++) { druidSink.process(); } tx = channel.getTransaction(); tx.begin(); Assertions.assertThat(channel.take()).isNull(); } @Test public void process500KValidEvents() throws EventDeliveryException { for (int i = 0; i < 10; i++) { processValidEvents(); } } private void getNEvents(int numEvents, TimeUnit timeUnit) { for (int i = 0; i < numEvents; i++) { channel.put(getEvent(getOffset(timeUnit))); } } private void getNTrackerEvents(int numEvents) { for (int i = 0; i < numEvents; i++) { channel.put(getTrackerEvent()); } } private long getOffset(TimeUnit timeUnit) { long offset = 0; switch (timeUnit) { case MILLISECONDS: offset = 1; break; case SECONDS: offset = 1000; break; case MINUTES: offset = 1000 * 60; break; case HOURS: offset = 1000 * 60 * 60; break; case DAYS: offset = 1000 * 60 * 60 * 24; break; default: offset = 0; break; } return offset; } private Event getTrackerEvent() { Random random = new Random(); String[] users = new String[] { "user1@santander.com", "user2@santander.com", "user3@santander.com", "user4@santander.com" }; String[] isoCode = new String[] { "DE", "ES", "US", "FR" }; TimeUnit[] offset = new TimeUnit[] { TimeUnit.DAYS, TimeUnit.HOURS, TimeUnit.SECONDS }; ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance); Map<String, String> headers; ObjectMapper mapper = new ObjectMapper(); JsonNode jsonNode = null; final String fileName = "/trackerSample" + random.nextInt(4) + ".json"; try { jsonNode = mapper.readTree(getClass().getResourceAsStream(fileName)); } catch (IOException e) { e.printStackTrace(); } headers = mapper.convertValue(jsonNode, Map.class); headers.put("timestamp", String.valueOf(new Date().getTime() + getOffset(offset[random.nextInt(3)]) * random .nextInt(100))); headers.put("santanderID", users[random.nextInt(4)]); headers.put("isoCode", isoCode[random.nextInt(4)]); return EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers); } private Event getEvent(long offset) { ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance); jsonBody.put("field1", "foo"); jsonBody.put("field2", 32); jsonBody.put("timestamp", String.valueOf(new Date().getTime())); Map<String, String> headers = new HashMap<String, String>(); headers.put("field3", "bar"); // Overwrites the value defined in JSON body headers.put("field4", "64"); headers.put("field5", "true"); headers.put("field6", "1.0"); headers.put("field7", "11"); final long l = new Date().getTime(); headers.put("timestamp", String.valueOf(l + offset)); headers.put("myString2", "baz"); return EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers); } private Context getMockContext() { Map<String, String> mapProperties = loadProperties("/context.properties"); Context context = new Context(mapProperties); return context; } private Map<String, String> loadProperties(String file) { Properties properties = new Properties(); try { properties.load(getClass().getResourceAsStream(file)); } catch (IOException e) { e.printStackTrace(); } return Maps.fromProperties(properties); } }
anavidad3/flume-ingestion
stratio-sinks/stratio-druid-sink/src/test/java/com/stratio/ingestion/sink/druid/DruidSinkIT.java
Java
apache-2.0
6,762
require 'spec_helper' klass = OneviewSDK::API500::Synergy::User RSpec.describe klass, integration: true, type: DELETE, sequence: rseq(klass) do let(:current_client) { $client_500_synergy } include_examples 'UserDeleteExample', 'integration api500 context' end
HewlettPackard/oneview-sdk-ruby
spec/integration/resource/api600/synergy/user/delete_spec.rb
Ruby
apache-2.0
265
import {NgModule} from '@angular/core'; import {MatButtonModule} from '@angular/material/button'; import {MatCardModule} from '@angular/material/card'; import {MatFormFieldModule} from '@angular/material/form-field'; import {MatGridListModule} from '@angular/material/grid-list'; import {MatIconModule} from '@angular/material/icon'; import {MatInputModule} from '@angular/material/input'; import {MatListModule} from '@angular/material/list'; import {MatMenuModule} from '@angular/material/menu'; import {MatPaginatorModule} from '@angular/material/paginator'; import {MatRadioModule} from '@angular/material/radio'; import {MatSelectModule} from '@angular/material/select'; import {MatSidenavModule} from '@angular/material/sidenav'; import {MatTableModule} from '@angular/material/table'; import {MatToolbarModule} from '@angular/material/toolbar'; import {MatTooltipModule} from '@angular/material/tooltip'; const matModules = [ MatButtonModule, MatCardModule, MatFormFieldModule, MatIconModule, MatInputModule, MatListModule, MatToolbarModule, MatSidenavModule, MatRadioModule, MatSelectModule, MatGridListModule, MatMenuModule, MatTableModule, MatPaginatorModule, MatTooltipModule ]; @NgModule({ imports: matModules, exports: matModules, }) export class MaterialModule { }
bazelbuild/rules_nodejs
examples/angular/src/shared/material/material.module.ts
TypeScript
apache-2.0
1,290
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.actions; import com.intellij.icons.AllIcons; import com.intellij.ide.highlighter.ProjectFileType; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileElement; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.projectImport.ProjectOpenProcessor; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class OpenProjectFileChooserDescriptor extends FileChooserDescriptor { private static final Icon ourProjectIcon = IconLoader.getIcon(ApplicationInfoEx.getInstanceEx().getSmallIconUrl()); public OpenProjectFileChooserDescriptor(final boolean chooseFiles) { super(chooseFiles, true, chooseFiles, chooseFiles, false, false); } public boolean isFileSelectable(final VirtualFile file) { if (file == null) return false; return isProjectDirectory(file) || isProjectFile(file); } public Icon getIcon(final VirtualFile file) { if (isProjectDirectory(file)) { return dressIcon(file, ourProjectIcon); } final Icon icon = getImporterIcon(file); if (icon != null) { return dressIcon(file, icon); } return super.getIcon(file); } @Nullable private static Icon getImporterIcon(final VirtualFile virtualFile) { final ProjectOpenProcessor provider = ProjectOpenProcessor.getImportProvider(virtualFile); if (provider != null) { return virtualFile.isDirectory() && provider.lookForProjectsInDirectory() ? AllIcons.Nodes.IdeaModule : provider.getIcon(virtualFile); } return null; } public boolean isFileVisible(final VirtualFile file, final boolean showHiddenFiles) { if (!showHiddenFiles && FileElement.isFileHidden(file)) return false; return isProjectFile(file) || super.isFileVisible(file, showHiddenFiles) && file.isDirectory(); } public static boolean isProjectFile(final VirtualFile file) { if (isIprFile(file)) return true; final ProjectOpenProcessor importProvider = ProjectOpenProcessor.getImportProvider(file); return importProvider != null; } private static boolean isIprFile(VirtualFile file) { if ((!file.isDirectory() && file.getName().toLowerCase().endsWith(ProjectFileType.DOT_DEFAULT_EXTENSION))) { return true; } return false; } private static boolean isProjectDirectory(final VirtualFile virtualFile) { // the root directory of any drive is never an IDEA project if (virtualFile.getParent() == null) return false; // NOTE: For performance reasons, it's very important not to iterate through all of the children here. if (virtualFile.isDirectory() && virtualFile.isValid() && virtualFile.findChild(Project.DIRECTORY_STORE_FOLDER) != null) return true; return false; } }
IllusionRom-deprecated/android_platform_tools_idea
platform/platform-impl/src/com/intellij/ide/actions/OpenProjectFileChooserDescriptor.java
Java
apache-2.0
3,508
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """'functions call' command.""" from googlecloudsdk.api_lib.functions import util from googlecloudsdk.calliope import base from googlecloudsdk.core import properties class Call(base.Command): """Call function synchronously for testing.""" @staticmethod def Args(parser): """Register flags for this command.""" parser.add_argument( 'name', help='Name of the function to be called.', type=util.ValidateFunctionNameOrRaise) parser.add_argument( '--data', default='', help='Data passed to the function (JSON string)') @util.CatchHTTPErrorRaiseHTTPException def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: Function call results (error or result with execution id) """ project = properties.VALUES.core.project.Get(required=True) registry = self.context['registry'] client = self.context['functions_client'] messages = self.context['functions_messages'] function_ref = registry.Parse( args.name, params={'projectsId': project, 'locationsId': args.region}, collection='cloudfunctions.projects.locations.functions') return client.projects_locations_functions.Call( messages.CloudfunctionsProjectsLocationsFunctionsCallRequest( name=function_ref.RelativeName(), callFunctionRequest=messages.CallFunctionRequest(data=args.data)))
KaranToor/MA450
google-cloud-sdk/lib/surface/functions/call.py
Python
apache-2.0
2,127
/*- * Automatically built by dist/s_java_stat. * Only the javadoc comments can be edited. * * See the file LICENSE for redistribution information. * * Copyright (c) 2002, 2010 Oracle and/or its affiliates. All rights reserved. */ package com.sleepycat.db; /** The BtreeStats object is used to return Btree or Recno database statistics. */ public class BtreeStats extends DatabaseStats { // no public constructor /* package */ BtreeStats() {} private int bt_magic; /** The magic number that identifies the file as a Btree database. */ public int getMagic() { return bt_magic; } private int bt_version; /** The version of the Btree database. */ public int getVersion() { return bt_version; } private int bt_metaflags; /** The metadata flags. */ public int getMetaFlags() { return bt_metaflags; } private int bt_nkeys; /** The number of keys or records in the database. <p> For the Btree Access Method, the number of keys in the database. If the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method or the database was configured to support retrieval by record number, the count will be exact. Otherwise, the count will be the last saved value unless it has never been calculated, in which case it will be 0. <p> For the Recno Access Method, the number of records in the database. If the database was configured with mutable record numbers the count will be exact. Otherwise, if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact but will include deleted records; if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact and will not include deleted records. */ public int getNumKeys() { return bt_nkeys; } private int bt_ndata; /** The number of key/data pairs or records in the database. <p> For the Btree Access Method, the number of key/data pairs in the database. If the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact. Otherwise, the count will be the last saved value unless it has never been calculated, in which case it will be 0. <p> For the Recno Access Method, the number of records in the database. If the database was configured with mutable record numbers, the count will be exact. Otherwise, if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact but will include deleted records; if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method, the count will be exact and will not include deleted records. */ public int getNumData() { return bt_ndata; } private int bt_pagecnt; /** The number of pages in the database. <p> Returned if {@link StatsConfig#setFast} was configured. */ public int getPageCount() { return bt_pagecnt; } private int bt_pagesize; /** The underlying database page size, in bytes. */ public int getPageSize() { return bt_pagesize; } private int bt_minkey; /** The minimum keys per page. */ public int getMinKey() { return bt_minkey; } private int bt_re_len; /** The length of fixed-length records. */ public int getReLen() { return bt_re_len; } private int bt_re_pad; /** The padding byte value for fixed-length records. */ public int getRePad() { return bt_re_pad; } private int bt_levels; /** The number of levels in the database. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getLevels() { return bt_levels; } private int bt_int_pg; /** The number of database internal pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getIntPages() { return bt_int_pg; } private int bt_leaf_pg; /** The number of database leaf pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getLeafPages() { return bt_leaf_pg; } private int bt_dup_pg; /** The number of database duplicate pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getDupPages() { return bt_dup_pg; } private int bt_over_pg; /** The number of database overflow pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getOverPages() { return bt_over_pg; } private int bt_empty_pg; /** The number of empty database pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getEmptyPages() { return bt_empty_pg; } private int bt_free; /** The number of pages on the free list. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public int getFree() { return bt_free; } private long bt_int_pgfree; /** The number of bytes free in database internal pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getIntPagesFree() { return bt_int_pgfree; } private long bt_leaf_pgfree; /** The number of bytes free in database leaf pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getLeafPagesFree() { return bt_leaf_pgfree; } private long bt_dup_pgfree; /** The number of bytes free in database duplicate pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getDupPagesFree() { return bt_dup_pgfree; } private long bt_over_pgfree; /** The number of bytes free in database overflow pages. <p> The information is only included if the {@link com.sleepycat.db.Database#getStats Database.getStats} call was not configured by the {@link com.sleepycat.db.StatsConfig#setFast StatsConfig.setFast} method. */ public long getOverPagesFree() { return bt_over_pgfree; } /** For convenience, the BtreeStats class has a toString method that lists all the data fields. */ public String toString() { return "BtreeStats:" + "\n bt_magic=" + bt_magic + "\n bt_version=" + bt_version + "\n bt_metaflags=" + bt_metaflags + "\n bt_nkeys=" + bt_nkeys + "\n bt_ndata=" + bt_ndata + "\n bt_pagecnt=" + bt_pagecnt + "\n bt_pagesize=" + bt_pagesize + "\n bt_minkey=" + bt_minkey + "\n bt_re_len=" + bt_re_len + "\n bt_re_pad=" + bt_re_pad + "\n bt_levels=" + bt_levels + "\n bt_int_pg=" + bt_int_pg + "\n bt_leaf_pg=" + bt_leaf_pg + "\n bt_dup_pg=" + bt_dup_pg + "\n bt_over_pg=" + bt_over_pg + "\n bt_empty_pg=" + bt_empty_pg + "\n bt_free=" + bt_free + "\n bt_int_pgfree=" + bt_int_pgfree + "\n bt_leaf_pgfree=" + bt_leaf_pgfree + "\n bt_dup_pgfree=" + bt_dup_pgfree + "\n bt_over_pgfree=" + bt_over_pgfree ; } }
racker/omnibus
source/db-5.0.26.NC/java/src/com/sleepycat/db/BtreeStats.java
Java
apache-2.0
9,461
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace SpellChecker.Net.Search.Spell { /// <summary> Edit distance class</summary> public class TRStringDistance { internal char[] sa; internal int n; internal int[][][] cache = new int[30][][]; /// <summary> Optimized to run a bit faster than the static getDistance(). /// In one benchmark times were 5.3sec using ctr vs 8.5sec w/ static method, thus 37% faster. /// </summary> public TRStringDistance(System.String target) { sa = target.ToCharArray(); n = sa.Length; } //*************************** // Compute Levenshtein distance //*************************** public int GetDistance(System.String other) { int[][] d; // matrix // Step 1 char[] ta = other.ToCharArray(); int m = ta.Length; if (n == 0) { return m; } if (m == 0) { return n; } if (m >= cache.Length) { d = Form(n, m); } else if (cache[m] != null) { d = cache[m]; } else { d = cache[m] = Form(n, m); // Step 3 } for (int i = 1; i <= n; i++) { char s_i = sa[i - 1]; // Step 4 for (int j = 1; j <= m; j++) { char t_j = ta[j - 1]; // Step 5 int cost = s_i == t_j ? 0 : 1; d[i][j] = Min3(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1] + cost); } } // Step 7 return d[n][m]; } /// <summary> </summary> private static int[][] Form(int n, int m) { int[][] d = new int[n + 1][]; for (int i = 0; i < n + 1; i++) { d[i] = new int[m + 1]; } // Step 2 for (int i = 0; i <= n; i++) { d[i][0] = i; } for (int j = 0; j <= m; j++) { d[0][j] = j; } return d; } //************************** // Get minimum of three values //************************** private static int Min3(int a, int b, int c) { int mi = a; if (b < mi) { mi = b; } if (c < mi) { mi = c; } return mi; } } }
Anomalous-Software/Lucene.NET
src/contrib/SpellChecker/Spell/TRStringDistance.cs
C#
apache-2.0
3,734
package org.smslib.callback; import org.smslib.callback.events.InboundCallCallbackEvent; public interface IInboundCallCallback { public boolean process(InboundCallCallbackEvent event); }
smslib/smslib
smslib/src/main/java/org/smslib/callback/IInboundCallCallback.java
Java
apache-2.0
191
// Copyright 2004 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry.vlib.pages; import java.rmi.RemoteException; import java.util.HashMap; import java.util.Map; import javax.ejb.CreateException; import javax.ejb.FinderException; import org.apache.tapestry.ApplicationRuntimeException; import org.apache.tapestry.IRequestCycle; import org.apache.tapestry.Tapestry; import org.apache.tapestry.event.PageEvent; import org.apache.tapestry.event.PageRenderListener; import org.apache.tapestry.vlib.Protected; import org.apache.tapestry.vlib.VirtualLibraryEngine; import org.apache.tapestry.vlib.ejb.IOperations; /** * Edits the properties of at book. * * @author Howard Lewis Ship * @version $Id$ **/ public abstract class EditBook extends Protected implements PageRenderListener { public abstract Map getAttributes(); public abstract void setAttributes(Map attributes); public abstract String getPublisherName(); public abstract Integer getBookId(); public abstract void setBookId(Integer bookId); /** * Invoked (from {@link MyLibrary}) to begin editting a book. * Gets the attributes from the {@link org.apache.tapestry.vlib.ejb.IBook} * and updates the request cycle to render this page, * **/ public void beginEdit(IRequestCycle cycle, Integer bookId) { setBookId(bookId); VirtualLibraryEngine vengine = (VirtualLibraryEngine) getEngine(); int i = 0; while (true) { try { // Get the attributes as a source for our input fields. IOperations operations = vengine.getOperations(); setAttributes(operations.getBookAttributes(bookId)); break; } catch (FinderException ex) { throw new ApplicationRuntimeException(ex); } catch (RemoteException ex) { vengine.rmiFailure( "Remote exception setting up page for book #" + bookId + ".", ex, i++); } } cycle.activate(this); } /** * Used to update the book when the form is submitted. * **/ public void formSubmit(IRequestCycle cycle) { Map attributes = getAttributes(); Integer publisherId = (Integer) attributes.get("publisherId"); String publisherName = getPublisherName(); if (publisherId == null && Tapestry.isBlank(publisherName)) { setErrorField("inputPublisherName", getMessage("need-publisher-name")); return; } if (publisherId != null && Tapestry.isNonBlank(publisherName)) { setErrorField("inputPublisherName", getMessage("leave-publisher-name-empty")); return; } // Check for an error from a validation field if (isInError()) return; // OK, do the update. VirtualLibraryEngine vengine = (VirtualLibraryEngine)cycle.getEngine(); Integer bookId = getBookId(); int i = 0; while (true) { IOperations bean = vengine.getOperations(); try { if (publisherId != null) bean.updateBook(bookId, attributes); else { bean.updateBook(bookId, attributes, publisherName); vengine.clearCache(); } break; } catch (FinderException ex) { throw new ApplicationRuntimeException(ex); } catch (CreateException ex) { throw new ApplicationRuntimeException(ex); } catch (RemoteException ex) { vengine.rmiFailure("Remote exception updating book #" + bookId + ".", ex, i++); continue; } } MyLibrary page = (MyLibrary) cycle.getPage("MyLibrary"); page.setMessage(format("updated-book", attributes.get("title"))); page.activate(cycle); } public void pageBeginRender(PageEvent event) { if (getAttributes() == null) setAttributes(new HashMap()); } }
apache/tapestry3
tapestry-examples/Vlib/src/org/apache/tapestry/vlib/pages/EditBook.java
Java
apache-2.0
4,895
package org.pinae.rafiki.trigger; import org.pinae.rafiki.trigger.Trigger; import org.pinae.rafiki.trigger.TriggerException; import org.pinae.rafiki.trigger.impl.CronTrigger; import static org.pinae.rafiki.trigger.helper.DateHelper.today; import org.apache.log4j.Logger; public class CronTriggerExample { private static Logger logger = Logger.getLogger(CronTriggerExample.class); public static Trigger getTrigger0(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0-30/5 * * * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger1(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger2(){ CronTrigger trigger = null; try { trigger = new CronTrigger("30 1-5 * * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger3(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * 12-17 * * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger4(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * 25-30 * * *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger5(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * * DEC SUN *"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger6(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0 * * * DEC * 2013"); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } public static Trigger getTrigger7(){ CronTrigger trigger = null; try { trigger = new CronTrigger("0-30/10 * * 5-12 MAY * 2014"); trigger.setStartTime(today(15, 10, 0)); trigger.setEndTime(today(21, 30, 0)); } catch (TriggerException e) { logger.error(String.format("getTrigger Exception: exception=%s", e.getMessage())); } return trigger; } }
PinaeOS/rafiki
src/example/java/org/pinae/rafiki/trigger/CronTriggerExample.java
Java
apache-2.0
2,660
alert("XSS from JS file");
datla/JavaSecurity
xss/src/main/webapp/alert.js
JavaScript
apache-2.0
26
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Container.V1.Snippets { // [START container_v1_generated_ClusterManager_CompleteIPRotation_sync] using Google.Cloud.Container.V1; public sealed partial class GeneratedClusterManagerClientSnippets { /// <summary>Snippet for CompleteIPRotation</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void CompleteIPRotationRequestObject() { // Create client ClusterManagerClient clusterManagerClient = ClusterManagerClient.Create(); // Initialize request argument(s) CompleteIPRotationRequest request = new CompleteIPRotationRequest { Name = "", }; // Make the request Operation response = clusterManagerClient.CompleteIPRotation(request); } } // [END container_v1_generated_ClusterManager_CompleteIPRotation_sync] }
googleapis/google-cloud-dotnet
apis/Google.Cloud.Container.V1/Google.Cloud.Container.V1.GeneratedSnippets/ClusterManagerClient.CompleteIPRotationRequestObjectSnippet.g.cs
C#
apache-2.0
1,655
/** * Get more info at : www.jrebirth.org . * Copyright JRebirth.org © 2011-2013 * Contact : sebastien.bordes@jrebirth.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jrebirth.af.core.ui.adapter; import javafx.scene.input.KeyEvent; import org.jrebirth.af.core.ui.AbstractBaseController; /** * The class <strong>DefaultKeyAdapter</strong>. * * @author Sébastien Bordes * * @param <C> The controller class which manage this event adapter */ public class DefaultKeyAdapter<C extends AbstractBaseController<?, ?>> extends AbstractDefaultAdapter<C> implements KeyAdapter { /** * {@inheritDoc} */ @Override public void key(final KeyEvent keyEvent) { // Nothing to do yet } /** * {@inheritDoc} */ @Override public void keyPressed(final KeyEvent keyEvent) { // Nothing to do yet } /** * {@inheritDoc} */ @Override public void keyReleased(final KeyEvent keyEvent) { // Nothing to do yet } /** * {@inheritDoc} */ @Override public void keyTyped(final KeyEvent keyEvent) { // Nothing to do yet } }
JRebirth/JRebirth
org.jrebirth.af/core/src/main/java/org/jrebirth/af/core/ui/adapter/DefaultKeyAdapter.java
Java
apache-2.0
1,671
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.proxy.frontend.mysql.command.query.text.query; import org.apache.shardingsphere.db.protocol.mysql.constant.MySQLCharacterSet; import org.apache.shardingsphere.db.protocol.mysql.constant.MySQLConstants; import org.apache.shardingsphere.db.protocol.mysql.packet.command.query.text.query.MySQLComQueryPacket; import org.apache.shardingsphere.proxy.backend.response.header.query.QueryResponseHeader; import org.apache.shardingsphere.proxy.backend.response.header.query.impl.QueryHeader; import org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; import org.apache.shardingsphere.proxy.backend.text.TextProtocolBackendHandler; import org.apache.shardingsphere.proxy.frontend.command.executor.ResponseType; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Answers; import org.mockito.Mock; import org.mockito.internal.util.reflection.FieldSetter; import org.mockito.junit.MockitoJUnitRunner; import java.sql.SQLException; import java.util.Collections; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public final class MySQLComQueryPacketExecutorTest { @Mock private TextProtocolBackendHandler textProtocolBackendHandler; @Mock private MySQLComQueryPacket packet; @Mock(answer = Answers.RETURNS_DEEP_STUBS) private ConnectionSession connectionSession; @Before public void setUp() { when(packet.getSql()).thenReturn(""); when(connectionSession.getAttributeMap().attr(MySQLConstants.MYSQL_CHARACTER_SET_ATTRIBUTE_KEY).get()).thenReturn(MySQLCharacterSet.UTF8MB4_GENERAL_CI); } @Test public void assertIsQueryResponse() throws SQLException, NoSuchFieldException { MySQLComQueryPacketExecutor mysqlComQueryPacketExecutor = new MySQLComQueryPacketExecutor(packet, connectionSession); FieldSetter.setField(mysqlComQueryPacketExecutor, MySQLComQueryPacketExecutor.class.getDeclaredField("textProtocolBackendHandler"), textProtocolBackendHandler); when(textProtocolBackendHandler.execute()).thenReturn(new QueryResponseHeader(Collections.singletonList(mock(QueryHeader.class)))); mysqlComQueryPacketExecutor.execute(); assertThat(mysqlComQueryPacketExecutor.getResponseType(), is(ResponseType.QUERY)); } @Test public void assertIsUpdateResponse() throws SQLException, NoSuchFieldException { MySQLComQueryPacketExecutor mysqlComQueryPacketExecutor = new MySQLComQueryPacketExecutor(packet, connectionSession); FieldSetter.setField(mysqlComQueryPacketExecutor, MySQLComQueryPacketExecutor.class.getDeclaredField("textProtocolBackendHandler"), textProtocolBackendHandler); when(textProtocolBackendHandler.execute()).thenReturn(new UpdateResponseHeader(mock(SQLStatement.class))); mysqlComQueryPacketExecutor.execute(); assertThat(mysqlComQueryPacketExecutor.getResponseType(), is(ResponseType.UPDATE)); } }
apache/incubator-shardingsphere
shardingsphere-proxy/shardingsphere-proxy-frontend/shardingsphere-proxy-frontend-mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLComQueryPacketExecutorTest.java
Java
apache-2.0
4,108
<?php /* * Copyright 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ namespace Google\Service\NetworkManagement; class Binding extends \Google\Collection { protected $collection_key = 'members'; protected $conditionType = Expr::class; protected $conditionDataType = ''; /** * @var string[] */ public $members; /** * @var string */ public $role; /** * @param Expr */ public function setCondition(Expr $condition) { $this->condition = $condition; } /** * @return Expr */ public function getCondition() { return $this->condition; } /** * @param string[] */ public function setMembers($members) { $this->members = $members; } /** * @return string[] */ public function getMembers() { return $this->members; } /** * @param string */ public function setRole($role) { $this->role = $role; } /** * @return string */ public function getRole() { return $this->role; } } // Adding a class alias for backwards compatibility with the previous class name. class_alias(Binding::class, 'Google_Service_NetworkManagement_Binding');
googleapis/google-api-php-client-services
src/NetworkManagement/Binding.php
PHP
apache-2.0
1,680
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.demo.registry; import org.apache.servicecomb.demo.CategorizedTestCase; import org.apache.servicecomb.demo.TestMgr; import org.apache.servicecomb.provider.pojo.RpcReference; import org.springframework.stereotype.Component; @Component public class SchemaDiscoveryTestCase implements CategorizedTestCase { @RpcReference(microserviceName = "thirdParty-no-schema-server", schemaId = "ServerEndpoint") IServerEndpoint serverEndpoint; @Override public void testRestTransport() throws Exception { // invoke thirdParty-no-schema-server(mocked by demo-multi-registries-server) TestMgr.check("hello", serverEndpoint.getName("hello")); } @Override public void testHighwayTransport() throws Exception { } @Override public void testAllTransport() throws Exception { } @Override public String getMicroserviceName() { return "thirdParty-service-center"; } }
ServiceComb/java-chassis
demo/demo-multi-registries/demo-multi-registries-client/src/main/java/org/apache/servicecomb/demo/registry/SchemaDiscoveryTestCase.java
Java
apache-2.0
1,724
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.client.channel.failover.strategies; import org.kaaproject.kaa.client.channel.TransportConnectionInfo; import org.kaaproject.kaa.client.channel.failover.FailoverDecision; import org.kaaproject.kaa.client.channel.failover.FailoverStatus; import java.util.concurrent.TimeUnit; /** * Failover strategy is responsible for producing failover decisions based on failover statuses. */ public interface FailoverStrategy { /** * Needs to be invoked to determine a decision that resolves the failover. * * @param failoverStatus current status of the failover. * * @return decision which is meant to resolve the failover. * * @see FailoverDecision * @see FailoverStatus */ FailoverDecision onFailover(FailoverStatus failoverStatus); /** * Needs to be invoked once client recovered after failover. * * @param connectionInfo server information * * @see org.kaaproject.kaa.client.channel.TransportConnectionInfo */ void onRecover(TransportConnectionInfo connectionInfo); /** * Use the {@link #getTimeUnit()} method to get current time unit. * * @return period of time after which will be made attempt to tweak bootstrap server. */ long getBootstrapServersRetryPeriod(); /** * Use the {@link #getTimeUnit()} method to get current time unit. * * @return period of time after which will be made attempt to tweak operation server. */ long getOperationServersRetryPeriod(); /** * @return time unit used within a scope of current failover strategy. */ TimeUnit getTimeUnit(); }
forGGe/kaa
client/client-multi/client-java-core/src/main/java/org/kaaproject/kaa/client/channel/failover/strategies/FailoverStrategy.java
Java
apache-2.0
2,267
/* * Copyright 2010 Martin Grotzke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an &quot;AS IS&quot; BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package de.javakaffee.web.msm.serializer; import java.util.Calendar; import java.util.concurrent.atomic.AtomicInteger; import org.apache.catalina.core.StandardContext; import org.apache.catalina.loader.WebappLoader; import de.javakaffee.web.msm.JavaSerializationTranscoder; import de.javakaffee.web.msm.MemcachedBackupSession; import de.javakaffee.web.msm.MemcachedBackupSessionManager; import de.javakaffee.web.msm.SessionAttributesTranscoder; import de.javakaffee.web.msm.TranscoderService; import de.javakaffee.web.msm.serializer.TestClasses.Address; import de.javakaffee.web.msm.serializer.TestClasses.Component; import de.javakaffee.web.msm.serializer.TestClasses.Person; import de.javakaffee.web.msm.serializer.TestClasses.Person.Gender; import de.javakaffee.web.msm.serializer.javolution.JavolutionTranscoder; import de.javakaffee.web.msm.serializer.kryo.KryoTranscoder; /** * A simple benchmark for existing serialization strategies. * * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ public class Benchmark { /* * 50000: * -- JavaSerializationTranscoder -- Serializing 1000 sessions took 156863 msec. serialized size is 59016 bytes. -- JavolutionTranscoder -- Serializing 1000 sessions took 251870 msec. serialized size is 138374 bytes. -- KryoTranscoder -- Serializing 1000 sessions took 154816 msec. serialized size is 70122 bytes. */ public static void main( final String[] args ) throws InterruptedException { //Thread.sleep( 1000 ); final MemcachedBackupSessionManager manager = createManager(); // some warmup final int warmupCycles = 100000; warmup( manager, new JavaSerializationTranscoder(), warmupCycles, 100, 3 ); warmup( manager, new JavolutionTranscoder( Thread.currentThread().getContextClassLoader(), false ), warmupCycles, 100, 3 ); warmup( manager, new KryoTranscoder(), warmupCycles, 100, 3 ); recover(); benchmark( manager, 10, 500, 4 /* 4^4 = 256 */ ); benchmark( manager, 10, 100, 3 /* 3^3 = 27 */ ); benchmark( manager, 10, 10, 2 /* 2^2 = 4 */ ); // Thread.sleep( Integer.MAX_VALUE ); } private static void benchmark( final MemcachedBackupSessionManager manager, final int rounds, final int countPersons, final int nodesPerEdge ) throws InterruptedException { final Stats javaSerStats = new Stats(); final Stats javaDeSerStats = new Stats(); benchmark( manager, new JavaSerializationTranscoder(), javaSerStats, javaDeSerStats, rounds, countPersons, nodesPerEdge ); recover(); final Stats javolutionSerStats = new Stats(); final Stats javolutionDeSerStats = new Stats(); benchmark( manager, new JavolutionTranscoder( Thread.currentThread().getContextClassLoader(), false ), javolutionSerStats, javolutionDeSerStats, rounds, countPersons, nodesPerEdge ); recover(); final Stats kryoSerStats = new Stats(); final Stats kryoDeSerStats = new Stats(); benchmark( manager, new KryoTranscoder(), kryoSerStats, kryoDeSerStats, rounds, countPersons, nodesPerEdge ); System.out.println( "Serialization,Size,Ser-Min,Ser-Avg,Ser-Max,Deser-Min,Deser-Avg,Deser-Max"); System.out.println( toCSV( "Java", javaSerStats, javaDeSerStats ) ); System.out.println( toCSV( "Javolution", javolutionSerStats, javolutionDeSerStats ) ); System.out.println( toCSV( "Kryo", kryoSerStats, kryoDeSerStats ) ); } private static String toCSV( final String name, final Stats serStats, final Stats deSerStats ) { return name + "," + serStats.size +","+ minAvgMax( serStats ) + "," + minAvgMax( deSerStats ); } private static String minAvgMax( final Stats stats ) { return stats.min +","+ stats.avg +","+ stats.max; } private static void recover() throws InterruptedException { Thread.sleep( 200 ); System.gc(); Thread.sleep( 200 ); } private static void benchmark( final MemcachedBackupSessionManager manager, final SessionAttributesTranscoder transcoder, final Stats serializationStats, final Stats deserializationStats, final int rounds, final int countPersons, final int nodesPerEdge ) throws InterruptedException { System.out.println( "Running benchmark for " + transcoder.getClass().getSimpleName() + "..." + " (rounds: "+ rounds +", persons: "+ countPersons +", nodes: "+ ((int)Math.pow( nodesPerEdge, nodesPerEdge ) + nodesPerEdge + 1 ) +")" ); final TranscoderService transcoderService = new TranscoderService( transcoder ); final MemcachedBackupSession session = createSession( manager, "123456789abcdefghijk987654321", countPersons, nodesPerEdge ); final byte[] data = transcoderService.serialize( session ); final int size = data.length; for( int r = 0; r < rounds; r++ ) { final long start = System.currentTimeMillis(); for( int i = 0; i < 500; i++ ) { transcoderService.serialize( session ); } serializationStats.registerSince( start ); serializationStats.setSize( size ); } System.gc(); Thread.sleep( 100 ); // deserialization for( int r = 0; r < rounds; r++ ) { final long start = System.currentTimeMillis(); for( int i = 0; i < 500; i++ ) { transcoderService.deserialize( data, manager ); } deserializationStats.registerSince( start ); deserializationStats.setSize( size ); } } private static void warmup( final MemcachedBackupSessionManager manager, final SessionAttributesTranscoder transcoder, final int loops, final int countPersons, final int nodesPerEdge ) throws InterruptedException { final TranscoderService transcoderService = new TranscoderService( transcoder ); final MemcachedBackupSession session = createSession( manager, "123456789abcdefghijk987654321", countPersons, nodesPerEdge ); System.out.print("Performing warmup for serialization using "+ transcoder.getClass().getSimpleName() +"..."); final long serWarmupStart = System.currentTimeMillis(); for( int i = 0; i < loops; i++ ) transcoderService.serialize( session ); System.out.println(" (" + (System.currentTimeMillis() - serWarmupStart) + " ms)"); System.out.print("Performing warmup for deserialization..."); final byte[] data = transcoderService.serialize( session ); final long deserWarmupStart = System.currentTimeMillis(); for( int i = 0; i < loops; i++ ) transcoderService.deserialize( data, manager ); System.out.println(" (" + (System.currentTimeMillis() - deserWarmupStart) + " ms)"); } private static MemcachedBackupSession createSession( final MemcachedBackupSessionManager manager, final String id, final int countPersons, final int countNodesPerEdge ) { final MemcachedBackupSession session = manager.createEmptySession(); session.setId( id ); session.setValid( true ); session.setAttribute( "stringbuffer", new StringBuffer( "<string\n&buffer/>" ) ); session.setAttribute( "stringbuilder", new StringBuilder( "<string\n&buffer/>" ) ); session.setAttribute( "persons", createPersons( countPersons ) ); session.setAttribute( "mycontainer", new TestClasses.MyContainer() ); session.setAttribute( "component", createComponents( countNodesPerEdge ) ); return session; } private static Component createComponents( final int countNodesPerEdge ) { final Component root = new Component( "root" ); for ( int i = 0; i < countNodesPerEdge; i++ ) { final Component node = new Component( "child" + i ); addChildren( node, countNodesPerEdge ); root.addChild( node ); } return root; } private static void addChildren( final Component node, final int count ) { for ( int i = 0; i < count; i++ ) { node.addChild( new Component( node.getName() + "-" + i ) ); } } private static Person[] createPersons( final int countPersons ) { final Person[] persons = new Person[countPersons]; for( int i = 0; i < countPersons; i++ ) { final Calendar dateOfBirth = Calendar.getInstance(); dateOfBirth.set( Calendar.YEAR, dateOfBirth.get( Calendar.YEAR ) - 42 ); final Person person = TestClasses.createPerson( "Firstname" + i + " Lastname" + i, i % 2 == 0 ? Gender.FEMALE : Gender.MALE, dateOfBirth, "email" + i + "-1@example.org", "email" + i + "-2@example.org", "email" + i + "-3@example.org" ); person.addAddress( new Address( "route66", "123456", "sincity", "sincountry" ) ); if ( i > 0 ) { person.addFriend( persons[i - 1] ); } persons[i] = person; } return persons; } private static MemcachedBackupSessionManager createManager() { final MemcachedBackupSessionManager manager = new MemcachedBackupSessionManager(); final StandardContext container = new StandardContext(); manager.setContainer( container ); final WebappLoader webappLoader = new WebappLoader() { /** * {@inheritDoc} */ @Override public ClassLoader getClassLoader() { return Thread.currentThread().getContextClassLoader(); } }; manager.getContainer().setLoader( webappLoader ); return manager; } static class Stats { long min; long max; double avg; int size; private boolean _first = true; private final AtomicInteger _count = new AtomicInteger(); /** * A utility method that calculates the difference of the time * between the given <code>startInMillis</code> and {@link System#currentTimeMillis()} * and registers the difference via {@link #register(long)}. * @param startInMillis the time in millis that shall be subtracted from {@link System#currentTimeMillis()}. */ public void registerSince( final long startInMillis ) { register( System.currentTimeMillis() - startInMillis ); } public void setSize( final int size ) { this.size = size; } /** * Register the given value. * @param value the value to register. */ public void register( final long value ) { if ( value < min || _first ) { min = value; } if ( value > max || _first ) { max = value; } avg = ( avg * _count.get() + value ) / _count.incrementAndGet(); _first = false; } /** * Returns a string array with labels and values of count, min, avg and max. * @return a String array. */ public String[] getInfo() { return new String[] { "Count = " + _count.get(), "Min = "+ min, "Avg = "+ avg, "Max = "+ max }; } } }
qq254963746/memcached-session-manager
serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/Benchmark.java
Java
apache-2.0
12,323
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.examples.signin2; import org.apache.wicket.PageParameters; /** * Simple home page. * * @author Jonathan Locke */ public class Home extends AuthenticatedWebPage { /** * Constructor * * @param parameters * Page parameters (ignored since this is the home page) */ public Home(final PageParameters parameters) { } }
astubbs/wicket.get-portals2
wicket-examples/src/main/java/org/apache/wicket/examples/signin2/Home.java
Java
apache-2.0
1,174
<?php function arve_is_bool_option( $array ) { $yes_no = array( '' => 1, 'yes' => 1, 'no' => 1 ); $check = array_diff_key( $array, $yes_no ); if( empty( $check ) ) { return 'bool'; } else { return $array; } } function arve_get_pre_style() { return ''; } function arve_load_plugin_textdomain() { load_plugin_textdomain( ARVE_SLUG, false, dirname( dirname( plugin_basename( __FILE__ ) ) ) . '/languages/' ); } function arve_get_first_array_value( $array ) { reset( $array ); $key = key( $array ); return $array[ $key ]; } function arve_prefix_array_keys( $keyprefix, $array ) { foreach( $array as $key => $value ) { $array[ $keyprefix . $key ] = $value; unset( $array[ $key ] ); } return $array; } function arve_check_filetype( $url, $ext ) { $check = wp_check_filetype( $url, wp_get_mime_types() ); if ( strtolower( $check['ext'] ) === $ext ) { return $check['type']; } else { return false; } } /** * Calculates seconds based on youtube times * * @param string $yttime The '1h25m13s' part of youtube URLs * * @return int Starttime in seconds */ function arve_youtube_time_to_seconds( $yttime ) { $format = false; $hours = $minutes = $seconds = 0; $pattern['hms'] = '/([0-9]+)h([0-9]+)m([0-9]+)s/'; // hours, minutes, seconds $pattern['ms'] = '/([0-9]+)m([0-9]+)s/'; // minutes, seconds $pattern['h'] = '/([0-9]+)h/'; $pattern['m'] = '/([0-9]+)m/'; $pattern['s'] = '/([0-9]+)s/'; foreach ( $pattern as $key => $value ) { preg_match( $value, $yttime, $result ); if ( ! empty( $result ) ) { $format = $key; break; } } switch ( $format ) { case 'hms': $hours = $result[1]; $minutes = $result[2]; $seconds = $result[3]; break; case 'ms': $minutes = $result[1]; $seconds = $result[2]; break; case 'h': $hours = $result[1]; break; case 'm': $minutes = $result[1]; break; case 's': $seconds = $result[1]; break; default: return false; } return ( $hours * 60 * 60 ) + ( $minutes * 60 ) + $seconds; } /** * Calculates padding percentage value for a particular aspect ratio * * @since 4.2.0 * * @param string $aspect_ratio '4:3' or percentage value with percent sign * * @return float */ function arve_aspect_ratio_to_percentage( $aspect_ratio ) { if ( is_wp_error( $aspect_ratio ) ) { return 52.25; } $a = explode( ':', $aspect_ratio ); return ( ( $a[1] / $a[0] ) * 100 ); } /** * Calculates * * @since 8.2.0 */ function arve_calculate_height( $width, $aspect_ratio ) { $width = (int) $width; $aspect_ratio = empty( $aspect_ratio ) ? '16:9' : $aspect_ratio; $percent = arve_aspect_ratio_to_percentage( $aspect_ratio ); if ( $width > 100 && $percent ) { return ( ( $width / 100 ) * $percent ); } return false; }
sifonsecac/capitalino-errante
wp-content/plugins/advanced-responsive-video-embedder/public/functions-misc.php
PHP
apache-2.0
2,833
/********************************************************************** // @@@ START COPYRIGHT @@@ // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // // @@@ END COPYRIGHT @@@ **********************************************************************/ /* -*-C++-*- ***************************************************************************** * * File: ComSchemaName.C * Description: methods for class ComSchemaName * * Created: 9/12/95 * Language: C++ * * * ***************************************************************************** */ #define SQLPARSERGLOBALS_NADEFAULTS // first #include <string.h> #include "ComASSERT.h" #include "ComMPLoc.h" #include "ComSchemaName.h" #include "ComSqlText.h" #include "NAString.h" #include "SqlParserGlobals.h" // last // // constructors // // // default constructor // ComSchemaName::ComSchemaName () { } // // initializing constructor // ComSchemaName::ComSchemaName (const NAString &externalSchemaName) { scan(externalSchemaName); } // // initializing constructor // ComSchemaName::ComSchemaName (const NAString &externalSchemaName, size_t &bytesScanned) { scan(externalSchemaName, bytesScanned); } // // initializing constructor // ComSchemaName::ComSchemaName (const ComAnsiNamePart &schemaNamePart) : schemaNamePart_ (schemaNamePart) { } // // initializing constructor // ComSchemaName::ComSchemaName (const ComAnsiNamePart &catalogNamePart, const ComAnsiNamePart &schemaNamePart) : catalogNamePart_ (catalogNamePart) , schemaNamePart_ (schemaNamePart) { // "cat." is invalid if (NOT catalogNamePart_.isEmpty() AND schemaNamePart_.isEmpty()) clear(); } // // virtual destructor // ComSchemaName::~ComSchemaName () { } // // assignment operator // ComSchemaName &ComSchemaName::operator= (const NAString &rhsSchemaName) { clear(); scan(rhsSchemaName); return *this; } // // accessors // const NAString & ComSchemaName::getCatalogNamePartAsAnsiString(NABoolean) const { return catalogNamePart_.getExternalName(); } const NAString & ComSchemaName::getSchemaNamePartAsAnsiString(NABoolean) const { return schemaNamePart_.getExternalName(); } NAString ComSchemaName::getExternalName(NABoolean) const { NAString extSchemaName; #ifndef NDEBUG Int32 ok = 0; #endif if (NOT schemaNamePart_.isEmpty()) { if (NOT catalogNamePart_.isEmpty()) { #ifndef NDEBUG ok = 1; #endif extSchemaName = getCatalogNamePartAsAnsiString() + "." + getSchemaNamePartAsAnsiString(); } else { extSchemaName = getSchemaNamePartAsAnsiString(); } } #ifndef NDEBUG if (!ok) cerr << "Warning: incomplete ComSchemaName " << extSchemaName << endl; #endif return extSchemaName; } // // mutators // // // Resets data members // void ComSchemaName::clear() { catalogNamePart_.clear(); schemaNamePart_.clear(); } // // private methods // // // Scans (parses) input external-format schema name. // NABoolean ComSchemaName::scan(const NAString &externalSchemaName) { size_t bytesScanned; return scan(externalSchemaName, bytesScanned); } // // Scans (parses) input external-format schema name. // // This method assumes that the parameter externalSchemaName only // contains the external-format schema name. The syntax of an // schema name is // // [ <catalog-name-part> ] . <schema-name-part> // // A schema name part must be specified; the catalog name part is optional. // // The method returns the number of bytes scanned via the parameter // bytesScanned. If the scanned schema name is illegal, bytesScanned // contains the number of bytes examined when the name is determined // to be invalid. // // If the specified external-format schema name is valid, this method // returns TRUE and saves the parsed ANSI SQL name part into data // members catalogNamePart_ and schemaNamePart_; otherwise, it returns // FALSE and does not changes the contents of the data members. // NABoolean ComSchemaName::scan(const NAString &externalSchemaName, size_t &bytesScanned) { size_t count; size_t externalSchemaNameLen = externalSchemaName.length(); bytesScanned = 0; #define COPY_VALIDATED_STRING(x) \ ComAnsiNamePart(x, ComAnsiNamePart::INTERNAL_FORMAT) if (( SqlParser_Initialized() && SqlParser_NAMETYPE == DF_NSK) || (!SqlParser_Initialized() && *externalSchemaName.data() == '\\')) { ComMPLoc loc(externalSchemaName); switch (loc.getFormat()) { case ComMPLoc::SUBVOL: catalogNamePart_ = COPY_VALIDATED_STRING(loc.getSysDotVol()); schemaNamePart_ = COPY_VALIDATED_STRING(loc.getSubvolName()); bytesScanned = externalSchemaNameLen; return TRUE; case ComMPLoc::FILE: if (!loc.hasSubvolName()) { catalogNamePart_ = ""; schemaNamePart_ = COPY_VALIDATED_STRING(loc.getFileName()); bytesScanned = externalSchemaNameLen; return TRUE; } } } // Each ComAnsiNamePart ctor below must be preceded by "count = 0;" // -- see ComAnsiNamePart.cpp, and for a better scan implementation, // see ComObjectName::scan() + ComObjectName(bytesScanned) ctor. // --------------------------------------------------------------------- // Scan the leftmost ANSI SQL name part. // --------------------------------------------------------------------- count = 0; ComAnsiNamePart part1(externalSchemaName, count); bytesScanned += count; if (NOT part1.isValid()) return FALSE; if (bytesScanned >= externalSchemaNameLen) { ComASSERT(bytesScanned == externalSchemaNameLen); schemaNamePart_ = part1; return TRUE; // "sch" } // Get past the period separator if (NOT ComSqlText.isPeriod(externalSchemaName[bytesScanned++])) return FALSE; // --------------------------------------------------------------------- // Scan the last ANSI SQL name part // --------------------------------------------------------------------- #pragma nowarn(1506) // warning elimination Int32 remainingLen = externalSchemaNameLen - bytesScanned; #pragma warn(1506) // warning elimination NAString remainingName = externalSchemaName(bytesScanned, remainingLen); count = 0; ComAnsiNamePart part2(remainingName, count); bytesScanned += count; if (NOT part2.isValid()) return FALSE; if (bytesScanned == externalSchemaNameLen) { catalogNamePart_ = part1; schemaNamePart_ = part2; return TRUE; // "cat.sch" } // The specified external-format object name contains some extra // trailing characters -- illegal. // return FALSE; } // ComSchemaName::scan() void ComSchemaName::setDefinitionSchemaName (const COM_VERSION version) { }
rlugojr/incubator-trafodion
core/sql/common/ComSchemaName.cpp
C++
apache-2.0
7,501
/* * Copyright 2018 ImpactDevelopment * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package clientapi.load.mixin.extension; import net.minecraft.util.Session; import net.minecraft.util.Timer; /** * @author Brady * @since 2/20/2017 */ public interface IMinecraft { /** * @return Returns the Game's Timer */ Timer getTimer(); /** * Sets the game's session * * @param session The new Session */ void setSession(Session session); /** * Sets the right click delay timer * * @param delay The new right click delay */ void setRightClickDelayTimer(int delay); /** * Clicks a mouse button * * @param button The button to click (LEFT, MIDDLE, RIGHT) */ void clickMouse(int button); }
ZeroMemes/ClientAPI
src/main/java/clientapi/load/mixin/extension/IMinecraft.java
Java
apache-2.0
1,304
package com.akari.tickets.beans; import java.util.List; /** * Created by Akari on 2017/2/17. */ public class QueryTrainsResponse { private List<Data> data; public List<Data> getData() { return data; } public void setData(List<Data> data) { this.data = data; } public static class Data { private String secretStr; private QueryLeftNewDTO queryLeftNewDTO; public String getSecretStr() { return secretStr; } public void setSecretStr(String secretStr) { this.secretStr = secretStr; } public QueryLeftNewDTO getQueryLeftNewDTO() { return queryLeftNewDTO; } public void setQueryLeftNewDTO(QueryLeftNewDTO queryLeftNewDTO) { this.queryLeftNewDTO = queryLeftNewDTO; } } }
Akari10032/12306
app/src/main/java/com/akari/tickets/beans/QueryTrainsResponse.java
Java
apache-2.0
848
sap.ui.define([ 'jquery.sap.global', 'sap/ui/core/Fragment', 'sap/ui/core/mvc/Controller', 'sap/ui/model/Filter', 'sap/ui/model/json/JSONModel' ], function(jQuery, Fragment, Controller, Filter, JSONModel) { "use strict"; var CController = Controller.extend("sap.m.sample.InputAssistedTwoValues.C", { inputId: '', onInit: function () { // set explored app's demo model on this sample var oModel = new JSONModel(jQuery.sap.getModulePath("sap.ui.demo.mock", "/products.json")); this.getView().setModel(oModel); }, handleValueHelp : function (oController) { this.inputId = oController.oSource.sId; // create value help dialog if (!this._valueHelpDialog) { this._valueHelpDialog = sap.ui.xmlfragment( "sap.m.sample.InputAssistedTwoValues.Dialog", this ); this.getView().addDependent(this._valueHelpDialog); } // open value help dialog this._valueHelpDialog.open(); }, _handleValueHelpSearch : function (evt) { var sValue = evt.getParameter("value"); var oFilter = new Filter( "Name", sap.ui.model.FilterOperator.Contains, sValue ); evt.getSource().getBinding("items").filter([oFilter]); }, _handleValueHelpClose : function (evt) { var oSelectedItem = evt.getParameter("selectedItem"); if (oSelectedItem) { var productInput = this.byId(this.inputId); productInput.setValue(oSelectedItem.getTitle()); } evt.getSource().getBinding("items").filter([]); } }); return CController; });
SQCLabs/openui5
src/sap.m/test/sap/m/demokit/sample/InputAssistedTwoValues/C.controller.js
JavaScript
apache-2.0
1,505
/** * @class Ext.ClassManager * * @author Jacky Nguyen <jacky@sencha.com> * @aside guide class_system * @aside video class-system * * Ext.ClassManager manages all classes and handles mapping from string class name to * actual class objects throughout the whole framework. It is not generally accessed directly, rather through * these convenient shorthands: * * - {@link Ext#define Ext.define} * - {@link Ext#create Ext.create} * - {@link Ext#widget Ext.widget} * - {@link Ext#getClass Ext.getClass} * - {@link Ext#getClassName Ext.getClassName} * * ## Basic syntax: * * Ext.define(className, properties); * * in which `properties` is an object represent a collection of properties that apply to the class. See * {@link Ext.ClassManager#create} for more detailed instructions. * * Ext.define('Person', { * name: 'Unknown', * * constructor: function(name) { * if (name) { * this.name = name; * } * * return this; * }, * * eat: function(foodType) { * alert("I'm eating: " + foodType); * * return this; * } * }); * * var aaron = new Person("Aaron"); * aaron.eat("Sandwich"); // alert("I'm eating: Sandwich"); * * Ext.Class has a powerful set of extensible {@link Ext.Class#registerPreprocessor pre-processors} which takes care of * everything related to class creation, including but not limited to inheritance, mixins, configuration, statics, etc. * * ## Inheritance: * * Ext.define('Developer', { * extend: 'Person', * * constructor: function(name, isGeek) { * this.isGeek = isGeek; * * // Apply a method from the parent class' prototype * this.callParent([name]); * * return this; * * }, * * code: function(language) { * alert("I'm coding in: " + language); * * this.eat("Bugs"); * * return this; * } * }); * * var jacky = new Developer("Jacky", true); * jacky.code("JavaScript"); // alert("I'm coding in: JavaScript"); * // alert("I'm eating: Bugs"); * * See {@link Ext.Base#callParent} for more details on calling superclass' methods * * ## Mixins: * * Ext.define('CanPlayGuitar', { * playGuitar: function() { * alert("F#...G...D...A"); * } * }); * * Ext.define('CanComposeSongs', { * composeSongs: function() { } * }); * * Ext.define('CanSing', { * sing: function() { * alert("I'm on the highway to hell...") * } * }); * * Ext.define('Musician', { * extend: 'Person', * * mixins: { * canPlayGuitar: 'CanPlayGuitar', * canComposeSongs: 'CanComposeSongs', * canSing: 'CanSing' * } * }) * * Ext.define('CoolPerson', { * extend: 'Person', * * mixins: { * canPlayGuitar: 'CanPlayGuitar', * canSing: 'CanSing' * }, * * sing: function() { * alert("Ahem...."); * * this.mixins.canSing.sing.call(this); * * alert("[Playing guitar at the same time...]"); * * this.playGuitar(); * } * }); * * var me = new CoolPerson("Jacky"); * * me.sing(); // alert("Ahem..."); * // alert("I'm on the highway to hell..."); * // alert("[Playing guitar at the same time...]"); * // alert("F#...G...D...A"); * * ## Config: * * Ext.define('SmartPhone', { * config: { * hasTouchScreen: false, * operatingSystem: 'Other', * price: 500 * }, * * isExpensive: false, * * constructor: function(config) { * this.initConfig(config); * * return this; * }, * * applyPrice: function(price) { * this.isExpensive = (price > 500); * * return price; * }, * * applyOperatingSystem: function(operatingSystem) { * if (!(/^(iOS|Android|BlackBerry)$/i).test(operatingSystem)) { * return 'Other'; * } * * return operatingSystem; * } * }); * * var iPhone = new SmartPhone({ * hasTouchScreen: true, * operatingSystem: 'iOS' * }); * * iPhone.getPrice(); // 500; * iPhone.getOperatingSystem(); // 'iOS' * iPhone.getHasTouchScreen(); // true; * * iPhone.isExpensive; // false; * iPhone.setPrice(600); * iPhone.getPrice(); // 600 * iPhone.isExpensive; // true; * * iPhone.setOperatingSystem('AlienOS'); * iPhone.getOperatingSystem(); // 'Other' * * ## Statics: * * Ext.define('Computer', { * statics: { * factory: function(brand) { * // 'this' in static methods refer to the class itself * return new this(brand); * } * }, * * constructor: function() { } * }); * * var dellComputer = Computer.factory('Dell'); * * Also see {@link Ext.Base#statics} and {@link Ext.Base#self} for more details on accessing * static properties within class methods * * @singleton */ (function(Class, alias, arraySlice, arrayFrom, global) { //<if nonBrowser> var isNonBrowser = typeof window == 'undefined'; //</if> var Manager = Ext.ClassManager = { /** * @property classes * @type Object * All classes which were defined through the ClassManager. Keys are the * name of the classes and the values are references to the classes. * @private */ classes: {}, /** * @private */ existCache: {}, /** * @private */ namespaceRewrites: [{ from: 'Ext.', to: Ext }], /** * @private */ maps: { alternateToName: {}, aliasToName: {}, nameToAliases: {}, nameToAlternates: {} }, /** @private */ enableNamespaceParseCache: true, /** @private */ namespaceParseCache: {}, /** @private */ instantiators: [], /** * Checks if a class has already been created. * * @param {String} className * @return {Boolean} exist */ isCreated: function(className) { var existCache = this.existCache, i, ln, part, root, parts; //<debug error> if (typeof className != 'string' || className.length < 1) { throw new Error("[Ext.ClassManager] Invalid classname, must be a string and must not be empty"); } //</debug> if (this.classes[className] || existCache[className]) { return true; } root = global; parts = this.parseNamespace(className); for (i = 0, ln = parts.length; i < ln; i++) { part = parts[i]; if (typeof part != 'string') { root = part; } else { if (!root || !root[part]) { return false; } root = root[part]; } } existCache[className] = true; this.triggerCreated(className); return true; }, /** * @private */ createdListeners: [], /** * @private */ nameCreatedListeners: {}, /** * @private */ triggerCreated: function(className) { var listeners = this.createdListeners, nameListeners = this.nameCreatedListeners, alternateNames = this.maps.nameToAlternates[className], names = [className], i, ln, j, subLn, listener, name; for (i = 0,ln = listeners.length; i < ln; i++) { listener = listeners[i]; listener.fn.call(listener.scope, className); } if (alternateNames) { names.push.apply(names, alternateNames); } for (i = 0,ln = names.length; i < ln; i++) { name = names[i]; listeners = nameListeners[name]; if (listeners) { for (j = 0,subLn = listeners.length; j < subLn; j++) { listener = listeners[j]; listener.fn.call(listener.scope, name); } delete nameListeners[name]; } } }, /** * @private */ onCreated: function(fn, scope, className) { var listeners = this.createdListeners, nameListeners = this.nameCreatedListeners, listener = { fn: fn, scope: scope }; if (className) { if (this.isCreated(className)) { fn.call(scope, className); return; } if (!nameListeners[className]) { nameListeners[className] = []; } nameListeners[className].push(listener); } else { listeners.push(listener); } }, /** * Supports namespace rewriting * @private */ parseNamespace: function(namespace) { //<debug error> if (typeof namespace != 'string') { throw new Error("[Ext.ClassManager] Invalid namespace, must be a string"); } //</debug> var cache = this.namespaceParseCache; if (this.enableNamespaceParseCache) { if (cache.hasOwnProperty(namespace)) { return cache[namespace]; } } var parts = [], rewrites = this.namespaceRewrites, root = global, name = namespace, rewrite, from, to, i, ln; for (i = 0, ln = rewrites.length; i < ln; i++) { rewrite = rewrites[i]; from = rewrite.from; to = rewrite.to; if (name === from || name.substring(0, from.length) === from) { name = name.substring(from.length); if (typeof to != 'string') { root = to; } else { parts = parts.concat(to.split('.')); } break; } } parts.push(root); parts = parts.concat(name.split('.')); if (this.enableNamespaceParseCache) { cache[namespace] = parts; } return parts; }, /** * Creates a namespace and assign the `value` to the created object * * Ext.ClassManager.setNamespace('MyCompany.pkg.Example', someObject); * alert(MyCompany.pkg.Example === someObject); // alerts true * * @param {String} name * @param {Mixed} value */ setNamespace: function(name, value) { var root = global, parts = this.parseNamespace(name), ln = parts.length - 1, leaf = parts[ln], i, part; for (i = 0; i < ln; i++) { part = parts[i]; if (typeof part != 'string') { root = part; } else { if (!root[part]) { root[part] = {}; } root = root[part]; } } root[leaf] = value; return root[leaf]; }, /** * The new Ext.ns, supports namespace rewriting * @private */ createNamespaces: function() { var root = global, parts, part, i, j, ln, subLn; for (i = 0, ln = arguments.length; i < ln; i++) { parts = this.parseNamespace(arguments[i]); for (j = 0, subLn = parts.length; j < subLn; j++) { part = parts[j]; if (typeof part != 'string') { root = part; } else { if (!root[part]) { root[part] = {}; } root = root[part]; } } } return root; }, /** * Sets a name reference to a class. * * @param {String} name * @param {Object} value * @return {Ext.ClassManager} this */ set: function(name, value) { var me = this, maps = me.maps, nameToAlternates = maps.nameToAlternates, targetName = me.getName(value), alternates; me.classes[name] = me.setNamespace(name, value); if (targetName && targetName !== name) { maps.alternateToName[name] = targetName; alternates = nameToAlternates[targetName] || (nameToAlternates[targetName] = []); alternates.push(name); } return this; }, /** * Retrieve a class by its name. * * @param {String} name * @return {Ext.Class} class */ get: function(name) { var classes = this.classes; if (classes[name]) { return classes[name]; } var root = global, parts = this.parseNamespace(name), part, i, ln; for (i = 0, ln = parts.length; i < ln; i++) { part = parts[i]; if (typeof part != 'string') { root = part; } else { if (!root || !root[part]) { return null; } root = root[part]; } } return root; }, /** * Register the alias for a class. * * @param {Ext.Class/String} cls a reference to a class or a className * @param {String} alias Alias to use when referring to this class */ setAlias: function(cls, alias) { var aliasToNameMap = this.maps.aliasToName, nameToAliasesMap = this.maps.nameToAliases, className; if (typeof cls == 'string') { className = cls; } else { className = this.getName(cls); } if (alias && aliasToNameMap[alias] !== className) { //<debug info> if (aliasToNameMap[alias]) { Ext.Logger.info("[Ext.ClassManager] Overriding existing alias: '" + alias + "' " + "of: '" + aliasToNameMap[alias] + "' with: '" + className + "'. Be sure it's intentional."); } //</debug> aliasToNameMap[alias] = className; } if (!nameToAliasesMap[className]) { nameToAliasesMap[className] = []; } if (alias) { Ext.Array.include(nameToAliasesMap[className], alias); } return this; }, /** * Get a reference to the class by its alias. * * @param {String} alias * @return {Ext.Class} class */ getByAlias: function(alias) { return this.get(this.getNameByAlias(alias)); }, /** * Get the name of a class by its alias. * * @param {String} alias * @return {String} className */ getNameByAlias: function(alias) { return this.maps.aliasToName[alias] || ''; }, /** * Get the name of a class by its alternate name. * * @param {String} alternate * @return {String} className */ getNameByAlternate: function(alternate) { return this.maps.alternateToName[alternate] || ''; }, /** * Get the aliases of a class by the class name * * @param {String} name * @return {Array} aliases */ getAliasesByName: function(name) { return this.maps.nameToAliases[name] || []; }, /** * Get the name of the class by its reference or its instance; * usually invoked by the shorthand {@link Ext#getClassName Ext.getClassName} * Ext.ClassManager.getName(Ext.Action); // returns "Ext.Action" * @param {Ext.Class/Object} object * @return {String} className * @markdown */ getName: function(object) { return object && object.$className || ''; }, /** * Get the class of the provided object; returns null if it's not an instance * of any class created with Ext.define. This is usually invoked by the shorthand {@link Ext#getClass Ext.getClass} * * var component = new Ext.Component(); * * Ext.ClassManager.getClass(component); // returns Ext.Component * * @param {Object} object * @return {Ext.Class} class */ getClass: function(object) { return object && object.self || null; }, /** * @private */ create: function(className, data, createdFn) { //<debug error> if (typeof className != 'string') { throw new Error("[Ext.define] Invalid class name '" + className + "' specified, must be a non-empty string"); } //</debug> data.$className = className; return new Class(data, function() { var postprocessorStack = data.postprocessors || Manager.defaultPostprocessors, registeredPostprocessors = Manager.postprocessors, index = 0, postprocessors = [], postprocessor, process, i, ln, j, subLn, postprocessorProperties, postprocessorProperty; delete data.postprocessors; for (i = 0,ln = postprocessorStack.length; i < ln; i++) { postprocessor = postprocessorStack[i]; if (typeof postprocessor == 'string') { postprocessor = registeredPostprocessors[postprocessor]; postprocessorProperties = postprocessor.properties; if (postprocessorProperties === true) { postprocessors.push(postprocessor.fn); } else if (postprocessorProperties) { for (j = 0,subLn = postprocessorProperties.length; j < subLn; j++) { postprocessorProperty = postprocessorProperties[j]; if (data.hasOwnProperty(postprocessorProperty)) { postprocessors.push(postprocessor.fn); break; } } } } else { postprocessors.push(postprocessor); } } process = function(clsName, cls, clsData) { postprocessor = postprocessors[index++]; if (!postprocessor) { Manager.set(className, cls); if (createdFn) { createdFn.call(cls, cls); } Manager.triggerCreated(className); return; } if (postprocessor.call(this, clsName, cls, clsData, process) !== false) { process.apply(this, arguments); } }; process.call(Manager, className, this, data); }); }, createOverride: function(className, data) { var overriddenClassName = data.override; delete data.override; this.existCache[className] = true; // Override the target class right after it's created this.onCreated(function() { this.get(overriddenClassName).override(data); // This push the overridding file itself into Ext.Loader.history // Hence if the target class never exists, the overriding file will // never be included in the build this.triggerCreated(className); }, this, overriddenClassName); return this; }, /** * Instantiate a class by its alias; usually invoked by the convenient shorthand {@link Ext#createByAlias Ext.createByAlias} * If {@link Ext.Loader} is {@link Ext.Loader#setConfig enabled} and the class has not been defined yet, it will * attempt to load the class via synchronous loading. * * var window = Ext.ClassManager.instantiateByAlias('widget.window', { width: 600, height: 800, ... }); * * @param {String} alias * @param {Mixed...} args Additional arguments after the alias will be passed to the class constructor. * @return {Object} instance */ instantiateByAlias: function() { var alias = arguments[0], args = arraySlice.call(arguments), className = this.getNameByAlias(alias); if (!className) { className = this.maps.aliasToName[alias]; //<debug error> if (!className) { throw new Error("[Ext.createByAlias] Cannot create an instance of unrecognized alias: " + alias); } //</debug> //<debug warn> Ext.Logger.warn("[Ext.Loader] Synchronously loading '" + className + "'; consider adding " + "Ext.require('" + alias + "') above Ext.onReady"); //</debug> Ext.syncRequire(className); } args[0] = className; return this.instantiate.apply(this, args); }, /** * Instantiate a class by either full name, alias or alternate name; usually invoked by the convenient * shorthand {@link Ext#create Ext.create} * * If {@link Ext.Loader} is {@link Ext.Loader#setConfig enabled} and the class has not been defined yet, it will * attempt to load the class via synchronous loading. * * For example, all these three lines return the same result: * * // alias * var window = Ext.ClassManager.instantiate('widget.window', { width: 600, height: 800, ... }); * * // alternate name * var window = Ext.ClassManager.instantiate('Ext.Window', { width: 600, height: 800, ... }); * * // full class name * var window = Ext.ClassManager.instantiate('Ext.window.Window', { width: 600, height: 800, ... }); * * @param {String} name * @param {Mixed} args,... Additional arguments after the name will be passed to the class' constructor. * @return {Object} instance */ instantiate: function() { var name = arguments[0], args = arraySlice.call(arguments, 1), alias = name, possibleName, cls; if (typeof name != 'function') { //<debug error> if ((typeof name != 'string' || name.length < 1)) { throw new Error("[Ext.create] Invalid class name or alias '" + name + "' specified, must be a non-empty string"); } //</debug> cls = this.get(name); } else { cls = name; } // No record of this class name, it's possibly an alias, so look it up if (!cls) { possibleName = this.getNameByAlias(name); if (possibleName) { name = possibleName; cls = this.get(name); } } // Still no record of this class name, it's possibly an alternate name, so look it up if (!cls) { possibleName = this.getNameByAlternate(name); if (possibleName) { name = possibleName; cls = this.get(name); } } // Still not existing at this point, try to load it via synchronous mode as the last resort if (!cls) { //<debug warn> //<if nonBrowser> !isNonBrowser && //</if> Ext.Logger.warn("[Ext.Loader] Synchronously loading '" + name + "'; consider adding '" + ((possibleName) ? alias : name) + "' explicitly as a require of the corresponding class"); //</debug> Ext.syncRequire(name); cls = this.get(name); } //<debug error> if (!cls) { throw new Error("[Ext.create] Cannot create an instance of unrecognized class name / alias: " + alias); } if (typeof cls != 'function') { throw new Error("[Ext.create] '" + name + "' is a singleton and cannot be instantiated"); } //</debug> return this.getInstantiator(args.length)(cls, args); }, /** * @private * @param name * @param args */ dynInstantiate: function(name, args) { args = arrayFrom(args, true); args.unshift(name); return this.instantiate.apply(this, args); }, /** * @private * @param length */ getInstantiator: function(length) { var instantiators = this.instantiators, instantiator; instantiator = instantiators[length]; if (!instantiator) { var i = length, args = []; for (i = 0; i < length; i++) { args.push('a[' + i + ']'); } instantiator = instantiators[length] = new Function('c', 'a', 'return new c(' + args.join(',') + ')'); //<debug> instantiator.displayName = "Ext.ClassManager.instantiate" + length; //</debug> } return instantiator; }, /** * @private */ postprocessors: {}, /** * @private */ defaultPostprocessors: [], /** * Register a post-processor function. * * @private * @param {String} name * @param {Function} postprocessor */ registerPostprocessor: function(name, fn, properties, position, relativeTo) { if (!position) { position = 'last'; } if (!properties) { properties = [name]; } this.postprocessors[name] = { name: name, properties: properties || false, fn: fn }; this.setDefaultPostprocessorPosition(name, position, relativeTo); return this; }, /** * Set the default post processors array stack which are applied to every class. * * @private * @param {String/Array} The name of a registered post processor or an array of registered names. * @return {Ext.ClassManager} this */ setDefaultPostprocessors: function(postprocessors) { this.defaultPostprocessors = arrayFrom(postprocessors); return this; }, /** * Insert this post-processor at a specific position in the stack, optionally relative to * any existing post-processor * * @private * @param {String} name The post-processor name. Note that it needs to be registered with * {@link Ext.ClassManager#registerPostprocessor} before this * @param {String} offset The insertion position. Four possible values are: * 'first', 'last', or: 'before', 'after' (relative to the name provided in the third argument) * @param {String} relativeName * @return {Ext.ClassManager} this */ setDefaultPostprocessorPosition: function(name, offset, relativeName) { var defaultPostprocessors = this.defaultPostprocessors, index; if (typeof offset == 'string') { if (offset === 'first') { defaultPostprocessors.unshift(name); return this; } else if (offset === 'last') { defaultPostprocessors.push(name); return this; } offset = (offset === 'after') ? 1 : -1; } index = Ext.Array.indexOf(defaultPostprocessors, relativeName); if (index !== -1) { Ext.Array.splice(defaultPostprocessors, Math.max(0, index + offset), 0, name); } return this; }, /** * Converts a string expression to an array of matching class names. An expression can either refers to class aliases * or class names. Expressions support wildcards: * * // returns ['Ext.window.Window'] * var window = Ext.ClassManager.getNamesByExpression('widget.window'); * * // returns ['widget.panel', 'widget.window', ...] * var allWidgets = Ext.ClassManager.getNamesByExpression('widget.*'); * * // returns ['Ext.data.Store', 'Ext.data.ArrayProxy', ...] * var allData = Ext.ClassManager.getNamesByExpression('Ext.data.*'); * * @param {String} expression * @return {Array} classNames * @markdown */ getNamesByExpression: function(expression) { var nameToAliasesMap = this.maps.nameToAliases, names = [], name, alias, aliases, possibleName, regex, i, ln; //<debug error> if (typeof expression != 'string' || expression.length < 1) { throw new Error("[Ext.ClassManager.getNamesByExpression] Expression " + expression + " is invalid, must be a non-empty string"); } //</debug> if (expression.indexOf('*') !== -1) { expression = expression.replace(/\*/g, '(.*?)'); regex = new RegExp('^' + expression + '$'); for (name in nameToAliasesMap) { if (nameToAliasesMap.hasOwnProperty(name)) { aliases = nameToAliasesMap[name]; if (name.search(regex) !== -1) { names.push(name); } else { for (i = 0, ln = aliases.length; i < ln; i++) { alias = aliases[i]; if (alias.search(regex) !== -1) { names.push(name); break; } } } } } } else { possibleName = this.getNameByAlias(expression); if (possibleName) { names.push(possibleName); } else { possibleName = this.getNameByAlternate(expression); if (possibleName) { names.push(possibleName); } else { names.push(expression); } } } return names; } }; //<feature classSystem.alias> /** * @cfg {String[]} alias * @member Ext.Class * List of short aliases for class names. Most useful for defining xtypes for widgets: * * Ext.define('MyApp.CoolPanel', { * extend: 'Ext.panel.Panel', * alias: ['widget.coolpanel'], * title: 'Yeah!' * }); * * // Using Ext.create * Ext.widget('widget.coolpanel'); * // Using the shorthand for widgets and in xtypes * Ext.widget('panel', { * items: [ * {xtype: 'coolpanel', html: 'Foo'}, * {xtype: 'coolpanel', html: 'Bar'} * ] * }); */ Manager.registerPostprocessor('alias', function(name, cls, data) { var aliases = data.alias, i, ln; for (i = 0,ln = aliases.length; i < ln; i++) { alias = aliases[i]; this.setAlias(cls, alias); } }, ['xtype', 'alias']); //</feature> //<feature classSystem.singleton> /** * @cfg {Boolean} singleton * @member Ext.Class * When set to true, the class will be instantiated as singleton. For example: * * Ext.define('Logger', { * singleton: true, * log: function(msg) { * console.log(msg); * } * }); * * Logger.log('Hello'); */ Manager.registerPostprocessor('singleton', function(name, cls, data, fn) { fn.call(this, name, new cls(), data); return false; }); //</feature> //<feature classSystem.alternateClassName> /** * @cfg {String/String[]} alternateClassName * @member Ext.Class * Defines alternate names for this class. For example: * * Ext.define('Developer', { * alternateClassName: ['Coder', 'Hacker'], * code: function(msg) { * alert('Typing... ' + msg); * } * }); * * var joe = Ext.create('Developer'); * joe.code('stackoverflow'); * * var rms = Ext.create('Hacker'); * rms.code('hack hack'); */ Manager.registerPostprocessor('alternateClassName', function(name, cls, data) { var alternates = data.alternateClassName, i, ln, alternate; if (!(alternates instanceof Array)) { alternates = [alternates]; } for (i = 0, ln = alternates.length; i < ln; i++) { alternate = alternates[i]; //<debug error> if (typeof alternate != 'string') { throw new Error("[Ext.define] Invalid alternate of: '" + alternate + "' for class: '" + name + "'; must be a valid string"); } //</debug> this.set(alternate, cls); } }); //</feature> Ext.apply(Ext, { /** * Convenient shorthand, see {@link Ext.ClassManager#instantiate} * @member Ext * @method create */ create: alias(Manager, 'instantiate'), /** * Convenient shorthand to create a widget by its xtype, also see {@link Ext.ClassManager#instantiateByAlias} * * var button = Ext.widget('button'); // Equivalent to Ext.create('widget.button') * var panel = Ext.widget('panel'); // Equivalent to Ext.create('widget.panel') * * @member Ext * @method widget */ widget: function(name) { var args = arraySlice.call(arguments); args[0] = 'widget.' + name; return Manager.instantiateByAlias.apply(Manager, args); }, /** * Convenient shorthand, see {@link Ext.ClassManager#instantiateByAlias} * @member Ext * @method createByAlias */ createByAlias: alias(Manager, 'instantiateByAlias'), /** * Defines a class or override. A basic class is defined like this: * * Ext.define('My.awesome.Class', { * someProperty: 'something', * * someMethod: function(s) { * console.log(s + this.someProperty); * } * }); * * var obj = new My.awesome.Class(); * * obj.someMethod('Say '); // logs 'Say something' to the console * * To defines an override, include the `override` property. The content of an * override is aggregated with the specified class in order to extend or modify * that class. This can be as simple as setting default property values or it can * extend and/or replace methods. This can also extend the statics of the class. * * One use for an override is to break a large class into manageable pieces. * * // File: /src/app/Panel.js * * Ext.define('My.app.Panel', { * extend: 'Ext.panel.Panel', * requires: [ * 'My.app.PanelPart2', * 'My.app.PanelPart3' * ] * * constructor: function (config) { * this.callParent(arguments); // calls Ext.panel.Panel's constructor * //... * }, * * statics: { * method: function () { * return 'abc'; * } * } * }); * * // File: /src/app/PanelPart2.js * Ext.define('My.app.PanelPart2', { * override: 'My.app.Panel', * * constructor: function (config) { * this.callParent(arguments); // calls My.app.Panel's constructor * //... * } * }); * * Another use for an override is to provide optional parts of classes that can be * independently required. In this case, the class may even be unaware of the * override altogether. * * Ext.define('My.ux.CoolTip', { * override: 'Ext.tip.ToolTip', * * constructor: function (config) { * this.callParent(arguments); // calls Ext.tip.ToolTip's constructor * //... * } * }); * * The above override can now be required as normal. * * Ext.define('My.app.App', { * requires: [ * 'My.ux.CoolTip' * ] * }); * * Overrides can also contain statics: * * Ext.define('My.app.BarMod', { * override: 'Ext.foo.Bar', * * statics: { * method: function (x) { * return this.callParent([x * 2]); // call Ext.foo.Bar.method * } * } * }); * * IMPORTANT: An override is only included in a build if the class it overrides is * required. Otherwise, the override, like the target class, is not included. * * @param {String} className The class name to create in string dot-namespaced format, for example: * 'My.very.awesome.Class', 'FeedViewer.plugin.CoolPager' * * It is highly recommended to follow this simple convention: * - The root and the class name are 'CamelCased' * - Everything else is lower-cased * * @param {Object} data The key - value pairs of properties to apply to this class. Property names can be of * any valid strings, except those in the reserved listed below: * * - `mixins` * - `statics` * - `config` * - `alias` * - `self` * - `singleton` * - `alternateClassName` * - `override` * * @param {Function} createdFn Optional callback to execute after the class (or override) * is created. The execution scope (`this`) will be the newly created class itself. * @return {Ext.Base} * * @member Ext * @method define */ define: function (className, data, createdFn) { if ('override' in data) { return Manager.createOverride.apply(Manager, arguments); } return Manager.create.apply(Manager, arguments); }, /** * Convenient shorthand for {@link Ext.ClassManager#getName}. * @member Ext * @method getClassName * @inheritdoc Ext.ClassManager#getName */ getClassName: alias(Manager, 'getName'), /** * Returns the display name for object. This name is looked for in order from the following places: * * - `displayName` field of the object. * - `$name` and `$class` fields of the object. * - '$className` field of the object. * * This method is used by {@link Ext.Logger#log} to display information about objects. * * @param {Mixed} [object] The object who's display name to determine. * @return {String} The determined display name, or "Anonymous" if none found. * @member Ext */ getDisplayName: function(object) { if (object) { if (object.displayName) { return object.displayName; } if (object.$name && object.$class) { return Ext.getClassName(object.$class) + '#' + object.$name; } if (object.$className) { return object.$className; } } return 'Anonymous'; }, /** * Convenient shorthand, see {@link Ext.ClassManager#getClass} * @member Ext * @method getClass */ getClass: alias(Manager, 'getClass'), /** * Creates namespaces to be used for scoping variables and classes so that they are not global. * Specifying the last node of a namespace implicitly creates all other nodes. Usage: * * Ext.namespace('Company', 'Company.data'); * * // equivalent and preferable to the above syntax * Ext.namespace('Company.data'); * * Company.Widget = function() { ... }; * * Company.data.CustomStore = function(config) { ... }; * * @param {String} namespace1 * @param {String} namespace2 * @param {String} etc * @return {Object} The namespace object. (If multiple arguments are passed, this will be the last namespace created) * @function * @member Ext * @method namespace */ namespace: alias(Manager, 'createNamespaces') }); /** * Old name for {@link Ext#widget}. * @deprecated 4.0.0 Please use {@link Ext#widget} instead. * @method createWidget * @member Ext */ Ext.createWidget = Ext.widget; /** * Convenient alias for {@link Ext#namespace Ext.namespace} * @member Ext * @method ns */ Ext.ns = Ext.namespace; Class.registerPreprocessor('className', function(cls, data) { if (data.$className) { cls.$className = data.$className; //<debug> cls.displayName = cls.$className; //</debug> } }, true, 'first'); Class.registerPreprocessor('alias', function(cls, data) { var prototype = cls.prototype, xtypes = arrayFrom(data.xtype), aliases = arrayFrom(data.alias), widgetPrefix = 'widget.', widgetPrefixLength = widgetPrefix.length, xtypesChain = Array.prototype.slice.call(prototype.xtypesChain || []), xtypesMap = Ext.merge({}, prototype.xtypesMap || {}), i, ln, alias, xtype; for (i = 0,ln = aliases.length; i < ln; i++) { alias = aliases[i]; //<debug error> if (typeof alias != 'string' || alias.length < 1) { throw new Error("[Ext.define] Invalid alias of: '" + alias + "' for class: '" + name + "'; must be a valid string"); } //</debug> if (alias.substring(0, widgetPrefixLength) === widgetPrefix) { xtype = alias.substring(widgetPrefixLength); Ext.Array.include(xtypes, xtype); } } cls.xtype = data.xtype = xtypes[0]; data.xtypes = xtypes; for (i = 0,ln = xtypes.length; i < ln; i++) { xtype = xtypes[i]; if (!xtypesMap[xtype]) { xtypesMap[xtype] = true; xtypesChain.push(xtype); } } data.xtypesChain = xtypesChain; data.xtypesMap = xtypesMap; Ext.Function.interceptAfter(data, 'onClassCreated', function() { var mixins = prototype.mixins, key, mixin; for (key in mixins) { if (mixins.hasOwnProperty(key)) { mixin = mixins[key]; xtypes = mixin.xtypes; if (xtypes) { for (i = 0,ln = xtypes.length; i < ln; i++) { xtype = xtypes[i]; if (!xtypesMap[xtype]) { xtypesMap[xtype] = true; xtypesChain.push(xtype); } } } } } }); for (i = 0,ln = xtypes.length; i < ln; i++) { xtype = xtypes[i]; //<debug error> if (typeof xtype != 'string' || xtype.length < 1) { throw new Error("[Ext.define] Invalid xtype of: '" + xtype + "' for class: '" + name + "'; must be a valid non-empty string"); } //</debug> Ext.Array.include(aliases, widgetPrefix + xtype); } data.alias = aliases; }, ['xtype', 'alias']); })(Ext.Class, Ext.Function.alias, Array.prototype.slice, Ext.Array.from, Ext.global);
pierotofy/glassomium
src/server/WebRoot/apps/Maps/src/core/class/ClassManager.js
JavaScript
apache-2.0
47,766
/* * * Copyright 2014 Tom Mahaffey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.tkmtwo.sarapi.convert; import com.bmc.arsys.api.DateInfo; import com.bmc.arsys.api.Value; import org.springframework.core.convert.converter.Converter; /** * */ public final class DateInfoToValueConverter implements Converter<DateInfo, Value> { @Override public Value convert(DateInfo di) { return (di == null) ? new Value() : new Value(di); } }
TkmTwoProjects/tkmtwo-sarapi
core/src/main/java/com/tkmtwo/sarapi/convert/DateInfoToValueConverter.java
Java
apache-2.0
989
/* * Copyright © 2017-2019 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.cdap.wrangler.service.database; import com.google.common.base.Throwables; import io.cdap.cdap.etl.api.Destroyable; import java.sql.DriverManager; import java.sql.SQLException; import javax.annotation.Nullable; /** * Cleans up JDBC drivers. */ public class DriverCleanup implements Destroyable { private final JDBCDriverShim driverShim; DriverCleanup(@Nullable JDBCDriverShim driverShim) { this.driverShim = driverShim; } public void destroy() { if (driverShim != null) { try { DriverManager.deregisterDriver(driverShim); } catch (SQLException e) { throw Throwables.propagate(e); } } } }
hydrator/wrangler
wrangler-service/src/main/java/io/cdap/wrangler/service/database/DriverCleanup.java
Java
apache-2.0
1,265
/* $Id: HopFilterManager.java 988245 2010-08-23 18:39:35Z kwright $ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.jobs; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.crawler.interfaces.*; import java.util.*; /** This class manages the "hopfilters" table, which contains the hopcount filters for each job. * It's separated from the main jobs table because we will need multiple hop filters per job. * * <br><br> * <b>jobhopfilters</b> * <table border="1" cellpadding="3" cellspacing="0"> * <tr class="TableHeadingColor"> * <th>Field</th><th>Type</th><th>Description&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</th> * <tr><td>ownerid</td><td>BIGINT</td><td>Reference:jobs.id</td></tr> * <tr><td>linktype</td><td>VARCHAR(255)</td><td></td></tr> * <tr><td>maxhops</td><td>BIGINT</td><td></td></tr> * </table> * <br><br> * */ public class HopFilterManager extends org.apache.manifoldcf.core.database.BaseTable { public static final String _rcsid = "@(#)$Id: HopFilterManager.java 988245 2010-08-23 18:39:35Z kwright $"; // Schema public final static String ownerIDField = "ownerid"; public final static String linkTypeField = "linktype"; public final static String maxHopsField = "maxhops"; /** Constructor. *@param threadContext is the thread context. *@param database is the database instance. */ public HopFilterManager(IThreadContext threadContext, IDBInterface database) throws ManifoldCFException { super(database,"jobhopfilters"); } /** Install or upgrade. *@param ownerTable is the name of the table that owns this one. *@param owningTablePrimaryKey is the primary key of the owning table. */ public void install(String ownerTable, String owningTablePrimaryKey) throws ManifoldCFException { // Standard practice: outer loop while (true) { Map existing = getTableSchema(null,null); if (existing == null) { HashMap map = new HashMap(); map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false)); // Null link types are NOT allowed here. The restrictions can only be made on a real link type. map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,false,null,null,false)); map.put(maxHopsField,new ColumnDescription("BIGINT",false,false,null,null,false)); performCreate(map,null); } else { // Upgrade code goes here, as needed } // Index management IndexDescription ownerIndex = new IndexDescription(true,new String[]{ownerIDField,linkTypeField}); // Get rid of indexes that shouldn't be there Map indexes = getTableIndexes(null,null); Iterator iter = indexes.keySet().iterator(); while (iter.hasNext()) { String indexName = (String)iter.next(); IndexDescription id = (IndexDescription)indexes.get(indexName); if (ownerIndex != null && id.equals(ownerIndex)) ownerIndex = null; else if (indexName.indexOf("_pkey") == -1) // This index shouldn't be here; drop it performRemoveIndex(indexName); } // Add the ones we didn't find if (ownerIndex != null) performAddIndex(null,ownerIndex); break; } } /** Uninstall. */ public void deinstall() throws ManifoldCFException { performDrop(null); } /** Read rows for a given owner id. *@param id is the owner id. *@return a map of link type to max hop count (as a Long). */ public Map readRows(Long id) throws ManifoldCFException { ArrayList list = new ArrayList(); list.add(id); IResultSet set = performQuery("SELECT "+linkTypeField+","+maxHopsField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list, null,null); Map rval = new HashMap(); if (set.getRowCount() == 0) return rval; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); String linkType = (String)row.getValue(linkTypeField); Long max = (Long)row.getValue(maxHopsField); rval.put(linkType,max); i++; } return rval; } /** Fill in a set of filters corresponding to a set of owner id's. *@param returnValues is a map keyed by ownerID, with value of JobDescription. *@param ownerIDList is the list of owner id's. *@param ownerIDParams is the corresponding set of owner id parameters. */ public void getRows(Map<Long,JobDescription> returnValues, String ownerIDList, ArrayList ownerIDParams) throws ManifoldCFException { IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+")",ownerIDParams, null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long ownerID = (Long)row.getValue(ownerIDField); String linkType = (String)row.getValue(linkTypeField); Long maxHops = (Long)row.getValue(maxHopsField); returnValues.get(ownerID).addHopCountFilter(linkType,maxHops); i++; } } /** Compare a filter list against what's in a job description. *@param ownerID is the owning identifier. *@param list is the job description to write hopcount filters for. */ public boolean compareRows(Long ownerID, IJobDescription list) throws ManifoldCFException { // Compare hopcount filter criteria. Map filterRows = readRows(ownerID); Map newFilterRows = list.getHopCountFilters(); if (filterRows.size() != newFilterRows.size()) return false; for (String linkType : (Collection<String>)filterRows.keySet()) { Long oldCount = (Long)filterRows.get(linkType); Long newCount = (Long)newFilterRows.get(linkType); if (oldCount == null || newCount == null) return false; if (oldCount.longValue() != newCount.longValue()) return false; } return true; } /** Write a filter list into the database. *@param ownerID is the owning identifier. *@param list is the job description to write hopcount filters for. */ public void writeRows(Long ownerID, IJobDescription list) throws ManifoldCFException { beginTransaction(); try { int i = 0; HashMap map = new HashMap(); Map filters = list.getHopCountFilters(); Iterator iter = filters.keySet().iterator(); while (iter.hasNext()) { String linkType = (String)iter.next(); Long maxHops = (Long)filters.get(linkType); map.clear(); map.put(linkTypeField,linkType); map.put(maxHopsField,maxHops); map.put(ownerIDField,ownerID); performInsert(map,null); } } catch (ManifoldCFException e) { signalRollback(); throw e; } catch (Error e) { signalRollback(); throw e; } finally { endTransaction(); } } /** Delete rows. *@param ownerID is the owner whose rows to delete. */ public void deleteRows(Long ownerID) throws ManifoldCFException { ArrayList list = new ArrayList(); list.add(ownerID); performDelete("WHERE "+ownerIDField+"=?",list,null); } }
gladyscarrizales/manifoldcf
framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/HopFilterManager.java
Java
apache-2.0
8,021
/** * Passport configuration * * This is the configuration for your Passport.js setup and where you * define the authentication strategies you want your application to employ. * * I have tested the service with all of the providers listed below - if you * come across a provider that for some reason doesn't work, feel free to open * an issue on GitHub. * * Also, authentication scopes can be set through the `scope` property. * * For more information on the available providers, check out: * http://passportjs.org/guide/providers/ */ module.exports.passport = { local: { strategy: require('passport-local').Strategy }, bearer: { strategy: require('passport-http-bearer').Strategy } /*twitter: { name: 'Twitter', protocol: 'oauth', strategy: require('passport-twitter').Strategy, options: { consumerKey: 'your-consumer-key', consumerSecret: 'your-consumer-secret' } }, github: { name: 'GitHub', protocol: 'oauth2', strategy: require('passport-github').Strategy, options: { clientID: 'your-client-id', clientSecret: 'your-client-secret' } }, facebook: { name: 'Facebook', protocol: 'oauth2', strategy: require('passport-facebook').Strategy, options: { clientID: 'your-client-id', clientSecret: 'your-client-secret', scope: ['email'] } }, google: { name: 'Google', protocol: 'oauth2', strategy: require('passport-google-oauth').OAuth2Strategy, options: { clientID: 'your-client-id', clientSecret: 'your-client-secret' } }, cas: { name: 'CAS', protocol: 'cas', strategy: require('passport-cas').Strategy, options: { ssoBaseURL: 'http://your-cas-url', serverBaseURL: 'http://localhost:1337', serviceURL: 'http://localhost:1337/auth/cas/callback' } }*/ };
porybox/porybox
config/passport.js
JavaScript
apache-2.0
1,880
/* Proposed SG14 status_code (C) 2018-2019 Niall Douglas <http://www.nedproductions.biz/> (5 commits) File Created: Feb 2018 Boost Software License - Version 1.0 - August 17th, 2003 Permission is hereby granted, free of charge, to any person or organization obtaining a copy of the software and accompanying documentation covered by this license (the "Software") to use, reproduce, display, distribute, execute, and transmit the Software, and to prepare derivative works of the Software, and to permit third-parties to whom the Software is furnished to do so, all subject to the following: The copyright notices in the Software and this entire statement, including the above license grant, this restriction and the following disclaimer, must be included in all copies of the Software, in whole or in part, and all derivative works of the Software, unless such copies or derivative works are solely in the form of machine-executable object code generated by a source language processor. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_CONFIG_HPP #define BOOST_OUTCOME_SYSTEM_ERROR2_CONFIG_HPP // < 0.1 each #include <cassert> #include <cstddef> // for size_t #include <cstdlib> // for free // 0.22 #include <type_traits> // 0.29 #include <atomic> // 0.28 (0.15 of which is exception_ptr) #include <exception> // for std::exception // <new> includes <exception>, <exception> includes <new> #include <new> // 0.01 #include <initializer_list> #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_CONSTEXPR14 #if defined(BOOST_OUTCOME_STANDARDESE_IS_IN_THE_HOUSE) || __cplusplus >= 201400 || _MSC_VER >= 1910 /* VS2017 */ //! Defined to be `constexpr` when on C++ 14 or better compilers. Usually automatic, can be overriden. #define BOOST_OUTCOME_SYSTEM_ERROR2_CONSTEXPR14 constexpr #else #define BOOST_OUTCOME_SYSTEM_ERROR2_CONSTEXPR14 #endif #endif #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN #if defined(BOOST_OUTCOME_STANDARDESE_IS_IN_THE_HOUSE) || (_HAS_CXX17 && _MSC_VER >= 1911 /* VS2017.3 */) #define BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN [[noreturn]] #endif #endif #if !defined(BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN) #ifdef __has_cpp_attribute #if __has_cpp_attribute(noreturn) #define BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN [[noreturn]] #endif #endif #endif #if !defined(BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN) #if defined(_MSC_VER) #define BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN __declspec(noreturn) #elif defined(__GNUC__) #define BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN __attribute__((__noreturn__)) #else #define BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN #endif #endif // GCCs before 7 don't grok [[noreturn]] virtual functions, and warn annoyingly #if defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 7 #undef BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN #define BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN #endif #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD #if defined(BOOST_OUTCOME_STANDARDESE_IS_IN_THE_HOUSE) || (_HAS_CXX17 && _MSC_VER >= 1911 /* VS2017.3 */) #define BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD [[nodiscard]] #endif #endif #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD #ifdef __has_cpp_attribute #if __has_cpp_attribute(nodiscard) #define BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD [[nodiscard]] #endif #elif defined(__clang__) #define BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD __attribute__((warn_unused_result)) #elif defined(_MSC_VER) // _Must_inspect_result_ expands into this #define BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD \ __declspec("SAL_name" \ "(" \ "\"_Must_inspect_result_\"" \ "," \ "\"\"" \ "," \ "\"2\"" \ ")") __declspec("SAL_begin") __declspec("SAL_post") __declspec("SAL_mustInspect") __declspec("SAL_post") __declspec("SAL_checkReturn") __declspec("SAL_end") #endif #endif #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD #define BOOST_OUTCOME_SYSTEM_ERROR2_NODISCARD #endif #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE //! The system_error2 namespace name. #define BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE system_error2 //! Begins the system_error2 namespace. #define BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_BEGIN \ namespace system_error2 \ { //! Ends the system_error2 namespace. #define BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_END } #endif //! Namespace for the library BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_BEGIN //! Namespace for user specialised traits namespace traits { /*! Specialise to true if you guarantee that a type is move relocating (i.e. its move constructor equals copying bits from old to new, old is left in a default constructed state, and calling the destructor on a default constructed instance is trivial). All trivially copyable types are move relocating by definition, and that is the unspecialised implementation. */ template <class T> struct is_move_relocating { static constexpr bool value = std::is_trivially_copyable<T>::value; }; } // namespace traits namespace detail { inline BOOST_OUTCOME_SYSTEM_ERROR2_CONSTEXPR14 size_t cstrlen(const char *str) { const char *end = nullptr; for(end = str; *end != 0; ++end) // NOLINT ; return end - str; } /* A partially compliant implementation of C++20's std::bit_cast function contributed by Jesse Towner. TODO FIXME Replace with C++ 20 bit_cast when available. Our bit_cast is only guaranteed to be constexpr when both the input and output arguments are either integrals or enums. However, this covers most use cases since the vast majority of status_codes have an underlying type that is either an integral or enum. We still attempt a constexpr union-based type pun for non-array input types, which some compilers accept. For array inputs, we fall back to non-constexpr memmove. */ template <class T> using is_integral_or_enum = std::integral_constant<bool, std::is_integral<T>::value || std::is_enum<T>::value>; template <class To, class From> using is_static_castable = std::integral_constant<bool, is_integral_or_enum<To>::value && is_integral_or_enum<From>::value>; template <class To, class From> using is_union_castable = std::integral_constant<bool, !is_static_castable<To, From>::value && !std::is_array<To>::value && !std::is_array<From>::value>; template <class To, class From> using is_bit_castable = std::integral_constant<bool, sizeof(To) == sizeof(From) && traits::is_move_relocating<To>::value && traits::is_move_relocating<From>::value>; template <class To, class From> union bit_cast_union { From source; To target; }; template <class To, class From, typename std::enable_if< // is_bit_castable<To, From>::value // && is_static_castable<To, From>::value // && !is_union_castable<To, From>::value, // bool>::type = true> // constexpr To bit_cast(const From &from) noexcept { return static_cast<To>(from); } template <class To, class From, typename std::enable_if< // is_bit_castable<To, From>::value // && !is_static_castable<To, From>::value // && is_union_castable<To, From>::value, // bool>::type = true> // constexpr To bit_cast(const From &from) noexcept { return bit_cast_union<To, From>{from}.target; } template <class To, class From, typename std::enable_if< // is_bit_castable<To, From>::value // && !is_static_castable<To, From>::value // && !is_union_castable<To, From>::value, // bool>::type = true> // To bit_cast(const From &from) noexcept { bit_cast_union<To, From> ret; memmove(&ret.source, &from, sizeof(ret.source)); return ret.target; } /* erasure_cast performs a bit_cast with additional rules to handle types of differing sizes. For integral & enum types, it may perform a narrowing or widing conversion with static_cast if necessary, before doing the final conversion with bit_cast. When casting to or from non-integral, non-enum types it may insert the value into another object with extra padding bytes to satisfy bit_cast's preconditions that both types have the same size. */ template <class To, class From> using is_erasure_castable = std::integral_constant<bool, traits::is_move_relocating<To>::value && traits::is_move_relocating<From>::value>; template <class T, bool = std::is_enum<T>::value> struct identity_or_underlying_type { using type = T; }; template <class T> struct identity_or_underlying_type<T, true> { using type = typename std::underlying_type<T>::type; }; template <class OfSize, class OfSign> using erasure_integer_type = typename std::conditional<std::is_signed<typename identity_or_underlying_type<OfSign>::type>::value, typename std::make_signed<typename identity_or_underlying_type<OfSize>::type>::type, typename std::make_unsigned<typename identity_or_underlying_type<OfSize>::type>::type>::type; template <class ErasedType, std::size_t N> struct padded_erasure_object { static_assert(traits::is_move_relocating<ErasedType>::value, "ErasedType must be TriviallyCopyable or MoveRelocating"); static_assert(alignof(ErasedType) <= sizeof(ErasedType), "ErasedType must not be over-aligned"); ErasedType value; char padding[N]; constexpr explicit padded_erasure_object(const ErasedType &v) noexcept : value(v) , padding{} { } }; template <class To, class From, typename std::enable_if<is_erasure_castable<To, From>::value && (sizeof(To) == sizeof(From)), bool>::type = true> constexpr To erasure_cast(const From &from) noexcept { return bit_cast<To>(from); } template <class To, class From, typename std::enable_if<is_erasure_castable<To, From>::value && is_static_castable<To, From>::value && (sizeof(To) < sizeof(From)), bool>::type = true> constexpr To erasure_cast(const From &from) noexcept { return static_cast<To>(bit_cast<erasure_integer_type<From, To>>(from)); } template <class To, class From, typename std::enable_if<is_erasure_castable<To, From>::value && is_static_castable<To, From>::value && (sizeof(To) > sizeof(From)), bool>::type = true> constexpr To erasure_cast(const From &from) noexcept { return bit_cast<To>(static_cast<erasure_integer_type<To, From>>(from)); } template <class To, class From, typename std::enable_if<is_erasure_castable<To, From>::value && !is_static_castable<To, From>::value && (sizeof(To) < sizeof(From)), bool>::type = true> constexpr To erasure_cast(const From &from) noexcept { return bit_cast<padded_erasure_object<To, sizeof(From) - sizeof(To)>>(from).value; } template <class To, class From, typename std::enable_if<is_erasure_castable<To, From>::value && !is_static_castable<To, From>::value && (sizeof(To) > sizeof(From)), bool>::type = true> constexpr To erasure_cast(const From &from) noexcept { return bit_cast<To>(padded_erasure_object<From, sizeof(To) - sizeof(From)>{from}); } } // namespace detail BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_END #ifndef BOOST_OUTCOME_SYSTEM_ERROR2_FATAL #include <cstdlib> // for abort #ifdef __APPLE__ #include <unistd.h> // for write #endif BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_BEGIN namespace detail { namespace avoid_stdio_include { #ifndef __APPLE__ extern "C" ptrdiff_t write(int, const void *, size_t); #endif } // namespace avoid_stdio_include inline void do_fatal_exit(const char *msg) { using namespace avoid_stdio_include; write(2 /*stderr*/, msg, cstrlen(msg)); write(2 /*stderr*/, "\n", 1); abort(); } } // namespace detail BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_END //! Prints msg to stderr, and calls `std::terminate()`. Can be overriden via predefinition. #define BOOST_OUTCOME_SYSTEM_ERROR2_FATAL(msg) ::BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE::detail::do_fatal_exit(msg) #endif #endif
Simran-B/arangodb
3rdParty/boost/1.71.0/boost/outcome/experimental/status-code/config.hpp
C++
apache-2.0
15,298
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.log4j.util; import org.apache.oro.text.perl.Perl5Util; public class LineNumberFilter implements Filter { Perl5Util util = new Perl5Util(); @Override public String filter(final String in) { if (util.match("/\\(.*:\\d{1,4}\\)/", in)) { return util.substitute("s/:\\d{1,4}\\)/:XXX)/", in); } if (in.indexOf(", Compiled Code") >= 0) { return util.substitute("s/, Compiled Code/:XXX/", in); } return in; } }
apache/logging-log4j2
log4j-1.2-api/src/test/java/org/apache/log4j/util/LineNumberFilter.java
Java
apache-2.0
1,310
package ca.uhn.fhir.rest.annotation; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2017 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.DateRangeParam; /** * Parameter annotation for the _since parameter, which indicates to the * server that only results dated since the given instant will be returned. * <p> * Parameters with this annotation should be of type {@link DateParam} or {@link DateRangeParam} * </p> * * @see History */ @Target(value=ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface Since { //nothing }
eug48/hapi-fhir
hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Since.java
Java
apache-2.0
1,368
"""The ReCollect Waste integration.""" from __future__ import annotations from datetime import date, timedelta from aiorecollect.client import Client, PickupEvent from aiorecollect.errors import RecollectError from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_PLACE_ID, CONF_SERVICE_ID, DOMAIN, LOGGER DEFAULT_NAME = "recollect_waste" DEFAULT_UPDATE_INTERVAL = timedelta(days=1) PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up RainMachine as config entry.""" session = aiohttp_client.async_get_clientsession(hass) client = Client( entry.data[CONF_PLACE_ID], entry.data[CONF_SERVICE_ID], session=session ) async def async_get_pickup_events() -> list[PickupEvent]: """Get the next pickup.""" try: return await client.async_get_pickup_events( start_date=date.today(), end_date=date.today() + timedelta(weeks=4) ) except RecollectError as err: raise UpdateFailed( f"Error while requesting data from ReCollect: {err}" ) from err coordinator = DataUpdateCoordinator( hass, LOGGER, name=f"Place {entry.data[CONF_PLACE_ID]}, Service {entry.data[CONF_SERVICE_ID]}", update_interval=DEFAULT_UPDATE_INTERVAL, update_method=async_get_pickup_events, ) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator hass.config_entries.async_setup_platforms(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) return True async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload an RainMachine config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
home-assistant/home-assistant
homeassistant/components/recollect_waste/__init__.py
Python
apache-2.0
2,425
/// <reference path='fourslash.ts'/> // @Filename: a.ts ////const enum TestEnum { //// Foo, Bar ////} ////var testFirstFile = TestEnum.Bar; // @Filename: b.ts /////// <reference path="a.ts" /> /////*1*/ ////var testInOtherFile = TestEnum.Bar; goTo.marker("1"); verify.verifyGetEmitOutputForCurrentFile( "/// <reference path=\"a.ts\" />\r\n\ var testInOtherFile = 1 /* TestEnum.Bar */;\r\n" )
Microsoft/TypeScript
tests/cases/fourslash/constEnumsEmitOutputInMultipleFiles.ts
TypeScript
apache-2.0
418
// Copyright 2015 The rkt Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "fmt" "io/ioutil" "os" "strings" "testing" "github.com/coreos/rkt/tests/testutils" taas "github.com/coreos/rkt/tests/testutils/aci-server" ) const ( manifestDepsTemplate = ` { "acKind" : "ImageManifest", "acVersion" : "0.7.4", "dependencies" : [ DEPENDENCIES ], "labels" : [ { "name" : "version", "value" : "VERSION" }, { "name" : "arch", "value" : "amd64" }, { "value" : "linux", "name" : "os" } ], "app" : { "user" : "0", "exec" : [ "/inspect", "--print-msg=HelloDependencies" ], "workingDirectory" : "/", "group" : "0", "environment" : [ ] }, "name" : "IMG_NAME" } ` ) // TestImageDependencies generates ACIs with a complex dependency tree and // fetches them via the discovery mechanism. Some dependencies are already // cached in the CAS, and some dependencies are fetched via the discovery // mechanism. This is to reproduce the scenario in explained in: // https://github.com/coreos/rkt/issues/1752#issue-117121841 func TestImageDependencies(t *testing.T) { tmpDir := createTempDirOrPanic("rkt-TestImageDeps-") defer os.RemoveAll(tmpDir) ctx := testutils.NewRktRunCtx() defer ctx.Cleanup() server := runDiscoveryServer(t, taas.ServerOrdinary, taas.AuthNone) defer server.Close() baseImage := getInspectImagePath() _ = importImageAndFetchHash(t, ctx, "", baseImage) emptyImage := getEmptyImagePath() fileSet := make(map[string]string) // Scenario from https://github.com/coreos/rkt/issues/1752#issue-117121841 // // A->B // A->C // A->D // // B: prefetched // // C->B // C->E // // D->B // D->E topImage := "localhost/image-a" imageList := []struct { shortName string imageName string deps string version string prefetch bool manifest string fileName string }{ { shortName: "a", imageName: topImage, deps: `{"imageName":"localhost/image-b"}, {"imageName":"localhost/image-c"}, {"imageName":"localhost/image-d"}`, version: "1", }, { shortName: "b", imageName: "localhost/image-b", deps: ``, version: "1", prefetch: true, }, { shortName: "c", imageName: "localhost/image-c", deps: `{"imageName":"localhost/image-b"}, {"imageName":"localhost/image-e", "labels": [{"name": "version", "value": "1"}]}`, version: "1", }, { shortName: "d", imageName: "localhost/image-d", deps: `{"imageName":"localhost/image-b"}, {"imageName":"localhost/image-e", "labels": [{"name": "version", "value": "1"}]}`, version: "1", }, { shortName: "e", imageName: "localhost/image-e", deps: `{"imageName":"coreos.com/rkt-inspect"}`, version: "1", }, } for i, _ := range imageList { // We need a reference rather than a new copy from "range" // because we modify the content img := &imageList[i] img.manifest = manifestDepsTemplate img.manifest = strings.Replace(img.manifest, "IMG_NAME", img.imageName, -1) img.manifest = strings.Replace(img.manifest, "DEPENDENCIES", img.deps, -1) img.manifest = strings.Replace(img.manifest, "VERSION", img.version, -1) tmpManifest, err := ioutil.TempFile(tmpDir, "manifest-"+img.shortName+"-") if err != nil { panic(fmt.Sprintf("Cannot create temp manifest: %v", err)) } defer os.Remove(tmpManifest.Name()) if err := ioutil.WriteFile(tmpManifest.Name(), []byte(img.manifest), 0600); err != nil { panic(fmt.Sprintf("Cannot write to temp manifest: %v", err)) } baseName := "image-" + img.shortName + ".aci" img.fileName = patchACI(emptyImage, baseName, "--manifest", tmpManifest.Name()) defer os.Remove(img.fileName) fileSet[baseName] = img.fileName } server.UpdateFileSet(fileSet) for i := len(imageList) - 1; i >= 0; i-- { img := imageList[i] if img.prefetch { t.Logf("Importing image %q: %q", img.imageName, img.fileName) testImageShortHash := importImageAndFetchHash(t, ctx, "", img.fileName) t.Logf("Imported image %q: %s", img.imageName, testImageShortHash) } } runCmd := fmt.Sprintf("%s --debug --insecure-options=image,tls run %s", ctx.Cmd(), topImage) child := spawnOrFail(t, runCmd) expectedList := []string{ "image: fetching image from https://localhost/localhost/image-a.aci", "image: using image from local store for image name localhost/image-b", "image: fetching image from https://localhost/localhost/image-c.aci", "image: fetching image from https://localhost/localhost/image-d.aci", "image: using image from local store for image name coreos.com/rkt-inspect", "HelloDependencies", } for _, expected := range expectedList { if err := expectWithOutput(child, expected); err != nil { t.Fatalf("Expected %q but not found: %v", expected, err) } } waitOrFail(t, child, true) }
kbrwn/rkt
tests/rkt_image_dependencies_test.go
GO
apache-2.0
5,472