code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* _______ ________ \ \ / _____/ ____ ___ / | \/ \ ____/ __ \ / \ / | \ \_\ \ ___/| | \ \____|__ /\______ /\___ >___| / \/ \/ \/ \/ The MIT License (MIT) COPYRIGHT (C) 2016 FIXCOM, LLC Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sub-license, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef __NGEN_CALLBACK_HPP #define __NGEN_CALLBACK_HPP #include "Ngen.Delegate.hpp" namespace Ngen { /** @brief A function-call provider primarily used to bind object instances to member delegates before invocation. */ class ngen_api Callback { public: /** @brief Constructor. Default. */ Callback() : mThis(0), mFunc(0) {} /** @brief Constructor. (unknown, Delegate*). */ Callback(unknown _this, Delegate* function) : mThis(_this), mFunc(function) {} /** @brief Constructor. (Delegate*). */ Callback(Delegate* function) : mThis(0), mFunc(function) {} /** @brief Constructor. Copy. */ Callback(const Callback& copy) : mThis(copy.mThis), mFunc(copy.mFunc) {} /** @brief De-constructor. */ ~Callback() { mThis = null; mFunc = null; } /** @brief Determines if the callback is properly configured for an invocation. */ bool IsValid() const { if(!isnull(mFunc)) { if(mFunc->IsMember()) { return !isnull(mThis); } return true; } return false; } /** @brief operator==(const Callback&) */ bool operator==(const Callback& rhs) const { if(!isnull(mThis) && mThis != rhs.mThis) { return false; } return rhs.mFunc->EqualTo(mFunc); } /** @brief operator!=(const Callback&) */ bool operator!=(const Callback& rhs) const { if(!isnull(mThis) && mThis != rhs.mThis) { return true; } return !rhs.mFunc->EqualTo(mFunc); } /** @brief Invokes the callback using the given unknown parameter set. */ unknown Call(unknown* params) { return mFunc->operator()(mThis, params); } void MakeValid(unknown pointer) { mThis = pointer; } Type* ReturnType() const { return mFunc->ReturnType(); } Delegate* Function() const { return mFunc; } protected: unknown mThis; Delegate* mFunc; }; } #endif // __NGEN_CALLBACK_HPP
archendian/ngensdk
develop/v0.4/Ngen/include/Ngen.Callback.hpp
C++
mit
3,144
package logbook.server.proxy; /** * 動作に必要なデータのみ取得するためのフィルターです。 * */ public class Filter { /** フィルターするContent-Type */ public static final String CONTENT_TYPE_FILTER = "text/plain"; /** キャプチャーするリクエストのバイトサイズ上限 */ public static final int MAX_POST_FIELD_SIZE = 1024 * 1024; /** setAttribute用のキー(Response) */ public static final String RESPONSE_BODY = "res-body"; /** setAttribute用のキー(Request) */ public static final String REQUEST_BODY = "req-body"; private static String serverName; /** * 鎮守府サーバー名を設定する * @param name 鎮守府サーバー名 */ public static void setServerName(String name) { serverName = name; } /** * 鎮守府サーバー名を取得する * @param name 鎮守府サーバー名 */ public static String getServerName() { return serverName; } /** * 鎮守府サーバー名を検出した場合true * * @return 鎮守府サーバー名を検出した場合true */ public static boolean isServerDetected() { return serverName != null; } /** * <p> * 取得が必要なデータかを調べます<br> * 鎮守府サーバーが検出された場合はサーバー名から必要かどうかを判別します<br> * 鎮守府サーバーが検出できていない場合は常にtrue<br> * * @param name サーバー名 * @return 取得が必要なデータか */ public static boolean isNeed(String name) { if ((!isServerDetected() || (isServerDetected() && serverName.equals(name)))) { return true; } return false; } /** * <p> * 取得が必要なデータかを調べます<br> * 鎮守府サーバーが検出された場合はサーバー名とContent-Typeから必要かどうかを判別します<br> * 鎮守府サーバーが検出できていない場合はContent-Typeから必要かどうかを判別します<br> * * @param name サーバー名 * @param contentType Content-Type * @return 取得が必要なデータか */ public static boolean isNeed(String name, String contentType) { if ((!isServerDetected() || serverName.equals(name)) && CONTENT_TYPE_FILTER.equals(contentType)) { return true; } return false; } }
silfumus/logbook-EN
main/logbook/server/proxy/Filter.java
Java
mit
2,555
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: ForwardDocumentEvent.proto package Diadoc.Api.Proto; public final class ForwardDocumentEventProtos { private ForwardDocumentEventProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface ForwardDocumentEventOrBuilder extends // @@protoc_insertion_point(interface_extends:Diadoc.Api.Proto.ForwardDocumentEvent) com.google.protobuf.MessageOrBuilder { /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ boolean hasTimestamp(); /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ Diadoc.Api.Proto.TimestampProtos.Timestamp getTimestamp(); /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getTimestampOrBuilder(); /** * <code>optional string ToBoxId = 2;</code> */ boolean hasToBoxId(); /** * <code>optional string ToBoxId = 2;</code> */ java.lang.String getToBoxId(); /** * <code>optional string ToBoxId = 2;</code> */ com.google.protobuf.ByteString getToBoxIdBytes(); } /** * Protobuf type {@code Diadoc.Api.Proto.ForwardDocumentEvent} */ public static final class ForwardDocumentEvent extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Diadoc.Api.Proto.ForwardDocumentEvent) ForwardDocumentEventOrBuilder { // Use ForwardDocumentEvent.newBuilder() to construct. private ForwardDocumentEvent(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ForwardDocumentEvent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ForwardDocumentEvent defaultInstance; public static ForwardDocumentEvent getDefaultInstance() { return defaultInstance; } public ForwardDocumentEvent getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ForwardDocumentEvent( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = timestamp_.toBuilder(); } timestamp_ = input.readMessage(Diadoc.Api.Proto.TimestampProtos.Timestamp.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timestamp_); timestamp_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; toBoxId_ = bs; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.class, Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.Builder.class); } public static com.google.protobuf.Parser<ForwardDocumentEvent> PARSER = new com.google.protobuf.AbstractParser<ForwardDocumentEvent>() { public ForwardDocumentEvent parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ForwardDocumentEvent(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ForwardDocumentEvent> getParserForType() { return PARSER; } private int bitField0_; public static final int TIMESTAMP_FIELD_NUMBER = 1; private Diadoc.Api.Proto.TimestampProtos.Timestamp timestamp_; /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp getTimestamp() { return timestamp_; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getTimestampOrBuilder() { return timestamp_; } public static final int TOBOXID_FIELD_NUMBER = 2; private java.lang.Object toBoxId_; /** * <code>optional string ToBoxId = 2;</code> */ public boolean hasToBoxId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string ToBoxId = 2;</code> */ public java.lang.String getToBoxId() { java.lang.Object ref = toBoxId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { toBoxId_ = s; } return s; } } /** * <code>optional string ToBoxId = 2;</code> */ public com.google.protobuf.ByteString getToBoxIdBytes() { java.lang.Object ref = toBoxId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); toBoxId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); toBoxId_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasTimestamp()) { if (!getTimestamp().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, timestamp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getToBoxIdBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, timestamp_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getToBoxIdBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Diadoc.Api.Proto.ForwardDocumentEvent} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:Diadoc.Api.Proto.ForwardDocumentEvent) Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEventOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.class, Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.Builder.class); } // Construct using Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTimestampFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (timestampBuilder_ == null) { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); } else { timestampBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); toBoxId_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; } public Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent getDefaultInstanceForType() { return Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.getDefaultInstance(); } public Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent build() { Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent buildPartial() { Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent result = new Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (timestampBuilder_ == null) { result.timestamp_ = timestamp_; } else { result.timestamp_ = timestampBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.toBoxId_ = toBoxId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent) { return mergeFrom((Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent other) { if (other == Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent.getDefaultInstance()) return this; if (other.hasTimestamp()) { mergeTimestamp(other.getTimestamp()); } if (other.hasToBoxId()) { bitField0_ |= 0x00000002; toBoxId_ = other.toBoxId_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasTimestamp()) { if (!getTimestamp().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (Diadoc.Api.Proto.ForwardDocumentEventProtos.ForwardDocumentEvent) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private Diadoc.Api.Proto.TimestampProtos.Timestamp timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder> timestampBuilder_; /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp getTimestamp() { if (timestampBuilder_ == null) { return timestamp_; } else { return timestampBuilder_.getMessage(); } } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder setTimestamp(Diadoc.Api.Proto.TimestampProtos.Timestamp value) { if (timestampBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timestamp_ = value; onChanged(); } else { timestampBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder setTimestamp( Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder builderForValue) { if (timestampBuilder_ == null) { timestamp_ = builderForValue.build(); onChanged(); } else { timestampBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder mergeTimestamp(Diadoc.Api.Proto.TimestampProtos.Timestamp value) { if (timestampBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && timestamp_ != Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance()) { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.newBuilder(timestamp_).mergeFrom(value).buildPartial(); } else { timestamp_ = value; } onChanged(); } else { timestampBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Builder clearTimestamp() { if (timestampBuilder_ == null) { timestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); onChanged(); } else { timestampBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder getTimestampBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTimestampFieldBuilder().getBuilder(); } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ public Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getTimestampOrBuilder() { if (timestampBuilder_ != null) { return timestampBuilder_.getMessageOrBuilder(); } else { return timestamp_; } } /** * <code>optional .Diadoc.Api.Proto.Timestamp Timestamp = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder> getTimestampFieldBuilder() { if (timestampBuilder_ == null) { timestampBuilder_ = new com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder>( getTimestamp(), getParentForChildren(), isClean()); timestamp_ = null; } return timestampBuilder_; } private java.lang.Object toBoxId_ = ""; /** * <code>optional string ToBoxId = 2;</code> */ public boolean hasToBoxId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string ToBoxId = 2;</code> */ public java.lang.String getToBoxId() { java.lang.Object ref = toBoxId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { toBoxId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string ToBoxId = 2;</code> */ public com.google.protobuf.ByteString getToBoxIdBytes() { java.lang.Object ref = toBoxId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); toBoxId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string ToBoxId = 2;</code> */ public Builder setToBoxId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; toBoxId_ = value; onChanged(); return this; } /** * <code>optional string ToBoxId = 2;</code> */ public Builder clearToBoxId() { bitField0_ = (bitField0_ & ~0x00000002); toBoxId_ = getDefaultInstance().getToBoxId(); onChanged(); return this; } /** * <code>optional string ToBoxId = 2;</code> */ public Builder setToBoxIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; toBoxId_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:Diadoc.Api.Proto.ForwardDocumentEvent) } static { defaultInstance = new ForwardDocumentEvent(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:Diadoc.Api.Proto.ForwardDocumentEvent) } private static final com.google.protobuf.Descriptors.Descriptor internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\032ForwardDocumentEvent.proto\022\020Diadoc.Api" + ".Proto\032\017Timestamp.proto\"W\n\024ForwardDocume" + "ntEvent\022.\n\tTimestamp\030\001 \001(\0132\033.Diadoc.Api." + "Proto.Timestamp\022\017\n\007ToBoxId\030\002 \001(\tB\034B\032Forw" + "ardDocumentEventProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { Diadoc.Api.Proto.TimestampProtos.getDescriptor(), }, assigner); internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Diadoc_Api_Proto_ForwardDocumentEvent_descriptor, new java.lang.String[] { "Timestamp", "ToBoxId", }); Diadoc.Api.Proto.TimestampProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
halex2005/diadocsdk-java
src/main/java/Diadoc/Api/Proto/ForwardDocumentEventProtos.java
Java
mit
27,479
package saberapplications.pawpads.databinding; import android.databinding.BaseObservable; import android.databinding.BindingAdapter; import android.databinding.BindingConversion; import android.text.Editable; import android.text.TextWatcher; import android.widget.EditText; import saberapplications.pawpads.R; /** * Created by Stanislav Volnjanskij on 25.08.16. */ public class BindableDouble extends BaseObservable { private Double value; private String format="%f"; public Double get() { return value; } public void set(double value) { if (this.value == null || !this.value.equals(value)) { this.value = value; notifyChange(); } } public void setSilent(double value) { if (this.value == null || !this.value.equals(value)) { this.value = value; } } public BindableDouble(double value) { super(); this.value = value; } public BindableDouble() { super(); } @BindingConversion public static String convertIntegerToString(BindableDouble value) { if (value != null && value.get()!=null) return String.format(value.getFormat(), value.get()); else { return null; } } @BindingAdapter({"binding2way"}) public static void bindEditText(EditText view, final BindableDouble bindableDouble) { if (view.getTag(R.id.BIND_ID) == null) { view.setTag(R.id.BIND_ID, true); view.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { try { bindableDouble.setSilent(Double.parseDouble(s.toString())); } catch (Exception e) { } } @Override public void afterTextChanged(Editable s) { } }); } //initial value if (bindableDouble == null) return; Double newValue = bindableDouble.get(); if (newValue == null) return; String strValue= String.format(bindableDouble.getFormat(),newValue); if (!view.getText().toString().equals(strValue) ) { view.setText(strValue); } } /** * Number format to display in text field * @return */ public String getFormat() { return format; } /** *Set number format to display in text field * @param format */ public void setFormat(String format) { this.format = format; } }
castaway2000/Pawpads
app/src/main/java/saberapplications/pawpads/databinding/BindableDouble.java
Java
mit
2,887
using UnityEngine; using System.Collections; public class EnemyDeadInfo { public int score = 0; public Transform transform; public bool headShot = false; }
yantian001/3DSniper
Assets/Script/Struct/EnemyDeadInfo.cs
C#
mit
177
<?php /** * Elgg cron library. * * @package Elgg * @subpackage Core * @author Curverider Ltd * @link http://elgg.org/ */ /** The cron exception. */ class CronException extends Exception {} /** * Initialisation * */ function cron_init() { // Register a pagehandler for cron register_page_handler('cron','cron_page_handler'); } /** * Cron handler for redirecting pages. * * @param unknown_type $page */ function cron_page_handler($page) { global $CONFIG; if ($page[0]) { switch (strtolower($page[0])) { case 'minute' : case 'fiveminute' : case 'fifteenmin' : case 'halfhour' : case 'hourly' : case 'daily' : case 'weekly' : case 'monthly': case 'yearly' : case 'reboot' : set_input('period', $page[0]); break; default : throw new CronException(sprintf(elgg_echo('CronException:unknownperiod'), $page[0])); } // Include cron handler include($CONFIG->path . "engine/handlers/cron_handler.php"); } else forward(); } // Register a startup event register_elgg_event_handler('init','system','cron_init'); ?>
namaggarwal/elgg
engine/lib/cron.php
PHP
mit
1,140
package types import "gopkg.in/pg.v4/internal/parser" func AppendJSONB(b, jsonb []byte, quote int) []byte { if quote == 1 { b = append(b, '\'') } p := parser.New(jsonb) for p.Valid() { c := p.Read() switch c { case '\'': if quote == 1 { b = append(b, '\'', '\'') } else { b = append(b, '\'') } case '\000': continue case '\\': if p.Got("u0000") { b = append(b, "\\\\u0000"...) } else { b = append(b, '\\') if p.Valid() { b = append(b, p.Read()) } } default: b = append(b, c) } } if quote == 1 { b = append(b, '\'') } return b }
yawhide/Lol-personal-counters
vendor/gopkg.in/pg.v4/types/append_jsonb.go
GO
mit
612
#include <stdio.h> #include <ruby.h> #include <ruby/thread.h> #include <v8.h> #include <libplatform/libplatform.h> #include <ruby/encoding.h> #include <pthread.h> #include <unistd.h> #include <mutex> #include <math.h> using namespace v8; class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator { public: virtual void* Allocate(size_t length) { void* data = AllocateUninitialized(length); return data == NULL ? data : memset(data, 0, length); } virtual void* AllocateUninitialized(size_t length) { return malloc(length); } virtual void Free(void* data, size_t) { free(data); } }; typedef struct { const char* data; int raw_size; } SnapshotInfo; typedef struct { Isolate* isolate; ArrayBufferAllocator* allocator; StartupData* startup_data; bool interrupted; pid_t pid; // how many references to this isolate exist // we can't rely on Ruby's GC for this, because when destroying // objects, Ruby will destroy ruby objects first, then call the // extenstion's deallocators. In this case, that means it would // call `deallocate_isolate` _before_ `deallocate`, causing a segfault int refs_count; } IsolateInfo; typedef struct { IsolateInfo* isolate_info; Persistent<Context>* context; } ContextInfo; typedef struct { bool parsed; bool executed; bool terminated; bool json; Persistent<Value>* value; Persistent<Value>* message; Persistent<Value>* backtrace; } EvalResult; typedef struct { ContextInfo* context_info; Local<String>* eval; useconds_t timeout; EvalResult* result; } EvalParams; static VALUE rb_eScriptTerminatedError; static VALUE rb_eParseError; static VALUE rb_eScriptRuntimeError; static VALUE rb_cJavaScriptFunction; static VALUE rb_eSnapshotError; static VALUE rb_ePlatformAlreadyInitializedError; static VALUE rb_mJSON; static VALUE rb_cFailedV8Conversion; static VALUE rb_cDateTime = Qnil; static Platform* current_platform = NULL; static std::mutex platform_lock; static VALUE rb_platform_set_flag_as_str(VALUE _klass, VALUE flag_as_str) { bool platform_already_initialized = false; platform_lock.lock(); if (current_platform == NULL) { V8::SetFlagsFromString(RSTRING_PTR(flag_as_str), (int)RSTRING_LEN(flag_as_str)); } else { platform_already_initialized = true; } platform_lock.unlock(); // important to raise outside of the lock if (platform_already_initialized) { rb_raise(rb_ePlatformAlreadyInitializedError, "The V8 platform is already initialized"); } return Qnil; } static void init_v8() { // no need to wait for the lock if already initialized if (current_platform != NULL) return; platform_lock.lock(); if (current_platform == NULL) { V8::InitializeICU(); current_platform = platform::CreateDefaultPlatform(); V8::InitializePlatform(current_platform); V8::Initialize(); } platform_lock.unlock(); } void* nogvl_context_eval(void* arg) { EvalParams* eval_params = (EvalParams*)arg; EvalResult* result = eval_params->result; Isolate* isolate = eval_params->context_info->isolate_info->isolate; Isolate::Scope isolate_scope(isolate); HandleScope handle_scope(isolate); TryCatch trycatch(isolate); Local<Context> context = eval_params->context_info->context->Get(isolate); Context::Scope context_scope(context); // in gvl flag isolate->SetData(0, (void*)false); // terminate ASAP isolate->SetData(1, (void*)false); MaybeLocal<Script> parsed_script = Script::Compile(context, *eval_params->eval); result->parsed = !parsed_script.IsEmpty(); result->executed = false; result->terminated = false; result->json = false; result->value = NULL; if (!result->parsed) { result->message = new Persistent<Value>(); result->message->Reset(isolate, trycatch.Exception()); } else { MaybeLocal<Value> maybe_value = parsed_script.ToLocalChecked()->Run(context); result->executed = !maybe_value.IsEmpty(); if (result->executed) { // arrays and objects get converted to json Local<Value> local_value = maybe_value.ToLocalChecked(); if ((local_value->IsObject() || local_value->IsArray()) && !local_value->IsDate() && !local_value->IsFunction()) { Local<Object> JSON = context->Global()->Get( String::NewFromUtf8(isolate, "JSON"))->ToObject(); Local<Function> stringify = JSON->Get(v8::String::NewFromUtf8(isolate, "stringify")) .As<Function>(); Local<Object> object = local_value->ToObject(); const unsigned argc = 1; Local<Value> argv[argc] = { object }; MaybeLocal<Value> json = stringify->Call(JSON, argc, argv); if (json.IsEmpty()) { result->executed = false; } else { result->json = true; Persistent<Value>* persistent = new Persistent<Value>(); persistent->Reset(isolate, json.ToLocalChecked()); result->value = persistent; } } else { Persistent<Value>* persistent = new Persistent<Value>(); persistent->Reset(isolate, local_value); result->value = persistent; } } } if (!result->executed || !result->parsed) { if (trycatch.HasCaught()) { if (!trycatch.Exception()->IsNull()) { result->message = new Persistent<Value>(); Local<Message> message = trycatch.Message(); char buf[1000]; int len; len = snprintf(buf, sizeof(buf), "%s at %s:%i:%i", *String::Utf8Value(message->Get()), *String::Utf8Value(message->GetScriptResourceName()->ToString()), message->GetLineNumber(), message->GetStartColumn()); Local<String> v8_message = String::NewFromUtf8(isolate, buf, NewStringType::kNormal, (int)len).ToLocalChecked(); result->message->Reset(isolate, v8_message); } else if(trycatch.HasTerminated()) { result->terminated = true; result->message = new Persistent<Value>(); Local<String> tmp = String::NewFromUtf8(isolate, "JavaScript was terminated (either by timeout or explicitly)"); result->message->Reset(isolate, tmp); } if (!trycatch.StackTrace().IsEmpty()) { result->backtrace = new Persistent<Value>(); result->backtrace->Reset(isolate, trycatch.StackTrace()->ToString()); } } } isolate->SetData(0, (void*)true); return NULL; } static VALUE convert_v8_to_ruby(Isolate* isolate, Handle<Value> &value) { Isolate::Scope isolate_scope(isolate); HandleScope scope(isolate); if (value->IsNull() || value->IsUndefined()){ return Qnil; } if (value->IsInt32()) { return INT2FIX(value->Int32Value()); } if (value->IsNumber()) { return rb_float_new(value->NumberValue()); } if (value->IsTrue()) { return Qtrue; } if (value->IsFalse()) { return Qfalse; } if (value->IsArray()) { VALUE rb_array = rb_ary_new(); Local<Array> arr = Local<Array>::Cast(value); for(uint32_t i=0; i < arr->Length(); i++) { Local<Value> element = arr->Get(i); VALUE rb_elem = convert_v8_to_ruby(isolate, element); if (rb_funcall(rb_elem, rb_intern("class"), 0) == rb_cFailedV8Conversion) { return rb_elem; } rb_ary_push(rb_array, rb_elem); } return rb_array; } if (value->IsFunction()){ return rb_funcall(rb_cJavaScriptFunction, rb_intern("new"), 0); } if (value->IsDate()){ double ts = Local<Date>::Cast(value)->ValueOf(); double secs = ts/1000; long nanos = round((secs - floor(secs)) * 1000000); return rb_time_new(secs, nanos); } if (value->IsObject()) { VALUE rb_hash = rb_hash_new(); TryCatch trycatch(isolate); Local<Context> context = Context::New(isolate); Local<Object> object = value->ToObject(); MaybeLocal<Array> maybe_props = object->GetOwnPropertyNames(context); if (!maybe_props.IsEmpty()) { Local<Array> props = maybe_props.ToLocalChecked(); for(uint32_t i=0; i < props->Length(); i++) { Local<Value> key = props->Get(i); VALUE rb_key = convert_v8_to_ruby(isolate, key); Local<Value> value = object->Get(key); // this may have failed due to Get raising if (trycatch.HasCaught()) { // TODO isolate code that translates execption to ruby // exception so we can properly return it return rb_funcall(rb_cFailedV8Conversion, rb_intern("new"), 1, rb_str_new2("")); } VALUE rb_value = convert_v8_to_ruby(isolate, value); rb_hash_aset(rb_hash, rb_key, rb_value); } } return rb_hash; } Local<String> rstr = value->ToString(); return rb_enc_str_new(*String::Utf8Value(rstr), rstr->Utf8Length(), rb_enc_find("utf-8")); } static Handle<Value> convert_ruby_to_v8(Isolate* isolate, VALUE value) { EscapableHandleScope scope(isolate); Local<Array> array; Local<Object> object; VALUE hash_as_array; VALUE pair; int i; long length; long fixnum; VALUE klass; switch (TYPE(value)) { case T_FIXNUM: fixnum = NUM2LONG(value); if (fixnum > INT_MAX) { return scope.Escape(Number::New(isolate, (double)fixnum)); } return scope.Escape(Integer::New(isolate, (int)fixnum)); case T_FLOAT: return scope.Escape(Number::New(isolate, NUM2DBL(value))); case T_STRING: return scope.Escape(String::NewFromUtf8(isolate, RSTRING_PTR(value), NewStringType::kNormal, (int)RSTRING_LEN(value)).ToLocalChecked()); case T_NIL: return scope.Escape(Null(isolate)); case T_TRUE: return scope.Escape(True(isolate)); case T_FALSE: return scope.Escape(False(isolate)); case T_ARRAY: length = RARRAY_LEN(value); array = Array::New(isolate, (int)length); for(i=0; i<length; i++) { array->Set(i, convert_ruby_to_v8(isolate, rb_ary_entry(value, i))); } return scope.Escape(array); case T_HASH: object = Object::New(isolate); hash_as_array = rb_funcall(value, rb_intern("to_a"), 0); length = RARRAY_LEN(hash_as_array); for(i=0; i<length; i++) { pair = rb_ary_entry(hash_as_array, i); object->Set(convert_ruby_to_v8(isolate, rb_ary_entry(pair, 0)), convert_ruby_to_v8(isolate, rb_ary_entry(pair, 1))); } return scope.Escape(object); case T_SYMBOL: value = rb_funcall(value, rb_intern("to_s"), 0); return scope.Escape(String::NewFromUtf8(isolate, RSTRING_PTR(value), NewStringType::kNormal, (int)RSTRING_LEN(value)).ToLocalChecked()); case T_DATA: klass = rb_funcall(value, rb_intern("class"), 0); if (klass == rb_cTime || klass == rb_cDateTime) { if (klass == rb_cDateTime) { value = rb_funcall(value, rb_intern("to_time"), 0); } value = rb_funcall(value, rb_intern("to_f"), 0); return scope.Escape(Date::New(isolate, NUM2DBL(value) * 1000)); } case T_OBJECT: case T_CLASS: case T_ICLASS: case T_MODULE: case T_REGEXP: case T_MATCH: case T_STRUCT: case T_BIGNUM: case T_FILE: case T_UNDEF: case T_NODE: default: return scope.Escape(String::NewFromUtf8(isolate, "Undefined Conversion")); } } static void unblock_eval(void *ptr) { EvalParams* eval = (EvalParams*)ptr; eval->context_info->isolate_info->interrupted = true; } static VALUE rb_snapshot_size(VALUE self, VALUE str) { SnapshotInfo* snapshot_info; Data_Get_Struct(self, SnapshotInfo, snapshot_info); return INT2NUM(snapshot_info->raw_size); } static VALUE rb_snapshot_load(VALUE self, VALUE str) { SnapshotInfo* snapshot_info; Data_Get_Struct(self, SnapshotInfo, snapshot_info); init_v8(); StartupData startup_data = V8::CreateSnapshotDataBlob(RSTRING_PTR(str)); if (startup_data.data == NULL && startup_data.raw_size == 0) { rb_raise(rb_eSnapshotError, "Could not create snapshot, most likely the source is incorrect"); } snapshot_info->data = startup_data.data; snapshot_info->raw_size = startup_data.raw_size; return Qnil; } static VALUE rb_snapshot_warmup(VALUE self, VALUE str) { SnapshotInfo* snapshot_info; Data_Get_Struct(self, SnapshotInfo, snapshot_info); init_v8(); StartupData cold_startup_data = {snapshot_info->data, snapshot_info->raw_size}; StartupData warm_startup_data = V8::WarmUpSnapshotDataBlob(cold_startup_data, RSTRING_PTR(str)); if (warm_startup_data.data == NULL && warm_startup_data.raw_size == 0) { rb_raise(rb_eSnapshotError, "Could not warm up snapshot, most likely the source is incorrect"); } else { delete[] snapshot_info->data; snapshot_info->data = warm_startup_data.data; snapshot_info->raw_size = warm_startup_data.raw_size; } return self; } static VALUE rb_isolate_init_with_snapshot(VALUE self, VALUE snapshot) { IsolateInfo* isolate_info; Data_Get_Struct(self, IsolateInfo, isolate_info); init_v8(); isolate_info->allocator = new ArrayBufferAllocator(); isolate_info->interrupted = false; isolate_info->refs_count = 1; Isolate::CreateParams create_params; create_params.array_buffer_allocator = isolate_info->allocator; StartupData* startup_data = NULL; if (!NIL_P(snapshot)) { SnapshotInfo* snapshot_info; Data_Get_Struct(snapshot, SnapshotInfo, snapshot_info); int raw_size = snapshot_info->raw_size; char* data = new char[raw_size]; memcpy(data, snapshot_info->data, sizeof(char) * raw_size); startup_data = new StartupData; startup_data->data = data; startup_data->raw_size = raw_size; create_params.snapshot_blob = startup_data; } isolate_info->startup_data = startup_data; isolate_info->isolate = Isolate::New(create_params); return Qnil; } static VALUE rb_isolate_idle_notification(VALUE self, VALUE idle_time_in_ms) { IsolateInfo* isolate_info; Data_Get_Struct(self, IsolateInfo, isolate_info); return isolate_info->isolate->IdleNotification(NUM2INT(idle_time_in_ms)) ? Qtrue : Qfalse; } static VALUE rb_context_init_with_isolate(VALUE self, VALUE isolate) { ContextInfo* context_info; Data_Get_Struct(self, ContextInfo, context_info); init_v8(); IsolateInfo* isolate_info; Data_Get_Struct(isolate, IsolateInfo, isolate_info); context_info->isolate_info = isolate_info; isolate_info->refs_count++; { Locker lock(isolate_info->isolate); Isolate::Scope isolate_scope(isolate_info->isolate); HandleScope handle_scope(isolate_info->isolate); Local<Context> context = Context::New(isolate_info->isolate); context_info->context = new Persistent<Context>(); context_info->context->Reset(isolate_info->isolate, context); } if (Qnil == rb_cDateTime && rb_funcall(rb_cObject, rb_intern("const_defined?"), 1, rb_str_new2("DateTime")) == Qtrue) { rb_cDateTime = rb_const_get(rb_cObject, rb_intern("DateTime")); } return Qnil; } static VALUE rb_context_eval_unsafe(VALUE self, VALUE str) { EvalParams eval_params; EvalResult eval_result; ContextInfo* context_info; VALUE result; VALUE message = Qnil; VALUE backtrace = Qnil; Data_Get_Struct(self, ContextInfo, context_info); Isolate* isolate = context_info->isolate_info->isolate; { Locker lock(isolate); Isolate::Scope isolate_scope(isolate); HandleScope handle_scope(isolate); Local<String> eval = String::NewFromUtf8(isolate, RSTRING_PTR(str), NewStringType::kNormal, (int)RSTRING_LEN(str)).ToLocalChecked(); eval_params.context_info = context_info; eval_params.eval = &eval; eval_params.result = &eval_result; eval_params.timeout = 0; VALUE timeout = rb_iv_get(self, "@timeout"); if (timeout != Qnil) { eval_params.timeout = (useconds_t)NUM2LONG(timeout); } eval_result.message = NULL; eval_result.backtrace = NULL; rb_thread_call_without_gvl(nogvl_context_eval, &eval_params, unblock_eval, &eval_params); if (eval_result.message != NULL) { Local<Value> tmp = Local<Value>::New(isolate, *eval_result.message); message = convert_v8_to_ruby(isolate, tmp); eval_result.message->Reset(); delete eval_result.message; } if (eval_result.backtrace != NULL) { Local<Value> tmp = Local<Value>::New(isolate, *eval_result.backtrace); backtrace = convert_v8_to_ruby(isolate, tmp); eval_result.backtrace->Reset(); delete eval_result.backtrace; } } // NOTE: this is very important, we can not do an rb_raise from within // a v8 scope, if we do the scope is never cleaned up properly and we leak if (!eval_result.parsed) { if(TYPE(message) == T_STRING) { rb_raise(rb_eParseError, "%s", RSTRING_PTR(message)); } else { rb_raise(rb_eParseError, "Unknown JavaScript Error during parse"); } } if (!eval_result.executed) { VALUE ruby_exception = rb_iv_get(self, "@current_exception"); if (ruby_exception == Qnil) { ruby_exception = eval_result.terminated ? rb_eScriptTerminatedError : rb_eScriptRuntimeError; // exception report about what happened if(TYPE(backtrace) == T_STRING) { rb_raise(ruby_exception, "%s", RSTRING_PTR(backtrace)); } else if(TYPE(message) == T_STRING) { rb_raise(ruby_exception, "%s", RSTRING_PTR(message)); } else { rb_raise(ruby_exception, "Unknown JavaScript Error during execution"); } } else { VALUE rb_str = rb_funcall(ruby_exception, rb_intern("to_s"), 0); rb_raise(CLASS_OF(ruby_exception), "%s", RSTRING_PTR(rb_str)); } } // New scope for return value { Locker lock(isolate); Isolate::Scope isolate_scope(isolate); HandleScope handle_scope(isolate); Local<Value> tmp = Local<Value>::New(isolate, *eval_result.value); if (eval_result.json) { Local<String> rstr = tmp->ToString(); VALUE json_string = rb_enc_str_new(*String::Utf8Value(rstr), rstr->Utf8Length(), rb_enc_find("utf-8")); result = rb_funcall(rb_mJSON, rb_intern("parse"), 1, json_string); } else { result = convert_v8_to_ruby(isolate, tmp); } eval_result.value->Reset(); delete eval_result.value; } if (rb_funcall(result, rb_intern("class"), 0) == rb_cFailedV8Conversion) { // TODO try to recover stack trace from the conversion error rb_raise(rb_eScriptRuntimeError, "Error converting JS object to Ruby object"); } return result; } typedef struct { VALUE callback; int length; VALUE* args; bool failed; } protected_callback_data; static VALUE protected_callback(VALUE rdata) { protected_callback_data* data = (protected_callback_data*)rdata; VALUE result; if (data->length > 0) { result = rb_funcall2(data->callback, rb_intern("call"), data->length, data->args); } else { result = rb_funcall(data->callback, rb_intern("call"), 0); } return result; } static VALUE rescue_callback(VALUE rdata, VALUE exception) { protected_callback_data* data = (protected_callback_data*)rdata; data->failed = true; return exception; } void* gvl_ruby_callback(void* data) { FunctionCallbackInfo<Value>* args = (FunctionCallbackInfo<Value>*)data; VALUE* ruby_args = NULL; int length = args->Length(); VALUE callback; VALUE result; VALUE self; { HandleScope scope(args->GetIsolate()); Handle<External> external = Handle<External>::Cast(args->Data()); VALUE* self_pointer = (VALUE*)(external->Value()); self = *self_pointer; callback = rb_iv_get(self, "@callback"); if (length > 0) { ruby_args = ALLOC_N(VALUE, length); } for (int i = 0; i < length; i++) { Local<Value> value = ((*args)[i]).As<Value>(); ruby_args[i] = convert_v8_to_ruby(args->GetIsolate(), value); } } // may raise exception stay clear of handle scope protected_callback_data callback_data; callback_data.length = length; callback_data.callback = callback; callback_data.args = ruby_args; callback_data.failed = false; if ((bool)args->GetIsolate()->GetData(1) == true) { args->GetIsolate()->ThrowException(String::NewFromUtf8(args->GetIsolate(), "Terminated execution during tansition from Ruby to JS")); V8::TerminateExecution(args->GetIsolate()); return NULL; } result = rb_rescue2((VALUE(*)(...))&protected_callback, (VALUE)(&callback_data), (VALUE(*)(...))&rescue_callback, (VALUE)(&callback_data), rb_eException, (VALUE)0); if(callback_data.failed) { VALUE parent = rb_iv_get(self, "@parent"); rb_iv_set(parent, "@current_exception", result); args->GetIsolate()->ThrowException(String::NewFromUtf8(args->GetIsolate(), "Ruby exception")); } else { HandleScope scope(args->GetIsolate()); Handle<Value> v8_result = convert_ruby_to_v8(args->GetIsolate(), result); args->GetReturnValue().Set(v8_result); } if (length > 0) { xfree(ruby_args); } if ((bool)args->GetIsolate()->GetData(1) == true) { Isolate* isolate = args->GetIsolate(); V8::TerminateExecution(isolate); } return NULL; } static void ruby_callback(const FunctionCallbackInfo<Value>& args) { bool has_gvl = (bool)args.GetIsolate()->GetData(0); if(has_gvl) { gvl_ruby_callback((void*)&args); } else { rb_thread_call_with_gvl(gvl_ruby_callback, (void*)(&args)); } } static VALUE rb_external_function_notify_v8(VALUE self) { ContextInfo* context_info; VALUE parent = rb_iv_get(self, "@parent"); VALUE name = rb_iv_get(self, "@name"); VALUE parent_object = rb_iv_get(self, "@parent_object"); VALUE parent_object_eval = rb_iv_get(self, "@parent_object_eval"); bool parse_error = false; bool attach_error = false; Data_Get_Struct(parent, ContextInfo, context_info); Isolate* isolate = context_info->isolate_info->isolate; { Locker lock(isolate); Isolate::Scope isolate_scope(isolate); HandleScope handle_scope(isolate); Local<Context> context = context_info->context->Get(isolate); Context::Scope context_scope(context); Local<String> v8_str = String::NewFromUtf8(isolate, RSTRING_PTR(name), NewStringType::kNormal, (int)RSTRING_LEN(name)).ToLocalChecked(); // copy self so we can access from v8 external VALUE* self_copy; Data_Get_Struct(self, VALUE, self_copy); *self_copy = self; Local<Value> external = External::New(isolate, self_copy); if (parent_object == Qnil) { context->Global()->Set(v8_str, FunctionTemplate::New(isolate, ruby_callback, external)->GetFunction()); } else { Local<String> eval = String::NewFromUtf8(isolate, RSTRING_PTR(parent_object_eval), NewStringType::kNormal, (int)RSTRING_LEN(parent_object_eval)).ToLocalChecked(); MaybeLocal<Script> parsed_script = Script::Compile(context, eval); if (parsed_script.IsEmpty()) { parse_error = true; } else { MaybeLocal<Value> maybe_value = parsed_script.ToLocalChecked()->Run(context); attach_error = true; if (!maybe_value.IsEmpty()) { Local<Value> value = maybe_value.ToLocalChecked(); if (value->IsObject()){ value.As<Object>()->Set(v8_str, FunctionTemplate::New(isolate, ruby_callback, external)->GetFunction()); attach_error = false; } } } } } // always raise out of V8 context if (parse_error) { rb_raise(rb_eParseError, "Invalid object %s", RSTRING_PTR(parent_object)); } if (attach_error) { rb_raise(rb_eParseError, "Was expecting %s to be an object", RSTRING_PTR(parent_object)); } return Qnil; } void maybe_free_isolate_info(IsolateInfo* isolate_info) { // an isolate can only be freed if no Isolate or Context (ruby) object // still need it if (isolate_info == NULL || isolate_info->refs_count > 0) { return; } if (isolate_info->isolate) { Locker lock(isolate_info->isolate); } if (isolate_info->isolate) { if (isolate_info->interrupted) { fprintf(stderr, "WARNING: V8 isolate was interrupted by Ruby, it can not be disposed and memory will not be reclaimed till the Ruby process exits.\n"); } else { if (isolate_info->pid != getpid()) { fprintf(stderr, "WARNING: V8 isolate was forked, it can not be disposed and memory will not be reclaimed till the Ruby process exits.\n"); } else { isolate_info->isolate->Dispose(); } } isolate_info->isolate = NULL; } if (isolate_info->startup_data) { delete[] isolate_info->startup_data->data; delete isolate_info->startup_data; } delete isolate_info->allocator; xfree(isolate_info); } void deallocate_isolate(void* data) { IsolateInfo* isolate_info = (IsolateInfo*) data; isolate_info->refs_count--; maybe_free_isolate_info(isolate_info); } void deallocate(void* data) { ContextInfo* context_info = (ContextInfo*)data; IsolateInfo* isolate_info = context_info->isolate_info; if (context_info->context && isolate_info && isolate_info->isolate) { Locker lock(isolate_info->isolate); v8::Isolate::Scope isolate_scope(isolate_info->isolate); context_info->context->Reset(); delete context_info->context; } if (isolate_info) { isolate_info->refs_count--; maybe_free_isolate_info(isolate_info); } } void deallocate_external_function(void * data) { xfree(data); } void deallocate_snapshot(void * data) { SnapshotInfo* snapshot_info = (SnapshotInfo*)data; delete[] snapshot_info->data; xfree(snapshot_info); } VALUE allocate_external_function(VALUE klass) { VALUE* self = ALLOC(VALUE); return Data_Wrap_Struct(klass, NULL, deallocate_external_function, (void*)self); } VALUE allocate(VALUE klass) { ContextInfo* context_info = ALLOC(ContextInfo); context_info->isolate_info = NULL; context_info->context = NULL; return Data_Wrap_Struct(klass, NULL, deallocate, (void*)context_info); } VALUE allocate_snapshot(VALUE klass) { SnapshotInfo* snapshot_info = ALLOC(SnapshotInfo); snapshot_info->data = NULL; snapshot_info->raw_size = 0; return Data_Wrap_Struct(klass, NULL, deallocate_snapshot, (void*)snapshot_info); } VALUE allocate_isolate(VALUE klass) { IsolateInfo* isolate_info = ALLOC(IsolateInfo); isolate_info->isolate = NULL; isolate_info->allocator = NULL; isolate_info->startup_data = NULL; isolate_info->interrupted = false; isolate_info->refs_count = 0; isolate_info->pid = getpid(); return Data_Wrap_Struct(klass, NULL, deallocate_isolate, (void*)isolate_info); } static VALUE rb_context_stop(VALUE self) { ContextInfo* context_info; Data_Get_Struct(self, ContextInfo, context_info); Isolate* isolate = context_info->isolate_info->isolate; // flag for termination isolate->SetData(1, (void*)true); V8::TerminateExecution(isolate); rb_funcall(self, rb_intern("stop_attached"), 0); return Qnil; } extern "C" { void Init_mini_racer_extension ( void ) { VALUE rb_mMiniRacer = rb_define_module("MiniRacer"); VALUE rb_cContext = rb_define_class_under(rb_mMiniRacer, "Context", rb_cObject); VALUE rb_cSnapshot = rb_define_class_under(rb_mMiniRacer, "Snapshot", rb_cObject); VALUE rb_cIsolate = rb_define_class_under(rb_mMiniRacer, "Isolate", rb_cObject); VALUE rb_cPlatform = rb_define_class_under(rb_mMiniRacer, "Platform", rb_cObject); VALUE rb_eEvalError = rb_define_class_under(rb_mMiniRacer, "EvalError", rb_eStandardError); rb_eScriptTerminatedError = rb_define_class_under(rb_mMiniRacer, "ScriptTerminatedError", rb_eEvalError); rb_eParseError = rb_define_class_under(rb_mMiniRacer, "ParseError", rb_eEvalError); rb_eScriptRuntimeError = rb_define_class_under(rb_mMiniRacer, "RuntimeError", rb_eEvalError); rb_cJavaScriptFunction = rb_define_class_under(rb_mMiniRacer, "JavaScriptFunction", rb_cObject); rb_eSnapshotError = rb_define_class_under(rb_mMiniRacer, "SnapshotError", rb_eStandardError); rb_ePlatformAlreadyInitializedError = rb_define_class_under(rb_mMiniRacer, "PlatformAlreadyInitialized", rb_eStandardError); rb_cFailedV8Conversion = rb_define_class_under(rb_mMiniRacer, "FailedV8Conversion", rb_cObject); rb_mJSON = rb_define_module("JSON"); VALUE rb_cExternalFunction = rb_define_class_under(rb_cContext, "ExternalFunction", rb_cObject); rb_define_method(rb_cContext, "stop", (VALUE(*)(...))&rb_context_stop, 0); rb_define_alloc_func(rb_cContext, allocate); rb_define_alloc_func(rb_cSnapshot, allocate_snapshot); rb_define_alloc_func(rb_cIsolate, allocate_isolate); rb_define_private_method(rb_cContext, "eval_unsafe",(VALUE(*)(...))&rb_context_eval_unsafe, 1); rb_define_private_method(rb_cContext, "init_with_isolate",(VALUE(*)(...))&rb_context_init_with_isolate, 1); rb_define_private_method(rb_cExternalFunction, "notify_v8", (VALUE(*)(...))&rb_external_function_notify_v8, 0); rb_define_alloc_func(rb_cExternalFunction, allocate_external_function); rb_define_method(rb_cSnapshot, "size", (VALUE(*)(...))&rb_snapshot_size, 0); rb_define_method(rb_cSnapshot, "warmup!", (VALUE(*)(...))&rb_snapshot_warmup, 1); rb_define_private_method(rb_cSnapshot, "load", (VALUE(*)(...))&rb_snapshot_load, 1); rb_define_method(rb_cIsolate, "idle_notification", (VALUE(*)(...))&rb_isolate_idle_notification, 1); rb_define_private_method(rb_cIsolate, "init_with_snapshot",(VALUE(*)(...))&rb_isolate_init_with_snapshot, 1); rb_define_singleton_method(rb_cPlatform, "set_flag_as_str!", (VALUE(*)(...))&rb_platform_set_flag_as_str, 1); } }
lrosskamp/makealist-public
vendor/cache/ruby/2.3.0/gems/mini_racer-0.1.9/ext/mini_racer_extension/mini_racer_extension.cc
C++
mit
29,455
// This source code is dual-licensed under the Apache License, version // 2.0, and the Mozilla Public License, version 1.1. // // The APL v2.0: // //--------------------------------------------------------------------------- // Copyright (C) 2007-2014 GoPivotal, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //--------------------------------------------------------------------------- // // The MPL v1.1: // //--------------------------------------------------------------------------- // The contents of this file are subject to the Mozilla Public License // Version 1.1 (the "License"); you may not use this file except in // compliance with the License. You may obtain a copy of the License // at http://www.mozilla.org/MPL/ // // Software distributed under the License is distributed on an "AS IS" // basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See // the License for the specific language governing rights and // limitations under the License. // // The Original Code is RabbitMQ. // // The Initial Developer of the Original Code is GoPivotal, Inc. // Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. //--------------------------------------------------------------------------- using System; using System.Collections.Generic; namespace RabbitMQ.Client.Impl { public abstract class StreamProperties : ContentHeaderBase, IStreamProperties { public abstract string ContentType { get; set; } public abstract string ContentEncoding { get; set; } public abstract IDictionary<string, object> Headers { get; set; } public abstract byte Priority { get; set; } public abstract AmqpTimestamp Timestamp { get; set; } public abstract void ClearContentType(); public abstract void ClearContentEncoding(); public abstract void ClearHeaders(); public abstract void ClearPriority(); public abstract void ClearTimestamp(); public abstract bool IsContentTypePresent(); public abstract bool IsContentEncodingPresent(); public abstract bool IsHeadersPresent(); public abstract bool IsPriorityPresent(); public abstract bool IsTimestampPresent(); public override object Clone() { StreamProperties clone = MemberwiseClone() as StreamProperties; if (IsHeadersPresent()) { clone.Headers = new Dictionary<string, object>(); foreach (KeyValuePair<string, object> entry in Headers) clone.Headers[entry.Key] = entry.Value; } return clone; } } }
CymaticLabs/Unity3D.Amqp
lib/rabbitmq-dotnet-client-rabbitmq_v3_4_4/projects/client/RabbitMQ.Client/src/client/impl/StreamProperties.cs
C#
mit
3,179
/* * Copyright (C) 2015 Actor LLC. <https://actor.im> */ package im.actor.core.entity; import com.google.j2objc.annotations.Property; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.List; import im.actor.core.api.ApiAvatar; import im.actor.core.api.ApiBotCommand; import im.actor.core.api.ApiContactRecord; import im.actor.core.api.ApiContactType; import im.actor.core.api.ApiFullUser; import im.actor.core.api.ApiInt32Value; import im.actor.core.api.ApiMapValue; import im.actor.core.api.ApiMapValueItem; import im.actor.core.api.ApiUser; import im.actor.runtime.bser.BserCreator; import im.actor.runtime.bser.BserValues; import im.actor.runtime.bser.BserWriter; import im.actor.runtime.storage.KeyValueItem; // Disabling Bounds checks for speeding up calculations /*-[ #define J2OBJC_DISABLE_ARRAY_BOUND_CHECKS 1 ]-*/ public class User extends WrapperExtEntity<ApiFullUser, ApiUser> implements KeyValueItem { private static final int RECORD_ID = 10; private static final int RECORD_FULL_ID = 20; public static BserCreator<User> CREATOR = User::new; @Property("readonly, nonatomic") private int uid; @Property("readonly, nonatomic") private long accessHash; @NotNull @Property("readonly, nonatomic") @SuppressWarnings("NullableProblems") private String name; @Nullable @Property("readonly, nonatomic") private String localName; @Nullable @Property("readonly, nonatomic") private String username; @Nullable @Property("readonly, nonatomic") private String about; @Nullable @Property("readonly, nonatomic") private Avatar avatar; @NotNull @Property("readonly, nonatomic") @SuppressWarnings("NullableProblems") private Sex sex; @Property("readonly, nonatomic") private boolean isBot; @NotNull @Property("readonly, nonatomic") @SuppressWarnings("NullableProblems") private List<ContactRecord> records; @Property("readonly, nonatomic") private boolean isBlocked; @Nullable @Property("readonly, nonatomic") private String timeZone; @Property("readonly, nonatomic") private boolean isVerified; @Property("readonly, nonatomic") private List<BotCommand> commands; @NotNull @Property("readonly, nonatomic") private boolean haveExtension; public User(@NotNull ApiUser wrappedUser, @Nullable ApiFullUser ext) { super(RECORD_ID, RECORD_FULL_ID, wrappedUser, ext); } public User(@NotNull byte[] data) throws IOException { super(RECORD_ID, RECORD_FULL_ID, data); } private User() { super(RECORD_ID, RECORD_FULL_ID); } @NotNull public Peer peer() { return new Peer(PeerType.PRIVATE, uid); } public int getUid() { return uid; } public long getAccessHash() { return accessHash; } @NotNull public String getServerName() { return name; } @Nullable public String getLocalName() { return localName; } @NotNull public String getName() { if (localName == null) { return name; } else { return localName; } } @Nullable public String getNick() { return username; } @Nullable public String getAbout() { return about; } @Nullable public Avatar getAvatar() { return avatar; } @NotNull public Sex getSex() { return sex; } public boolean isHaveExtension() { return haveExtension; } @NotNull public List<ContactRecord> getRecords() { return records; } public boolean isBot() { return isBot; } public List<BotCommand> getCommands() { return commands; } public boolean isBlocked() { return isBlocked; } @Nullable public String getTimeZone() { return timeZone; } public boolean isVerified() { return isVerified; } public User editName(@NotNull String name) { ApiUser w = getWrapped(); ApiUser res = new ApiUser( w.getId(), w.getAccessHash(), name, w.getLocalName(), w.getNick(), w.getSex(), w.getAvatar(), w.isBot(), w.getExt()); res.setUnmappedObjects(w.getUnmappedObjects()); return new User(res, getWrappedExt()); } public User editLocalName(@NotNull String localName) { ApiUser w = getWrapped(); ApiUser res = new ApiUser( w.getId(), w.getAccessHash(), w.getName(), localName, w.getNick(), w.getSex(), w.getAvatar(), w.isBot(), w.getExt()); res.setUnmappedObjects(w.getUnmappedObjects()); return new User(res, getWrappedExt()); } public User editNick(@Nullable String nick) { ApiUser w = getWrapped(); ApiUser res = new ApiUser( w.getId(), w.getAccessHash(), w.getName(), w.getLocalName(), nick, w.getSex(), w.getAvatar(), w.isBot(), w.getExt()); res.setUnmappedObjects(w.getUnmappedObjects()); return new User(res, getWrappedExt()); } public User editExt(@Nullable ApiMapValue ext) { ApiUser w = getWrapped(); ApiUser res = new ApiUser( w.getId(), w.getAccessHash(), w.getName(), w.getLocalName(), w.getNick(), w.getSex(), w.getAvatar(), w.isBot(), ext); res.setUnmappedObjects(w.getUnmappedObjects()); return new User(res, getWrappedExt()); } public User editAvatar(@Nullable ApiAvatar avatar) { ApiUser w = getWrapped(); ApiUser res = new ApiUser( w.getId(), w.getAccessHash(), w.getName(), w.getLocalName(), w.getNick(), w.getSex(), avatar, w.isBot(), w.getExt()); res.setUnmappedObjects(w.getUnmappedObjects()); return new User(res, getWrappedExt()); } public User updateExt(@Nullable ApiFullUser ext) { return new User(getWrapped(), ext); } public User editAbout(@Nullable String about) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), ext.getContactInfo(), about, ext.getPreferredLanguages(), ext.getTimeZone(), ext.getBotCommands(), ext.getExt(), ext.isBlocked() ); return new User(getWrapped(), upd); } else { return this; } } public User editPreferredLanguages(List<String> preferredLanguages) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), ext.getContactInfo(), ext.getAbout(), preferredLanguages, ext.getTimeZone(), ext.getBotCommands(), ext.getExt(), ext.isBlocked() ); return new User(getWrapped(), upd); } else { return this; } } public User editTimeZone(String timeZone) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), ext.getContactInfo(), ext.getAbout(), ext.getPreferredLanguages(), timeZone, ext.getBotCommands(), ext.getExt(), ext.isBlocked() ); return new User(getWrapped(), upd); } else { return this; } } public User editContacts(List<ApiContactRecord> contacts) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), contacts, ext.getAbout(), ext.getPreferredLanguages(), ext.getTimeZone(), ext.getBotCommands(), ext.getExt(), ext.isBlocked() ); return new User(getWrapped(), upd); } else { return this; } } public User editBotCommands(List<ApiBotCommand> commands) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), ext.getContactInfo(), ext.getAbout(), ext.getPreferredLanguages(), ext.getTimeZone(), commands, ext.getExt(), ext.isBlocked() ); return new User(getWrapped(), upd); } else { return this; } } public User editFullExt(ApiMapValue extv) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), ext.getContactInfo(), ext.getAbout(), ext.getPreferredLanguages(), ext.getTimeZone(), ext.getBotCommands(), extv, ext.isBlocked() ); return new User(getWrapped(), upd); } else { return this; } } public User editBlocked(boolean isBlocked) { ApiFullUser ext = getWrappedExt(); if (ext != null) { ApiFullUser upd = new ApiFullUser( ext.getId(), ext.getContactInfo(), ext.getAbout(), ext.getPreferredLanguages(), ext.getTimeZone(), ext.getBotCommands(), ext.getExt(), isBlocked ); return new User(getWrapped(), upd); } else { return this; } } @Override protected void applyWrapped(@NotNull ApiUser wrapped, @Nullable ApiFullUser ext) { this.uid = wrapped.getId(); this.accessHash = wrapped.getAccessHash(); this.name = wrapped.getName(); this.localName = wrapped.getLocalName(); if (wrapped.getNick() != null && wrapped.getNick().length() > 0) { this.username = wrapped.getNick(); } else { this.username = null; } if (wrapped.getAvatar() != null) { this.avatar = new Avatar(wrapped.getAvatar()); } this.isBot = false; if (wrapped.isBot() != null) { this.isBot = wrapped.isBot(); } this.sex = Sex.UNKNOWN; if (wrapped.getSex() != null) { switch (wrapped.getSex()) { case FEMALE: this.sex = Sex.FEMALE; break; case MALE: this.sex = Sex.MALE; break; } } if (wrapped.getExt() != null) { this.isVerified = true; for (ApiMapValueItem i : wrapped.getExt().getItems()) { if ("is_verified".equals(i.getKey())) { if (i.getValue() instanceof ApiInt32Value) { this.isVerified = ((ApiInt32Value) i.getValue()).getValue() > 0; } } } } // Extension if (ext != null) { this.haveExtension = true; this.records = new ArrayList<>(); this.commands = new ArrayList<BotCommand>(); if (ext.isBlocked() != null) { this.isBlocked = ext.isBlocked(); } else { this.isBlocked = false; } this.timeZone = ext.getTimeZone(); for (ApiContactRecord record : ext.getContactInfo()) { if (record.getType() == ApiContactType.PHONE) { this.records.add(new ContactRecord(ContactRecordType.PHONE, record.getTypeSpec(), "" + record.getLongValue(), record.getTitle(), record.getSubtitle())); } else if (record.getType() == ApiContactType.EMAIL) { this.records.add(new ContactRecord(ContactRecordType.EMAIL, record.getTypeSpec(), record.getStringValue(), record.getTitle(), record.getSubtitle())); } else if (record.getType() == ApiContactType.WEB) { this.records.add(new ContactRecord(ContactRecordType.WEB, record.getTypeSpec(), record.getStringValue(), record.getTitle(), record.getSubtitle())); } else if (record.getType() == ApiContactType.SOCIAL) { this.records.add(new ContactRecord(ContactRecordType.SOCIAL, record.getTypeSpec(), record.getStringValue(), record.getTitle(), record.getSubtitle())); } } //Bot commands for (ApiBotCommand command : ext.getBotCommands()) { commands.add(new BotCommand(command.getSlashCommand(), command.getDescription(), command.getLocKey())); } this.about = ext.getAbout(); } else { this.isBlocked = false; this.haveExtension = false; this.records = new ArrayList<>(); this.commands = new ArrayList<BotCommand>(); this.about = null; this.timeZone = null; } } @Override public void parse(BserValues values) throws IOException { // Is Wrapper Layout if (values.getBool(8, false)) { // Parse wrapper layout super.parse(values); } else { // Convert old layout throw new IOException("Unsupported obsolete format"); } } @Override public void serialize(BserWriter writer) throws IOException { // Mark as wrapper layout writer.writeBool(8, true); // Serialize wrapper layout super.serialize(writer); } @Override public long getEngineId() { return getUid(); } @Override @NotNull protected ApiUser createInstance() { return new ApiUser(); } @Override protected ApiFullUser createExtInstance() { return new ApiFullUser(); } }
ljshj/actor-platform
actor-sdk/sdk-core/core/core-shared/src/main/java/im/actor/core/entity/User.java
Java
mit
15,291
<?xml version="1.0" encoding="UTF-8"?> <tileset name="Canalave Gym Set" firstgid="1" tilewidth="16" tileheight="16"> <image source="Canalave Gym Set.png" trans="000000"/> </tileset>
vVv-AA/Pokemon-AndEngine
app/assets/tmx/workspace/Canalave Gym Set.tsx
TypeScript
mit
182
package api import ( "errors" "io/ioutil" "log" "net/http" "github.com/bitly/go-simplejson" ) func Request(req *http.Request) (*simplejson.Json, error) { resp, err := http.DefaultClient.Do(req) if err != nil { return nil, err } body, err := ioutil.ReadAll(resp.Body) resp.Body.Close() if err != nil { return nil, err } if resp.StatusCode != 200 { log.Printf("got response code %d - %s", resp.StatusCode, body) return nil, errors.New("api request returned non 200 status code") } data, err := simplejson.NewJson(body) if err != nil { return nil, err } return data, nil } func RequestUnparsedResponse(url string, header http.Header) ( response *http.Response, err error) { req, err := http.NewRequest("GET", url, nil) if err != nil { return nil, errors.New("failed building request for " + url + ": " + err.Error()) } req.Header = header httpclient := &http.Client{} if response, err = httpclient.Do(req); err != nil { return nil, errors.New("request failed for " + url + ": " + err.Error()) } return }
t-yuki/oauth2_proxy
api/api.go
GO
mit
1,053
require 'net/http' ## monkey-patch Net::HTTP # # Certain apple endpoints return 415 responses if a Content-Type is supplied. # Net::HTTP will default a content-type if none is provided by faraday # This monkey-patch allows us to leave out the content-type if we do not specify one. class Net::HTTPGenericRequest def supply_default_content_type return if content_type() end end
NghiaTranUIT/spaceship
lib/spaceship/helper/net_http_generic_request.rb
Ruby
mit
385
<?php namespace spec\SensioLabs\Behat\PageObjectExtension\PageObject\Factory; require_once __DIR__.'/Fixtures/ArticleList.php'; use PhpSpec\ObjectBehavior; use Prophecy\Argument; use ProxyManager\Factory\LazyLoadingValueHolderFactory; use ProxyManager\Proxy\LazyLoadingInterface; use SensioLabs\Behat\PageObjectExtension\PageObject\Element; use SensioLabs\Behat\PageObjectExtension\PageObject\Factory; use SensioLabs\Behat\PageObjectExtension\PageObject\InlineElement; use SensioLabs\Behat\PageObjectExtension\PageObject\Page; use SensioLabs\Behat\PageObjectExtension\PageObject\PageObject; class LazyFactorySpec extends ObjectBehavior { function let(Factory $decoratedFactory) { $this->beConstructedWith($decoratedFactory, new LazyLoadingValueHolderFactory()); } function it_is_a_page_object_factory() { $this->shouldHaveType('SensioLabs\Behat\PageObjectExtension\PageObject\Factory'); } function it_delegates_create_page_calls_to_the_decorated_factory(Factory $decoratedFactory, Page $page) { $decoratedFactory->createPage('ArticleList')->willReturn($page); $this->createPage('ArticleList')->shouldReturn($page); } function it_delegates_create_element_calls_to_the_decorated_factory(Factory $decoratedFactory, Element $element) { $decoratedFactory->createElement('Foo')->willReturn($element); $this->createElement('Foo')->shouldReturn($element); } function it_delegates_create_inline_element_calls_to_the_decorated_factory(Factory $decoratedFactory, InlineElement $element) { $decoratedFactory->createInlineElement('.foo')->willReturn($element); $this->createInlineElement('.foo')->shouldReturn($element); } function it_creates_a_proxy_instead_of_instantiating_a_page_object_right_away() { $this->create('ArticleList')->shouldReturnAnInstanceOf('ProxyManager\Proxy\LazyLoadingInterface'); } function it_delegates_instantiation_to_the_decorated_factory(PageObject $pageObject, Factory $decoratedFactory) { $decoratedFactory->create('ArticleList')->willReturn($pageObject); $this->create('ArticleList')->shouldReturnAnInstanceOf('ProxyManager\Proxy\ProxyInterface'); } }
sensiolabs/BehatPageObjectExtension
spec/PageObject/Factory/LazyFactorySpec.php
PHP
mit
2,257
#include "overlapping.hpp" /* TODO: * grow seeds from edges, not nodes. * Why are the results changing? * Then, speed up delV * ZEntropy shouldn't consider the number of groups, that should be taken out to another function. * That factorial expression would be better * Write down some of the stats, and be sure they're correct. * unordered_map for efficiency more often? * a big vector for the comm_count_per_edge? * keep track of frontier properly in growingSeed * type tags for the multi_index_container, instead of get<1> * random tie-breaking in frontier * update _p_in also. * * PLAN: * Fully abstract interface to grouping * Track count of types of edges. * Varying p_in and p_out * Each edge to know how many communities it's in. * Calculate global objective function every now and then. * More efficient finding of seeds * Random tie breaking * Stop seed growing at first positive? */ /* * Sources of randomness: * 246: Choice of initial edge seed * 249: (arbitrary, not random) Randomized p_in * 459: (arbitrary, not random) Tie breaker in seed expansion */ #include <list> #include <map> #include <algorithm> #include <functional> #include <math.h> #include <string.h> #include <fstream> #include <sstream> #include <float.h> #include <boost/multi_index_container.hpp> #include <boost/multi_index/ordered_index.hpp> #include <boost/multi_index/hashed_index.hpp> //#include <boost/multi_index/identity.hpp> #include <boost/multi_index/indexed_by.hpp> #include <boost/multi_index/member.hpp> #include <boost/unordered_map.hpp> #include <boost/unordered_set.hpp> #include "iterative.hpp" #include "options.hpp" #include "Range.hpp" #include "grouping.hpp" #include <set> using namespace std; using namespace std::tr1 ; using namespace grouping; char option_saveMOSESscores[1000] = ""; long option_seed = 0; int flag_justCalcObjective = 0; static void runMutual(void) { //system("echo;echo -n \" >> ;${x};${gt};${me}; \" ; bash -c '~/Code/Mutual3/mutual3/mutual \"${x}\"{\"${me}\",\"${gt}\"}' 1>&2 "); } namespace overlapping { bool flag_save_group_id_in_output = true; template <class N> static void overlappingT(bloomGraph<N> &g); void overlapping(SimpleIntGraph &g) { overlappingT(g); } void overlapping(SimpleStringGraph &g) { overlappingT(g); } void addSeed(Grouping &ging, const set<V> &nodes, bool randomized_p_in); static pair<long double, set<V> > growThisEdge(Grouping &ging, V edgeNumber, const long double &boost, bool randomized_p_in); static void update_p_out(Grouping &ging); static void estimate_p_in_and_p_out(Grouping &ging); long double MOSES_objective(const Grouping &ging); void addSeed(Grouping &ging, const set<V> &nodes, bool randomized_p_in) { assert(nodes.size()>0); Group *g = ging.newG(randomized_p_in); ForeachContainer(V v, nodes) { ging.addV(g, v); } } void seedGroupingWithPartition(Grouping &ging, const iterative::Partition &p); template <class N> static void useOneNodeSeeds(Grouping &ging, bloomGraph<N> &g, bool randomize_p_in); template <class N> static void groupStats(const Grouping &ging, bloomGraph<N> &g); template <class N> static void save(Grouping &ging, bloomGraph<N> &g); template <class N> static void louvainStyle(Grouping &ging, bloomGraph<N> &g); static void tryMerges(Grouping &ging); static void tryDeletions(Grouping &ging, bool SaveScores = false); static bool tryAndApplyThisOne(Grouping &ging, V e, bool randomized_p_in); template <class N> static void overlappingT(bloomGraph<N> &g) { printf("env: ALLOW_BOOST000 %s\n", getenv("ALLOW_BOOST000")); printf("env: Lookahead %s\n", getenv("Lookahead")); printf("env: MaxCommSize %s\n", getenv("MaxCommSize")); printf("env: OUTER_ITERS %s\n", getenv("OUTER_ITERS")); printf("env: LOUVAIN_ITERS %s\n", getenv("LOUVAIN_ITERS")); printf("NoBroken\n"); srand48(option_seed); // default seed. Will be changeable by command line arg. /* iterative::Partition p(g); iterative::findPartition(p, g); assert(option_planted[0]==0); strcpy(option_planted, option_overlapping); strcat(option_planted, "_partition"); savePartition(g, p, -1); long double p_in = (long double) p.m_i / p.a; long double p_out; { int64 b = p.N * (p.N-1L) / 2L; int64 m_e = p.m-p.m_i; p_out = (long double) (m_e) / (b - p.a); } PP(p_in); PP(p_out); //Grouping ging(g, p_in, p_out); //seedGroupingWithPartition(ging, p); */ Grouping ging(g, option_p_in, option_p_out); estimate_p_in_and_p_out(ging); ging.value_of_objective_with_no_communities = MOSES_objective(ging); //PP(ging._p_in); //PP(ging._p_out); if(option_loadOverlapping[0]) { // load preexisting grouping Pn("Preloading grouping '%s'", option_loadOverlapping); ifstream inFile(option_loadOverlapping); //for (int i=0; i<5; ++i) int lineNo=0; while(inFile.peek()!=EOF) { lineNo ++; string line; getline(inFile, line); if(line.length()==0) break; Group *grp = ging.newG(); istringstream linestream(line); //PP(line);// if(linestream.peek()=='"') { char ch; linestream >> ch; while(linestream.peek() != EOF) { linestream >> ch; if(ch=='"') break; } if(linestream.peek()!='\t') Die("grouping file should have a tab after any \" \". line %d {%d '%c'}", lineNo, (int) ch, ch); } while(linestream >> ws, linestream.peek() != EOF) { N n; linestream >> n; V v = g.key_for_vertexName(n); assert(v>=0); assert(v<g.vcount()); ging.addV(grp, v); } } save(ging, g); } estimate_p_in_and_p_out(ging); groupStats(ging, g); MOSES_objective(ging); if(flag_justCalcObjective) exit(0); const int max_OuterIters = atoi(getenv("OUTER_ITERS") ? : "20"); PP(max_OuterIters); for (int k=0; k<max_OuterIters; k++) { MOSES_objective(ging); const size_t num_groups_before = ging.groups.size(); ostringstream s; s << "Whole outer iter " << k << "/" << max_OuterIters; Timer timer(s.str()); Pn("\ngrow seeds %d/%d", k, max_OuterIters); bool randomize_p_in; if(k < max_OuterIters / 2) { randomize_p_in = true; Pn("Random p_i for each edge that we try"); estimate_p_in_and_p_out(ging); // this is just to estimate p_out useOneNodeSeeds(ging, g, true); estimate_p_in_and_p_out(ging); tryDeletions(ging); } else { randomize_p_in = false; useOneNodeSeeds(ging, g, false); tryDeletions(ging); } groupStats(ging, g); save(ging, g); estimate_p_in_and_p_out(ging); //tryDeletions(ging); //save(ging, g); const size_t num_groups_after = ging.groups.size(); if(!randomize_p_in && /*num_groups_after > 1000 && */ 0.99L * num_groups_after <= num_groups_before) { Pn("breaking after %d growing passes, as VERY FEW more have been found among the most recent subset", k+1); break; } } int louvainStyle_iter=0; const int max_louvainStyleIters = atoi(getenv("LOUVAIN_ITERS") ? : "10"); PP(max_louvainStyleIters); while(louvainStyle_iter != max_louvainStyleIters) { MOSES_objective(ging); Timer timer("Whole Louvain iter"); Pn("\nLouvain-style iteration %d/%d", louvainStyle_iter++, max_louvainStyleIters); louvainStyle(ging, g); tryDeletions(ging, louvainStyle_iter==max_louvainStyleIters); estimate_p_in_and_p_out(ging); groupStats(ging, g); save(ging, g); } if(0) { tryMerges(ging); save(ging, g); groupStats(ging, g); } Pn("\n\nFINAL Grouping"); groupStats(ging, g); estimate_p_in_and_p_out(ging); MOSES_objective(ging); } static bool tryAndApplyThisOne(Grouping &ging, V e, bool randomized_p_in) { static long double boost = 0.0L; if(!getenv("ALLOW_BOOST000")) boost=0.0L; // reset it back to zero unless ALLOW_BOOST000 is defined in the environment pair<long double, set<V> > bestSeed = growThisEdge(ging, e, boost, randomized_p_in); if(bestSeed.first + boost> 0.0L && bestSeed.second.size()>0 ) { addSeed(ging, bestSeed.second, randomized_p_in); boost /= 2.0; // if(boost<0.0L) boost=0.0L; return true; //Pn("Applied best 1-seed. Now returning. %zd nodes (+%Lg). Now there are %zd communities", bestSeed.second.size(), bestSeed.first, ging.groups.size()); // if(bestSeed.second.size() < 20) ForeachContainer (V v, bestSeed.second) { cout << "|" << g.name(v); } P("\n"); } boost += 0.1; return false; } template <class N> static void useOneNodeSeeds(Grouping &ging, bloomGraph<N> &g, bool randomize_p_in) { Timer timer(__FUNCTION__); const int numTries = g.ecount()/5; P(" \n Now just use one-EDGE seeds at a time. Try %d edges\n", numTries); // groupStats(ging, g); for(int x=0; x<numTries; ++x) { if(x && numTries>5 && x%(numTries/5)==0) { PP(x); PP(ging.groups.size()); } // choose an edge at random, but prefer to use it iff it's sharedCommunities score is low. V e = V(drand48() * (2*ging._g.ecount())); assert(e >= 0); assert(e < 2*ging._g.ecount()); if(randomize_p_in) { ging._p_in = 0.01L + 0.98L*drand48(); assert(ging._p_in < 1.0L); } tryAndApplyThisOne(ging, e, randomize_p_in); #if 0 int sharedCommunities = ging.comm_count_per_edge(ging._g.neighbours(0).first[e], &(ging._g.neighbours(0).first[e])); // a little time in here //PP(sharedCommunities); if( (double(rand())/RAND_MAX) < powl(0.5, sharedCommunities)) { //Pn(" X %d", sharedCommunities); tryAndApplyThisOne(ging, e); } else { //Pn(" %d", sharedCommunities); } #endif } } template <class N> static void groupStats(const Grouping &ging, bloomGraph<N> &g) { map<size_t, int> group_sizes_of_the_randomized; map<size_t, int> group_sizes; int64 totalAssignments = 0; // to help calculate average communities per node. ForeachContainer(Group *group, ging.groups) { DYINGWORDS(group->vs.size()>0) { PP(group->vs.size()); } group_sizes[group->vs.size()]++; totalAssignments += group->vs.size(); if(group->_randomized_p_in) group_sizes_of_the_randomized[group->vs.size()]++; } //Perror(" %zd\n", ging.groups.size()); Pn("#groups=%zd. %zd nodes, out of %d, are in at least one community. avgs grps/node=%g", ging.groups.size(), ging.vgroups_size(), g.vcount(), (double) totalAssignments / g.vcount()); pair<size_t, int> group_size; size_t max_group_size = group_sizes.size()==0 ? 0 : group_sizes.rbegin()->first; int entries_per_row = 15; int number_of_rows = (max_group_size / entries_per_row) + 1; for(int r = 0; r<number_of_rows ; r++) { for(size_t c = r; c <= max_group_size; c+=number_of_rows) { if(group_sizes[c]>0) P("%6d{%3zd}", group_sizes[c], c); else P(" "); } P("\n"); } { // now, just the randomized ones size_t max_group_size = group_sizes_of_the_randomized.size()==0 ? 0 : group_sizes_of_the_randomized.rbegin()->first; int entries_per_row = 15; int number_of_rows = (max_group_size / entries_per_row) + 1; for(int r = 0; r<number_of_rows ; r++) { for(size_t c = r; c <= max_group_size; c+=number_of_rows) { if(group_sizes_of_the_randomized[c]>0) P("%6d{%3zd}", group_sizes_of_the_randomized[c], c); else P(" "); } P("\n"); } } #if 0 set<V> lonelyNodesInLargestGroup; Group *largestGroup = NULL; ForeachContainer(Group *group, ging.groups) { if(group->vs.size() == max_group_size) { ForeachContainer(V v, group->vs) { P("(%zd)", ging.vgroups(v).size()); if(1 == ging.vgroups(v).size() && 0 == rand()%2) lonelyNodesInLargestGroup.insert(v); } P("\n"); largestGroup = group; break; } } #endif { int print_count = 0; for(map<int, V>::const_iterator it = ging.global_edge_counts.begin(); it!=ging.global_edge_counts.end(); ++it) { P("%6d(%3d)", (int) it->second ,(int) it->first); print_count++; if(print_count%15==0) P("\n"); } P("\n"); //if(0) update_p_out(ging); //PP(ging._p_in); //PP(ging._p_out); } } void seedGroupingWithPartition(Grouping &ging, const iterative::Partition &p) { for(V c=0; c<p.g.vcount(); ++c) { if(p.p[c].c == c) { // we're at the head node //Pn(" order %d", p.p[c].order); Group *grp = ging.newG(); V v = c; do { assert(c == p.p[v].c); ging.addV(grp, v); //Pn ("%d is in %d", v ,c); v = p.p[v].next; } while (v != c); } } } template <class It> static size_t count_intersection(It it1b, It it1e, It it2b, It it2e) { vector< typename It::value_type > is; set_intersection(it1b, it1e, it2b, it2e, back_inserter(is)); return is.size(); } template <class Container> static size_t count_intersection(const Container &container1, const Container &container2) { return count_intersection( container1.begin() , container1.end() , container2.begin() , container2.end() ); } struct DeltaSeed { const V _v; // for this node const int _group_size_smaller; // ...and this group (which v is NOT currently in) const Grouping &_ging; // what'd be the change in entropy (edge+Z+Q) from joining it? long double _deltadeltaEdgeEntropy; int count_edges_back_into_this_group; long double deltadeltaPairEntropy() const { return _deltadeltaEdgeEntropy + log2l(1.0L - _ging._p_in) * (_group_size_smaller - count_edges_back_into_this_group); } explicit DeltaSeed(V v, int group_size_smaller, Grouping &ging) : _v(v), _group_size_smaller(group_size_smaller), _ging(ging), _deltadeltaEdgeEntropy(0.0L), count_edges_back_into_this_group(0) { //assert(_grp.vs.count(v)==0); } void addEdge2(V n, const V* edgeVN_ptr) { // n is connected to _v assert(*edgeVN_ptr == n); this->addEdge(n, _ging.comm_count_per_edge(n, edgeVN_ptr)); } void addEdge(V , int sharedCommunities) { // n is connected to _v // TODO: might be quicker to pass in the count of sharedCommunities too //assert(_ging._g.are_connected(_v, n)); // TODO: remove these assertions //assert(_grp.vs.count(n) == 1); //assert(_grp.vs.count(_v) == 0); count_edges_back_into_this_group ++; _deltadeltaEdgeEntropy += log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 1+sharedCommunities)) - log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, sharedCommunities)); } void redoEdge(V , int previous_sharedCommunities) { // n is connected to _v // TODO: might be quicker to pass in the count of sharedCommunities too _deltadeltaEdgeEntropy -=(log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 1+previous_sharedCommunities)) - log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, previous_sharedCommunities))); _deltadeltaEdgeEntropy += log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 2+previous_sharedCommunities)) - log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 1+previous_sharedCommunities)); } long double _deltaZentropy() const { const size_t N = _ging._g.vcount(); const size_t x = _group_size_smaller; const size_t x2 = 1+x; return( x2 * log2l(x2) + (N-x2) * log2l(N-x2) -x * log2l(x) - (N-x ) * log2l(N-x ) ); /* (x2 * (log2l(x2)-log2l(N)) + (N-x2) * (log2l(N-x2)-log2l(N)) + log2l(1+ging.groups.size()) - log2l(N)) -(x * (log2l(x) -log2l(N)) + (N-x ) * (log2l(N-x )-log2l(N)) + log2l(1+ging.groups.size()) - log2l(N)) = (x2 * (log2l(x2)-log2l(N)) + (N-x2) * (log2l(N-x2)-log2l(N)) ) -(x * (log2l(x) -log2l(N)) + (N-x ) * (log2l(N-x )-log2l(N)) ) = (x2 * (log2l(x2) ) + (N-x2) * (log2l(N-x2) ) ) -(x * (log2l(x) ) + (N-x ) * (log2l(N-x ) ) ) = x2 * log2l(x2) + (N-x2) * log2l(N-x2) -x * log2l(x) - (N-x ) * log2l(N-x ) */ } long double _deltaTotalentropy() const { return this->deltadeltaPairEntropy() + this->_deltaZentropy(); } }; struct FrontierNode { FrontierNode(long double &score, V v) : _score(score), _v(v) {} long double _score; V _v; struct Incrementer { long double _x; Incrementer(long double &x) : _x(x) {} void operator() (FrontierNode &fn) const { fn._score += _x; } }; }; using namespace boost::multi_index; struct VertexTag {}; struct Frontier : private multi_index_container < // TODO: Some sort of binary tree sometime? FrontierNode, indexed_by< ordered_non_unique< member<FrontierNode,long double,&FrontierNode::_score>, greater<long double> >, hashed_unique< tag<VertexTag>, member<FrontierNode,V,&FrontierNode::_v> > > > { // vertices, and their scores. // easy removal of the highest-score vertices. // easy increase of score of arbitrary members, adding them if they don't exist already. public: static long double __attribute__ ((noinline)) calcddEE(const Grouping &ging, int sharedCommunities) { return log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, 1+sharedCommunities)) - log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities)) - log2l(1.0L - ging._p_in) + 1e-20L * drand48() // random tie breaking ; } void addNode(const Grouping &ging, V to, const V *edgeFT_ptr) { // to is being added to the frontier, BUT it may already be in the frontier. // from is in the seed. //assert(*edgeFT_ptr == to); int sharedCommunities = ging.comm_count_per_edge(to, edgeFT_ptr); // a little time in here long double deltadeltaEdgeEntropy = calcddEE(ging, sharedCommunities /*, to*/); // a little time in here Frontier::nth_index<1>::type::iterator addOrModifyThis = this->get<1>().find(to); if(addOrModifyThis==this->get<1>().end()) { // TODO: faster if search for to is done just once? this->insert(FrontierNode(deltadeltaEdgeEntropy, to)); } else { this->get<1>().modify(addOrModifyThis, FrontierNode::Incrementer(deltadeltaEdgeEntropy)); } } void erase_best_node() { Frontier::iterator best_node = this->get<0>().begin(); this->erase(best_node); } int erase_this_node(V to) { return this->get<1>().erase(to); } long double best_node_score() const { Frontier::iterator best_node = this->get<0>().begin(); return best_node -> _score; } V best_node_v() const { Frontier::iterator best_node = this->get<0>().begin(); return best_node -> _v; } bool Empty() const { return this->empty(); } }; static long double logNchoose(int64 N, int64 n_c) { if(n_c==N) return 0; static vector<long double> logNchoose_vector; // static int64 usedN; assert(n_c>0); assert(n_c<=N); if (logNchoose_vector.size()==0) { Timer t("logNchoose Initialization"); logNchoose_vector.resize(N+1); // usedN = N; long double lN = 0.0L; for(int64 x1=1; x1<=N; x1++) { if(x1>1) { int64 i = x1-1; lN += log2l(i) - log2l(N-i); } logNchoose_vector.at(x1) = lN; } } assert(logNchoose_vector.at(0) == 0); // DYINGWORDS(logNchoose_vector.at(N) == 0) { // will never be exactly zero, but it should be, in theory assert(logNchoose_vector.size()>0); // assert(usedN == N); assert( size_t(N+1) == logNchoose_vector.size()); assert( size_t(n_c) < logNchoose_vector.size()); return logNchoose_vector.at(n_c); } pair<long double, set<V> > growingSeed(Grouping &ging, int lookahead , set<V> &seed , pair<long double, set<V> > bestSoFar , long double seedEdgeEntropy , int seed_size , Frontier &frontier , int edges_in_seed , const long double &boost // to allow some negative communities to persist. The deletion phase will fix them later. This is to ensure that we have the best chance of filling the graph up quickly. , bool randomized_p_in ) // Find the expansion among the frontier that best improves the score. Then recurse to it. // Stop growing if dead end is reached (empty frontier) or the seed isn't increasing and we already have at least 5 nodes. // Return the best set of nodes // TODO: Profile, then make this damn efficient { assert((int) seed.size() == seed_size); if(frontier.Empty()) return bestSoFar; const V best_v = frontier.best_node_v(); #if 0 { IteratorRange<const V *> ns(ging._g.neighbours(best_v)); Foreach(V n, ns) { if(1==seed.count(n)) { // This neighbour is already in the seed. That means the count of edges within the seed is about to be increased. Should update randomized p_in in light of this. ++edges_in_seed; } } int64 pairsInSeed = seed_size * (1+seed_size) / 2; long double new_p_in = 1.0L * (2*edges_in_seed+1) / (2 * pairsInSeed + 2) ; if(randomized_p_in) ging._p_in = new_p_in; /* cout << edges_in_seed << "/" << (1+seed_size) << "\t" << 100.0 * edges_in_seed / (seed_size * (1+seed_size) / 2) // << "\t" << ging._p_in << "\t" << randomized_p_in << "\t" << new_p_in << endl; */ assert(edges_in_seed <= (seed_size * (1+seed_size) / 2)); } #endif const long double newseedEdgeEntropy = seedEdgeEntropy + frontier.best_node_score() + log2l(1.0L-ging._p_in) * seed_size; const int N = ging._g.vcount(); //const int x = seed_size; const int x1= 1+seed_size; //long double seed_totalDeltaEntropy = seedEdgeEntropy + x * (log2l(x)-log2l(N)) + (N-x) * (log2l(N-x)-log2l(N)) + log2l(1+ging.groups.size())/*equivalent groupings*/ - log2l(N)/*encoding of size of the group*/; UNUSED int64 q = ging.groups.size(); if (q==0) q=1; UNUSED const int64 q_= 1 + q; const long double logNchoosen = logNchoose(N,x1); const long double newseed_totalDeltaEntropy = newseedEdgeEntropy //+ x1 * (log2l(x1)-log2l(N)) + (N-x1) * (log2l(N-x1)-log2l(N)) + logNchoosen - log2l(N+1)/*encoding of size of the group*/ + log2l(1+ging.groups.size())/*equivalent groupings*/ //+ (getenv("UseBroken") ? ( ( q_ * (log2l(q_) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q_) ) - ( q * (log2l(q ) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q ) ) ) : 0) ; if(bestSoFar.first < newseed_totalDeltaEntropy) { bestSoFar.first = newseed_totalDeltaEntropy; bestSoFar.second = seed; bestSoFar.second.insert(best_v); } if( (size_t) seed_size >= lookahead +bestSoFar.second.size() ) // lookahead return bestSoFar; const size_t max_commsize = atoi(getenv("MaxCommSize") ? : "10000000"); if( (size_t) seed_size >= max_commsize ) { // max comm size bestSoFar.first = -10000000; return bestSoFar; } //if( bestSoFar.first > 0.0L) // once positive, return immediately!!!!!!!!!!!!! //return bestSoFar; if( (size_t) seed_size > bestSoFar.second.size() && bestSoFar.first + boost > 0.0L) // once positive, return immediately if it drops. return bestSoFar; //if(bestSoFar.first > 0.0L) //return bestSoFar; // This isn't working so well. Too many communities (I think), and no faster (for Oklahoma at least) frontier.erase_best_node(); IteratorRange<const V *> ns(ging._g.neighbours(best_v)); const V *edgeVN_offset = ging._g.neighbours(best_v).first; Foreach(V n, ns) { assert( *edgeVN_offset == n); //assert(ging._g.neighbours(0).first[ging._comm_count_per_edge2[edgeVN_offset].other_index] == best_v); if(0==seed.count(n)) frontier.addNode(ging, n , edgeVN_offset ); ++edgeVN_offset; } seed.insert(best_v); return growingSeed(ging , lookahead , seed , bestSoFar , newseedEdgeEntropy , 1 + seed_size , frontier , edges_in_seed , boost , randomized_p_in ); } struct ThrowingIterator { struct Dereferenced {}; V & operator *() { throw Dereferenced(); } void operator ++() { throw Dereferenced(); } }; bool emptyIntersection(const pair<const V*, const V*> &l, const pair<const V*, const V*> &r) { try { set_intersection( l.first ,l.second ,r.first ,r.second ,ThrowingIterator() ); } catch (ThrowingIterator::Dereferenced &) { return false; } return true; //inter.empty(); } static pair<long double, set<V> > growThisEdge(Grouping &ging, const V edgeNumber, const long double &boost, bool randomized_p_in) { assert(edgeNumber < 2*ging._g.ecount()); V r = ging._g.neighbours(0).first[edgeNumber]; V l = ging._g.neighbours(0).first[ging.comm_count_per_edge2[edgeNumber].other_index]; // there must be a triangle available if(emptyIntersection(ging._g.neighbours(l),ging._g.neighbours(r))) return make_pair(-1.0L, set<V>()); Frontier frontier; { IteratorRange<const V *> ns(ging._g.neighbours(l)); const V *edgeIN_ptr = ging._g.neighbours(l).first; Foreach(V n, ns) { frontier.addNode(ging, n, edgeIN_ptr); ++edgeIN_ptr; } } //PP(frontier.size()); const int erased = frontier.erase_this_node(r); DYINGWORDS(erased==1) { PP(erased); PP(l); PP(r); //PP(frontier.size()); PP(ging._g.degree(l)); PP(ging._g.degree(r)); IteratorRange<const V *> ns(ging._g.neighbours(l)); Foreach(V n, ns) { PP(n); } } assert(erased==1); { IteratorRange<const V *> ns(ging._g.neighbours(r)); const V *edgeIN_ptr = ging._g.neighbours(r).first; Foreach(V n, ns) { if(n!=l) frontier.addNode(ging, n, edgeIN_ptr); ++edgeIN_ptr; } } set<V> seed; seed.insert(l); seed.insert(r); int sharedCommunities = ging.comm_count_per_edge(r, &(ging._g.neighbours(0).first[edgeNumber])); // a little time in here pair<long double, set<V> > grownSeed = growingSeed(ging , atoi(getenv("Lookahead") ? : "2") , seed , make_pair(-10000000.0L, seed) // This score is too low, but then we don't expect a singleton community to come out best anyway! Only positive scores are used. , log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, 1+sharedCommunities)) - log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities)) , 2 , frontier , 1 , boost , randomized_p_in ); return grownSeed; } template <class N> static void save(Grouping &ging, bloomGraph<N> &g) { ofstream saveFile(option_overlapping); ForeachContainer(Group *grp, ging.groups) { bool firstLine = true; if(flag_save_group_id_in_output) saveFile << '\"' << grp->_id << "\"\t"; ForeachContainer(V v, grp->vs) { saveFile << (firstLine?"":" ") << g.name(v); firstLine = false; } saveFile << endl; } runMutual(); } struct hashGroup { int operator() (Group *grp) const { return grp->_id; } }; typedef boost::unordered_map<Group *, DeltaSeed, hashGroup> SeedDeltasT; template <class N> static void louvainStyle(Grouping &ging, bloomGraph<N> &g) { Timer t(__FUNCTION__); // find a node, isolate it from its communities, Add back one at a time if appropriate. const bool DEBUG_louvainStyle = 0; for(V v=0; v<g.vcount(); v++) { if(0) update_p_out(ging); // if(v%(g.vcount()/20)==0) PP(v); if(DEBUG_louvainStyle) groupStats(ging, g); if(DEBUG_louvainStyle) cout << "removing node in these many groups: " << ging.vgroups(v).size() << endl; ging.isolate(v); SeedDeltasT _seedDeltas; IteratorRange<const V *> ns(ging._g.neighbours(v)); { const V * edgeVN_ptr = ging._g.neighbours(v).first; Foreach(V n, ns) { int sharedCommunities = ging.comm_count_per_edge(n,edgeVN_ptr); // TODO: Could prepare the results for addEdge of v<>n ForeachContainer(Group *grp, ging.vgroups(n)) { _seedDeltas.insert(make_pair(grp, DeltaSeed(v, grp->vs.size(), ging))).first->second.addEdge(n, sharedCommunities); //_seedDeltas.find(grp)->second.addEdge(n, sharedCommunities); } ++edgeVN_ptr; } } for(int addedBack = 0; _seedDeltas.size()>0 ; addedBack++) { // for each neighbouring group, calculate the delta-entropy of expanding back in here. pair<long double, Group *> bestGroup(-LDBL_MAX, (Group*) NULL); int num_positive = 0; for(SeedDeltasT::iterator i = _seedDeltas.begin(); i!=_seedDeltas.end(); ) { if(i->second._deltaTotalentropy()<=0.0L) { i = _seedDeltas.erase(i); continue; } else { long double delta2 = i->second._deltaTotalentropy(); // TODO: Count the positive scores. No point proceeding if there aren't any more positive scores, as they can only decrease if(bestGroup.first < delta2) bestGroup = make_pair(delta2, i->first); if(delta2>0.0L) ++num_positive; } ++i; } if(bestGroup.first > 0.0L) { assert(num_positive>=1); ging.addV(bestGroup.second, v); if(num_positive==1) { // if just one was positive, then there's no point continuing, as the rest will only lose more score. break; } _seedDeltas.erase(bestGroup.second); // the other potential groups on the end of this edge need to have their addEdge undone const V * edgeVN_ptr = ging._g.neighbours(v).first; IteratorRange<const V*> ns(ging._g.neighbours(v)); Foreach(V n, ns) { assert(*edgeVN_ptr == n); if(bestGroup.second->vs.count(n)) { int previous_sharedCommunities = ging.comm_count_per_edge(n, edgeVN_ptr) - 1; ForeachContainer(Group *grp, ging.vgroups(n)) { SeedDeltasT::iterator grpInSeed =_seedDeltas.find(grp); if(grpInSeed != _seedDeltas.end()) { const long double before = grpInSeed->second._deltaTotalentropy(); grpInSeed->second.redoEdge(n, previous_sharedCommunities); const long double after = grpInSeed->second._deltaTotalentropy(); if(after > before) { Perror("%s:%d _deltaTotalentropy %Lg -> %Lg\n", __FILE__, __LINE__, before, after); } } } } ++edgeVN_ptr; } } else break; } } } static void update_p_out(Grouping &ging) { long double new_p_out = 2.0L * ging.global_edge_counts[0] / ging._g.vcount() / (ging._g.vcount()-1) ; if(new_p_out > 0.1L) ging._p_out = 0.1L; else ging._p_out = new_p_out; } struct PairGroupHash { int operator() (const pair<Group*,Group*> &pg) const { return pg.first->_id + pg.second->_id; } }; static void tryMerges(Grouping &ging) { Timer timer(__FUNCTION__); typedef boost::unordered_map< pair<Group*,Group*> , long double, PairGroupHash> MergesT; MergesT proposed_merges; size_t counter=0; for(V e = 0; e < /*100000 */ 2*ging._g.ecount() ; e++) { //if(e % (2*ging._g.ecount()/10) ==0) { PP(e); } V e2 = ging.comm_count_per_edge2[e].other_index; V l = ging._g.neighbours(0).first[e2]; V r = ging._g.neighbours(0).first[e]; if(r<l) continue; // no point considering each edge twice V sharedCommunities = ging.comm_count_per_edge2[e].shared_comms; assert(sharedCommunities == ging.comm_count_per_edge2[e2].shared_comms); //Pn("%d\t%d", l, r); counter++; const set<Group *> &lgrps = ging.vgroups(l); const set<Group *> &rgrps = ging.vgroups(r); //PP(lgrps.size()); //PP(rgrps.size()); vector<Group *> lonly, ronly; set_difference(lgrps.begin(),lgrps.end(),rgrps.begin(),rgrps.end(),back_inserter(lonly)); set_difference(rgrps.begin(),rgrps.end(),lgrps.begin(),lgrps.end(),back_inserter(ronly)); //Pn("# unmatched %zd,%zd", lonly.size(), ronly.size()); ForeachContainer(Group *lg, lonly) { ForeachContainer(Group *rg, ronly) { long double qi = 1.0L - ging._p_in; long double qo = 1.0L - ging._p_out; //const int64 oldQz = ging.groups.size(); Group * lg1 = lg; Group * rg1 = rg; if(lg1 > rg1) swap(lg1, rg1); MergesT::key_type key = make_pair(lg1, rg1); MergesT::iterator pm = proposed_merges.find(key); if(pm == proposed_merges.end()) { const int64 s1 = lg1->vs.size(); const int64 s2 = rg1->vs.size(); vector<V> Union; set_union(lg1->vs.begin(),lg1->vs.end(),rg1->vs.begin(),rg1->vs.end(),back_inserter(Union)); const int64 N = ging._g.vcount(); pm = proposed_merges.insert(make_pair(key, log2l(qi)*0.5L* (long double)( Union.size() * (Union.size()-1) - s1*(s1-1) - s2*(s2-1) ) // + ( (oldQz) *log2l(oldQz-1) + log2l(exp(1)) - log2l(oldQz-2-N) ) // - ( (oldQz+1)*log2l(oldQz ) - log2l(oldQz-1-N) ) + (s1+s2) * (log2l(s1+s2) /*- log2l(N)*/) - (s1 ) * (log2l(s1 ) /*- log2l(N)*/) - (s2 ) * (log2l(s2 ) /*- log2l(N)*/) + log2l(N) // one fewer community whose pi has to be encoded )).first; } pm -> second+= log2l(1.0L - qo * powl(qi, 1+sharedCommunities)) - log2l(1.0L - qo * powl(qi, sharedCommunities)) - log2l(qi) // for a given member of the map, each edge will be found exactly once. So here we cancel the affect of assuming it was disconnected ; } } } for(V e = 0; e < /*100000 */ 2*ging._g.ecount() ; e++) { if(e % (2*ging._g.ecount()/10) ==0) { PP(e); } V e2 = ging.comm_count_per_edge2[e].other_index; V l = ging._g.neighbours(0).first[e2]; V r = ging._g.neighbours(0).first[e]; if(r<l) continue; // no point considering each edge twice V sharedCommunities = ging.comm_count_per_edge2[e].shared_comms; assert(sharedCommunities == ging.comm_count_per_edge2[e2].shared_comms); //Pn("%d\t%d", l, r); counter++; const set<Group *> &lgrps = ging.vgroups(l); const set<Group *> &rgrps = ging.vgroups(r); //PP(lgrps.size()); //PP(rgrps.size()); vector<Group *> inter; set_intersection(lgrps.begin(),lgrps.end(),rgrps.begin(),rgrps.end(),back_inserter(inter)); //Pn("# unmatched %zd,%zd", lonly.size(), ronly.size()); ForeachContainer(Group *lg, inter) { ForeachContainer(Group *rg, inter) { if(lg < rg) { // no point proposing a merge between a group and itself long double qi = 1.0L - ging._p_in; long double qo = 1.0L - ging._p_out; Group * lg1 = lg; Group * rg1 = rg; if(lg1 > rg1) swap(lg1, rg1); MergesT::key_type key = make_pair(lg1, rg1); MergesT::iterator pm = proposed_merges.find(key); if(pm != proposed_merges.end()) pm -> second+= log2l(1.0L - qo * powl(qi, sharedCommunities-1)) - log2l(1.0L - qo * powl(qi, sharedCommunities)) + log2l(qi) // for a given member of the map, each edge will be found exactly once. So here we cancel the affect of assuming it was disconnected ; } } } } int64 merges_accepted = 0; int64 merges_applied = 0; for(MergesT::const_iterator pm = proposed_merges.begin(); pm != proposed_merges.end(); ++pm) { const long double score = pm->second; //const int64 N = ging._g.vcount(); boost::unordered_set<Group *> already_merged; if(score >0.0) { //PP(scoreEdges); //PP(scoreZ); merges_accepted++; MergesT::key_type merge_these = pm->first; if(already_merged.count(merge_these.first)==0 && already_merged.count(merge_these.second)==0) { Group * l = merge_these.first; Group * r = merge_these.second; const set<V> these_nodes(l->vs); // copy them, so as to iterate properly over them. //P(" "); PP(ging.groups.size()); ForeachContainer(V v, these_nodes) { if(r->vs.count(v)==0) { ging.addV(r,v); } assert(r->vs.count(v)==1); //ging.delV(l,v); } //PP(ging.groups.size()); already_merged.insert(merge_these.first); already_merged.insert(merge_these.second); merges_applied++; } } //if(score > -25.0) { printf("merge: %-11.2Lf\n", score); } } PP(proposed_merges.size()); PP(merges_accepted); PP(merges_applied); } static void tryDeletions(Grouping &ging, bool SaveScores /*= true*/) { // delete groups which aren't making a positive contribution any more. Timer timer(__FUNCTION__); typedef boost::unordered_map< Group* , long double, hashGroup> DeletionsT; DeletionsT proposed_deletions; const int64 N = ging._g.vcount(); ForeachContainer(Group *grp, ging.groups) { // preseed with all groups, because we want the groups even that don't have an edge in them! const int64 sz = grp->vs.size(); //int64 q = ging.groups.size() -1; if (q<=0) q=1; //const int64 q_= 1 + q; const long double logNchoosen = logNchoose(N,sz); proposed_deletions.insert(make_pair(grp, log2l(1.0L - ging._p_in)*(sz*(sz-1)/2) + logNchoosen - log2l(N+1)/*encoding of size of the group*/ + log2l(1+ging.groups.size())/*equivalent groupings*/ //+ (getenv("UseBroken") ? ( ( q_ * (log2l(q_) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q_) ) - ( q * (log2l(q ) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q ) ) ) : 0) )); } for(V e = 0; e < /*100000 */ 2*ging._g.ecount() ; e++) { /* if(e % (2*ging._g.ecount()/10) ==0) { PP(e); } */ V e2 = ging.comm_count_per_edge2[e].other_index; V sharedCommunities = ging.comm_count_per_edge2[e].shared_comms; assert(sharedCommunities == ging.comm_count_per_edge2[e2].shared_comms); V l = ging._g.neighbours(0).first[e2]; V r = ging._g.neighbours(0).first[e]; if(r<l) continue; // no point considering each edge twice const set<Group *> &lgrps = ging.vgroups(l); const set<Group *> &rgrps = ging.vgroups(r); vector<Group *> sharedComms; set_intersection(lgrps.begin(),lgrps.end(),rgrps.begin(),rgrps.end(),back_inserter(sharedComms)); assert((size_t)sharedCommunities == sharedComms.size()); ForeachContainer(Group *grp, sharedComms) { DeletionsT::iterator pm = proposed_deletions.find(grp); assert(pm != proposed_deletions.end()); pm -> second+= log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities)) - log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities-1)) - log2l(1.0L - ging._p_in) ; } } assert(proposed_deletions.size() <= ging.groups.size()); // maybe some communities didn't have an edge in them V deletions_accepted = 0; map<V, int> deletions_sizes; PP(ging.groups.size()); for(DeletionsT::const_iterator pm = proposed_deletions.begin(); pm != proposed_deletions.end(); ++pm) { const long double score = pm->second; //const int64 N = ging._g.vcount(); if(score < 0.0) { //PP(scoreEdges); //PP(scoreZ); deletions_accepted++; deletions_sizes[pm->first->vs.size()]++; { // delete the group set<V> vs = pm->first->vs; // COPY the vertices in ForeachContainer(V v, vs) { ging.delV(pm->first, v); } // By now, pm->first will be an invalid pointer, as it will be been delete'd } } } P("deletions_accepted: %d\t", deletions_accepted); pair<V, int> delete_size; ForeachContainer(delete_size, deletions_sizes) { P("%d{%d} ", delete_size.second, delete_size.first); } P("\n"); PP(ging.groups.size()); //if(SaveScores && option_saveMOSESscores[0]) Pn("NOT Saving the delta-scores for each comm"); if(SaveScores && option_saveMOSESscores[0]) { Pn("Saving the MOSES delta-score for each community as per the --saveMOSESscores option"); ofstream saveFile(option_saveMOSESscores); ForeachContainer(Group *grp, ging.groups) { // preseed with all groups, because we want the groups even that don't have an edge in them! saveFile << proposed_deletions[grp] << '\t' << grp->vs.size() << endl; } } } long double P_x_given_z(const Grouping &ging, long double p_o, long double p_i, int sigma_shared_Xis1) { const int64 N = ging._g.vcount(); const int64 m = ging._g.ecount() / 2L; long double logP_XgivenZ = 0.0; logP_XgivenZ += log2l(1.0L - p_o) * (N * (N-1) / 2 - m); logP_XgivenZ += log2l(1.0L - p_i) * (ging._sigma_shared - sigma_shared_Xis1); typedef pair <int,V> edge_countT; ForeachContainer(const edge_countT &edge_count, ging.global_edge_counts) { const int64 s = edge_count.first; const int64 m_s = edge_count.second; logP_XgivenZ += log2l(1.0L - (1.0L - p_o)*powl(1.0L - p_i, s)) * m_s; } return logP_XgivenZ; } long double MOSES_objective(const Grouping &ging) { Timer t(__FUNCTION__); // Three components // P(x|z) // Qz! // product of binomial/N+1 int64 sigma_shared_Xis1 = 0; pair <int,V> edge_count; ForeachContainer(edge_count, ging.global_edge_counts) { sigma_shared_Xis1 += edge_count.first * edge_count.second; } long double Pxz = P_x_given_z(ging, ging._p_out, ging._p_in, sigma_shared_Xis1); long double Pz = 0.0; for (size_t i = 1; i<=ging.groups.size(); i++) { Pz += log2l(i); //+ log2l(1+ging.groups.size())/*equivalent groupings*/ //P(Pz); } // PP(Pz); int64 N = ging._g.vcount(); ForeachContainer(const Group *grp, ging.groups) { long double logNchoosen = logNchoose(N,grp->vs.size()); DYINGWORDS(logNchoosen <= 0.0) { PP(logNchoosen); } assert(logNchoosen <= 0.0); Pz += logNchoosen - log2l(N+1) ; } // PP(Pxz); // PP(Pz); // PP(Pxz + Pz); // PP(ging.value_of_objective_with_no_communities); if(ging.value_of_objective_with_no_communities==1.0) Pn("Compression:\t%Lf\t%Lg\t%Lg", 1.0L ,Pz ,Pxz ); else Pn("Compression:\t%Lf\t%Lg\t%Lg", (Pxz + Pz) / ging.value_of_objective_with_no_communities ,Pz ,Pxz ); return Pxz + Pz; } static void estimate_p_in_and_p_out(Grouping &ging) { Timer t(__FUNCTION__); //PP(ging._sigma_shared); /* int64 _sigma_shared2 = 0; ForeachContainer(const Group *grp, ging.groups) { _sigma_shared2 += int64(grp->vs.size()) * int64(grp->vs.size()-1); } PP(_sigma_shared2/2); assert(_sigma_shared2 = 2 * ging._sigma_shared); */ const int64 N = ging._g.vcount(); const int64 m = ging._g.ecount() / 2L; int64 sigma_shared_Xis1 = 0; pair <int,V> edge_count; ForeachContainer(edge_count, ging.global_edge_counts) { sigma_shared_Xis1 += edge_count.first * edge_count.second; } //PP(sigma_shared_Xis1); map<long double, pair<long double, long double>, greater<long double> > ALLlogP_XgivenZ; for(long double p_i = 0.0L; (p_i+=0.001L) < 1.0L; ) { for(long double p_o = 1e-11L; (p_o*=1.1L) < 1.0L; ) { long double logP_XgivenZ = 0.0; logP_XgivenZ += log2l(1.0L - p_o) * (N * (N-1) / 2 - m); logP_XgivenZ += log2l(1.0L - p_i) * (ging._sigma_shared - sigma_shared_Xis1); ForeachContainer(edge_count, ging.global_edge_counts) { const int64 s = edge_count.first; const int64 m_s = edge_count.second; logP_XgivenZ += log2l(1.0L - (1.0L - p_o)*powl(1.0L - p_i, s)) * m_s; } assert(logP_XgivenZ == P_x_given_z(ging ,p_o ,p_i ,sigma_shared_Xis1)); ALLlogP_XgivenZ[logP_XgivenZ] = make_pair(p_i, p_o); } } pair<long double, pair<long double, long double> > best; ForeachContainer(best, ALLlogP_XgivenZ) { Pn("BEST: %Lg,%Lg -> %9.0Lf ", best.second.first, best.second.second, best.first); ging._p_in = best.second.first; ging._p_out= best.second.second; break; } } } // namespace overlapping
rabbanyk/CommunityEvaluation
execs/CM-Overlapping-MOSES-McDaid/moses-2011-01-26/overlapping.cpp
C++
mit
42,402
'use strict';var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { if (typeof Reflect === "object" && typeof Reflect.decorate === "function") return Reflect.decorate(decorators, target, key, desc); switch (arguments.length) { case 2: return decorators.reduceRight(function(o, d) { return (d && d(o)) || o; }, target); case 3: return decorators.reduceRight(function(o, d) { return (d && d(target, key)), void 0; }, void 0); case 4: return decorators.reduceRight(function(o, d) { return (d && d(target, key, o)) || o; }, desc); } }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; var collection_1 = require('angular2/src/facade/collection'); var lang_1 = require('angular2/src/facade/lang'); var exceptions_1 = require('angular2/src/facade/exceptions'); var DefaultKeyValueDifferFactory = (function () { function DefaultKeyValueDifferFactory() { } DefaultKeyValueDifferFactory.prototype.supports = function (obj) { return obj instanceof Map || lang_1.isJsObject(obj); }; DefaultKeyValueDifferFactory.prototype.create = function (cdRef) { return new DefaultKeyValueDiffer(); }; DefaultKeyValueDifferFactory = __decorate([ lang_1.CONST(), __metadata('design:paramtypes', []) ], DefaultKeyValueDifferFactory); return DefaultKeyValueDifferFactory; })(); exports.DefaultKeyValueDifferFactory = DefaultKeyValueDifferFactory; var DefaultKeyValueDiffer = (function () { function DefaultKeyValueDiffer() { this._records = new Map(); this._mapHead = null; this._previousMapHead = null; this._changesHead = null; this._changesTail = null; this._additionsHead = null; this._additionsTail = null; this._removalsHead = null; this._removalsTail = null; } Object.defineProperty(DefaultKeyValueDiffer.prototype, "isDirty", { get: function () { return this._additionsHead !== null || this._changesHead !== null || this._removalsHead !== null; }, enumerable: true, configurable: true }); DefaultKeyValueDiffer.prototype.forEachItem = function (fn) { var record; for (record = this._mapHead; record !== null; record = record._next) { fn(record); } }; DefaultKeyValueDiffer.prototype.forEachPreviousItem = function (fn) { var record; for (record = this._previousMapHead; record !== null; record = record._nextPrevious) { fn(record); } }; DefaultKeyValueDiffer.prototype.forEachChangedItem = function (fn) { var record; for (record = this._changesHead; record !== null; record = record._nextChanged) { fn(record); } }; DefaultKeyValueDiffer.prototype.forEachAddedItem = function (fn) { var record; for (record = this._additionsHead; record !== null; record = record._nextAdded) { fn(record); } }; DefaultKeyValueDiffer.prototype.forEachRemovedItem = function (fn) { var record; for (record = this._removalsHead; record !== null; record = record._nextRemoved) { fn(record); } }; DefaultKeyValueDiffer.prototype.diff = function (map) { if (lang_1.isBlank(map)) map = collection_1.MapWrapper.createFromPairs([]); if (!(map instanceof Map || lang_1.isJsObject(map))) { throw new exceptions_1.BaseException("Error trying to diff '" + map + "'"); } if (this.check(map)) { return this; } else { return null; } }; DefaultKeyValueDiffer.prototype.onDestroy = function () { }; DefaultKeyValueDiffer.prototype.check = function (map) { var _this = this; this._reset(); var records = this._records; var oldSeqRecord = this._mapHead; var lastOldSeqRecord = null; var lastNewSeqRecord = null; var seqChanged = false; this._forEach(map, function (value, key) { var newSeqRecord; if (oldSeqRecord !== null && key === oldSeqRecord.key) { newSeqRecord = oldSeqRecord; if (!lang_1.looseIdentical(value, oldSeqRecord.currentValue)) { oldSeqRecord.previousValue = oldSeqRecord.currentValue; oldSeqRecord.currentValue = value; _this._addToChanges(oldSeqRecord); } } else { seqChanged = true; if (oldSeqRecord !== null) { oldSeqRecord._next = null; _this._removeFromSeq(lastOldSeqRecord, oldSeqRecord); _this._addToRemovals(oldSeqRecord); } if (records.has(key)) { newSeqRecord = records.get(key); } else { newSeqRecord = new KVChangeRecord(key); records.set(key, newSeqRecord); newSeqRecord.currentValue = value; _this._addToAdditions(newSeqRecord); } } if (seqChanged) { if (_this._isInRemovals(newSeqRecord)) { _this._removeFromRemovals(newSeqRecord); } if (lastNewSeqRecord == null) { _this._mapHead = newSeqRecord; } else { lastNewSeqRecord._next = newSeqRecord; } } lastOldSeqRecord = oldSeqRecord; lastNewSeqRecord = newSeqRecord; oldSeqRecord = oldSeqRecord === null ? null : oldSeqRecord._next; }); this._truncate(lastOldSeqRecord, oldSeqRecord); return this.isDirty; }; /** @internal */ DefaultKeyValueDiffer.prototype._reset = function () { if (this.isDirty) { var record; // Record the state of the mapping for (record = this._previousMapHead = this._mapHead; record !== null; record = record._next) { record._nextPrevious = record._next; } for (record = this._changesHead; record !== null; record = record._nextChanged) { record.previousValue = record.currentValue; } for (record = this._additionsHead; record != null; record = record._nextAdded) { record.previousValue = record.currentValue; } // todo(vicb) once assert is supported // assert(() { // var r = _changesHead; // while (r != null) { // var nextRecord = r._nextChanged; // r._nextChanged = null; // r = nextRecord; // } // // r = _additionsHead; // while (r != null) { // var nextRecord = r._nextAdded; // r._nextAdded = null; // r = nextRecord; // } // // r = _removalsHead; // while (r != null) { // var nextRecord = r._nextRemoved; // r._nextRemoved = null; // r = nextRecord; // } // // return true; //}); this._changesHead = this._changesTail = null; this._additionsHead = this._additionsTail = null; this._removalsHead = this._removalsTail = null; } }; /** @internal */ DefaultKeyValueDiffer.prototype._truncate = function (lastRecord, record) { while (record !== null) { if (lastRecord === null) { this._mapHead = null; } else { lastRecord._next = null; } var nextRecord = record._next; // todo(vicb) assert // assert((() { // record._next = null; // return true; //})); this._addToRemovals(record); lastRecord = record; record = nextRecord; } for (var rec = this._removalsHead; rec !== null; rec = rec._nextRemoved) { rec.previousValue = rec.currentValue; rec.currentValue = null; this._records.delete(rec.key); } }; /** @internal */ DefaultKeyValueDiffer.prototype._isInRemovals = function (record) { return record === this._removalsHead || record._nextRemoved !== null || record._prevRemoved !== null; }; /** @internal */ DefaultKeyValueDiffer.prototype._addToRemovals = function (record) { // todo(vicb) assert // assert(record._next == null); // assert(record._nextAdded == null); // assert(record._nextChanged == null); // assert(record._nextRemoved == null); // assert(record._prevRemoved == null); if (this._removalsHead === null) { this._removalsHead = this._removalsTail = record; } else { this._removalsTail._nextRemoved = record; record._prevRemoved = this._removalsTail; this._removalsTail = record; } }; /** @internal */ DefaultKeyValueDiffer.prototype._removeFromSeq = function (prev, record) { var next = record._next; if (prev === null) { this._mapHead = next; } else { prev._next = next; } // todo(vicb) assert // assert((() { // record._next = null; // return true; //})()); }; /** @internal */ DefaultKeyValueDiffer.prototype._removeFromRemovals = function (record) { // todo(vicb) assert // assert(record._next == null); // assert(record._nextAdded == null); // assert(record._nextChanged == null); var prev = record._prevRemoved; var next = record._nextRemoved; if (prev === null) { this._removalsHead = next; } else { prev._nextRemoved = next; } if (next === null) { this._removalsTail = prev; } else { next._prevRemoved = prev; } record._prevRemoved = record._nextRemoved = null; }; /** @internal */ DefaultKeyValueDiffer.prototype._addToAdditions = function (record) { // todo(vicb): assert // assert(record._next == null); // assert(record._nextAdded == null); // assert(record._nextChanged == null); // assert(record._nextRemoved == null); // assert(record._prevRemoved == null); if (this._additionsHead === null) { this._additionsHead = this._additionsTail = record; } else { this._additionsTail._nextAdded = record; this._additionsTail = record; } }; /** @internal */ DefaultKeyValueDiffer.prototype._addToChanges = function (record) { // todo(vicb) assert // assert(record._nextAdded == null); // assert(record._nextChanged == null); // assert(record._nextRemoved == null); // assert(record._prevRemoved == null); if (this._changesHead === null) { this._changesHead = this._changesTail = record; } else { this._changesTail._nextChanged = record; this._changesTail = record; } }; DefaultKeyValueDiffer.prototype.toString = function () { var items = []; var previous = []; var changes = []; var additions = []; var removals = []; var record; for (record = this._mapHead; record !== null; record = record._next) { items.push(lang_1.stringify(record)); } for (record = this._previousMapHead; record !== null; record = record._nextPrevious) { previous.push(lang_1.stringify(record)); } for (record = this._changesHead; record !== null; record = record._nextChanged) { changes.push(lang_1.stringify(record)); } for (record = this._additionsHead; record !== null; record = record._nextAdded) { additions.push(lang_1.stringify(record)); } for (record = this._removalsHead; record !== null; record = record._nextRemoved) { removals.push(lang_1.stringify(record)); } return "map: " + items.join(', ') + "\n" + "previous: " + previous.join(', ') + "\n" + "additions: " + additions.join(', ') + "\n" + "changes: " + changes.join(', ') + "\n" + "removals: " + removals.join(', ') + "\n"; }; /** @internal */ DefaultKeyValueDiffer.prototype._forEach = function (obj, fn) { if (obj instanceof Map) { obj.forEach(fn); } else { collection_1.StringMapWrapper.forEach(obj, fn); } }; return DefaultKeyValueDiffer; })(); exports.DefaultKeyValueDiffer = DefaultKeyValueDiffer; var KVChangeRecord = (function () { function KVChangeRecord(key) { this.key = key; this.previousValue = null; this.currentValue = null; /** @internal */ this._nextPrevious = null; /** @internal */ this._next = null; /** @internal */ this._nextAdded = null; /** @internal */ this._nextRemoved = null; /** @internal */ this._prevRemoved = null; /** @internal */ this._nextChanged = null; } KVChangeRecord.prototype.toString = function () { return lang_1.looseIdentical(this.previousValue, this.currentValue) ? lang_1.stringify(this.key) : (lang_1.stringify(this.key) + '[' + lang_1.stringify(this.previousValue) + '->' + lang_1.stringify(this.currentValue) + ']'); }; return KVChangeRecord; })(); exports.KVChangeRecord = KVChangeRecord; //# sourceMappingURL=default_keyvalue_differ.js.map
binariedMe/blogging
node_modules/angular2/src/core/change_detection/differs/default_keyvalue_differ.js
JavaScript
mit
14,210
window.hideAlert = function () { $('#alertMessage').addClass("hidden"); $('#alertMessage').text(""); }; window.showAlert = function (msg) { $('#alertMessage').text(msg); $('#alertMessage').addClass("alert-danger"); $('#alertMessage').removeClass("hidden"); $('#alertMessage').fadeOut(100).fadeIn(100).fadeOut(100).fadeIn(100); }; window.showInfo = function (msg) { $('#alertMessage').text(msg); $('#alertMessage').removeClass("alert-danger"); $('#alertMessage').removeClass("hidden"); $('#alertMessage').fadeOut(100).fadeIn(100).fadeOut(100).fadeIn(100); }; window.dataErrorAlert = function (data) { switch (data.idError) { case "InvalidFile": showAlert(Resources["InvalidFile"]); break; case "InvalidReg": showAlert(Resources["WrongRegExpMessage"]); break; case "NotFound": showAlert(Resources["NoSearchResultsMessage"]); break; case "InvalidPassword": showAlert(Resources["UnlockInvalidPassword"]); break; default: showAlert(data.idError); break; } }; window.handleError = function (xhr, exception) { hideLoader(); $('#workButton').removeClass("hidden"); var msg = ''; if (xhr.status === 0) { msg = 'Not connect.\n Verify Network.'; } else if (xhr.status == 404) { msg = 'Requested page not found. [404]'; } else if (xhr.status == 500) { msg = 'Internal Server Error [500].'; } else if (exception === 'parsererror') { msg = 'Requested JSON parse failed.'; } else if (exception === 'timeout') { msg = 'Time out error.'; } else if (exception === 'abort') { msg = 'Ajax request aborted.'; } else { msg = 'Uncaught Error.\n' + xhr.responseText; } showAlert(msg); };
asposebarcode/Aspose_BarCode_NET
Demos/src/Aspose.BarCode.Live.Demos.UI/Scripts/Shared/Alert.js
JavaScript
mit
1,886
using System; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Migrations; using Satrabel.Starter.EntityFramework; using Abp.Authorization; using Abp.BackgroundJobs; using Abp.Notifications; namespace Satrabel.OpenApp.Migrations { [DbContext(typeof(AppDbContext))] [Migration("20170621153937_Added_Description_And_IsActive_To_Role")] partial class Added_Description_And_IsActive_To_Role { protected override void BuildTargetModel(ModelBuilder modelBuilder) { modelBuilder .HasAnnotation("ProductVersion", "1.1.2") .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); modelBuilder.Entity("Abp.Application.Editions.Edition", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(64); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(32); b.HasKey("Id"); b.ToTable("AbpEditions"); }); modelBuilder.Entity("Abp.Application.Features.FeatureSetting", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Discriminator") .IsRequired(); b.Property<string>("Name") .IsRequired() .HasMaxLength(128); b.Property<string>("Value") .IsRequired() .HasMaxLength(2000); b.HasKey("Id"); b.ToTable("AbpFeatures"); b.HasDiscriminator<string>("Discriminator").HasValue("FeatureSetting"); }); modelBuilder.Entity("Abp.Auditing.AuditLog", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("BrowserInfo") .HasMaxLength(256); b.Property<string>("ClientIpAddress") .HasMaxLength(64); b.Property<string>("ClientName") .HasMaxLength(128); b.Property<string>("CustomData") .HasMaxLength(2000); b.Property<string>("Exception") .HasMaxLength(2000); b.Property<int>("ExecutionDuration"); b.Property<DateTime>("ExecutionTime"); b.Property<int?>("ImpersonatorTenantId"); b.Property<long?>("ImpersonatorUserId"); b.Property<string>("MethodName") .HasMaxLength(256); b.Property<string>("Parameters") .HasMaxLength(1024); b.Property<string>("ServiceName") .HasMaxLength(256); b.Property<int?>("TenantId"); b.Property<long?>("UserId"); b.HasKey("Id"); b.HasIndex("TenantId", "ExecutionDuration"); b.HasIndex("TenantId", "ExecutionTime"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpAuditLogs"); }); modelBuilder.Entity("Abp.Authorization.PermissionSetting", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Discriminator") .IsRequired(); b.Property<bool>("IsGranted"); b.Property<string>("Name") .IsRequired() .HasMaxLength(128); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpPermissions"); b.HasDiscriminator<string>("Discriminator").HasValue("PermissionSetting"); }); modelBuilder.Entity("Abp.Authorization.Roles.RoleClaim", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<int>("RoleId"); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("RoleId"); b.HasIndex("TenantId", "ClaimType"); b.ToTable("AbpRoleClaims"); }); modelBuilder.Entity("Abp.Authorization.Users.UserAccount", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("EmailAddress"); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastLoginTime"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.Property<long?>("UserLinkId"); b.Property<string>("UserName"); b.HasKey("Id"); b.HasIndex("EmailAddress"); b.HasIndex("UserName"); b.HasIndex("TenantId", "EmailAddress"); b.HasIndex("TenantId", "UserId"); b.HasIndex("TenantId", "UserName"); b.ToTable("AbpUserAccounts"); }); modelBuilder.Entity("Abp.Authorization.Users.UserClaim", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "ClaimType"); b.ToTable("AbpUserClaims"); }); modelBuilder.Entity("Abp.Authorization.Users.UserLogin", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("LoginProvider") .IsRequired() .HasMaxLength(128); b.Property<string>("ProviderKey") .IsRequired() .HasMaxLength(256); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "UserId"); b.HasIndex("TenantId", "LoginProvider", "ProviderKey"); b.ToTable("AbpUserLogins"); }); modelBuilder.Entity("Abp.Authorization.Users.UserLoginAttempt", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("BrowserInfo") .HasMaxLength(256); b.Property<string>("ClientIpAddress") .HasMaxLength(64); b.Property<string>("ClientName") .HasMaxLength(128); b.Property<DateTime>("CreationTime"); b.Property<byte>("Result"); b.Property<string>("TenancyName") .HasMaxLength(64); b.Property<int?>("TenantId"); b.Property<long?>("UserId"); b.Property<string>("UserNameOrEmailAddress") .HasMaxLength(255); b.HasKey("Id"); b.HasIndex("UserId", "TenantId"); b.HasIndex("TenancyName", "UserNameOrEmailAddress", "Result"); b.ToTable("AbpUserLoginAttempts"); }); modelBuilder.Entity("Abp.Authorization.Users.UserOrganizationUnit", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long>("OrganizationUnitId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("TenantId", "OrganizationUnitId"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpUserOrganizationUnits"); }); modelBuilder.Entity("Abp.Authorization.Users.UserRole", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<int>("RoleId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "RoleId"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpUserRoles"); }); modelBuilder.Entity("Abp.Authorization.Users.UserToken", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("LoginProvider"); b.Property<string>("Name"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.Property<string>("Value"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpUserTokens"); }); modelBuilder.Entity("Abp.BackgroundJobs.BackgroundJobInfo", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<bool>("IsAbandoned"); b.Property<string>("JobArgs") .IsRequired() .HasMaxLength(1048576); b.Property<string>("JobType") .IsRequired() .HasMaxLength(512); b.Property<DateTime?>("LastTryTime"); b.Property<DateTime>("NextTryTime"); b.Property<byte>("Priority"); b.Property<short>("TryCount"); b.HasKey("Id"); b.HasIndex("IsAbandoned", "NextTryTime"); b.ToTable("AbpBackgroundJobs"); }); modelBuilder.Entity("Abp.Configuration.Setting", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(256); b.Property<int?>("TenantId"); b.Property<long?>("UserId"); b.Property<string>("Value") .HasMaxLength(2000); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpSettings"); }); modelBuilder.Entity("Abp.Localization.ApplicationLanguage", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(64); b.Property<string>("Icon") .HasMaxLength(128); b.Property<bool>("IsDeleted"); b.Property<bool>("IsDisabled"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(10); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpLanguages"); }); modelBuilder.Entity("Abp.Localization.ApplicationLanguageText", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Key") .IsRequired() .HasMaxLength(256); b.Property<string>("LanguageName") .IsRequired() .HasMaxLength(10); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Source") .IsRequired() .HasMaxLength(128); b.Property<int?>("TenantId"); b.Property<string>("Value") .IsRequired() .HasMaxLength(67108864); b.HasKey("Id"); b.HasIndex("TenantId", "Source", "LanguageName", "Key"); b.ToTable("AbpLanguageTexts"); }); modelBuilder.Entity("Abp.Notifications.NotificationInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Data") .HasMaxLength(1048576); b.Property<string>("DataTypeName") .HasMaxLength(512); b.Property<string>("EntityId") .HasMaxLength(96); b.Property<string>("EntityTypeAssemblyQualifiedName") .HasMaxLength(512); b.Property<string>("EntityTypeName") .HasMaxLength(250); b.Property<string>("ExcludedUserIds") .HasMaxLength(131072); b.Property<string>("NotificationName") .IsRequired() .HasMaxLength(96); b.Property<byte>("Severity"); b.Property<string>("TenantIds") .HasMaxLength(131072); b.Property<string>("UserIds") .HasMaxLength(131072); b.HasKey("Id"); b.ToTable("AbpNotifications"); }); modelBuilder.Entity("Abp.Notifications.NotificationSubscriptionInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("EntityId") .HasMaxLength(96); b.Property<string>("EntityTypeAssemblyQualifiedName") .HasMaxLength(512); b.Property<string>("EntityTypeName") .HasMaxLength(250); b.Property<string>("NotificationName") .HasMaxLength(96); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("NotificationName", "EntityTypeName", "EntityId", "UserId"); b.HasIndex("TenantId", "NotificationName", "EntityTypeName", "EntityId", "UserId"); b.ToTable("AbpNotificationSubscriptions"); }); modelBuilder.Entity("Abp.Notifications.TenantNotificationInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Data") .HasMaxLength(1048576); b.Property<string>("DataTypeName") .HasMaxLength(512); b.Property<string>("EntityId") .HasMaxLength(96); b.Property<string>("EntityTypeAssemblyQualifiedName") .HasMaxLength(512); b.Property<string>("EntityTypeName") .HasMaxLength(250); b.Property<string>("NotificationName") .IsRequired() .HasMaxLength(96); b.Property<byte>("Severity"); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("TenantId"); b.ToTable("AbpTenantNotifications"); }); modelBuilder.Entity("Abp.Notifications.UserNotificationInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<int>("State"); b.Property<int?>("TenantId"); b.Property<Guid>("TenantNotificationId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId", "State", "CreationTime"); b.ToTable("AbpUserNotifications"); }); modelBuilder.Entity("Abp.Organizations.OrganizationUnit", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("Code") .IsRequired() .HasMaxLength(95); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(128); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<long?>("ParentId"); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("ParentId"); b.HasIndex("TenantId", "Code"); b.ToTable("AbpOrganizationUnits"); }); modelBuilder.Entity("Satrabel.JobManager.Authorization.Roles.Role", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("Description") .HasMaxLength(5000); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(64); b.Property<bool>("IsActive"); b.Property<bool>("IsDefault"); b.Property<bool>("IsDeleted"); b.Property<bool>("IsStatic"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(32); b.Property<string>("NormalizedName") .IsRequired() .HasMaxLength(32); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("CreatorUserId"); b.HasIndex("DeleterUserId"); b.HasIndex("LastModifierUserId"); b.HasIndex("TenantId", "NormalizedName"); b.ToTable("AbpRoles"); }); modelBuilder.Entity("Satrabel.JobManager.Authorization.Users.User", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<int>("AccessFailedCount"); b.Property<string>("AuthenticationSource") .HasMaxLength(64); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("EmailAddress") .IsRequired() .HasMaxLength(256); b.Property<string>("EmailConfirmationCode") .HasMaxLength(328); b.Property<bool>("IsActive"); b.Property<bool>("IsDeleted"); b.Property<bool>("IsEmailConfirmed"); b.Property<bool>("IsLockoutEnabled"); b.Property<bool>("IsPhoneNumberConfirmed"); b.Property<bool>("IsTwoFactorEnabled"); b.Property<DateTime?>("LastLoginTime"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<DateTime?>("LockoutEndDateUtc"); b.Property<string>("Name") .IsRequired() .HasMaxLength(32); b.Property<string>("NormalizedEmailAddress") .IsRequired() .HasMaxLength(256); b.Property<string>("NormalizedUserName") .IsRequired() .HasMaxLength(32); b.Property<string>("Password") .IsRequired() .HasMaxLength(128); b.Property<string>("PasswordResetCode") .HasMaxLength(328); b.Property<string>("PhoneNumber"); b.Property<string>("SecurityStamp"); b.Property<string>("Surname") .IsRequired() .HasMaxLength(32); b.Property<int?>("TenantId"); b.Property<string>("UserName") .IsRequired() .HasMaxLength(32); b.HasKey("Id"); b.HasIndex("CreatorUserId"); b.HasIndex("DeleterUserId"); b.HasIndex("LastModifierUserId"); b.HasIndex("TenantId", "NormalizedEmailAddress"); b.HasIndex("TenantId", "NormalizedUserName"); b.ToTable("AbpUsers"); }); modelBuilder.Entity("Satrabel.JobManager.MultiTenancy.Tenant", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ConnectionString") .HasMaxLength(1024); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<int?>("EditionId"); b.Property<bool>("IsActive"); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(128); b.Property<string>("TenancyName") .IsRequired() .HasMaxLength(64); b.HasKey("Id"); b.HasIndex("CreatorUserId"); b.HasIndex("DeleterUserId"); b.HasIndex("EditionId"); b.HasIndex("LastModifierUserId"); b.HasIndex("TenancyName"); b.ToTable("AbpTenants"); }); modelBuilder.Entity("Abp.Application.Features.EditionFeatureSetting", b => { b.HasBaseType("Abp.Application.Features.FeatureSetting"); b.Property<int>("EditionId"); b.HasIndex("EditionId", "Name"); b.ToTable("AbpFeatures"); b.HasDiscriminator().HasValue("EditionFeatureSetting"); }); modelBuilder.Entity("Abp.MultiTenancy.TenantFeatureSetting", b => { b.HasBaseType("Abp.Application.Features.FeatureSetting"); b.Property<int>("TenantId"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpFeatures"); b.HasDiscriminator().HasValue("TenantFeatureSetting"); }); modelBuilder.Entity("Abp.Authorization.Roles.RolePermissionSetting", b => { b.HasBaseType("Abp.Authorization.PermissionSetting"); b.Property<int>("RoleId"); b.HasIndex("RoleId"); b.ToTable("AbpPermissions"); b.HasDiscriminator().HasValue("RolePermissionSetting"); }); modelBuilder.Entity("Abp.Authorization.Users.UserPermissionSetting", b => { b.HasBaseType("Abp.Authorization.PermissionSetting"); b.Property<long>("UserId"); b.HasIndex("UserId"); b.ToTable("AbpPermissions"); b.HasDiscriminator().HasValue("UserPermissionSetting"); }); modelBuilder.Entity("Abp.Authorization.Roles.RoleClaim", b => { b.HasOne("Satrabel.JobManager.Authorization.Roles.Role") .WithMany("Claims") .HasForeignKey("RoleId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserClaim", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User") .WithMany("Claims") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserLogin", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User") .WithMany("Logins") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserRole", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User") .WithMany("Roles") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserToken", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User") .WithMany("Tokens") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Configuration.Setting", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User") .WithMany("Settings") .HasForeignKey("UserId"); }); modelBuilder.Entity("Abp.Organizations.OrganizationUnit", b => { b.HasOne("Abp.Organizations.OrganizationUnit", "Parent") .WithMany("Children") .HasForeignKey("ParentId"); }); modelBuilder.Entity("Satrabel.JobManager.Authorization.Roles.Role", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User", "CreatorUser") .WithMany() .HasForeignKey("CreatorUserId"); b.HasOne("Satrabel.JobManager.Authorization.Users.User", "DeleterUser") .WithMany() .HasForeignKey("DeleterUserId"); b.HasOne("Satrabel.JobManager.Authorization.Users.User", "LastModifierUser") .WithMany() .HasForeignKey("LastModifierUserId"); }); modelBuilder.Entity("Satrabel.JobManager.Authorization.Users.User", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User", "CreatorUser") .WithMany() .HasForeignKey("CreatorUserId"); b.HasOne("Satrabel.JobManager.Authorization.Users.User", "DeleterUser") .WithMany() .HasForeignKey("DeleterUserId"); b.HasOne("Satrabel.JobManager.Authorization.Users.User", "LastModifierUser") .WithMany() .HasForeignKey("LastModifierUserId"); }); modelBuilder.Entity("Satrabel.JobManager.MultiTenancy.Tenant", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User", "CreatorUser") .WithMany() .HasForeignKey("CreatorUserId"); b.HasOne("Satrabel.JobManager.Authorization.Users.User", "DeleterUser") .WithMany() .HasForeignKey("DeleterUserId"); b.HasOne("Abp.Application.Editions.Edition", "Edition") .WithMany() .HasForeignKey("EditionId"); b.HasOne("Satrabel.JobManager.Authorization.Users.User", "LastModifierUser") .WithMany() .HasForeignKey("LastModifierUserId"); }); modelBuilder.Entity("Abp.Application.Features.EditionFeatureSetting", b => { b.HasOne("Abp.Application.Editions.Edition", "Edition") .WithMany() .HasForeignKey("EditionId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Roles.RolePermissionSetting", b => { b.HasOne("Satrabel.JobManager.Authorization.Roles.Role") .WithMany("Permissions") .HasForeignKey("RoleId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserPermissionSetting", b => { b.HasOne("Satrabel.JobManager.Authorization.Users.User") .WithMany("Permissions") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); } } }
sachatrauwaen/OpenApp
src/Satrabel.Starter.Web.Spa/Migrations/20170621153937_Added_Description_And_IsActive_To_Role.Designer.cs
C#
mit
35,419
<?php /* * Copyright 2007-2013 Charles du Jeu - Abstrium SAS <team (at) pyd.io> * This file is part of Pydio. * * Pydio is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Pydio is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Pydio. If not, see <http://www.gnu.org/licenses/>. * * The latest code can be found at <http://pyd.io/>. */ $mess=array( "Source Viewer" => "Afficheur de sources", "Syntax Highlighter for all major code source files" => "Coloration syntaxique pour la plupart des fichiers de code source", );
reverserob/pydio-docker
pydio-core/plugins/editor.codemirror/i18n/conf/fr.php
PHP
mit
987
define('controllers/panel',['require','jquery','backbone','utils/metrics','utils/browser','utils/video-player','utils/pubsub','controllers/panel-display'],function(require) { var $ = require('jquery'), Backbone = require('backbone'), Metrics = require('utils/metrics'), Browser = require('utils/browser'), VideoPlayer = require('utils/video-player'), PubSub = require('utils/pubsub'), PanelDisplay = require('controllers/panel-display') ; var PanelView = Backbone.View.extend({ events: { }, panelOn: false, minWidth: 320, minHeight: 180, leftLimit: 100, topLimit: 100, offset: 10, border: 5, initialize: function() { Browser.checkMobileTabletDevice(); this.logger = new eventsCore.util.Logger('PanelView'); this.panel1 = new PanelDisplay({el: '#panel1'}).render(); this.panel2 = new PanelDisplay({el: '#panel2'}).render(); //this.logger.info('initialize - panel1:%o panel2:%o', this.panel1, this.panel2); this.on('route:change', this.checkPage); this.listenTo(PubSub, 'video:playPanel', this.onPlayPanel); this.listenTo(PubSub, 'video:exitPanel', this.onExitPanel); this.listenTo(PubSub, 'video:resetHero', this.onResetHero); this.listenTo(PubSub, 'video:resetVod', this.onResetVod); this.listenTo(VideoPlayer, 'player:play', this.onPlayEvent); this.listenTo(VideoPlayer, 'player:panelOpen', this.onPanelOpenEvent); this.listenTo(VideoPlayer, 'player:panelClosed', this.onPanelClosedEvent); }, render: function() { return this; }, /** * checks state/position of each panel and updates initial mute status * executes on play and route change */ checkVolume: function() { this.logger.info('checkVolume - panel1:%o panel2:%o', this.panel1.state(), this.panel2.state()); if (this.panel1.state() != '' && this.panel2.state() == '') { this.panel1.mute(false); this.panel2.mute(true); } else if (this.panel2.state() != '' && this.panel1.state() == '') { this.panel2.mute(false); this.panel1.mute(true); } else if (this.panel1.state() == 'floatVideo' && this.panel2.state() == 'heroVideo') { this.panel2.mute(false); this.panel1.mute(true); } else if (this.panel1.state() == 'heroVideo' && this.panel2.state() == 'floatVideo') { this.panel1.mute(false); this.panel2.mute(true); } }, /** * close any open hero panel * - used for mobile internal ref to a different watch live channel */ onResetHero: function() { if(this.panel1.state() === 'heroVideo') { this.panel1.onPanelExit(); } else if(this.panel2.state() === 'heroVideo') { this.panel2.onPanelExit(); } }, /** * close any open vod floated panel */ onResetVod: function() { if(this.panel1.state() === 'floatVideo') { this.panel1.onPanelExit(); } else if(this.panel2.state() === 'floatVideo') { this.panel2.onPanelExit(); } }, /** * play video in a panel, check for channel existing in panel first and use existing if playing * @param data - panel video data * @param channel - this live video data * @param options - options to be passed to video player, consisting of: * floated - optional boolean indicating if should start in float mode * vod - indicates if playing a vod */ onPlayPanel: function(data, channel, options) { options = _.extend({ floated: false, vod: false }, options); this.logger.info('onPlayPanel - panel1:%o chan1:%o panel2:%o chan2:%o data:%o', this.panel1.state(), this.panel1.channelId(), this.panel2.state(), this.panel2.channelId(), data); //if panel1 floating and opening the same channel, close to hero (to force back to hero when return to live channel page) // do not close if float is true, call is trying to open same channel in already open float panel if (this.panel1.channelId() == data[0].id) { // if panel has no state, reset it to play channel if(this.panel1.state() === '') { this.panel1.playPanel(data, channel, options); } else if (this.panel1.state() == 'floatVideo' && !options.floated) this.panel1.panelClose(data, false); else this.logger.warn('onPlayPanel - ignoring call, attempted to open same channel already active'); } //if panel2 floating and opening the same channel, close to hero (to force back to hero when return to live channel page) // do not close if float is true, call i trying to open same channel in already open float panel else if (this.panel2.channelId() == data[0].id){ // if panel has no state, reset it to play channel if(this.panel2.state() === '') { this.panel2.playPanel(data, channel, options); } else if (this.panel2.state() == 'floatVideo' && !options.floated) this.panel2.panelClose(data, false); else this.logger.warn('onPlayPanel - ignoring call, attempted to open same channel to floating panel'); } //if panel1 in hero use it, (if not playing this channel) else if ((this.panel1.state() == 'heroVideo' || this.panel1.state() == '') && this.panel1.channelId() != data[0].id) { this.panel1.playPanel(data, channel, options); } //else use panel2 (if not playing this channel) else if (this.panel2.channelId() != data[0].id){ this.panel2.playPanel(data, channel, options); } }, /** * exit video playing in panel, whichever panel is open */ onExitPanel: function() { this.logger.info('onExitPanel - panel1:%o chan1:%o panel2:%o chan2:%o', this.panel1.state(), this.panel1.channelId(), this.panel2.state(), this.panel2.channelId()); // close whichever one is floated if(this.panel1.state() === 'floatVideo') { this.panel1.onPanelExit(); } else if(this.panel2.state() === 'floatVideo') { this.panel2.onPanelExit(); } }, /** * on play, initiates check for setting initial mute * @param data - event data with panel id */ onPlayEvent: function(data) { //this.logger.info('onPlayEvent - data:%o', data.id); if (data.id == 'panel1' || data.id == 'panel2') this.checkVolume(); }, /** * handle panel open event from video player * triggers panel to transition to float state * @param data - event data with panel id */ onPanelOpenEvent: function(data) { this.logger.info('onPanelOpenEvent - panel1:%o panel2:%o id:%o', this.panel1.state(), this.panel2.state(), data.id); if(data.id == 'panel1') { if (this.panel2.state() == 'floatVideo') { this.panel2.panelClose(null, false); } this.panel1.panelOpen(); } else if(data.id == 'panel2') { if (this.panel1.state() == 'floatVideo') { this.panel1.panelClose(null, false); } this.panel2.panelOpen(); } }, /** * handle panel close event from video player * triggers panel to return to hero state * @param data - event data with panel id */ onPanelClosedEvent: function(data) { this.logger.info('onPanelClosedEvent - panel1:%o panel2:%o id:%o', this.panel1.state(), this.panel2.state(), data.id); if(data.id == 'panel1') { this.panel1.panelClose(data, true); if (this.panel2.state() == 'heroVideo') { this.panel2.panelClose(data, false); this.checkVolume(); } } else if(data.id == 'panel2') { this.panel2.panelClose(data, true); if (this.panel1.state() == 'heroVideo') { this.panel1.panelClose(data, false); this.checkVolume(); } } }, /** * initiate page check for closing hero on route change * also check volume on route change */ checkPage: function(){ var route = Backbone.history.getFragment(); this.panel1.checkPage(route); this.panel2.checkPage(route); this.checkVolume(); } }); return PanelView; }) ;
rlaj/tmc
source/dist/controllers/panel.js
JavaScript
mit
9,409
// Design Basic Game Solo Challenge // This is a solo challenge // Your mission description:To complete a line of the same figure, horizontal, diagonal or vertical // Overall mission: To win all the time :) // Goals: make a line of the same kind before computer does // Characters:You and the computer // Objects:tic tac toe // Functions:clear_board, refresh_board, turn // Pseudocode // Make a Tictactoe class // Initialize the instance // Paint the board // Take a turn UNTIL someones win // Check if some one won // Clear the board // // // // // Initial Code turns = 0 board_state = [[" "," "," "], [" "," "," "], [" "," "," "]]; var Tictactoe = { take_turn : function(user){ mark = prompt("It is your turn, where do you want to mark?"); horizontal = mark[1]; vertical = mark[0].toUpperCase(); if (vertical == "A"){ vertical = 0 } else if (vertical == "B"){ vertical = 1 } else { vertical = 2 } board_state[horizontal-1][vertical] = user console.log(board_state) }, print_board : function(){ line = "" console.log(" A B C") for (i in board_state){ new_line = "\n ═══╬═══╬═══\n" for (x in board_state[i]){ ln = parseInt(i); if (x == 0){line = (ln+1)+" "} if (x == 2) { if (i == 2){new_line = "\n"} line += " "+board_state[i][x]+new_line; } else { line += " "+board_state[i][x]+" ║" } } console.log(line); } } } alert ("Welcome to @cyberpolin's Tic Tac Toe\n So it is the turn of User 1, please select where you want to mark...") Tictactoe.print_board() while (turns < 9){ if (turns%2 == 0){ Tictactoe.take_turn("o"); } else { Tictactoe.take_turn("x"); } Tictactoe.print_board(); turns++; } // RELECTION // What was the most difficult part of this challenge? // Order my toughts to make the code works as i wannted, also as javascript is not a language thinked for terminal it was difficult to figure out how it was going to work. // What did you learn about creating objects and functions that interact with one another? // Is like in ruby, i think of them as just methods // Did you learn about any new built-in methods you could use in your refactored solution? If so, what were they and how do they work? // The only one i used was toUpperCase, ad they are like Ruby methods even tough Javascript have a different syntax. // How can you access and manipulate properties of objects? // like in Ruby object[property] = new_value, or the JS way object.property = new_value // TODO'S // Check if a place is marked already // Check if you have winned // Make it playable with computer // MAKE IT GRAPHICAL!!!!
cyberpolin/Phase-0
week-7/tictactoe-game/game.js
JavaScript
mit
2,853
from django.conf import settings from django.conf.urls.defaults import patterns, url from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned from django.core.urlresolvers import NoReverseMatch, reverse, resolve, Resolver404 from django.db.models.sql.constants import QUERY_TERMS, LOOKUP_SEP from django.http import HttpResponse from django.utils.cache import patch_cache_control from tastypie.authentication import Authentication from tastypie.authorization import ReadOnlyAuthorization from tastypie.bundle import Bundle from tastypie.cache import NoCache from tastypie.constants import ALL, ALL_WITH_RELATIONS from tastypie.exceptions import NotFound, BadRequest, InvalidFilterError, HydrationError, InvalidSortError, ImmediateHttpResponse from tastypie.fields import * from tastypie.http import * from tastypie.paginator import Paginator from tastypie.serializers import Serializer from tastypie.throttle import BaseThrottle from tastypie.utils import is_valid_jsonp_callback_value, dict_strip_unicode_keys, trailing_slash from tastypie.utils.mime import determine_format, build_content_type from tastypie.validation import Validation try: set except NameError: from sets import Set as set # The ``copy`` module was added in Python 2.5 and ``copycompat`` was added in # post 1.1.1 Django (r11901) try: from django.utils.copycompat import deepcopy from django.views.decorators.csrf import csrf_exempt except ImportError: from copy import deepcopy def csrf_exempt(func): return func class ResourceOptions(object): """ A configuration class for ``Resource``. Provides sane defaults and the logic needed to augment these settings with the internal ``class Meta`` used on ``Resource`` subclasses. """ serializer = Serializer() authentication = Authentication() authorization = ReadOnlyAuthorization() cache = NoCache() throttle = BaseThrottle() validation = Validation() allowed_methods = ['get', 'post', 'put', 'delete'] list_allowed_methods = None detail_allowed_methods = None limit = getattr(settings, 'API_LIMIT_PER_PAGE', 20) api_name = None resource_name = None urlconf_namespace = None default_format = 'application/json' filtering = {} ordering = [] object_class = None queryset = None fields = [] excludes = [] include_resource_uri = True include_absolute_url = False def __new__(cls, meta=None): overrides = {} # Handle overrides. if meta: for override_name in dir(meta): # No internals please. if not override_name.startswith('_'): overrides[override_name] = getattr(meta, override_name) allowed_methods = overrides.get('allowed_methods', ['get', 'post', 'put', 'delete']) if overrides.get('list_allowed_methods', None) is None: overrides['list_allowed_methods'] = allowed_methods if overrides.get('detail_allowed_methods', None) is None: overrides['detail_allowed_methods'] = allowed_methods if not overrides.get('queryset', None) is None: overrides['object_class'] = overrides['queryset'].model return object.__new__(type('ResourceOptions', (cls,), overrides)) class DeclarativeMetaclass(type): def __new__(cls, name, bases, attrs): attrs['base_fields'] = {} declared_fields = {} # Inherit any fields from parent(s). try: parents = [b for b in bases if issubclass(b, Resource)] for p in parents: fields = getattr(p, 'base_fields', {}) for field_name, field_object in fields.items(): attrs['base_fields'][field_name] = deepcopy(field_object) except NameError: pass for field_name, obj in attrs.items(): if isinstance(obj, ApiField): field = attrs.pop(field_name) declared_fields[field_name] = field attrs['base_fields'].update(declared_fields) attrs['declared_fields'] = declared_fields new_class = super(DeclarativeMetaclass, cls).__new__(cls, name, bases, attrs) opts = getattr(new_class, 'Meta', None) new_class._meta = ResourceOptions(opts) if not getattr(new_class._meta, 'resource_name', None): # No ``resource_name`` provided. Attempt to auto-name the resource. class_name = new_class.__name__ name_bits = [bit for bit in class_name.split('Resource') if bit] resource_name = ''.join(name_bits).lower() new_class._meta.resource_name = resource_name if getattr(new_class._meta, 'include_resource_uri', True): if not 'resource_uri' in new_class.base_fields: new_class.base_fields['resource_uri'] = CharField(readonly=True) elif 'resource_uri' in new_class.base_fields and not 'resource_uri' in attrs: del(new_class.base_fields['resource_uri']) for field_name, field_object in new_class.base_fields.items(): if hasattr(field_object, 'contribute_to_class'): field_object.contribute_to_class(new_class, field_name) return new_class class Resource(object): """ Handles the data, request dispatch and responding to requests. Serialization/deserialization is handled "at the edges" (i.e. at the beginning/end of the request/response cycle) so that everything internally is Python data structures. This class tries to be non-model specific, so it can be hooked up to other data sources, such as search results, files, other data, etc. """ __metaclass__ = DeclarativeMetaclass def __init__(self, api_name=None): self.fields = deepcopy(self.base_fields) if not api_name is None: self._meta.api_name = api_name def __getattr__(self, name): if name in self.fields: return self.fields[name] def wrap_view(self, view): """ Wraps methods so they can be called in a more functional way as well as handling exceptions better. Note that if ``BadRequest`` or an exception with a ``response`` attr are seen, there is special handling to either present a message back to the user or return the response traveling with the exception. """ @csrf_exempt def wrapper(request, *args, **kwargs): try: callback = getattr(self, view) response = callback(request, *args, **kwargs) if request.is_ajax(): # IE excessively caches XMLHttpRequests, so we're disabling # the browser cache here. # See http://www.enhanceie.com/ie/bugs.asp for details. patch_cache_control(response, no_cache=True) return response except (BadRequest, ApiFieldError), e: return HttpBadRequest(e.args[0]) except Exception, e: if hasattr(e, 'response'): return e.response # A real, non-expected exception. # Handle the case where the full traceback is more helpful # than the serialized error. if settings.DEBUG and getattr(settings, 'TASTYPIE_FULL_DEBUG', False): raise # Rather than re-raising, we're going to things similar to # what Django does. The difference is returning a serialized # error message. return self._handle_500(request, e) return wrapper def _handle_500(self, request, exception): import traceback import sys the_trace = '\n'.join(traceback.format_exception(*(sys.exc_info()))) if settings.DEBUG: data = { "error_message": exception.message, "traceback": the_trace, } desired_format = self.determine_format(request) serialized = self.serialize(request, data, desired_format) return HttpApplicationError(content=serialized, content_type=build_content_type(desired_format)) # When DEBUG is False, send an error message to the admins. from django.core.mail import mail_admins subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path) try: request_repr = repr(request) except: request_repr = "Request repr() unavailable" message = "%s\n\n%s" % (the_trace, request_repr) mail_admins(subject, message, fail_silently=True) # Prep the data going out. data = { "error_message": getattr(settings, 'TASTYPIE_CANNED_ERROR', "Sorry, this request could not be processed. Please try again later."), } desired_format = self.determine_format(request) serialized = self.serialize(request, data, desired_format) return HttpApplicationError(content=serialized, content_type=build_content_type(desired_format)) def _build_reverse_url(self, name, args=None, kwargs=None): """ A convenience hook for overriding how URLs are built. See ``NamespacedModelResource._build_reverse_url`` for an example. """ return reverse(name, args=args, kwargs=kwargs) def base_urls(self): """ The standard URLs this ``Resource`` should respond to. """ # Due to the way Django parses URLs, ``get_multiple`` won't work without # a trailing slash. return [ url(r"^(?P<resource_name>%s)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_list'), name="api_dispatch_list"), url(r"^(?P<resource_name>%s)/schema%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('get_schema'), name="api_get_schema"), url(r"^(?P<resource_name>%s)/set/(?P<pk_list>\w[\w/;-]*)/$" % self._meta.resource_name, self.wrap_view('get_multiple'), name="api_get_multiple"), url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_detail'), name="api_dispatch_detail"), ] def override_urls(self): """ A hook for adding your own URLs or overriding the default URLs. """ return [] @property def urls(self): """ The endpoints this ``Resource`` responds to. Mostly a standard URLconf, this is suitable for either automatic use when registered with an ``Api`` class or for including directly in a URLconf should you choose to. """ urls = self.override_urls() + self.base_urls() urlpatterns = patterns('', *urls ) return urlpatterns def determine_format(self, request): """ Used to determine the desired format. Largely relies on ``tastypie.utils.mime.determine_format`` but here as a point of extension. """ return determine_format(request, self._meta.serializer, default_format=self._meta.default_format) def serialize(self, request, data, format, options=None): """ Given a request, data and a desired format, produces a serialized version suitable for transfer over the wire. Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``. """ options = options or {} if 'text/javascript' in format: # get JSONP callback name. default to "callback" callback = request.GET.get('callback', 'callback') if not is_valid_jsonp_callback_value(callback): raise BadRequest('JSONP callback name is invalid.') options['callback'] = callback return self._meta.serializer.serialize(data, format, options) def deserialize(self, request, data, format='application/json'): """ Given a request, data and a format, deserializes the given data. It relies on the request properly sending a ``CONTENT_TYPE`` header, falling back to ``application/json`` if not provided. Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``. """ return self._meta.serializer.deserialize(data, format=request.META.get('CONTENT_TYPE', 'application/json')) def dispatch_list(self, request, **kwargs): """ A view for handling the various HTTP methods (GET/POST/PUT/DELETE) over the entire list of resources. Relies on ``Resource.dispatch`` for the heavy-lifting. """ return self.dispatch('list', request, **kwargs) def dispatch_detail(self, request, **kwargs): """ A view for handling the various HTTP methods (GET/POST/PUT/DELETE) on a single resource. Relies on ``Resource.dispatch`` for the heavy-lifting. """ return self.dispatch('detail', request, **kwargs) def dispatch(self, request_type, request, **kwargs): """ Handles the common operations (allowed HTTP method, authentication, throttling, method lookup) surrounding most CRUD interactions. """ allowed_methods = getattr(self._meta, "%s_allowed_methods" % request_type, None) request_method = self.method_check(request, allowed=allowed_methods) method = getattr(self, "%s_%s" % (request_method, request_type), None) if method is None: raise ImmediateHttpResponse(response=HttpNotImplemented()) self.is_authenticated(request) self.is_authorized(request) self.throttle_check(request) # All clear. Process the request. request = convert_post_to_put(request) response = method(request, **kwargs) # Add the throttled request. self.log_throttled_access(request) # If what comes back isn't a ``HttpResponse``, assume that the # request was accepted and that some action occurred. This also # prevents Django from freaking out. if not isinstance(response, HttpResponse): return HttpAccepted() return response def remove_api_resource_names(self, url_dict): """ Given a dictionary of regex matches from a URLconf, removes ``api_name`` and/or ``resource_name`` if found. This is useful for converting URLconf matches into something suitable for data lookup. For example:: Model.objects.filter(**self.remove_api_resource_names(matches)) """ kwargs_subset = url_dict.copy() for key in ['api_name', 'resource_name']: try: del(kwargs_subset[key]) except KeyError: pass return kwargs_subset def method_check(self, request, allowed=None): """ Ensures that the HTTP method used on the request is allowed to be handled by the resource. Takes an ``allowed`` parameter, which should be a list of lowercase HTTP methods to check against. Usually, this looks like:: # The most generic lookup. self.method_check(request, self._meta.allowed_methods) # A lookup against what's allowed for list-type methods. self.method_check(request, self._meta.list_allowed_methods) # A useful check when creating a new endpoint that only handles # GET. self.method_check(request, ['get']) """ if allowed is None: allowed = [] request_method = request.method.lower() if not request_method in allowed: raise ImmediateHttpResponse(response=HttpMethodNotAllowed()) return request_method def is_authorized(self, request, object=None): """ Handles checking of permissions to see if the user has authorization to GET, POST, PUT, or DELETE this resource. If ``object`` is provided, the authorization backend can apply additional row-level permissions checking. """ auth_result = self._meta.authorization.is_authorized(request, object) if isinstance(auth_result, HttpResponse): raise ImmediateHttpResponse(response=auth_result) if not auth_result is True: raise ImmediateHttpResponse(response=HttpUnauthorized()) def is_authenticated(self, request): """ Handles checking if the user is authenticated and dealing with unauthenticated users. Mostly a hook, this uses class assigned to ``authentication`` from ``Resource._meta``. """ # Authenticate the request as needed. auth_result = self._meta.authentication.is_authenticated(request) if isinstance(auth_result, HttpResponse): raise ImmediateHttpResponse(response=auth_result) if not auth_result is True: raise ImmediateHttpResponse(response=HttpUnauthorized()) def throttle_check(self, request): """ Handles checking if the user should be throttled. Mostly a hook, this uses class assigned to ``throttle`` from ``Resource._meta``. """ identifier = self._meta.authentication.get_identifier(request) # Check to see if they should be throttled. if self._meta.throttle.should_be_throttled(identifier): # Throttle limit exceeded. raise ImmediateHttpResponse(response=HttpForbidden()) def log_throttled_access(self, request): """ Handles the recording of the user's access for throttling purposes. Mostly a hook, this uses class assigned to ``throttle`` from ``Resource._meta``. """ request_method = request.method.lower() self._meta.throttle.accessed(self._meta.authentication.get_identifier(request), url=request.get_full_path(), request_method=request_method) def build_bundle(self, obj=None, data=None): """ Given either an object, a data dictionary or both, builds a ``Bundle`` for use throughout the ``dehydrate/hydrate`` cycle. If no object is provided, an empty object from ``Resource._meta.object_class`` is created so that attempts to access ``bundle.obj`` do not fail. """ if obj is None: obj = self._meta.object_class() return Bundle(obj, data) def build_filters(self, filters=None): """ Allows for the filtering of applicable objects. This needs to be implemented at the user level.' ``ModelResource`` includes a full working version specific to Django's ``Models``. """ return filters def apply_sorting(self, obj_list, options=None): """ Allows for the sorting of objects being returned. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ return obj_list # URL-related methods. def get_resource_uri(self, bundle_or_obj): """ This needs to be implemented at the user level. A ``return reverse("api_dispatch_detail", kwargs={'resource_name': self.resource_name, 'pk': object.id})`` should be all that would be needed. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def get_resource_list_uri(self): """ Returns a URL specific to this resource's list endpoint. """ kwargs = { 'resource_name': self._meta.resource_name, } if self._meta.api_name is not None: kwargs['api_name'] = self._meta.api_name try: return self._build_reverse_url("api_dispatch_list", kwargs=kwargs) except NoReverseMatch: return None def get_via_uri(self, uri): """ This pulls apart the salient bits of the URI and populates the resource via a ``obj_get``. If you need custom behavior based on other portions of the URI, simply override this method. """ try: view, args, kwargs = resolve(uri) except Resolver404: raise NotFound("The URL provided '%s' was not a link to a valid resource." % uri) return self.obj_get(**self.remove_api_resource_names(kwargs)) # Data preparation. def full_dehydrate(self, obj): """ Given an object instance, extract the information from it to populate the resource. """ bundle = Bundle(obj=obj) # Dehydrate each field. for field_name, field_object in self.fields.items(): # A touch leaky but it makes URI resolution work. if isinstance(field_object, RelatedField): field_object.api_name = self._meta.api_name field_object.resource_name = self._meta.resource_name bundle.data[field_name] = field_object.dehydrate(bundle) # Check for an optional method to do further dehydration. method = getattr(self, "dehydrate_%s" % field_name, None) if method: bundle.data[field_name] = method(bundle) bundle = self.dehydrate(bundle) return bundle def dehydrate(self, bundle): """ A hook to allow a final manipulation of data once all fields/methods have built out the dehydrated data. Useful if you need to access more than one dehydrated field or want to annotate on additional data. Must return the modified bundle. """ return bundle def full_hydrate(self, bundle): """ Given a populated bundle, distill it and turn it back into a full-fledged object instance. """ if bundle.obj is None: bundle.obj = self._meta.object_class() for field_name, field_object in self.fields.items(): if field_object.attribute: value = field_object.hydrate(bundle) if value is not None: # We need to avoid populating M2M data here as that will # cause things to blow up. if not getattr(field_object, 'is_related', False): setattr(bundle.obj, field_object.attribute, value) elif not getattr(field_object, 'is_m2m', False): setattr(bundle.obj, field_object.attribute, value.obj) # Check for an optional method to do further hydration. method = getattr(self, "hydrate_%s" % field_name, None) if method: bundle = method(bundle) bundle = self.hydrate(bundle) return bundle def hydrate(self, bundle): """ A hook to allow a final manipulation of data once all fields/methods have built out the hydrated data. Useful if you need to access more than one hydrated field or want to annotate on additional data. Must return the modified bundle. """ return bundle def hydrate_m2m(self, bundle): """ Populate the ManyToMany data on the instance. """ if bundle.obj is None: raise HydrationError("You must call 'full_hydrate' before attempting to run 'hydrate_m2m' on %r." % self) for field_name, field_object in self.fields.items(): if not getattr(field_object, 'is_m2m', False): continue if field_object.attribute: # Note that we only hydrate the data, leaving the instance # unmodified. It's up to the user's code to handle this. # The ``ModelResource`` provides a working baseline # in this regard. bundle.data[field_name] = field_object.hydrate_m2m(bundle) for field_name, field_object in self.fields.items(): if not getattr(field_object, 'is_m2m', False): continue method = getattr(self, "hydrate_%s" % field_name, None) if method: method(bundle) return bundle def build_schema(self): """ Returns a dictionary of all the fields on the resource and some properties about those fields. Used by the ``schema/`` endpoint to describe what will be available. """ data = { 'fields': {}, 'default_format': self._meta.default_format, } if self._meta.ordering: data['ordering'] = self._meta.ordering if self._meta.filtering: data['filtering'] = self._meta.filtering for field_name, field_object in self.fields.items(): data['fields'][field_name] = { 'type': field_object.dehydrated_type, 'nullable': field_object.null, 'readonly': field_object.readonly, 'help_text': field_object.help_text, } return data def dehydrate_resource_uri(self, bundle): """ For the automatically included ``resource_uri`` field, dehydrate the URI for the given bundle. Returns empty string if no URI can be generated. """ try: return self.get_resource_uri(bundle) except NotImplementedError: return '' except NoReverseMatch: return '' def generate_cache_key(self, *args, **kwargs): """ Creates a unique-enough cache key. This is based off the current api_name/resource_name/args/kwargs. """ smooshed = [] for key, value in kwargs.items(): smooshed.append("%s=%s" % (key, value)) # Use a list plus a ``.join()`` because it's faster than concatenation. return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), ':'.join(smooshed)) # Data access methods. def get_object_list(self, request): """ A hook to allow making returning the list of available objects. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def apply_authorization_limits(self, request, object_list): """ Allows the ``Authorization`` class to further limit the object list. Also a hook to customize per ``Resource``. """ if hasattr(self._meta.authorization, 'apply_limits'): object_list = self._meta.authorization.apply_limits(request, object_list) return object_list def obj_get_list(self, request=None, **kwargs): """ Fetches the list of objects available on the resource. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def cached_obj_get_list(self, request=None, **kwargs): """ A version of ``obj_get_list`` that uses the cache as a means to get commonly-accessed data faster. """ cache_key = self.generate_cache_key('list', **kwargs) obj_list = self._meta.cache.get(cache_key) if obj_list is None: obj_list = self.obj_get_list(request=request, **kwargs) self._meta.cache.set(cache_key, obj_list) return obj_list def obj_get(self, request=None, **kwargs): """ Fetches an individual object on the resource. This needs to be implemented at the user level. If the object can not be found, this should raise a ``NotFound`` exception. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def cached_obj_get(self, request=None, **kwargs): """ A version of ``obj_get`` that uses the cache as a means to get commonly-accessed data faster. """ cache_key = self.generate_cache_key('detail', **kwargs) bundle = self._meta.cache.get(cache_key) if bundle is None: bundle = self.obj_get(request=request, **kwargs) self._meta.cache.set(cache_key, bundle) return bundle def obj_create(self, bundle, request=None, **kwargs): """ Creates a new object based on the provided data. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def obj_update(self, bundle, request=None, **kwargs): """ Updates an existing object (or creates a new object) based on the provided data. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def obj_delete_list(self, request=None, **kwargs): """ Deletes an entire list of objects. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def obj_delete(self, request=None, **kwargs): """ Deletes a single object. This needs to be implemented at the user level. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() def create_response(self, request, data): """ Extracts the common "which-format/serialize/return-response" cycle. Mostly a useful shortcut/hook. """ desired_format = self.determine_format(request) serialized = self.serialize(request, data, desired_format) return HttpResponse(content=serialized, content_type=build_content_type(desired_format)) def is_valid(self, bundle, request=None): """ Handles checking if the data provided by the user is valid. Mostly a hook, this uses class assigned to ``validation`` from ``Resource._meta``. If validation fails, an error is raised with the error messages serialized inside it. """ errors = self._meta.validation.is_valid(bundle, request) if len(errors): if request: desired_format = self.determine_format(request) else: desired_format = self._meta.default_format serialized = self.serialize(request, errors, desired_format) response = HttpBadRequest(content=serialized, content_type=build_content_type(desired_format)) raise ImmediateHttpResponse(response=response) def rollback(self, bundles): """ Given the list of bundles, delete all objects pertaining to those bundles. This needs to be implemented at the user level. No exceptions should be raised if possible. ``ModelResource`` includes a full working version specific to Django's ``Models``. """ raise NotImplementedError() # Views. def get_list(self, request, **kwargs): """ Returns a serialized list of resources. Calls ``obj_get_list`` to provide the data, then handles that result set and serializes it. Should return a HttpResponse (200 OK). """ # TODO: Uncached for now. Invalidation that works for everyone may be # impossible. objects = self.obj_get_list(request=request, **self.remove_api_resource_names(kwargs)) sorted_objects = self.apply_sorting(objects, options=request.GET) paginator = Paginator(request.GET, sorted_objects, resource_uri=self.get_resource_list_uri(), limit=self._meta.limit) to_be_serialized = paginator.page() # Dehydrate the bundles in preparation for serialization. to_be_serialized['objects'] = [self.full_dehydrate(obj=obj) for obj in to_be_serialized['objects']] return self.create_response(request, to_be_serialized) def get_detail(self, request, **kwargs): """ Returns a single serialized resource. Calls ``cached_obj_get/obj_get`` to provide the data, then handles that result set and serializes it. Should return a HttpResponse (200 OK). """ try: obj = self.cached_obj_get(request=request, **self.remove_api_resource_names(kwargs)) except ObjectDoesNotExist: return HttpGone() except MultipleObjectsReturned: return HttpMultipleChoices("More than one resource is found at this URI.") bundle = self.full_dehydrate(obj) return self.create_response(request, bundle) def put_list(self, request, **kwargs): """ Replaces a collection of resources with another collection. Calls ``delete_list`` to clear out the collection then ``obj_create`` with the provided the data to create the new collection. Return ``HttpAccepted`` (204 No Content). """ deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json')) if not 'objects' in deserialized: raise BadRequest("Invalid data sent.") self.obj_delete_list(request=request, **self.remove_api_resource_names(kwargs)) bundles_seen = [] for object_data in deserialized['objects']: bundle = self.build_bundle(data=dict_strip_unicode_keys(object_data)) # Attempt to be transactional, deleting any previously created # objects if validation fails. try: self.is_valid(bundle, request) except ImmediateHttpResponse: self.rollback(bundles_seen) raise self.obj_create(bundle, request=request) bundles_seen.append(bundle) return HttpAccepted() def put_detail(self, request, **kwargs): """ Either updates an existing resource or creates a new one with the provided data. Calls ``obj_update`` with the provided data first, but falls back to ``obj_create`` if the object does not already exist. If a new resource is created, return ``HttpCreated`` (201 Created). If an existing resource is modified, return ``HttpAccepted`` (204 No Content). """ deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json')) bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized)) self.is_valid(bundle, request) try: updated_bundle = self.obj_update(bundle, request=request, pk=kwargs.get('pk')) return HttpAccepted() except: updated_bundle = self.obj_create(bundle, request=request, pk=kwargs.get('pk')) return HttpCreated(location=self.get_resource_uri(updated_bundle)) def post_list(self, request, **kwargs): """ Creates a new resource/object with the provided data. Calls ``obj_create`` with the provided data and returns a response with the new resource's location. If a new resource is created, return ``HttpCreated`` (201 Created). """ deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json')) bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized)) self.is_valid(bundle, request) updated_bundle = self.obj_create(bundle, request=request) return HttpCreated(location=self.get_resource_uri(updated_bundle)) def post_detail(self, request, **kwargs): """ Creates a new subcollection of the resource under a resource. This is not implemented by default because most people's data models aren't self-referential. If a new resource is created, return ``HttpCreated`` (201 Created). """ return HttpNotImplemented() def delete_list(self, request, **kwargs): """ Destroys a collection of resources/objects. Calls ``obj_delete_list``. If the resources are deleted, return ``HttpAccepted`` (204 No Content). """ self.obj_delete_list(request=request, **self.remove_api_resource_names(kwargs)) return HttpAccepted() def delete_detail(self, request, **kwargs): """ Destroys a single resource/object. Calls ``obj_delete``. If the resource is deleted, return ``HttpAccepted`` (204 No Content). If the resource did not exist, return ``HttpGone`` (410 Gone). """ try: self.obj_delete(request=request, **self.remove_api_resource_names(kwargs)) return HttpAccepted() except NotFound: return HttpGone() def get_schema(self, request, **kwargs): """ Returns a serialized form of the schema of the resource. Calls ``build_schema`` to generate the data. This method only responds to HTTP GET. Should return a HttpResponse (200 OK). """ self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) self.log_throttled_access(request) return self.create_response(request, self.build_schema()) def get_multiple(self, request, **kwargs): """ Returns a serialized list of resources based on the identifiers from the URL. Calls ``obj_get`` to fetch only the objects requested. This method only responds to HTTP GET. Should return a HttpResponse (200 OK). """ self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) # Rip apart the list then iterate. obj_pks = kwargs.get('pk_list', '').split(';') objects = [] not_found = [] for pk in obj_pks: try: obj = self.obj_get(request, pk=pk) bundle = self.full_dehydrate(obj) objects.append(bundle) except ObjectDoesNotExist: not_found.append(pk) object_list = { 'objects': objects, } if len(not_found): object_list['not_found'] = not_found self.log_throttled_access(request) return self.create_response(request, object_list) class ModelDeclarativeMetaclass(DeclarativeMetaclass): def __new__(cls, name, bases, attrs): new_class = super(ModelDeclarativeMetaclass, cls).__new__(cls, name, bases, attrs) fields = getattr(new_class._meta, 'fields', []) excludes = getattr(new_class._meta, 'excludes', []) field_names = new_class.base_fields.keys() for field_name in field_names: if field_name == 'resource_uri': continue if field_name in new_class.declared_fields: continue if len(fields) and not field_name in fields: del(new_class.base_fields[field_name]) if len(excludes) and field_name in excludes: del(new_class.base_fields[field_name]) # Add in the new fields. new_class.base_fields.update(new_class.get_fields(fields, excludes)) if getattr(new_class._meta, 'include_absolute_url', True): if not 'absolute_url' in new_class.base_fields: new_class.base_fields['absolute_url'] = CharField(attribute='get_absolute_url', readonly=True) elif 'absolute_url' in new_class.base_fields and not 'absolute_url' in attrs: del(new_class.base_fields['absolute_url']) return new_class class ModelResource(Resource): """ A subclass of ``Resource`` designed to work with Django's ``Models``. This class will introspect a given ``Model`` and build a field list based on the fields found on the model (excluding relational fields). Given that it is aware of Django's ORM, it also handles the CRUD data operations of the resource. """ __metaclass__ = ModelDeclarativeMetaclass @classmethod def should_skip_field(cls, field): """ Given a Django model field, return if it should be included in the contributed ApiFields. """ # Ignore certain fields (related fields). if getattr(field, 'rel'): return True return False @classmethod def api_field_from_django_field(cls, f, default=CharField): """ Returns the field type that would likely be associated with each Django type. """ result = default if f.get_internal_type() in ('DateField', 'DateTimeField'): result = DateTimeField elif f.get_internal_type() in ('BooleanField', 'NullBooleanField'): result = BooleanField elif f.get_internal_type() in ('DecimalField', 'FloatField'): result = FloatField elif f.get_internal_type() in ('IntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SmallIntegerField'): result = IntegerField elif f.get_internal_type() in ('FileField', 'ImageField'): result = FileField # TODO: Perhaps enable these via introspection. The reason they're not enabled # by default is the very different ``__init__`` they have over # the other fields. # elif f.get_internal_type() == 'ForeignKey': # result = ForeignKey # elif f.get_internal_type() == 'ManyToManyField': # result = ManyToManyField return result @classmethod def get_fields(cls, fields=None, excludes=None): """ Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. """ final_fields = {} fields = fields or [] excludes = excludes or [] if not cls._meta.object_class: return final_fields for f in cls._meta.object_class._meta.fields: # If the field name is already present, skip if f.name in cls.base_fields: continue # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # If field is in exclude list, skip if excludes and f.name in excludes: continue if cls.should_skip_field(f): continue api_field_class = cls.api_field_from_django_field(f) kwargs = { 'attribute': f.name, } if f.null is True: kwargs['null'] = True kwargs['unique'] = f.unique if not f.null and f.blank is True: kwargs['default'] = '' if f.get_internal_type() == 'TextField': kwargs['default'] = '' if f.has_default(): kwargs['default'] = f.default final_fields[f.name] = api_field_class(**kwargs) final_fields[f.name].instance_name = f.name return final_fields def build_filters(self, filters=None): """ Given a dictionary of filters, create the necessary ORM-level filters. Keys should be resource fields, **NOT** model fields. Valid values are either a list of Django filter types (i.e. ``['startswith', 'exact', 'lte']``), the ``ALL`` constant or the ``ALL_WITH_RELATIONS`` constant. """ # At the declarative level: # filtering = { # 'resource_field_name': ['exact', 'startswith', 'endswith', 'contains'], # 'resource_field_name_2': ['exact', 'gt', 'gte', 'lt', 'lte', 'range'], # 'resource_field_name_3': ALL, # 'resource_field_name_4': ALL_WITH_RELATIONS, # ... # } # Accepts the filters as a dict. None by default, meaning no filters. if filters is None: filters = {} qs_filters = {} for filter_expr, value in filters.items(): filter_bits = filter_expr.split(LOOKUP_SEP) if not filter_bits[0] in self.fields: # It's not a field we know about. Move along citizen. continue if not filter_bits[0] in self._meta.filtering: raise InvalidFilterError("The '%s' field does not allow filtering." % filter_bits[0]) if filter_bits[-1] in QUERY_TERMS.keys(): filter_type = filter_bits.pop() else: filter_type = 'exact' # Check to see if it's allowed lookup type. if not self._meta.filtering[filter_bits[0]] in (ALL, ALL_WITH_RELATIONS): # Must be an explicit whitelist. if not filter_type in self._meta.filtering[filter_bits[0]]: raise InvalidFilterError("'%s' is not an allowed filter on the '%s' field." % (filter_expr, filter_bits[0])) # Check to see if it's a relational lookup and if that's allowed. if len(filter_bits) > 1: if not self._meta.filtering[filter_bits[0]] == ALL_WITH_RELATIONS: raise InvalidFilterError("Lookups are not allowed more than one level deep on the '%s' field." % filter_bits[0]) if self.fields[filter_bits[0]].attribute is None: raise InvalidFilterError("The '%s' field has no 'attribute' for searching with." % filter_bits[0]) if value in ['true', 'True', True]: value = True elif value in ['false', 'False', False]: value = False elif value in ('nil', 'none', 'None', None): value = None db_field_name = LOOKUP_SEP.join([self.fields[filter_bits[0]].attribute] + filter_bits[1:]) qs_filter = "%s%s%s" % (db_field_name, LOOKUP_SEP, filter_type) qs_filters[qs_filter] = value return dict_strip_unicode_keys(qs_filters) def apply_sorting(self, obj_list, options=None): """ Given a dictionary of options, apply some ORM-level sorting to the provided ``QuerySet``. Looks for the ``sort_by`` key and handles either ascending (just the field name) or descending (the field name with a ``-`` in front). The field name should be the resource field, **NOT** model field. """ if options is None: options = {} if not 'sort_by' in options: # Nothing to alter the sort order. Return what we've got. return obj_list order_by_args = [] if hasattr(options, 'getlist'): sort_bits = options.getlist('sort_by') else: sort_bits = options.get('sort_by') if not isinstance(sort_bits, (list, tuple)): sort_bits = [sort_bits] for sort_by in sort_bits: sort_by_bits = sort_by.split(LOOKUP_SEP) field_name = sort_by_bits[0] order = '' if sort_by_bits[0].startswith('-'): field_name = sort_by_bits[0][1:] order = '-' if not field_name in self.fields: # It's not a field we know about. Move along citizen. raise InvalidSortError("No matching '%s' field for ordering on." % field_name) if not field_name in self._meta.ordering: raise InvalidSortError("The '%s' field does not allow ordering." % field_name) if self.fields[field_name].attribute is None: raise InvalidSortError("The '%s' field has no 'attribute' for ordering with." % field_name) order_by_args.append("%s%s" % (order, LOOKUP_SEP.join([self.fields[field_name].attribute] + sort_by_bits[1:]))) return obj_list.order_by(*order_by_args) def get_object_list(self, request): """ An ORM-specific implementation of ``get_object_list``. Returns a queryset that may have been limited by authorization or other overrides. """ base_object_list = self._meta.queryset # Limit it as needed. authed_object_list = self.apply_authorization_limits(request, base_object_list) return authed_object_list def obj_get_list(self, request=None, **kwargs): """ A ORM-specific implementation of ``obj_get_list``. Takes an optional ``request`` object, whose ``GET`` dictionary can be used to narrow the query. """ filters = None if hasattr(request, 'GET'): filters = request.GET applicable_filters = self.build_filters(filters=filters) try: return self.get_object_list(request).filter(**applicable_filters) except ValueError, e: raise NotFound("Invalid resource lookup data provided (mismatched type).") def obj_get(self, request=None, **kwargs): """ A ORM-specific implementation of ``obj_get``. Takes optional ``kwargs``, which are used to narrow the query to find the instance. """ try: return self.get_object_list(request).get(**kwargs) except ValueError, e: raise NotFound("Invalid resource lookup data provided (mismatched type).") def obj_create(self, bundle, request=None, **kwargs): """ A ORM-specific implementation of ``obj_create``. """ bundle.obj = self._meta.object_class() for key, value in kwargs.items(): setattr(bundle.obj, key, value) bundle = self.full_hydrate(bundle) bundle.obj.save() # Now pick up the M2M bits. m2m_bundle = self.hydrate_m2m(bundle) self.save_m2m(m2m_bundle) return bundle def obj_update(self, bundle, request=None, **kwargs): """ A ORM-specific implementation of ``obj_update``. """ if not bundle.obj or not bundle.obj.pk: # Attempt to hydrate data from kwargs before doing a lookup for the object. # This step is needed so certain values (like datetime) will pass model validation. try: bundle.obj = self.get_object_list(request).model() bundle.data.update(kwargs) bundle = self.full_hydrate(bundle) lookup_kwargs = kwargs.copy() lookup_kwargs.update(dict( (k, getattr(bundle.obj, k)) for k in kwargs.keys() if getattr(bundle.obj, k) is not None)) except: # if there is trouble hydrating the data, fall back to just # using kwargs by itself (usually it only contains a "pk" key # and this will work fine. lookup_kwargs = kwargs try: bundle.obj = self.get_object_list(request).get(**lookup_kwargs) except ObjectDoesNotExist: raise NotFound("A model instance matching the provided arguments could not be found.") bundle = self.full_hydrate(bundle) bundle.obj.save() # Now pick up the M2M bits. m2m_bundle = self.hydrate_m2m(bundle) self.save_m2m(m2m_bundle) return bundle def obj_delete_list(self, request=None, **kwargs): """ A ORM-specific implementation of ``obj_delete_list``. Takes optional ``kwargs``, which can be used to narrow the query. """ self.get_object_list(request).filter(**kwargs).delete() def obj_delete(self, request=None, **kwargs): """ A ORM-specific implementation of ``obj_delete``. Takes optional ``kwargs``, which are used to narrow the query to find the instance. """ try: obj = self.get_object_list(request).get(**kwargs) except ObjectDoesNotExist: raise NotFound("A model instance matching the provided arguments could not be found.") obj.delete() def rollback(self, bundles): """ A ORM-specific implementation of ``rollback``. Given the list of bundles, delete all models pertaining to those bundles. """ for bundle in bundles: if bundle.obj and getattr(bundle.obj, 'pk', None): bundle.obj.delete() def save_m2m(self, bundle): """ Handles the saving of related M2M data. Due to the way Django works, the M2M data must be handled after the main instance, which is why this isn't a part of the main ``save`` bits. Currently slightly inefficient in that it will clear out the whole relation and recreate the related data as needed. """ for field_name, field_object in self.fields.items(): if not getattr(field_object, 'is_m2m', False): continue if not field_object.attribute: continue # Get the manager. related_mngr = getattr(bundle.obj, field_object.attribute) if hasattr(related_mngr, 'clear'): # Clear it out, just to be safe. related_mngr.clear() related_objs = [] for related_bundle in bundle.data[field_name]: related_bundle.obj.save() related_objs.append(related_bundle.obj) related_mngr.add(*related_objs) def get_resource_uri(self, bundle_or_obj): """ Handles generating a resource URI for a single resource. Uses the model's ``pk`` in order to create the URI. """ kwargs = { 'resource_name': self._meta.resource_name, } if isinstance(bundle_or_obj, Bundle): kwargs['pk'] = bundle_or_obj.obj.pk else: kwargs['pk'] = bundle_or_obj.id if self._meta.api_name is not None: kwargs['api_name'] = self._meta.api_name return self._build_reverse_url("api_dispatch_detail", kwargs=kwargs) class NamespacedModelResource(ModelResource): """ A ModelResource subclass that respects Django namespaces. """ def _build_reverse_url(self, name, args=None, kwargs=None): namespaced = "%s:%s" % (self._meta.urlconf_namespace, name) return reverse(namespaced, args=args, kwargs=kwargs) # Based off of ``piston.utils.coerce_put_post``. Similarly BSD-licensed. # And no, the irony is not lost on me. def convert_post_to_put(request): """ Force Django to process the PUT. """ if request.method == "PUT": if hasattr(request, '_post'): del request._post del request._files try: request.method = "POST" request._load_post_and_files() request.method = "PUT" except AttributeError: request.META['REQUEST_METHOD'] = 'POST' request._load_post_and_files() request.META['REQUEST_METHOD'] = 'PUT' request.PUT = request.POST return request
colinsullivan/bingo-board
bingo_board/tastypie/resources.py
Python
mit
58,556
using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.UI; using System.Web.UI.WebControls; namespace Company { public partial class Site : System.Web.UI.MasterPage { protected void Page_Load(object sender, EventArgs e) { } } }
dnmitev/ASPNET-WebForms
SiteMaps/Company/Site.Master.cs
C#
mit
315
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Dataflow * @copyright Copyright (c) 2014 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Convert container abstract * * @category Mage * @package Mage_Dataflow * @author Magento Core Team <core@magentocommerce.com> */ abstract class Mage_Dataflow_Model_Convert_Container_Abstract implements Mage_Dataflow_Model_Convert_Container_Interface { protected $_batchParams = array(); protected $_vars; protected $_profile; protected $_action; protected $_data; protected $_position; public function getVar($key, $default=null) { if (!isset($this->_vars[$key]) || (!is_array($this->_vars[$key]) && strlen($this->_vars[$key]) == 0)) { return $default; } return $this->_vars[$key]; } public function getVars() { return $this->_vars; } public function setVar($key, $value=null) { if (is_array($key) && is_null($value)) { $this->_vars = $key; } else { $this->_vars[$key] = $value; } return $this; } public function getAction() { return $this->_action; } public function setAction(Mage_Dataflow_Model_Convert_Action_Interface $action) { $this->_action = $action; return $this; } public function getProfile() { return $this->_profile; } public function setProfile(Mage_Dataflow_Model_Convert_Profile_Interface $profile) { $this->_profile = $profile; return $this; } public function getData() { if (is_null($this->_data) && $this->getProfile()) { $this->_data = $this->getProfile()->getContainer()->getData(); } return $this->_data; } public function setData($data) { if ($this->getProfile()) { $this->getProfile()->getContainer()->setData($data); } $this->_data = $data; return $this; } public function validateDataString($data=null) { if (is_null($data)) { $data = $this->getData(); } if (!is_string($data)) { $this->addException("Invalid data type, expecting string.", Mage_Dataflow_Model_Convert_Exception::FATAL); } return true; } public function validateDataArray($data=null) { if (is_null($data)) { $data = $this->getData(); } if (!is_array($data)) { $this->addException("Invalid data type, expecting array.", Mage_Dataflow_Model_Convert_Exception::FATAL); } return true; } public function validateDataGrid($data=null) { if (is_null($data)) { $data = $this->getData(); } if (!is_array($data) || !is_array(current($data))) { if (count($data)==0) { return true; } $this->addException("Invalid data type, expecting 2D grid array.", Mage_Dataflow_Model_Convert_Exception::FATAL); } return true; } public function getGridFields($grid) { $fields = array(); foreach ($grid as $i=>$row) { foreach ($row as $fieldName=>$data) { if (!in_array($fieldName, $fields)) { $fields[] = $fieldName; } } } return $fields; } public function addException($error, $level=null) { $e = new Mage_Dataflow_Model_Convert_Exception($error); $e->setLevel(!is_null($level) ? $level : Mage_Dataflow_Model_Convert_Exception::NOTICE); $e->setContainer($this); $e->setPosition($this->getPosition()); if ($this->getProfile()) { $this->getProfile()->addException($e); } return $e; } public function getPosition() { return $this->_position; } public function setPosition($position) { $this->_position = $position; return $this; } public function setBatchParams($data) { if (is_array($data)) { $this->_batchParams = $data; } return $this; } public function getBatchParams($key = null) { if (!empty($key)) { return isset($this->_batchParams[$key]) ? $this->_batchParams[$key] : null; } return $this->_batchParams; } }
almadaocta/lordbike-production
errors/includes/src/Mage_Dataflow_Model_Convert_Container_Abstract.php
PHP
mit
5,471
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.monitor; import com.fasterxml.jackson.annotation.JsonProperty; /** * Part of MultiTenantDiagnosticSettings. Specifies the settings for a * particular log. */ public class LogSettings { /** * Name of a Diagnostic Log category for a resource type this setting is * applied to. To obtain the list of Diagnostic Log categories for a * resource, first perform a GET diagnostic settings operation. */ @JsonProperty(value = "category") private String category; /** * a value indicating whether this log is enabled. */ @JsonProperty(value = "enabled", required = true) private boolean enabled; /** * the retention policy for this log. */ @JsonProperty(value = "retentionPolicy") private RetentionPolicy retentionPolicy; /** * Get the category value. * * @return the category value */ public String category() { return this.category; } /** * Set the category value. * * @param category the category value to set * @return the LogSettings object itself. */ public LogSettings withCategory(String category) { this.category = category; return this; } /** * Get the enabled value. * * @return the enabled value */ public boolean enabled() { return this.enabled; } /** * Set the enabled value. * * @param enabled the enabled value to set * @return the LogSettings object itself. */ public LogSettings withEnabled(boolean enabled) { this.enabled = enabled; return this; } /** * Get the retentionPolicy value. * * @return the retentionPolicy value */ public RetentionPolicy retentionPolicy() { return this.retentionPolicy; } /** * Set the retentionPolicy value. * * @param retentionPolicy the retentionPolicy value to set * @return the LogSettings object itself. */ public LogSettings withRetentionPolicy(RetentionPolicy retentionPolicy) { this.retentionPolicy = retentionPolicy; return this; } }
martinsawicki/azure-sdk-for-java
azure-mgmt-monitor/src/main/java/com/microsoft/azure/management/monitor/LogSettings.java
Java
mit
2,421
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("ConsoleApplication1")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("ConsoleApplication1")] [assembly: AssemblyCopyright("Copyright © 2014")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("f058ff68-8743-49cb-bea4-2d80be2d568d")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
fabiomargarito/ExemplosCursoFundamentosEmArquiteturaDeSoftware
Curso Fundamentos/src/Aula 2/Revisitando OO/Exercício de Fixação 2 - Resolvido/ConsoleApplication1/Properties/AssemblyInfo.cs
C#
mit
1,414
// GridFS // Copyright(c) 2013 Siddharth Mahendraker <siddharth_mahen@me.com> // MIT Licensed exports.GridFS = require('./lib/GridFS'); exports.GridStream = require('./lib/GridStream');
SergejKasper/smartkitchen
server/node_modules/gridfstore/node_modules/GridFS/index.js
JavaScript
mit
187
// @flow class A { x = [1, 2, 3]; y = 4; foo() { this.x = this.x.map(function (z) { this.y; // error, function has wrong this }); } } class B { x = [1, 2, 3]; y = 4; foo() { this.x = this.x.map(function (z) { this.y; // ok, function gets passed correct this }, this); } } class C { x = [1, 2, 3]; y = 4; foo() { this.x = this.x.map(z => { this.y; // ok, arrow binds surrounding context this }); } }
facebook/flow
tests/arraylib/callback_this.js
JavaScript
mit
534
import collectionClass from "./collections.class"; import collectionColor from "./collections.color"; function collectionBackgroundStyles(contentItem) { return ` .${collectionClass(contentItem)} { background-color: #${collectionColor(contentItem)}; } `; } export default collectionBackgroundStyles;
NewSpring/apollos-core
imports/util/collections/collections.backgroundStyles.js
JavaScript
mit
320
<?php /** * @Created By ECMall PhpCacheServer * @Time:2015-01-17 18:28:59 */ if(filemtime(__FILE__) + 600 < time())return false; return array ( 'inbox' => '0', 'outbox' => '0', 'total' => 0, ); ?>
guotao2000/ecmall
temp/caches/0220/9021b7cab81e674df1db5e94e51dced1.cache.php
PHP
mit
220
/* Open Asset Import Library (assimp) ---------------------------------------------------------------------- Copyright (c) 2006-2018, assimp team All rights reserved. Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the assimp team, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission of the assimp team. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ---------------------------------------------------------------------- */ #include <assimp/Subdivision.h> #include <assimp/SceneCombiner.h> #include <assimp/SpatialSort.h> #include "ProcessHelper.h" #include <assimp/Vertex.h> #include <assimp/ai_assert.h> #include <stdio.h> using namespace Assimp; void mydummy() {} // ------------------------------------------------------------------------------------------------ /** Subdivider stub class to implement the Catmull-Clarke subdivision algorithm. The * implementation is basing on recursive refinement. Directly evaluating the result is also * possible and much quicker, but it depends on lengthy matrix lookup tables. */ // ------------------------------------------------------------------------------------------------ class CatmullClarkSubdivider : public Subdivider { public: void Subdivide (aiMesh* mesh, aiMesh*& out, unsigned int num, bool discard_input); void Subdivide (aiMesh** smesh, size_t nmesh, aiMesh** out, unsigned int num, bool discard_input); // --------------------------------------------------------------------------- /** Intermediate description of an edge between two corners of a polygon*/ // --------------------------------------------------------------------------- struct Edge { Edge() : ref(0) {} Vertex edge_point, midpoint; unsigned int ref; }; typedef std::vector<unsigned int> UIntVector; typedef std::map<uint64_t,Edge> EdgeMap; // --------------------------------------------------------------------------- // Hashing function to derive an index into an #EdgeMap from two given // 'unsigned int' vertex coordinates (!!distinct coordinates - same // vertex position == same index!!). // NOTE - this leads to rare hash collisions if a) sizeof(unsigned int)>4 // and (id[0]>2^32-1 or id[0]>2^32-1). // MAKE_EDGE_HASH() uses temporaries, so INIT_EDGE_HASH() needs to be put // at the head of every function which is about to use MAKE_EDGE_HASH(). // Reason is that the hash is that hash construction needs to hold the // invariant id0<id1 to identify an edge - else two hashes would refer // to the same edge. // --------------------------------------------------------------------------- #define MAKE_EDGE_HASH(id0,id1) (eh_tmp0__=id0,eh_tmp1__=id1,\ (eh_tmp0__<eh_tmp1__?std::swap(eh_tmp0__,eh_tmp1__):mydummy()),(uint64_t)eh_tmp0__^((uint64_t)eh_tmp1__<<32u)) #define INIT_EDGE_HASH_TEMPORARIES()\ unsigned int eh_tmp0__, eh_tmp1__; private: void InternSubdivide (const aiMesh* const * smesh, size_t nmesh,aiMesh** out, unsigned int num); }; // ------------------------------------------------------------------------------------------------ // Construct a subdivider of a specific type Subdivider* Subdivider::Create (Algorithm algo) { switch (algo) { case CATMULL_CLARKE: return new CatmullClarkSubdivider(); }; ai_assert(false); return NULL; // shouldn't happen } // ------------------------------------------------------------------------------------------------ // Call the Catmull Clark subdivision algorithm for one mesh void CatmullClarkSubdivider::Subdivide ( aiMesh* mesh, aiMesh*& out, unsigned int num, bool discard_input ) { ai_assert(mesh != out); Subdivide(&mesh,1,&out,num,discard_input); } // ------------------------------------------------------------------------------------------------ // Call the Catmull Clark subdivision algorithm for multiple meshes void CatmullClarkSubdivider::Subdivide ( aiMesh** smesh, size_t nmesh, aiMesh** out, unsigned int num, bool discard_input ) { ai_assert( NULL != smesh ); ai_assert( NULL != out ); // course, both regions may not overlap ai_assert(smesh<out || smesh+nmesh>out+nmesh); if (!num) { // No subdivision at all. Need to copy all the meshes .. argh. if (discard_input) { for (size_t s = 0; s < nmesh; ++s) { out[s] = smesh[s]; smesh[s] = NULL; } } else { for (size_t s = 0; s < nmesh; ++s) { SceneCombiner::Copy(out+s,smesh[s]); } } return; } std::vector<aiMesh*> inmeshes; std::vector<aiMesh*> outmeshes; std::vector<unsigned int> maptbl; inmeshes.reserve(nmesh); outmeshes.reserve(nmesh); maptbl.reserve(nmesh); // Remove pure line and point meshes from the working set to reduce the // number of edge cases the subdivider is forced to deal with. Line and // point meshes are simply passed through. for (size_t s = 0; s < nmesh; ++s) { aiMesh* i = smesh[s]; // FIX - mPrimitiveTypes might not yet be initialized if (i->mPrimitiveTypes && (i->mPrimitiveTypes & (aiPrimitiveType_LINE|aiPrimitiveType_POINT))==i->mPrimitiveTypes) { ASSIMP_LOG_DEBUG("Catmull-Clark Subdivider: Skipping pure line/point mesh"); if (discard_input) { out[s] = i; smesh[s] = NULL; } else { SceneCombiner::Copy(out+s,i); } continue; } outmeshes.push_back(NULL);inmeshes.push_back(i); maptbl.push_back(static_cast<unsigned int>(s)); } // Do the actual subdivision on the preallocated storage. InternSubdivide // *always* assumes that enough storage is available, it does not bother // checking any ranges. ai_assert(inmeshes.size()==outmeshes.size()&&inmeshes.size()==maptbl.size()); if (inmeshes.empty()) { ASSIMP_LOG_WARN("Catmull-Clark Subdivider: Pure point/line scene, I can't do anything"); return; } InternSubdivide(&inmeshes.front(),inmeshes.size(),&outmeshes.front(),num); for (unsigned int i = 0; i < maptbl.size(); ++i) { ai_assert(nullptr != outmeshes[i]); out[maptbl[i]] = outmeshes[i]; } if (discard_input) { for (size_t s = 0; s < nmesh; ++s) { delete smesh[s]; } } } // ------------------------------------------------------------------------------------------------ // Note - this is an implementation of the standard (recursive) Cm-Cl algorithm without further // optimizations (except we're using some nice LUTs). A description of the algorithm can be found // here: http://en.wikipedia.org/wiki/Catmull-Clark_subdivision_surface // // The code is mostly O(n), however parts are O(nlogn) which is therefore the algorithm's // expected total runtime complexity. The implementation is able to work in-place on the same // mesh arrays. Calling #InternSubdivide() directly is not encouraged. The code can operate // in-place unless 'smesh' and 'out' are equal (no strange overlaps or reorderings). // Previous data is replaced/deleted then. // ------------------------------------------------------------------------------------------------ void CatmullClarkSubdivider::InternSubdivide ( const aiMesh* const * smesh, size_t nmesh, aiMesh** out, unsigned int num ) { ai_assert(NULL != smesh && NULL != out); INIT_EDGE_HASH_TEMPORARIES(); // no subdivision requested or end of recursive refinement if (!num) { return; } UIntVector maptbl; SpatialSort spatial; // --------------------------------------------------------------------- // 0. Offset table to index all meshes continuously, generate a spatially // sorted representation of all vertices in all meshes. // --------------------------------------------------------------------- typedef std::pair<unsigned int,unsigned int> IntPair; std::vector<IntPair> moffsets(nmesh); unsigned int totfaces = 0, totvert = 0; for (size_t t = 0; t < nmesh; ++t) { const aiMesh* mesh = smesh[t]; spatial.Append(mesh->mVertices,mesh->mNumVertices,sizeof(aiVector3D),false); moffsets[t] = IntPair(totfaces,totvert); totfaces += mesh->mNumFaces; totvert += mesh->mNumVertices; } spatial.Finalize(); const unsigned int num_unique = spatial.GenerateMappingTable(maptbl,ComputePositionEpsilon(smesh,nmesh)); #define FLATTEN_VERTEX_IDX(mesh_idx, vert_idx) (moffsets[mesh_idx].second+vert_idx) #define FLATTEN_FACE_IDX(mesh_idx, face_idx) (moffsets[mesh_idx].first+face_idx) // --------------------------------------------------------------------- // 1. Compute the centroid point for all faces // --------------------------------------------------------------------- std::vector<Vertex> centroids(totfaces); unsigned int nfacesout = 0; for (size_t t = 0, n = 0; t < nmesh; ++t) { const aiMesh* mesh = smesh[t]; for (unsigned int i = 0; i < mesh->mNumFaces;++i,++n) { const aiFace& face = mesh->mFaces[i]; Vertex& c = centroids[n]; for (unsigned int a = 0; a < face.mNumIndices;++a) { c += Vertex(mesh,face.mIndices[a]); } c /= static_cast<float>(face.mNumIndices); nfacesout += face.mNumIndices; } } { // we want edges to go away before the recursive calls so begin a new scope EdgeMap edges; // --------------------------------------------------------------------- // 2. Set each edge point to be the average of all neighbouring // face points and original points. Every edge exists twice // if there is a neighboring face. // --------------------------------------------------------------------- for (size_t t = 0; t < nmesh; ++t) { const aiMesh* mesh = smesh[t]; for (unsigned int i = 0; i < mesh->mNumFaces;++i) { const aiFace& face = mesh->mFaces[i]; for (unsigned int p =0; p< face.mNumIndices; ++p) { const unsigned int id[] = { face.mIndices[p], face.mIndices[p==face.mNumIndices-1?0:p+1] }; const unsigned int mp[] = { maptbl[FLATTEN_VERTEX_IDX(t,id[0])], maptbl[FLATTEN_VERTEX_IDX(t,id[1])] }; Edge& e = edges[MAKE_EDGE_HASH(mp[0],mp[1])]; e.ref++; if (e.ref<=2) { if (e.ref==1) { // original points (end points) - add only once e.edge_point = e.midpoint = Vertex(mesh,id[0])+Vertex(mesh,id[1]); e.midpoint *= 0.5f; } e.edge_point += centroids[FLATTEN_FACE_IDX(t,i)]; } } } } // --------------------------------------------------------------------- // 3. Normalize edge points // --------------------------------------------------------------------- {unsigned int bad_cnt = 0; for (EdgeMap::iterator it = edges.begin(); it != edges.end(); ++it) { if ((*it).second.ref < 2) { ai_assert((*it).second.ref); ++bad_cnt; } (*it).second.edge_point *= 1.f/((*it).second.ref+2.f); } if (bad_cnt) { // Report the number of bad edges. bad edges are referenced by less than two // faces in the mesh. They occur at outer model boundaries in non-closed // shapes. ASSIMP_LOG_DEBUG_F("Catmull-Clark Subdivider: got ", bad_cnt, " bad edges touching only one face (totally ", static_cast<unsigned int>(edges.size()), " edges). "); }} // --------------------------------------------------------------------- // 4. Compute a vertex-face adjacency table. We can't reuse the code // from VertexTriangleAdjacency because we need the table for multiple // meshes and out vertex indices need to be mapped to distinct values // first. // --------------------------------------------------------------------- UIntVector faceadjac(nfacesout), cntadjfac(maptbl.size(),0), ofsadjvec(maptbl.size()+1,0); { for (size_t t = 0; t < nmesh; ++t) { const aiMesh* const minp = smesh[t]; for (unsigned int i = 0; i < minp->mNumFaces; ++i) { const aiFace& f = minp->mFaces[i]; for (unsigned int n = 0; n < f.mNumIndices; ++n) { ++cntadjfac[maptbl[FLATTEN_VERTEX_IDX(t,f.mIndices[n])]]; } } } unsigned int cur = 0; for (size_t i = 0; i < cntadjfac.size(); ++i) { ofsadjvec[i+1] = cur; cur += cntadjfac[i]; } for (size_t t = 0; t < nmesh; ++t) { const aiMesh* const minp = smesh[t]; for (unsigned int i = 0; i < minp->mNumFaces; ++i) { const aiFace& f = minp->mFaces[i]; for (unsigned int n = 0; n < f.mNumIndices; ++n) { faceadjac[ofsadjvec[1+maptbl[FLATTEN_VERTEX_IDX(t,f.mIndices[n])]]++] = FLATTEN_FACE_IDX(t,i); } } } // check the other way round for consistency #ifdef ASSIMP_BUILD_DEBUG for (size_t t = 0; t < ofsadjvec.size()-1; ++t) { for (unsigned int m = 0; m < cntadjfac[t]; ++m) { const unsigned int fidx = faceadjac[ofsadjvec[t]+m]; ai_assert(fidx < totfaces); for (size_t n = 1; n < nmesh; ++n) { if (moffsets[n].first > fidx) { const aiMesh* msh = smesh[--n]; const aiFace& f = msh->mFaces[fidx-moffsets[n].first]; bool haveit = false; for (unsigned int i = 0; i < f.mNumIndices; ++i) { if (maptbl[FLATTEN_VERTEX_IDX(n,f.mIndices[i])]==(unsigned int)t) { haveit = true; break; } } ai_assert(haveit); if (!haveit) { ASSIMP_LOG_DEBUG("Catmull-Clark Subdivider: Index not used"); } break; } } } } #endif } #define GET_ADJACENT_FACES_AND_CNT(vidx,fstartout,numout) \ fstartout = &faceadjac[ofsadjvec[vidx]], numout = cntadjfac[vidx] typedef std::pair<bool,Vertex> TouchedOVertex; std::vector<TouchedOVertex > new_points(num_unique,TouchedOVertex(false,Vertex())); // --------------------------------------------------------------------- // 5. Spawn a quad from each face point to the corresponding edge points // the original points being the fourth quad points. // --------------------------------------------------------------------- for (size_t t = 0; t < nmesh; ++t) { const aiMesh* const minp = smesh[t]; aiMesh* const mout = out[t] = new aiMesh(); for (unsigned int a = 0; a < minp->mNumFaces; ++a) { mout->mNumFaces += minp->mFaces[a].mNumIndices; } // We need random access to the old face buffer, so reuse is not possible. mout->mFaces = new aiFace[mout->mNumFaces]; mout->mNumVertices = mout->mNumFaces*4; mout->mVertices = new aiVector3D[mout->mNumVertices]; // quads only, keep material index mout->mPrimitiveTypes = aiPrimitiveType_POLYGON; mout->mMaterialIndex = minp->mMaterialIndex; if (minp->HasNormals()) { mout->mNormals = new aiVector3D[mout->mNumVertices]; } if (minp->HasTangentsAndBitangents()) { mout->mTangents = new aiVector3D[mout->mNumVertices]; mout->mBitangents = new aiVector3D[mout->mNumVertices]; } for(unsigned int i = 0; minp->HasTextureCoords(i); ++i) { mout->mTextureCoords[i] = new aiVector3D[mout->mNumVertices]; mout->mNumUVComponents[i] = minp->mNumUVComponents[i]; } for(unsigned int i = 0; minp->HasVertexColors(i); ++i) { mout->mColors[i] = new aiColor4D[mout->mNumVertices]; } mout->mNumVertices = mout->mNumFaces<<2u; for (unsigned int i = 0, v = 0, n = 0; i < minp->mNumFaces;++i) { const aiFace& face = minp->mFaces[i]; for (unsigned int a = 0; a < face.mNumIndices;++a) { // Get a clean new face. aiFace& faceOut = mout->mFaces[n++]; faceOut.mIndices = new unsigned int [faceOut.mNumIndices = 4]; // Spawn a new quadrilateral (ccw winding) for this original point between: // a) face centroid centroids[FLATTEN_FACE_IDX(t,i)].SortBack(mout,faceOut.mIndices[0]=v++); // b) adjacent edge on the left, seen from the centroid const Edge& e0 = edges[MAKE_EDGE_HASH(maptbl[FLATTEN_VERTEX_IDX(t,face.mIndices[a])], maptbl[FLATTEN_VERTEX_IDX(t,face.mIndices[a==face.mNumIndices-1?0:a+1]) ])]; // fixme: replace with mod face.mNumIndices? // c) adjacent edge on the right, seen from the centroid const Edge& e1 = edges[MAKE_EDGE_HASH(maptbl[FLATTEN_VERTEX_IDX(t,face.mIndices[a])], maptbl[FLATTEN_VERTEX_IDX(t,face.mIndices[!a?face.mNumIndices-1:a-1]) ])]; // fixme: replace with mod face.mNumIndices? e0.edge_point.SortBack(mout,faceOut.mIndices[3]=v++); e1.edge_point.SortBack(mout,faceOut.mIndices[1]=v++); // d= original point P with distinct index i // F := 0 // R := 0 // n := 0 // for each face f containing i // F := F+ centroid of f // R := R+ midpoint of edge of f from i to i+1 // n := n+1 // // (F+2R+(n-3)P)/n const unsigned int org = maptbl[FLATTEN_VERTEX_IDX(t,face.mIndices[a])]; TouchedOVertex& ov = new_points[org]; if (!ov.first) { ov.first = true; const unsigned int* adj; unsigned int cnt; GET_ADJACENT_FACES_AND_CNT(org,adj,cnt); if (cnt < 3) { ov.second = Vertex(minp,face.mIndices[a]); } else { Vertex F,R; for (unsigned int o = 0; o < cnt; ++o) { ai_assert(adj[o] < totfaces); F += centroids[adj[o]]; // adj[0] is a global face index - search the face in the mesh list const aiMesh* mp = NULL; size_t nidx; if (adj[o] < moffsets[0].first) { mp = smesh[nidx=0]; } else { for (nidx = 1; nidx<= nmesh; ++nidx) { if (nidx == nmesh ||moffsets[nidx].first > adj[o]) { mp = smesh[--nidx]; break; } } } ai_assert(adj[o]-moffsets[nidx].first < mp->mNumFaces); const aiFace& f = mp->mFaces[adj[o]-moffsets[nidx].first]; bool haveit = false; // find our original point in the face for (unsigned int m = 0; m < f.mNumIndices; ++m) { if (maptbl[FLATTEN_VERTEX_IDX(nidx,f.mIndices[m])] == org) { // add *both* edges. this way, we can be sure that we add // *all* adjacent edges to R. In a closed shape, every // edge is added twice - so we simply leave out the // factor 2.f in the amove formula and get the right // result. const Edge& c0 = edges[MAKE_EDGE_HASH(org,maptbl[FLATTEN_VERTEX_IDX( nidx,f.mIndices[!m?f.mNumIndices-1:m-1])])]; // fixme: replace with mod face.mNumIndices? const Edge& c1 = edges[MAKE_EDGE_HASH(org,maptbl[FLATTEN_VERTEX_IDX( nidx,f.mIndices[m==f.mNumIndices-1?0:m+1])])]; // fixme: replace with mod face.mNumIndices? R += c0.midpoint+c1.midpoint; haveit = true; break; } } // this invariant *must* hold if the vertex-to-face adjacency table is valid ai_assert(haveit); if ( !haveit ) { ASSIMP_LOG_WARN( "OBJ: no name for material library specified." ); } } const float div = static_cast<float>(cnt), divsq = 1.f/(div*div); ov.second = Vertex(minp,face.mIndices[a])*((div-3.f) / div) + R*divsq + F*divsq; } } ov.second.SortBack(mout,faceOut.mIndices[2]=v++); } } } } // end of scope for edges, freeing its memory // --------------------------------------------------------------------- // 7. Apply the next subdivision step. // --------------------------------------------------------------------- if (num != 1) { std::vector<aiMesh*> tmp(nmesh); InternSubdivide (out,nmesh,&tmp.front(),num-1); for (size_t i = 0; i < nmesh; ++i) { delete out[i]; out[i] = tmp[i]; } } }
MadManRises/Madgine
shared/assimp/code/Subdivision.cpp
C++
mit
23,758
namespace SIM.Tool.Windows.MainWindowComponents { using System.Windows; using SIM.Instances; using SIM.Tool.Base; using SIM.Tool.Base.Plugins; using SIM.Tool.Windows.Dialogs; using JetBrains.Annotations; [UsedImplicitly] public class DatabaseManagerButton : IMainWindowButton { #region Public methods public bool IsEnabled(Window mainWindow, Instance instance) { return true; } public void OnClick(Window mainWindow, Instance instance) { if (EnvironmentHelper.CheckSqlServer()) { WindowHelper.ShowDialog(new DatabasesDialog(), mainWindow); } } #endregion } }
sergeyshushlyapin/Sitecore-Instance-Manager
src/SIM.Tool.Windows/MainWindowComponents/DatabaseManagerButton.cs
C#
mit
649
package com.xeiam.xchange.lakebtc.marketdata; import static org.fest.assertions.api.Assertions.assertThat; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import org.junit.Test; import com.fasterxml.jackson.databind.ObjectMapper; import com.xeiam.xchange.lakebtc.dto.marketdata.LakeBTCOrderBook; import com.xeiam.xchange.lakebtc.dto.marketdata.LakeBTCTicker; import com.xeiam.xchange.lakebtc.dto.marketdata.LakeBTCTickers; public class LakeBTCMarketDataJsonTest { @Test public void testDeserializeTicker() throws IOException { // Read in the JSON from the example resources InputStream is = LakeBTCMarketDataJsonTest.class.getResourceAsStream("/marketdata/example-ticker-data.json"); // Use Jackson to parse it ObjectMapper mapper = new ObjectMapper(); LakeBTCTickers tickers = mapper.readValue(is, LakeBTCTickers.class); LakeBTCTicker cnyTicker = tickers.getCny(); assertThat(cnyTicker.getAsk()).isEqualTo("3524.07"); assertThat(cnyTicker.getBid()).isEqualTo("3517.13"); assertThat(cnyTicker.getLast()).isEqualTo("3524.07"); assertThat(cnyTicker.getHigh()).isEqualTo("3584.97"); assertThat(cnyTicker.getLow()).isEqualTo("3480.07"); assertThat(cnyTicker.getVolume()).isEqualTo("5964.7677"); LakeBTCTicker usdTicker = tickers.getUsd(); assertThat(usdTicker.getAsk()).isEqualTo("564.63"); assertThat(usdTicker.getBid()).isEqualTo("564.63"); assertThat(usdTicker.getLast()).isEqualTo("564.4"); assertThat(usdTicker.getHigh()).isEqualTo("573.83"); assertThat(usdTicker.getLow()).isEqualTo("557.7"); assertThat(usdTicker.getVolume()).isEqualTo("3521.2782"); } @Test public void testDeserializeOrderBook() throws IOException { // Read in the JSON from the example resources InputStream is = LakeBTCMarketDataJsonTest.class.getResourceAsStream("/marketdata/example-orderbook-data.json"); // Use Jackson to parse it ObjectMapper mapper = new ObjectMapper(); LakeBTCOrderBook orderBook = mapper.readValue(is, LakeBTCOrderBook.class); BigDecimal[][] asks = orderBook.getAsks(); assertThat(asks).hasSize(3); assertThat(asks[0][0]).isEqualTo("564.87"); assertThat(asks[0][1]).isEqualTo("22.371"); BigDecimal[][] bids = orderBook.getBids(); assertThat(bids).hasSize(3); assertThat(bids[2][0]).isEqualTo("558.08"); assertThat(bids[2][1]).isEqualTo("0.9878"); } }
coingecko/XChange
xchange-lakebtc/src/test/java/com/xeiam/xchange/lakebtc/marketdata/LakeBTCMarketDataJsonTest.java
Java
mit
2,443
using System; using BEPUutilities.DataStructures; namespace BEPUutilities.ResourceManagement { /// <summary> /// Uses a spinlock to safely access resources. /// </summary> /// <typeparam name="T">Type of object to store in the pool.</typeparam> public class LockingResourcePool<T> : ResourcePool<T> where T : class, new() { private readonly ConcurrentDeque<T> stack; /// <summary> /// Constructs a new thread-unsafe resource pool. /// </summary> /// <param name="initialResourceCount">Number of resources to include in the pool by default.</param> /// <param name="initializer">Function to initialize new instances in the resource pool with.</param> public LockingResourcePool(int initialResourceCount, Action<T> initializer) { InstanceInitializer = initializer; stack = new ConcurrentDeque<T>(initialResourceCount); Initialize(initialResourceCount); } /// <summary> /// Constructs a new thread-unsafe resource pool. /// </summary> /// <param name="initialResourceCount">Number of resources to include in the pool by default.</param> public LockingResourcePool(int initialResourceCount) : this(initialResourceCount, null) { } /// <summary> /// Constructs a new thread-unsafe resource pool. /// </summary> public LockingResourcePool() : this(10) { } /// <summary> /// Gets the number of resources in the pool. /// Even if the resource count hits 0, resources /// can still be requested; they will be allocated /// dynamically. /// </summary> public override int Count { get { return stack.Count; } } /// <summary> /// Gives an item back to the resource pool. /// </summary> /// <param name="item">Item to return.</param> public override void GiveBack(T item) { stack.Enqueue(item); } /// <summary> /// Initializes the pool with some resources. /// Throws away excess resources. /// </summary> /// <param name="initialResourceCount">Number of resources to include.</param> public override void Initialize(int initialResourceCount) { while (stack.Count > initialResourceCount) { T toRemove; stack.TryUnsafeDequeueFirst(out toRemove); } int length = stack.lastIndex - stack.firstIndex + 1; //lastIndex is inclusive, so add 1. if (InstanceInitializer != null) for (int i = 0; i < length; i++) { InstanceInitializer(stack.array[(stack.firstIndex + i) % stack.array.Length]); } while (stack.Count < initialResourceCount) { stack.UnsafeEnqueue(CreateNewResource()); } } /// <summary> /// Takes an item from the resource pool. /// </summary> /// <returns>Item to take.</returns> public override T Take() { T toTake; if (stack.TryDequeueFirst(out toTake)) { return toTake; } return CreateNewResource(); } /// <summary> /// Clears out the resource pool. /// </summary> public override void Clear() { while (stack.Count > 0) { T item; stack.TryDequeueFirst(out item); } } } }
mayermatt/coms-437-trashdroids
Trashdroids/BEPUutilities/ResourceManagement/LockingResourcePool.cs
C#
mit
3,717
version https://git-lfs.github.com/spec/v1 oid sha256:71736be070607c3c30f4c139b063edf1b1ffa587cf725a0acc1e06c6d3af0e48 size 53235
yogeshsaroya/new-cdnjs
ajax/libs/ace/1.1.5/mode-objectivec.js
JavaScript
mit
130
'use strict'; /** * Module dependencies. */ var mongoose = require('mongoose'), CurrentModel = mongoose.model('Attendance'), Schedule = mongoose.model('Schedule'), Group = mongoose.model('Group'), _ = require('lodash'); exports.attendance = function(req, res, next, id) { CurrentModel.load(id, function(err, item) { if (err) return next(err); if (!item) return next(new Error('Failed to load item ' + id)); req.attendance = item; next(); }); }; exports.schedule = function(req, res, next, id) { Schedule.load(id, function(err, item) { if (err) return next(err); if (!item) return next(new Error('Failed to load item ' + id)); req.schedule = item; next(); }); }; exports.group = function(req, res, next, id) { Group.load(id, function(err, item) { if (err) return next(err); if (!item) return next(new Error('Failed to load item ' + id)); req.group = item; next(); }); }; exports.create = function(req, res) { var value = new CurrentModel(req.body); value.group = req.group; value.schedule = req.schedule; value.save(function(err) { if (err) { return res.send('users/signup', { errors: err.errors, object: value }); } else { res.jsonp(value); } }); }; exports.update = function(req, res) { var item = req.attendance; item = _.extend(item, req.body); item.save(function(err) { if (err) { return res.send('users/signup', { errors: err.errors, object: item }); } else { res.jsonp(item); } }); }; exports.destroy = function(req, res) { var item = req.attendance; item.remove(function(err) { if (err) { return res.send('users/signup', { errors: err.errors, object: item }); } else { res.jsonp(item); } }); }; exports.show = function(req, res) { res.jsonp(req.attendance); }; exports.all = function(req, res) { CurrentModel.find({ group: req.group, schedule: req.schedule }).populate('participant', 'name email').exec(function(err, items) { if (err) { res.render('error', { status: 500 }); } else { res.jsonp(items); } }); };
wolf-mtwo/attendance
packages/custom/groups/server/controllers/attendances.js
JavaScript
mit
2,206
#ifndef OSCTOOLS_HPP_INCLUDED #define OSCTOOLS_HPP_INCLUDED class OscOptionalUnpacker { ofxOscMessage & msg; int n; public: OscOptionalUnpacker(ofxOscMessage & m):msg(m),n(0){} OscOptionalUnpacker & operator >> (int & i) { if(n < msg.getNumArgs()) { i = msg.getArgAsInt32( n++ ); } return *this; } OscOptionalUnpacker & operator >> (float & i) { if(n < msg.getNumArgs()) { i = msg.getArgAsFloat( n++ ); } return *this; } OscOptionalUnpacker & operator >> (double & i) { if(n < msg.getNumArgs()) { i = msg.getArgAsFloat( n++ ); } return *this; } OscOptionalUnpacker & operator >> (std::string & i) { if(n < msg.getNumArgs()) { i = msg.getArgAsString( n++ ); } return *this; } bool Eos() { return n >= msg.getNumArgs(); } }; class OscPacker { ofxOscMessage & msg; public: OscPacker(ofxOscMessage & m):msg(m){} OscPacker & operator << (int i) { msg.addIntArg(i); return *this; } OscPacker & operator << (unsigned int i) { msg.addIntArg(i); return *this; } OscPacker & operator << (float i) { msg.addFloatArg(i); return *this; } OscPacker & operator << (const std::string & i) { msg.addStringArg(i); return *this; } }; #endif // OSCTOOLS_HPP_INCLUDED
toddberreth/ofxTableGestures
src/Utils/OscTools.hpp
C++
mit
1,534
package org.magcruise.gaming.executor.api.message; import org.magcruise.gaming.lang.Message; import org.magcruise.gaming.lang.SConstructor; public interface RequestToGameExecutor extends Message { @Override public SConstructor<? extends RequestToGameExecutor> toConstructor(ToExpressionStyle style); @Override public default SConstructor<? extends RequestToGameExecutor> toConstructor() { return toConstructor(ToExpressionStyle.DEFAULT); } }
MAGCruise/magcruise-core
src/main/java/org/magcruise/gaming/executor/api/message/RequestToGameExecutor.java
Java
mit
454
import os import re from opsbro.collector import Collector # DMI have lot of useful information that detectors can use to know lot about the platform/hardware class Dmidecode(Collector): def launch(self): logger = self.logger logger.debug('getDmidecode: start') res = {} # Maybe we are in linux and we can directly read the linux_dmi_path = '/sys/class/dmi/id/' if os.path.exists(linux_dmi_path): file_names = os.listdir(linux_dmi_path) for fname in file_names: p = os.path.join(linux_dmi_path, fname) # There can be a link there, skip them if os.path.isfile(p): f = open(p, 'r') buf = f.read() f.close() res[fname] = buf.strip() logger.debug('getdmidecode: completed, returning') return res elif os.name == 'nt': self.set_not_eligible('Windows is currently not managed for DMI informations') return False # Ok not direct access, try to launch with else: # try dmidecode way, if exists res = self.execute_shell('LANG=C dmidecode -s') if res is False: self.set_not_eligible('Cannot read dmi information') return False for p in res.split('\n'): if re.search('^ ', p): buf = self.execute_shell('LANG=C dmidecode -s %s' % p).strip() if 'No such file or directory' in buf: logger.warning('Cannot access to dmi information with dmidecode command, exiting this collector.') self.set_not_eligible('Cannot get DMI informations because the dmidecode command is missing.') return res res[p.replace('-', '_').strip()] = buf logger.debug('getdmidecode: completed, returning') return res
naparuba/kunai
data/global-configuration/packs/system/collectors/collector_dmidecode.py
Python
mit
2,003
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _jsxRuntime = require("react/jsx-runtime"); var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", { d: "m22 6.92-1.41-1.41-2.85 3.21C15.68 6.4 12.83 5 9.61 5 6.72 5 4.07 6.16 2 8l1.42 1.42C5.12 7.93 7.27 7 9.61 7c2.74 0 5.09 1.26 6.77 3.24l-2.88 3.24-4-4L2 16.99l1.5 1.5 6-6.01 4 4 4.05-4.55c.75 1.35 1.25 2.9 1.44 4.55H21c-.22-2.3-.95-4.39-2.04-6.14L22 6.92z" }), 'MultilineChartOutlined'); exports.default = _default;
oliviertassinari/material-ui
packages/mui-icons-material/lib/MultilineChartOutlined.js
JavaScript
mit
733
# Be sure to restart your server when you modify this file. Rails.application.config.session_store :cookie_store, key: '_ctgov_session'
tibbs001/ctgov
config/initializers/session_store.rb
Ruby
mit
137
import dateformat from 'dateformat'; import { map } from "underscore"; import { getAccountById } from 'routes/root/routes/Banking/routes/Accounts/modules/accounts'; import { getCreditCardById, getPrepaidCardById } from 'routes/root/routes/Banking/routes/Cards/modules/cards'; import { getLoanById } from 'routes/root/routes/Banking/routes/Loans/modules/loans'; export const getDebitAccount = (debitAccountType, debitAccountId) => { switch (debitAccountType) { case "isAccount": getAccountById(debitAccountId) break; case "isLoan": getLoanById(debitAccountId) break; case "isCreditCard": getCreditCardById(debitAccountId) break; case "isPrepaidCard": getPrepaidCardById(debitAccountId) break; } } const findDebitAccount = (debitAccountType, debitAccountId, state) => { let debitAccount = {}; switch (debitAccountType) { case "isAccount": debitAccount = state.accounts.accounts.filter((account) => account.id == debitAccountId)[0] break; case "isLoan": debitAccount = state.loans.loans.filter((loan) => loan.id == debitAccountId)[0] break; case "isCreditCard": debitAccount = state.cards.creditCards.filter((creditCard) => creditCard.id == debitAccountId)[0] break; case "isPrepaidCard": debitAccount = state.cards.prepaidCards.filter((prepaidCard) => prepaidCard.id == debitAccountId)[0] break; } return debitAccount; } export const getDebitAccountAvailableBalance = (debitAccountType, debitAccountId, state) => { const debitAccount = findDebitAccount(debitAccountType, debitAccountId, state); return getProductAvailableBalance(debitAccount, debitAccountType); } export const getProductAvailableBalance = (debitAccount, debitAccountType) => { let availableBalance = 0; switch (debitAccountType) { case "isAccount": availableBalance = debitAccount.ledgerBalance; break; case "isLoan": case "isCreditCard": case "isPrepaidCard": availableBalance = debitAccount.availableBalance; break; } return availableBalance; } export const getDebitAccountCurrency = (debitAccountType, debitAccountId, state) => { return findDebitAccount(debitAccountType, debitAccountId, state).currency; } export const isValidDate = (date) => { return new Date(date).setHours(0,0,0,0) >= new Date(dateformat()).setHours(0,0,0,0) } export const isValidInstallmentPaymentAmount = (product, amount, availableBalance) => { return amount > 0 && (parseFloat(amount) <= product.nextInstallmentAmount || parseFloat(amount) <= product.debt) && parseFloat(amount) <= availableBalance } export const isValidInstallmentPaymentForm = (transactionForm) => { return transactionForm.debitAccount.correct && transactionForm.amount.correct && transactionForm.date.correct } export const getPaymentType = (paymentMethod) => { let paymentType = ''; switch (paymentMethod) { case 'ΚΑΡΤΑ AGILE BANK': paymentType = 'isCreditCardAgile'; break; case 'ΚΑΡΤΑ ΑΛΛΗΣ ΤΡΑΠΕΖΗΣ': paymentType = 'isCreditCardThirdParty'; break; case 'ΔΑΝΕΙΟ AGILE BANK': paymentType = 'isLoan'; break; default: paymentType = 'thirdPartyPayment'; } return paymentType; } export const getCustomerName = (fullCustomerName) => { return (fullCustomerName.firstName + ' ' + fullCustomerName.lastName) .replace('ά', 'α') .replace('έ', 'ε') .replace('ί', 'ι') .replace('ή', 'η') .replace('ό', 'ο') .replace('ύ', 'υ') .replace('ώ', 'ω'); } export const getActualFullName = (fullName, currentFullName) => { const correctPattern = new RegExp("^[A-Za-zΑ-Ωα-ω ]+$"); return fullName = (correctPattern.test(fullName) || fullName == '' ? fullName : currentFullName).toUpperCase(); } export const isValidFullName = (fullName) => fullName.split(' ').length == 2 export const isValidDebitAmount = (amount, availableBalance) => { return amount > 0 && (parseFloat(amount)) <= availableBalance } export const isValidChargesBeneficiary = (beneficiary) => { return beneficiary == 'both' || beneficiary == 'sender' || beneficiary == 'beneficiary' } export const findPaymentCharges = (paymentMethods, paymentName) => { return map(paymentMethods, (paymentMethod) => paymentMethod.map(method => method)) .flatMap(paymentMethod => paymentMethod) .filter(payment => payment.name == paymentName)[0].charges } export const findTransferCharges = (beneficiary) => { let charges = 0; switch (beneficiary) { case 'both': charges = 3; break; case 'sender': charges = 6; break; case 'beneficiary': charges = 0; break; } return charges; } export const getImmediateText = (language) => { let immediateText = ''; switch (language) { case 'greek': immediateText = 'ΑΜΕΣΑ'; break; case 'english': immediateText = 'IMMEDIATE'; break; } return immediateText; } export const formatCardNumber = (cardNumber) => { return [...cardNumber].map(((num, key) => key % 4 == 0 && key != 0 ? ' ' + num : num )) }
GKotsovos/WebBanking-Front-End
src/routes/root/routes/Banking/routes/utils/commonUtils.js
JavaScript
mit
5,183
#include <iostream> #include <map> #include <stdexcept> using namespace std; int main(int argc, char* argv[]){ map<string, int> m; m["bob"] = 56; m["alice"] = 89; m["billy"] = 3; // print it out map<string,int>::iterator i; for(i = m.begin(); i != m.end(); i++){ cout << i->first << ": " << i->second << endl; } cout << "size: " << m.size() << endl << endl; i = m.find("billy"); if(i == m.end()){ cout << "No billy!\n"; }else{ cout << i->first << ": " << i->second << endl; } return 0; }
mikehelmick/teaching
uc/computerScience2/spring2014/materials/demos/wk14/map.cpp
C++
mit
524
require File.join(File.dirname(__FILE__), "/../spec_helper") describe "Firefox" do before(:each) do @browser = Firefox.new @url = "http://localhost" end describe "Cross OS Firefox", :shared => true do it "should be supported" do @browser.should be_supported end end describe "Mac OS X" do it_should_behave_like "Cross OS Firefox" it "should have a path" do expected = File.expand_path("/Applications/#{@browser.escaped_name}.app") @browser.path.should == expected end it "return name" do @browser.name.should == "Firefox" end it "should visit a given url" do Kernel.expects(:system).with("open -a #{@browser.name} '#{@url}'") @browser.visit(@url) end end if macos? describe "Windows" do it_should_behave_like "Cross OS Firefox" it "should have a path" do @browser.path.should == File.join(ENV['ProgramFiles'] || 'c:\Program Files', '\Mozilla Firefox\firefox.exe') end it "return name" do @browser.name.should == "Firefox" end it "should visit a given url" do Kernel.expects(:system).with("#{@browser.path} #{@url}") @browser.visit(@url) end end if windows? describe "Linux" do it_should_behave_like "Cross OS Firefox" it "should have a path" do path = "/usr/bin/#{@browser.name}" Firefox.new(path).path.should == path end it "should visit a given url" do Kernel.expects(:system).with("#{@browser.name} #{@url}") @browser.visit(@url) end it "return name" do @browser.name.should == "firefox" end end if linux? end
zfben/zfben_hanoi
spec/browsers/firefox_spec.rb
Ruby
mit
1,641
using System.ComponentModel; namespace EncompassRest.Loans.Enums { /// <summary> /// PropertyValuationMethodType /// </summary> public enum PropertyValuationMethodType { /// <summary> /// Automated Valuation Model /// </summary> [Description("Automated Valuation Model")] AutomatedValuationModel = 0, /// <summary> /// Desktop Appraisal /// </summary> [Description("Desktop Appraisal")] DesktopAppraisal = 1, /// <summary> /// Drive By /// </summary> [Description("Drive By")] DriveBy = 2, /// <summary> /// Estimation /// </summary> Estimation = 3, /// <summary> /// Full Appraisal /// </summary> [Description("Full Appraisal")] FullAppraisal = 5, /// <summary> /// None /// </summary> None = 6, /// <summary> /// Other /// </summary> Other = 7, /// <summary> /// Prior Appraisal Used /// </summary> [Description("Prior Appraisal Used")] PriorAppraisalUsed = 8 } }
EncompassRest/EncompassRest
src/EncompassRest/Loans/Enums/PropertyValuationMethodType.cs
C#
mit
1,186
/*! * vue-resource v1.5.3 * https://github.com/pagekit/vue-resource * Released under the MIT License. */ 'use strict'; /** * Promises/A+ polyfill v1.1.4 (https://github.com/bramstein/promis) */ var RESOLVED = 0; var REJECTED = 1; var PENDING = 2; function Promise$1(executor) { this.state = PENDING; this.value = undefined; this.deferred = []; var promise = this; try { executor(function (x) { promise.resolve(x); }, function (r) { promise.reject(r); }); } catch (e) { promise.reject(e); } } Promise$1.reject = function (r) { return new Promise$1(function (resolve, reject) { reject(r); }); }; Promise$1.resolve = function (x) { return new Promise$1(function (resolve, reject) { resolve(x); }); }; Promise$1.all = function all(iterable) { return new Promise$1(function (resolve, reject) { var count = 0, result = []; if (iterable.length === 0) { resolve(result); } function resolver(i) { return function (x) { result[i] = x; count += 1; if (count === iterable.length) { resolve(result); } }; } for (var i = 0; i < iterable.length; i += 1) { Promise$1.resolve(iterable[i]).then(resolver(i), reject); } }); }; Promise$1.race = function race(iterable) { return new Promise$1(function (resolve, reject) { for (var i = 0; i < iterable.length; i += 1) { Promise$1.resolve(iterable[i]).then(resolve, reject); } }); }; var p = Promise$1.prototype; p.resolve = function resolve(x) { var promise = this; if (promise.state === PENDING) { if (x === promise) { throw new TypeError('Promise settled with itself.'); } var called = false; try { var then = x && x['then']; if (x !== null && typeof x === 'object' && typeof then === 'function') { then.call(x, function (x) { if (!called) { promise.resolve(x); } called = true; }, function (r) { if (!called) { promise.reject(r); } called = true; }); return; } } catch (e) { if (!called) { promise.reject(e); } return; } promise.state = RESOLVED; promise.value = x; promise.notify(); } }; p.reject = function reject(reason) { var promise = this; if (promise.state === PENDING) { if (reason === promise) { throw new TypeError('Promise settled with itself.'); } promise.state = REJECTED; promise.value = reason; promise.notify(); } }; p.notify = function notify() { var promise = this; nextTick(function () { if (promise.state !== PENDING) { while (promise.deferred.length) { var deferred = promise.deferred.shift(), onResolved = deferred[0], onRejected = deferred[1], resolve = deferred[2], reject = deferred[3]; try { if (promise.state === RESOLVED) { if (typeof onResolved === 'function') { resolve(onResolved.call(undefined, promise.value)); } else { resolve(promise.value); } } else if (promise.state === REJECTED) { if (typeof onRejected === 'function') { resolve(onRejected.call(undefined, promise.value)); } else { reject(promise.value); } } } catch (e) { reject(e); } } } }); }; p.then = function then(onResolved, onRejected) { var promise = this; return new Promise$1(function (resolve, reject) { promise.deferred.push([onResolved, onRejected, resolve, reject]); promise.notify(); }); }; p["catch"] = function (onRejected) { return this.then(undefined, onRejected); }; /** * Promise adapter. */ if (typeof Promise === 'undefined') { window.Promise = Promise$1; } function PromiseObj(executor, context) { if (executor instanceof Promise) { this.promise = executor; } else { this.promise = new Promise(executor.bind(context)); } this.context = context; } PromiseObj.all = function (iterable, context) { return new PromiseObj(Promise.all(iterable), context); }; PromiseObj.resolve = function (value, context) { return new PromiseObj(Promise.resolve(value), context); }; PromiseObj.reject = function (reason, context) { return new PromiseObj(Promise.reject(reason), context); }; PromiseObj.race = function (iterable, context) { return new PromiseObj(Promise.race(iterable), context); }; var p$1 = PromiseObj.prototype; p$1.bind = function (context) { this.context = context; return this; }; p$1.then = function (fulfilled, rejected) { if (fulfilled && fulfilled.bind && this.context) { fulfilled = fulfilled.bind(this.context); } if (rejected && rejected.bind && this.context) { rejected = rejected.bind(this.context); } return new PromiseObj(this.promise.then(fulfilled, rejected), this.context); }; p$1["catch"] = function (rejected) { if (rejected && rejected.bind && this.context) { rejected = rejected.bind(this.context); } return new PromiseObj(this.promise["catch"](rejected), this.context); }; p$1["finally"] = function (callback) { return this.then(function (value) { callback.call(this); return value; }, function (reason) { callback.call(this); return Promise.reject(reason); }); }; /** * Utility functions. */ var _ref = {}, hasOwnProperty = _ref.hasOwnProperty, slice = [].slice, debug = false, ntick; var inBrowser = typeof window !== 'undefined'; function Util (_ref2) { var config = _ref2.config, nextTick = _ref2.nextTick; ntick = nextTick; debug = config.debug || !config.silent; } function warn(msg) { if (typeof console !== 'undefined' && debug) { console.warn('[VueResource warn]: ' + msg); } } function error(msg) { if (typeof console !== 'undefined') { console.error(msg); } } function nextTick(cb, ctx) { return ntick(cb, ctx); } function trim(str) { return str ? str.replace(/^\s*|\s*$/g, '') : ''; } function trimEnd(str, chars) { if (str && chars === undefined) { return str.replace(/\s+$/, ''); } if (!str || !chars) { return str; } return str.replace(new RegExp("[" + chars + "]+$"), ''); } function toLower(str) { return str ? str.toLowerCase() : ''; } function toUpper(str) { return str ? str.toUpperCase() : ''; } var isArray = Array.isArray; function isString(val) { return typeof val === 'string'; } function isFunction(val) { return typeof val === 'function'; } function isObject(obj) { return obj !== null && typeof obj === 'object'; } function isPlainObject(obj) { return isObject(obj) && Object.getPrototypeOf(obj) == Object.prototype; } function isBlob(obj) { return typeof Blob !== 'undefined' && obj instanceof Blob; } function isFormData(obj) { return typeof FormData !== 'undefined' && obj instanceof FormData; } function when(value, fulfilled, rejected) { var promise = PromiseObj.resolve(value); if (arguments.length < 2) { return promise; } return promise.then(fulfilled, rejected); } function options(fn, obj, opts) { opts = opts || {}; if (isFunction(opts)) { opts = opts.call(obj); } return merge(fn.bind({ $vm: obj, $options: opts }), fn, { $options: opts }); } function each(obj, iterator) { var i, key; if (isArray(obj)) { for (i = 0; i < obj.length; i++) { iterator.call(obj[i], obj[i], i); } } else if (isObject(obj)) { for (key in obj) { if (hasOwnProperty.call(obj, key)) { iterator.call(obj[key], obj[key], key); } } } return obj; } var assign = Object.assign || _assign; function merge(target) { var args = slice.call(arguments, 1); args.forEach(function (source) { _merge(target, source, true); }); return target; } function defaults(target) { var args = slice.call(arguments, 1); args.forEach(function (source) { for (var key in source) { if (target[key] === undefined) { target[key] = source[key]; } } }); return target; } function _assign(target) { var args = slice.call(arguments, 1); args.forEach(function (source) { _merge(target, source); }); return target; } function _merge(target, source, deep) { for (var key in source) { if (deep && (isPlainObject(source[key]) || isArray(source[key]))) { if (isPlainObject(source[key]) && !isPlainObject(target[key])) { target[key] = {}; } if (isArray(source[key]) && !isArray(target[key])) { target[key] = []; } _merge(target[key], source[key], deep); } else if (source[key] !== undefined) { target[key] = source[key]; } } } /** * Root Prefix Transform. */ function root (options$$1, next) { var url = next(options$$1); if (isString(options$$1.root) && !/^(https?:)?\//.test(url)) { url = trimEnd(options$$1.root, '/') + '/' + url; } return url; } /** * Query Parameter Transform. */ function query (options$$1, next) { var urlParams = Object.keys(Url.options.params), query = {}, url = next(options$$1); each(options$$1.params, function (value, key) { if (urlParams.indexOf(key) === -1) { query[key] = value; } }); query = Url.params(query); if (query) { url += (url.indexOf('?') == -1 ? '?' : '&') + query; } return url; } /** * URL Template v2.0.6 (https://github.com/bramstein/url-template) */ function expand(url, params, variables) { var tmpl = parse(url), expanded = tmpl.expand(params); if (variables) { variables.push.apply(variables, tmpl.vars); } return expanded; } function parse(template) { var operators = ['+', '#', '.', '/', ';', '?', '&'], variables = []; return { vars: variables, expand: function expand(context) { return template.replace(/\{([^{}]+)\}|([^{}]+)/g, function (_, expression, literal) { if (expression) { var operator = null, values = []; if (operators.indexOf(expression.charAt(0)) !== -1) { operator = expression.charAt(0); expression = expression.substr(1); } expression.split(/,/g).forEach(function (variable) { var tmp = /([^:*]*)(?::(\d+)|(\*))?/.exec(variable); values.push.apply(values, getValues(context, operator, tmp[1], tmp[2] || tmp[3])); variables.push(tmp[1]); }); if (operator && operator !== '+') { var separator = ','; if (operator === '?') { separator = '&'; } else if (operator !== '#') { separator = operator; } return (values.length !== 0 ? operator : '') + values.join(separator); } else { return values.join(','); } } else { return encodeReserved(literal); } }); } }; } function getValues(context, operator, key, modifier) { var value = context[key], result = []; if (isDefined(value) && value !== '') { if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { value = value.toString(); if (modifier && modifier !== '*') { value = value.substring(0, parseInt(modifier, 10)); } result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : null)); } else { if (modifier === '*') { if (Array.isArray(value)) { value.filter(isDefined).forEach(function (value) { result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : null)); }); } else { Object.keys(value).forEach(function (k) { if (isDefined(value[k])) { result.push(encodeValue(operator, value[k], k)); } }); } } else { var tmp = []; if (Array.isArray(value)) { value.filter(isDefined).forEach(function (value) { tmp.push(encodeValue(operator, value)); }); } else { Object.keys(value).forEach(function (k) { if (isDefined(value[k])) { tmp.push(encodeURIComponent(k)); tmp.push(encodeValue(operator, value[k].toString())); } }); } if (isKeyOperator(operator)) { result.push(encodeURIComponent(key) + '=' + tmp.join(',')); } else if (tmp.length !== 0) { result.push(tmp.join(',')); } } } } else { if (operator === ';') { result.push(encodeURIComponent(key)); } else if (value === '' && (operator === '&' || operator === '?')) { result.push(encodeURIComponent(key) + '='); } else if (value === '') { result.push(''); } } return result; } function isDefined(value) { return value !== undefined && value !== null; } function isKeyOperator(operator) { return operator === ';' || operator === '&' || operator === '?'; } function encodeValue(operator, value, key) { value = operator === '+' || operator === '#' ? encodeReserved(value) : encodeURIComponent(value); if (key) { return encodeURIComponent(key) + '=' + value; } else { return value; } } function encodeReserved(str) { return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { if (!/%[0-9A-Fa-f]/.test(part)) { part = encodeURI(part); } return part; }).join(''); } /** * URL Template (RFC 6570) Transform. */ function template (options) { var variables = [], url = expand(options.url, options.params, variables); variables.forEach(function (key) { delete options.params[key]; }); return url; } /** * Service for URL templating. */ function Url(url, params) { var self = this || {}, options$$1 = url, transform; if (isString(url)) { options$$1 = { url: url, params: params }; } options$$1 = merge({}, Url.options, self.$options, options$$1); Url.transforms.forEach(function (handler) { if (isString(handler)) { handler = Url.transform[handler]; } if (isFunction(handler)) { transform = factory(handler, transform, self.$vm); } }); return transform(options$$1); } /** * Url options. */ Url.options = { url: '', root: null, params: {} }; /** * Url transforms. */ Url.transform = { template: template, query: query, root: root }; Url.transforms = ['template', 'query', 'root']; /** * Encodes a Url parameter string. * * @param {Object} obj */ Url.params = function (obj) { var params = [], escape = encodeURIComponent; params.add = function (key, value) { if (isFunction(value)) { value = value(); } if (value === null) { value = ''; } this.push(escape(key) + '=' + escape(value)); }; serialize(params, obj); return params.join('&').replace(/%20/g, '+'); }; /** * Parse a URL and return its components. * * @param {String} url */ Url.parse = function (url) { var el = document.createElement('a'); if (document.documentMode) { el.href = url; url = el.href; } el.href = url; return { href: el.href, protocol: el.protocol ? el.protocol.replace(/:$/, '') : '', port: el.port, host: el.host, hostname: el.hostname, pathname: el.pathname.charAt(0) === '/' ? el.pathname : '/' + el.pathname, search: el.search ? el.search.replace(/^\?/, '') : '', hash: el.hash ? el.hash.replace(/^#/, '') : '' }; }; function factory(handler, next, vm) { return function (options$$1) { return handler.call(vm, options$$1, next); }; } function serialize(params, obj, scope) { var array = isArray(obj), plain = isPlainObject(obj), hash; each(obj, function (value, key) { hash = isObject(value) || isArray(value); if (scope) { key = scope + '[' + (plain || hash ? key : '') + ']'; } if (!scope && array) { params.add(value.name, value.value); } else if (hash) { serialize(params, value, key); } else { params.add(key, value); } }); } /** * XDomain client (Internet Explorer). */ function xdrClient (request) { return new PromiseObj(function (resolve) { var xdr = new XDomainRequest(), handler = function handler(_ref) { var type = _ref.type; var status = 0; if (type === 'load') { status = 200; } else if (type === 'error') { status = 500; } resolve(request.respondWith(xdr.responseText, { status: status })); }; request.abort = function () { return xdr.abort(); }; xdr.open(request.method, request.getUrl()); if (request.timeout) { xdr.timeout = request.timeout; } xdr.onload = handler; xdr.onabort = handler; xdr.onerror = handler; xdr.ontimeout = handler; xdr.onprogress = function () {}; xdr.send(request.getBody()); }); } /** * CORS Interceptor. */ var SUPPORTS_CORS = inBrowser && 'withCredentials' in new XMLHttpRequest(); function cors (request) { if (inBrowser) { var orgUrl = Url.parse(location.href); var reqUrl = Url.parse(request.getUrl()); if (reqUrl.protocol !== orgUrl.protocol || reqUrl.host !== orgUrl.host) { request.crossOrigin = true; request.emulateHTTP = false; if (!SUPPORTS_CORS) { request.client = xdrClient; } } } } /** * Form data Interceptor. */ function form (request) { if (isFormData(request.body)) { request.headers["delete"]('Content-Type'); } else if (isObject(request.body) && request.emulateJSON) { request.body = Url.params(request.body); request.headers.set('Content-Type', 'application/x-www-form-urlencoded'); } } /** * JSON Interceptor. */ function json (request) { var type = request.headers.get('Content-Type') || ''; if (isObject(request.body) && type.indexOf('application/json') === 0) { request.body = JSON.stringify(request.body); } return function (response) { return response.bodyText ? when(response.text(), function (text) { var type = response.headers.get('Content-Type') || ''; if (type.indexOf('application/json') === 0 || isJson(text)) { try { response.body = JSON.parse(text); } catch (e) { response.body = null; } } else { response.body = text; } return response; }) : response; }; } function isJson(str) { var start = str.match(/^\s*(\[|\{)/); var end = { '[': /]\s*$/, '{': /}\s*$/ }; return start && end[start[1]].test(str); } /** * JSONP client (Browser). */ function jsonpClient (request) { return new PromiseObj(function (resolve) { var name = request.jsonp || 'callback', callback = request.jsonpCallback || '_jsonp' + Math.random().toString(36).substr(2), body = null, handler, script; handler = function handler(_ref) { var type = _ref.type; var status = 0; if (type === 'load' && body !== null) { status = 200; } else if (type === 'error') { status = 500; } if (status && window[callback]) { delete window[callback]; document.body.removeChild(script); } resolve(request.respondWith(body, { status: status })); }; window[callback] = function (result) { body = JSON.stringify(result); }; request.abort = function () { handler({ type: 'abort' }); }; request.params[name] = callback; if (request.timeout) { setTimeout(request.abort, request.timeout); } script = document.createElement('script'); script.src = request.getUrl(); script.type = 'text/javascript'; script.async = true; script.onload = handler; script.onerror = handler; document.body.appendChild(script); }); } /** * JSONP Interceptor. */ function jsonp (request) { if (request.method == 'JSONP') { request.client = jsonpClient; } } /** * Before Interceptor. */ function before (request) { if (isFunction(request.before)) { request.before.call(this, request); } } /** * HTTP method override Interceptor. */ function method (request) { if (request.emulateHTTP && /^(PUT|PATCH|DELETE)$/i.test(request.method)) { request.headers.set('X-HTTP-Method-Override', request.method); request.method = 'POST'; } } /** * Header Interceptor. */ function header (request) { var headers = assign({}, Http.headers.common, !request.crossOrigin ? Http.headers.custom : {}, Http.headers[toLower(request.method)]); each(headers, function (value, name) { if (!request.headers.has(name)) { request.headers.set(name, value); } }); } /** * XMLHttp client (Browser). */ function xhrClient (request) { return new PromiseObj(function (resolve) { var xhr = new XMLHttpRequest(), handler = function handler(event) { var response = request.respondWith('response' in xhr ? xhr.response : xhr.responseText, { status: xhr.status === 1223 ? 204 : xhr.status, // IE9 status bug statusText: xhr.status === 1223 ? 'No Content' : trim(xhr.statusText) }); each(trim(xhr.getAllResponseHeaders()).split('\n'), function (row) { response.headers.append(row.slice(0, row.indexOf(':')), row.slice(row.indexOf(':') + 1)); }); resolve(response); }; request.abort = function () { return xhr.abort(); }; xhr.open(request.method, request.getUrl(), true); if (request.timeout) { xhr.timeout = request.timeout; } if (request.responseType && 'responseType' in xhr) { xhr.responseType = request.responseType; } if (request.withCredentials || request.credentials) { xhr.withCredentials = true; } if (!request.crossOrigin) { request.headers.set('X-Requested-With', 'XMLHttpRequest'); } // deprecated use downloadProgress if (isFunction(request.progress) && request.method === 'GET') { xhr.addEventListener('progress', request.progress); } if (isFunction(request.downloadProgress)) { xhr.addEventListener('progress', request.downloadProgress); } // deprecated use uploadProgress if (isFunction(request.progress) && /^(POST|PUT)$/i.test(request.method)) { xhr.upload.addEventListener('progress', request.progress); } if (isFunction(request.uploadProgress) && xhr.upload) { xhr.upload.addEventListener('progress', request.uploadProgress); } request.headers.forEach(function (value, name) { xhr.setRequestHeader(name, value); }); xhr.onload = handler; xhr.onabort = handler; xhr.onerror = handler; xhr.ontimeout = handler; xhr.send(request.getBody()); }); } /** * Http client (Node). */ function nodeClient (request) { var client = require('got'); return new PromiseObj(function (resolve) { var url = request.getUrl(); var body = request.getBody(); var method = request.method; var headers = {}, handler; request.headers.forEach(function (value, name) { headers[name] = value; }); client(url, { body: body, method: method, headers: headers }).then(handler = function handler(resp) { var response = request.respondWith(resp.body, { status: resp.statusCode, statusText: trim(resp.statusMessage) }); each(resp.headers, function (value, name) { response.headers.set(name, value); }); resolve(response); }, function (error$$1) { return handler(error$$1.response); }); }); } /** * Base client. */ function Client (context) { var reqHandlers = [sendRequest], resHandlers = []; if (!isObject(context)) { context = null; } function Client(request) { while (reqHandlers.length) { var handler = reqHandlers.pop(); if (isFunction(handler)) { var _ret = function () { var response = void 0, next = void 0; response = handler.call(context, request, function (val) { return next = val; }) || next; if (isObject(response)) { return { v: new PromiseObj(function (resolve, reject) { resHandlers.forEach(function (handler) { response = when(response, function (response) { return handler.call(context, response) || response; }, reject); }); when(response, resolve, reject); }, context) }; } if (isFunction(response)) { resHandlers.unshift(response); } }(); if (typeof _ret === "object") return _ret.v; } else { warn("Invalid interceptor of type " + typeof handler + ", must be a function"); } } } Client.use = function (handler) { reqHandlers.push(handler); }; return Client; } function sendRequest(request) { var client = request.client || (inBrowser ? xhrClient : nodeClient); return client(request); } /** * HTTP Headers. */ var Headers = /*#__PURE__*/function () { function Headers(headers) { var _this = this; this.map = {}; each(headers, function (value, name) { return _this.append(name, value); }); } var _proto = Headers.prototype; _proto.has = function has(name) { return getName(this.map, name) !== null; }; _proto.get = function get(name) { var list = this.map[getName(this.map, name)]; return list ? list.join() : null; }; _proto.getAll = function getAll(name) { return this.map[getName(this.map, name)] || []; }; _proto.set = function set(name, value) { this.map[normalizeName(getName(this.map, name) || name)] = [trim(value)]; }; _proto.append = function append(name, value) { var list = this.map[getName(this.map, name)]; if (list) { list.push(trim(value)); } else { this.set(name, value); } }; _proto["delete"] = function _delete(name) { delete this.map[getName(this.map, name)]; }; _proto.deleteAll = function deleteAll() { this.map = {}; }; _proto.forEach = function forEach(callback, thisArg) { var _this2 = this; each(this.map, function (list, name) { each(list, function (value) { return callback.call(thisArg, value, name, _this2); }); }); }; return Headers; }(); function getName(map, name) { return Object.keys(map).reduce(function (prev, curr) { return toLower(name) === toLower(curr) ? curr : prev; }, null); } function normalizeName(name) { if (/[^a-z0-9\-#$%&'*+.^_`|~]/i.test(name)) { throw new TypeError('Invalid character in header field name'); } return trim(name); } /** * HTTP Response. */ var Response = /*#__PURE__*/function () { function Response(body, _ref) { var url = _ref.url, headers = _ref.headers, status = _ref.status, statusText = _ref.statusText; this.url = url; this.ok = status >= 200 && status < 300; this.status = status || 0; this.statusText = statusText || ''; this.headers = new Headers(headers); this.body = body; if (isString(body)) { this.bodyText = body; } else if (isBlob(body)) { this.bodyBlob = body; if (isBlobText(body)) { this.bodyText = blobText(body); } } } var _proto = Response.prototype; _proto.blob = function blob() { return when(this.bodyBlob); }; _proto.text = function text() { return when(this.bodyText); }; _proto.json = function json() { return when(this.text(), function (text) { return JSON.parse(text); }); }; return Response; }(); Object.defineProperty(Response.prototype, 'data', { get: function get() { return this.body; }, set: function set(body) { this.body = body; } }); function blobText(body) { return new PromiseObj(function (resolve) { var reader = new FileReader(); reader.readAsText(body); reader.onload = function () { resolve(reader.result); }; }); } function isBlobText(body) { return body.type.indexOf('text') === 0 || body.type.indexOf('json') !== -1; } /** * HTTP Request. */ var Request = /*#__PURE__*/function () { function Request(options$$1) { this.body = null; this.params = {}; assign(this, options$$1, { method: toUpper(options$$1.method || 'GET') }); if (!(this.headers instanceof Headers)) { this.headers = new Headers(this.headers); } } var _proto = Request.prototype; _proto.getUrl = function getUrl() { return Url(this); }; _proto.getBody = function getBody() { return this.body; }; _proto.respondWith = function respondWith(body, options$$1) { return new Response(body, assign(options$$1 || {}, { url: this.getUrl() })); }; return Request; }(); /** * Service for sending network requests. */ var COMMON_HEADERS = { 'Accept': 'application/json, text/plain, */*' }; var JSON_CONTENT_TYPE = { 'Content-Type': 'application/json;charset=utf-8' }; function Http(options$$1) { var self = this || {}, client = Client(self.$vm); defaults(options$$1 || {}, self.$options, Http.options); Http.interceptors.forEach(function (handler) { if (isString(handler)) { handler = Http.interceptor[handler]; } if (isFunction(handler)) { client.use(handler); } }); return client(new Request(options$$1)).then(function (response) { return response.ok ? response : PromiseObj.reject(response); }, function (response) { if (response instanceof Error) { error(response); } return PromiseObj.reject(response); }); } Http.options = {}; Http.headers = { put: JSON_CONTENT_TYPE, post: JSON_CONTENT_TYPE, patch: JSON_CONTENT_TYPE, "delete": JSON_CONTENT_TYPE, common: COMMON_HEADERS, custom: {} }; Http.interceptor = { before: before, method: method, jsonp: jsonp, json: json, form: form, header: header, cors: cors }; Http.interceptors = ['before', 'method', 'jsonp', 'json', 'form', 'header', 'cors']; ['get', 'delete', 'head', 'jsonp'].forEach(function (method$$1) { Http[method$$1] = function (url, options$$1) { return this(assign(options$$1 || {}, { url: url, method: method$$1 })); }; }); ['post', 'put', 'patch'].forEach(function (method$$1) { Http[method$$1] = function (url, body, options$$1) { return this(assign(options$$1 || {}, { url: url, method: method$$1, body: body })); }; }); /** * Service for interacting with RESTful services. */ function Resource(url, params, actions, options$$1) { var self = this || {}, resource = {}; actions = assign({}, Resource.actions, actions); each(actions, function (action, name) { action = merge({ url: url, params: assign({}, params) }, options$$1, action); resource[name] = function () { return (self.$http || Http)(opts(action, arguments)); }; }); return resource; } function opts(action, args) { var options$$1 = assign({}, action), params = {}, body; switch (args.length) { case 2: params = args[0]; body = args[1]; break; case 1: if (/^(POST|PUT|PATCH)$/i.test(options$$1.method)) { body = args[0]; } else { params = args[0]; } break; case 0: break; default: throw 'Expected up to 2 arguments [params, body], got ' + args.length + ' arguments'; } options$$1.body = body; options$$1.params = assign({}, options$$1.params, params); return options$$1; } Resource.actions = { get: { method: 'GET' }, save: { method: 'POST' }, query: { method: 'GET' }, update: { method: 'PUT' }, remove: { method: 'DELETE' }, "delete": { method: 'DELETE' } }; /** * Install plugin. */ function plugin(Vue) { if (plugin.installed) { return; } Util(Vue); Vue.url = Url; Vue.http = Http; Vue.resource = Resource; Vue.Promise = PromiseObj; Object.defineProperties(Vue.prototype, { $url: { get: function get() { return options(Vue.url, this, this.$options.url); } }, $http: { get: function get() { return options(Vue.http, this, this.$options.http); } }, $resource: { get: function get() { return Vue.resource.bind(this); } }, $promise: { get: function get() { var _this = this; return function (executor) { return new Vue.Promise(executor, _this); }; } } }); } if (typeof window !== 'undefined' && window.Vue && !window.Vue.resource) { window.Vue.use(plugin); } module.exports = plugin;
vuejs/vue-resource
dist/vue-resource.common.js
JavaScript
mit
33,000
console.log('argv[0]: '+process.argv[0]); console.log('argv[1]: '+process.argv[1]);
AdonRain/neowheel
1_hello/hello_2/hello_2_2.js
JavaScript
mit
86
# Copyright (C) 2009 Pascal Rettig. require 'rss/2.0' class Feed::RssRenderer < ParagraphRenderer paragraph :feed paragraph :view_rss paragraph :rss_auto_discovery, :cache => true def feed paragraph_data = (paragraph.data || {}).symbolize_keys @handler_info = get_handler_info(:feed,:rss,paragraph_data[:feed_type]) if ! @handler_info data_paragraph :text => 'Reconfigure RSS Feed'.t return end handler_options_class = nil begin handler_options_class = "#{@handler_info[:class_name]}::Options".constantize rescue end if handler_options_class.nil? data_paragraph :text => 'Reconfigure RSS Feed'.t return end @options = handler_options_class.new(paragraph_data) @handler = @handler_info[:class].new(@options) @cache_id = site_node.id.to_s if @handler.respond_to?(:set_path) @handler.set_path(params[:path]) @cache_id += DomainModel.hexdigest(params[:path].join("/")) end results = renderer_cache(nil,@cache_id, :skip => @options.timeout <= 0, :expires => @options.timeout*60) do |cache| data = @handler.get_feed data[:self_link] = Configuration.domain_link site_node.node_path if @handler_info[:custom] cache[:output] = render_to_string(:partial => @handler_info[:custom],:locals => { :data => data}) else cache[:output] = render_to_string(:partial => '/feed/rss/feed',:locals => { :data => data }) end end headers['Content-Type'] = 'text/xml' data_paragraph :text => results.output end feature :rss_feed_view, :default_feature => <<-FEATURE <div class='rss_feed'> <cms:feed> <h2><cms:link><cms:title/></cms:link></h2> <cms:description/> <cms:items> <cms:item> <div class='rss_feed_item'> <h3><cms:link><cms:title/></cms:link></h3> <cms:content/> </div> </cms:item> </cms:items> </cms:feed> <cms:no_feed> No Feed </cms:no_feed> </div> FEATURE include ActionView::Helpers::DateHelper def rss_feed_view_feature(data) webiva_feature(:rss_feed_view,data) do |c| c.define_tag('feed') { |tag| data[:feed].blank? ? nil : tag.expand } c.define_tag('no_feed') { |tag| data[:feed].blank? ? tag.expand : nil } c.define_link_tag('feed:') { |t| data[:feed].channel.link } c.define_value_tag('feed:title') { |tag| data[:feed].channel.title } c.define_value_tag('feed:description') { |tag| data[:feed].channel.description } c.define_tag('feed:no_items') { |tag| data[:feed].items.length == 0 ? tag.expand : nil } c.define_tag('feed:items') { |tag| data[:feed].items.length > 0 ? tag.expand : nil } c.define_tag('feed:items:item') do |tag| result = '' items = data[:feed].items unless data[:category].blank? items = items.find_all { |item| item.categories.detect { |cat| cat.content == data[:category] } } end items = items[0..(data[:items]-1)] if data[:items] > 0 items.each_with_index do |item,idx| tag.locals.item = item tag.locals.index = idx + 1 tag.locals.first = idx == 0 tag.locals.last = idx == data[:feed].items.length result << tag.expand end result end c.define_value_tag('feed:items:item:content') { |tag| if data[:read_more].blank? txt = tag.locals.item.description else txt = tag.locals.item.description.to_s.sub(data[:read_more],"[<a href='#{tag.locals.item.link}'>Read More..</a>]") end } c.define_link_tag('feed:items:item:') { |t| t.locals.item.link } c.define_value_tag('feed:items:item:title') { |tag| tag.locals.item.title } c.define_value_tag('feed:items:item:author') { |tag| tag.locals.item.author } c.define_value_tag('feed:items:item:categories') { |tag| tag.locals.item.categories.map { |cat| cat.content }.join(", ") } c.define_value_tag('feed:items:item:description') { |tag| tag.locals.item.description } c.date_tag('feed:items:item:date') { |t| t.locals.item.date } c.value_tag('feed:items:item:ago') { |t| distance_of_time_in_words_to_now(t.locals.item.date).gsub('about','').strip if t.locals.item.date } end end def view_rss options = Feed::RssController::ViewRssOptions.new(paragraph.data || {}) return render_paragraph :text => 'Configure Paragraph' if options.rss_url.blank? result = renderer_cache(nil,options.rss_url, :expires => options.cache_minutes.to_i.minutes) do |cache| rss_feed = delayed_cache_fetch(FeedParser,:delayed_feed_parser,{ :rss_url => options.rss_url },options.rss_url, :expires => options.cache_minutes.to_i.minutes) return render_paragraph :text => '' if !rss_feed data = { :feed => rss_feed[:feed], :items => options.items, :category => options.category, :read_more => options.read_more } cache[:output] = rss_feed_view_feature(data) logger.warn('In Renderer Cache') end render_paragraph :text => result.output end def rss_auto_discovery @options = paragraph.data || {} if !@options[:module_node_id].blank? && @options[:module_node_id].to_i > 0 @nodes = [ SiteNode.find_by_id(@options[:module_node_id]) ].compact else @nodes = SiteNode.find(:all,:conditions => ['node_type = "M" AND module_name = "/feed/rss"' ],:include => :page_modifier) end output = @nodes.collect do |nd| if nd.page_modifier nd.page_modifier.modifier_data ||= {} "<link rel='alternate' type='application/rss+xml' title='#{vh nd.page_modifier.modifier_data[:feed_title]}' href='#{vh nd.node_path}' />" else nil end end.compact.join("\n") include_in_head(output) render_paragraph :nothing => true end end
cykod/Webiva
vendor/modules/feed/app/controllers/feed/rss_renderer.rb
Ruby
mit
5,899
'use strict'; var Crawler = require('../lib/crawler'); var expect = require('chai').expect; var jsdom = require('jsdom'); var httpbinHost = 'localhost:8000'; describe('Errors', function() { describe('timeout', function() { var c = new Crawler({ timeout : 1500, retryTimeout : 1000, retries : 2, jquery : false }); it('should return a timeout error after ~5sec', function(done) { // override default mocha test timeout of 2000ms this.timeout(10000); c.queue({ uri : 'http://'+httpbinHost+'/delay/15', callback : function(error, response) //noinspection BadExpressionStatementJS,BadExpressionStatementJS { expect(error).not.to.be.null; expect(error.code).to.equal("ETIMEDOUT"); //expect(response).to.be.undefined; done(); } }); }); it('should retry after a first timeout', function(done) { // override default mocha test timeout of 2000ms this.timeout(15000); c.queue({ uri : 'http://'+httpbinHost+'/delay/1', callback : function(error, response) { expect(error).to.be.null; expect(response.body).to.be.ok; done(); } }); }); }); describe('error status code', function() { var c = new Crawler({ jQuery : false }); it('should not return an error on status code 400 (Bad Request)', function(done) { c.queue({ uri: 'http://' + httpbinHost + '/status/400', callback: function(error, response, $){ expect(error).to.be.null; expect(response.statusCode).to.equal(400); done(); } }); }); it('should not return an error on status code 401 (Unauthorized)', function(done) { c.queue({ uri: 'http://' + httpbinHost + '/status/401', callback: function(error, response, $){ expect(error).to.be.null; expect(response.statusCode).to.equal(401); done(); } }); }); it('should not return an error on status code 403 (Forbidden)', function(done) { c.queue({ uri: 'http://' + httpbinHost + '/status/403', callback: function(error, response, $){ expect(error).to.be.null; expect(response.statusCode).to.equal(403); done(); } }); }); it('should not return an error on a 404', function(done) { c.queue({ uri : 'http://'+httpbinHost+'/status/404', callback : function(error, response) { expect(error).to.be.null; expect(response.statusCode).to.equal(404); done(); } }); }); it('should not return an error on a 500', function(done) { c.queue({ uri : 'http://'+httpbinHost+'/status/500', callback : function(error, response) { expect(error).to.be.null; expect(response.statusCode).to.equal(500); done(); } }); }); it('should not failed on empty response', function(done) { c.queue({ uri : 'http://'+httpbinHost+'/status/204', callback : function(error) { expect(error).to.be.null; done(); } }); }); it('should not failed on a malformed html if jquery is false', function(done) { c.queue({ html : '<html><p>hello <div>dude</p></html>', callback : function(error, response) { expect(error).to.be.null; expect(response).not.to.be.null; done(); } }); }); it('should not return an error on a malformed html if jQuery is jsdom', function(done) { c.queue({ html : '<html><p>hello <div>dude</p></html>', jQuery : jsdom, callback : function(error, response) { expect(error).to.be.null; expect(response).not.to.be.undefined; done(); } }); }); }); });
shedar/node-webcrawler
tests/errorHandling.test.js
JavaScript
mit
4,772
using System; using Xamarin.Forms; using System.Collections.ObjectModel; using System.Threading.Tasks; using System.Linq; namespace MyShop { public class StoresViewModel : BaseViewModel { readonly IDataStore dataStore; public ObservableCollection<Store> Stores { get; set;} public ObservableCollection<Grouping<string, Store>> StoresGrouped { get; set; } public bool ForceSync { get; set; } public StoresViewModel (Page page) : base (page) { Title = "Locations"; dataStore = DependencyService.Get<IDataStore> (); Stores = new ObservableCollection<Store> (); StoresGrouped = new ObservableCollection<Grouping<string, Store>> (); } public async Task DeleteStore(Store store) { if (IsBusy) return; IsBusy = true; try { await dataStore.RemoveStoreAsync(store); Stores.Remove(store); Sort(); } catch(Exception ex) { page.DisplayAlert ("Uh Oh :(", "Unable to remove store, please try again", "OK"); Xamarin.Insights.Report (ex); } finally { IsBusy = false; } } private Command getStoresCommand; public Command GetStoresCommand { get { return getStoresCommand ?? (getStoresCommand = new Command (async () => await ExecuteGetStoresCommand (), () => {return !IsBusy;})); } } private async Task ExecuteGetStoresCommand() { if (IsBusy) return; if (ForceSync) Settings.LastSync = DateTime.Now.AddDays (-30); IsBusy = true; GetStoresCommand.ChangeCanExecute (); try{ Stores.Clear(); var stores = await dataStore.GetStoresAsync (); foreach(var store in stores) { if(string.IsNullOrWhiteSpace(store.Image)) store.Image = "http://refractored.com/images/wc_small.jpg"; Stores.Add(store); } Sort(); } catch(Exception ex) { page.DisplayAlert ("Uh Oh :(", "Unable to gather stores.", "OK"); Xamarin.Insights.Report (ex); } finally { IsBusy = false; GetStoresCommand.ChangeCanExecute (); } } private void Sort() { StoresGrouped.Clear(); var sorted = from store in Stores orderby store.Country, store.City group store by store.Country into storeGroup select new Grouping<string, Store>(storeGroup.Key, storeGroup); foreach(var sort in sorted) StoresGrouped.Add(sort); } } }
usmanm77/MyShoppe
MyShop/ViewModels/StoresViewModel.cs
C#
mit
2,325
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _jsxRuntime = require("react/jsx-runtime"); var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", { d: "M21 3H3C2 3 1 4 1 5v14c0 1.1.9 2 2 2h18c1 0 2-1 2-2V5c0-1-1-2-2-2zM5 17l3.5-4.5 2.5 3.01L14.5 11l4.5 6H5z" }), 'PhotoSizeSelectActual'); exports.default = _default;
oliviertassinari/material-ui
packages/mui-icons-material/lib/PhotoSizeSelectActual.js
JavaScript
mit
582
#!/usr/bin/python # convert LLVM GenAsmWriter.inc for Capstone disassembler. # by Nguyen Anh Quynh, 2019 import sys if len(sys.argv) == 1: print("Syntax: %s <GenAsmWriter.inc> <Output-GenAsmWriter.inc> <Output-GenRegisterName.inc> <arch>" %sys.argv[0]) sys.exit(1) arch = sys.argv[4] f = open(sys.argv[1]) lines = f.readlines() f.close() f1 = open(sys.argv[2], 'w+') f2 = open(sys.argv[3], 'w+') f1.write("/* Capstone Disassembly Engine, http://www.capstone-engine.org */\n") f1.write("/* By Nguyen Anh Quynh <aquynh@gmail.com>, 2013-2019 */\n") f1.write("\n") f2.write("/* Capstone Disassembly Engine, http://www.capstone-engine.org */\n") f2.write("/* By Nguyen Anh Quynh <aquynh@gmail.com>, 2013-2019 */\n") f2.write("\n") need_endif = False in_getRegisterName = False in_printAliasInstr = False fragment_no = None skip_printing = False skip_line = 0 skip_count = 0 def replace_getOp(line): line2 = line if 'MI->getOperand(0)' in line: line2 = line.replace('MI->getOperand(0)', 'MCInst_getOperand(MI, 0)') elif 'MI->getOperand(1)' in line: line2 = line.replace('MI->getOperand(1)', 'MCInst_getOperand(MI, 1)') elif 'MI->getOperand(2)' in line: line2 = line.replace('MI->getOperand(2)', 'MCInst_getOperand(MI, 2)') elif 'MI->getOperand(3)' in line: line2 = line.replace('MI->getOperand(3)', 'MCInst_getOperand(MI, 3)') elif 'MI->getOperand(4)' in line: line2 = line.replace('MI->getOperand(4)', 'MCInst_getOperand(MI, 4)') elif 'MI->getOperand(5)' in line: line2 = line.replace('MI->getOperand(5)', 'MCInst_getOperand(MI, 5)') elif 'MI->getOperand(6)' in line: line2 = line.replace('MI->getOperand(6)', 'MCInst_getOperand(MI, 6)') elif 'MI->getOperand(7)' in line: line2 = line.replace('MI->getOperand(7)', 'MCInst_getOperand(MI, 7)') elif 'MI->getOperand(8)' in line: line2 = line.replace('MI->getOperand(8)', 'MCInst_getOperand(MI, 8)') return line2 def replace_getReg(line): line2 = line if 'MI->getOperand(0).getReg()' in line: line2 = line.replace('MI->getOperand(0).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 0))') elif 'MI->getOperand(1).getReg()' in line: line2 = line.replace('MI->getOperand(1).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 1))') elif 'MI->getOperand(2).getReg()' in line: line2 = line.replace('MI->getOperand(2).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 2))') elif 'MI->getOperand(3).getReg()' in line: line2 = line.replace('MI->getOperand(3).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 3))') elif 'MI->getOperand(4).getReg()' in line: line2 = line.replace('MI->getOperand(4).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 4))') elif 'MI->getOperand(5).getReg()' in line: line2 = line.replace('MI->getOperand(5).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 5))') elif 'MI->getOperand(6).getReg()' in line: line2 = line.replace('MI->getOperand(6).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 6))') elif 'MI->getOperand(7).getReg()' in line: line2 = line.replace('MI->getOperand(7).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 7))') elif 'MI->getOperand(8).getReg()' in line: line2 = line.replace('MI->getOperand(8).getReg()', 'MCOperand_getReg(MCInst_getOperand(MI, 8))') return line2 # extract param between text() # MRI.getRegClass(AArch64::GPR32spRegClassID).contains(MI->getOperand(1).getReg())) def extract_paren(line, text): i = line.index(text) return line[line.index('(', i)+1 : line.index(')', i)] # extract text between <> # printSVERegOp<'q'> def extract_brackets(line): if '<' in line: return line[line.index('<')+1 : line.index('>')] else: return '' # delete text between <>, including <> # printSVERegOp<'q'> def del_brackets(line): if '<' in line: return line[:line.index('<')] + line[line.index('>') + 1:] else: return line def print_line(line): line = line.replace('::', '_') line = line.replace('nullptr', 'NULL') if not skip_printing: if in_getRegisterName: f2.write(line + "\n") else: f1.write(line + "\n") for line in lines: line = line.rstrip() #print("@", line) # skip Alias if arch.upper() == 'X86': if 'PRINT_ALIAS_INSTR' in line: # done break if skip_line: skip_count += 1 if skip_count <= skip_line: # skip this line continue else: # skip enough number of lines, reset counters skip_line = 0 skip_count = 0 if "::printInstruction" in line: if arch.upper() in ('AARCH64', 'ARM64'): #print_line("static void printInstruction(MCInst *MI, SStream *O, MCRegisterInfo *MRI)\n{") print_line("static void printInstruction(MCInst *MI, SStream *O)\n{") else: print_line("static void printInstruction(MCInst *MI, SStream *O)\n{") elif 'const char *AArch64InstPrinter::' in line: continue elif 'getRegisterName(' in line: if 'unsigned AltIdx' in line: print_line("static const char *getRegisterName(unsigned RegNo, unsigned AltIdx)\n{") else: print_line("static const char *getRegisterName(unsigned RegNo)\n{") elif 'getRegisterName' in line: in_getRegisterName = True print_line(line) elif '::printAliasInstr' in line: if arch.upper() in ('AARCH64', 'PPC'): print_line("static char *printAliasInstr(MCInst *MI, SStream *OS, MCRegisterInfo *MRI)\n{") print_line(' #define GETREGCLASS_CONTAIN(_class, _reg) MCRegisterClass_contains(MCRegisterInfo_getRegClass(MRI, _class), MCOperand_getReg(MCInst_getOperand(MI, _reg)))') else: print_line("static bool printAliasInstr(MCInst *MI, SStream *OS)\n{") print_line(" unsigned int I = 0, OpIdx, PrintMethodIdx;") print_line(" char *tmpString;") in_printAliasInstr = True elif 'STI.getFeatureBits()[' in line: if arch.upper() == 'ARM': line2 = line.replace('STI.getFeatureBits()[', 'ARM_getFeatureBits(MI->csh->mode, ') elif arch.upper() == 'AARCH64': line2 = line.replace('STI.getFeatureBits()[', 'AArch64_getFeatureBits(') line2 = line2.replace(']', ')') print_line(line2) elif ', STI, ' in line: line2 = line.replace(', STI, ', ', ') if 'printSVELogicalImm<' in line: if 'int16' in line: line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm16') line2 = line2.replace('<int16_t>', '') elif 'int32' in line: line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm32') line2 = line2.replace('<int32_t>', '') else: line2 = line2.replace('printSVELogicalImm', 'printSVELogicalImm64') line2 = line2.replace('<int64_t>', '') if 'MI->getOperand(' in line: line2 = replace_getOp(line2) # C++ template if 'printPrefetchOp' in line2: param = extract_brackets(line2) if param == '': param = 'false' line2 = del_brackets(line2) line2 = line2.replace(', O);', ', O, %s);' %param) line2 = line2.replace(', OS);', ', OS, %s);' %param) elif '<false>' in line2: line2 = line2.replace('<false>', '') line2 = line2.replace(', O);', ', O, false);') line2 = line2.replace('STI, ', '') elif '<true>' in line: line2 = line2.replace('<true>', '') line2 = line2.replace(', O);', ', O, true);') line2 = line2.replace('STI, ', '') elif 'printAdrLabelOperand' in line: # C++ template if '<0>' in line: line2 = line2.replace('<0>', '') line2 = line2.replace(', O);', ', O, 0);') elif '<1>' in line: line2 = line2.replace('<1>', '') line2 = line2.replace(', O);', ', O, 1);') elif '<2>' in line: line2 = line2.replace('<2>', '') line2 = line2.replace(', O);', ', O, 2);') elif 'printImm8OptLsl' in line2: param = extract_brackets(line2) line2 = del_brackets(line2) if '8' in param or '16' in param or '32' in param: line2 = line2.replace('printImm8OptLsl', 'printImm8OptLsl32') elif '64' in param: line2 = line2.replace('printImm8OptLsl', 'printImm8OptLsl64') elif 'printLogicalImm' in line2: param = extract_brackets(line2) line2 = del_brackets(line2) if '8' in param or '16' in param or '32' in param: line2 = line2.replace('printLogicalImm', 'printLogicalImm32') elif '64' in param: line2 = line2.replace('printLogicalImm', 'printLogicalImm64') elif 'printSVERegOp' in line2 or 'printGPRSeqPairsClassOperand' in line2 or 'printTypedVectorList' in line2 or 'printPostIncOperand' in line2 or 'printImmScale' in line2 or 'printRegWithShiftExtend' in line2 or 'printUImm12Offset' in line2 or 'printExactFPImm' in line2 or 'printMemExtend' in line2 or 'printZPRasFPR' in line2: param = extract_brackets(line2) if param == '': param = '0' line2 = del_brackets(line2) line2 = line2.replace(', O);', ', O, %s);' %param) line2 = line2.replace(', OS);', ', OS, %s);' %param) elif 'printComplexRotationOp' in line: # printComplexRotationOp<90, 0>(MI, 5, STI, O); bracket_content = line2[line2.index('<') + 1 : line2.index('>')] line2 = line2.replace('<' + bracket_content + '>', '') line2 = line2.replace(' O);', ' O, %s);' %bracket_content) print_line(line2) elif "static const char AsmStrs[]" in line: print_line("#ifndef CAPSTONE_DIET") print_line(" static const char AsmStrs[] = {") need_endif = True elif "static const char AsmStrsNoRegAltName[]" in line: print_line("#ifndef CAPSTONE_DIET") print_line(" static const char AsmStrsNoRegAltName[] = {") need_endif = True elif line == ' O << "\\t";': print_line(" unsigned int opcode = MCInst_getOpcode(MI);") print_line(' // printf("opcode = %u\\n", opcode);'); elif 'MI->getOpcode()' in line: if 'switch' in line: line2 = line.replace('MI->getOpcode()', 'MCInst_getOpcode(MI)') else: line2 = line.replace('MI->getOpcode()', 'opcode') print_line(line2) elif 'O << ' in line: if '"' in line: line2 = line.lower() line2 = line2.replace('o << ', 'SStream_concat0(O, '); else: line2 = line.replace('O << ', 'SStream_concat0(O, '); line2 = line2.replace("'", '"') line2 = line2.replace(';', ');') if '" : "' in line2: # "segment : offset" in X86 line2 = line2.replace('" : "', '":"') # ARM print_line(line2) if '", #0"' in line2: print_line(' op_addImm(MI, 0);') if '", #1"' in line2: print_line(' op_addImm(MI, 1);') # PowerPC if '", 268"' in line2: print_line(' op_addImm(MI, 268);') elif '", 256"' in line2: print_line(' op_addImm(MI, 256);') elif '", 0, "' in line2 or '", 0"' in line2: print_line(' op_addImm(MI, 0);') elif '", -1"' in line2: print_line(' op_addImm(MI, -1);') if '[' in line2: if not '[]' in line2: print_line(' set_mem_access(MI, true);') if ']' in line2: if not '[]' in line2: print_line(' set_mem_access(MI, false);') if '".f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64);') elif '".f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32);') elif '".f16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16);') elif '".s64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S64);') elif '".s32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32);') elif '".s16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16);') elif '".s8\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S8);') elif '".u64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U64);') elif '".u32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32);') elif '".u16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16);') elif '".u8\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U8);') elif '".i64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I64);') elif '".i32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I32);') elif '".i16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I16);') elif '".i8\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_I8);') elif '".f16.f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16F64);') elif '".f64.f16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64F16);') elif '".f16.f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16F32);') elif '".f32.f16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32F16);') elif '".f64.f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64F32);') elif '".f32.f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32F64);') elif '".s32.f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32F32);') elif '".f32.s32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32S32);') elif '".u32.f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F32);') elif '".f32.u32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32U32);') elif '".p8\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_P8);') elif '".f64.s16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64S16);') elif '".s16.f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16F64);') elif '".f32.s16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32S16);') elif '".s16.f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S16F32);') elif '".f64.s32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64S32);') elif '".s32.f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_S32F64);') elif '".f64.u16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64U16);') elif '".u16.f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F64);') elif '".f32.u16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F32U16);') elif '".u16.f32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F32);') elif '".f64.u32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F64U32);') elif '".u32.f64\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F64);') elif '".f16.u32\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16U32);') elif '".u32.f16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U32F16);') elif '".f16.u16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_F16U16);') elif '".u16.f16\\t"' in line2: print_line(' ARM_addVectorDataType(MI, ARM_VECTORDATA_U16F16);') elif '"\\tlr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_LR);') elif '"\\tapsr_nzcv, fpscr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_APSR_NZCV);') print_line(' ARM_addReg(MI, ARM_REG_FPSCR);') elif '"\\tpc, lr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_PC);') print_line(' ARM_addReg(MI, ARM_REG_LR);') elif '"\\tfpscr, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPSCR);') elif '"\\tfpexc, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPEXC);') elif '"\\tfpinst, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPINST);') elif '"\\tfpinst2, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPINST2);') elif '"\\tfpsid, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPSID);') elif '"\\tsp, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_SP);') elif '"\\tsp!, "' in line2: print_line(' ARM_addReg(MI, ARM_REG_SP);') elif '", apsr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_APSR);') elif '", spsr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_SPSR);') elif '", fpscr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPSCR);') elif '", fpscr"' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPSCR);') elif '", fpexc"' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPEXC);') elif '", fpinst"' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPINST);') elif '", fpinst2"' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPINST2);') elif '", fpsid"' in line2: print_line(' ARM_addReg(MI, ARM_REG_FPSID);') elif '", mvfr0"' in line2: print_line(' ARM_addReg(MI, ARM_REG_MVFR0);') elif '", mvfr1"' in line2: print_line(' ARM_addReg(MI, ARM_REG_MVFR1);') elif '", mvfr2"' in line2: print_line(' ARM_addReg(MI, ARM_REG_MVFR2);') elif '.8\\t' in line2: print_line(' ARM_addVectorDataSize(MI, 8);') elif '.16\\t' in line2: print_line(' ARM_addVectorDataSize(MI, 16);') elif '.32\\t' in line2: print_line(' ARM_addVectorDataSize(MI, 32);') elif '.64\\t' in line2: print_line(' ARM_addVectorDataSize(MI, 64);') elif '" ^"' in line2: print_line(' ARM_addUserMode(MI);') if '.16b' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_16B);') elif '.8b' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_8B);') elif '.4b' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4B);') elif '.b' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1B);') elif '.8h' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_8H);') elif '.4h' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4H);') elif '.2h' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2H);') elif '.h' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1H);') elif '.4s' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_4S);') elif '.2s' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2S);') elif '.s' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1S);') elif '.2d' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_2D);') elif '.1d' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1D);') elif '.1q' in line2: print_line(' arm64_op_addVectorArrSpecifier(MI, ARM64_VAS_1Q);') if '#0.0' in line2: print_line(' arm64_op_addFP(MI, 0);') elif '#0' in line2: print_line(' arm64_op_addImm(MI, 0);') elif '#8' in line2: print_line(' arm64_op_addImm(MI, 8);') elif '#16' in line2: print_line(' arm64_op_addImm(MI, 16);') elif '#32' in line2: print_line(' arm64_op_addImm(MI, 32);') # X86 if '", %rax"' in line2 or '", rax"' in line2: print_line(' op_addReg(MI, X86_REG_RAX);') elif '", %eax"' in line2 or '", eax"' in line2: print_line(' op_addReg(MI, X86_REG_EAX);') elif '", %ax"' in line2 or '", ax"' in line2: print_line(' op_addReg(MI, X86_REG_AX);') elif '", %al"' in line2 or '", al"' in line2: print_line(' op_addReg(MI, X86_REG_AL);') elif '", %dx"' in line2 or '", dx"' in line2: print_line(' op_addReg(MI, X86_REG_DX);') elif '", %st(0)"' in line2 or '", st(0)"' in line2: print_line(' op_addReg(MI, X86_REG_ST0);') elif '", 1"' in line2: print_line(' op_addImm(MI, 1);') elif '", cl"' in line2: print_line(' op_addReg(MI, X86_REG_CL);') elif '"{1to2}, "' in line2: print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_2);') elif '"{1to4}, "' in line2: print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_4);') elif '"{1to8}, "' in line2: print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_8);') elif '"{1to16}, "' in line2: print_line(' op_addAvxBroadcast(MI, X86_AVX_BCAST_16);') elif '{z}{sae}' in line2: print_line(' op_addAvxSae(MI);') print_line(' op_addAvxZeroOpmask(MI);') elif ('{z}' in line2): print_line(' op_addAvxZeroOpmask(MI);') elif '{sae}' in line2: print_line(' op_addAvxSae(MI);') elif 'llvm_unreachable("Invalid command number.");' in line: line2 = line.replace('llvm_unreachable("Invalid command number.");', '// unreachable') print_line(line2) elif ('assert(' in line) or ('assert (' in line): pass elif 'Invalid alt name index' in line: pass elif '::' in line and 'case ' in line: #print_line(line2) print_line(line) elif 'MI->getNumOperands()' in line: line2 = line.replace('MI->getNumOperands()', 'MCInst_getNumOperands(MI)') print_line(line2) elif 'const MCOperand &MCOp' in line: line2 = line.replace('const MCOperand &MCOp', 'MCOperand *MCOp') print_line(line2) elif 'MI->getOperand(0).isImm()' in line: line2 = line.replace('MI->getOperand(0).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 0))') print_line(line2) elif 'MI->getOperand(1).isImm()' in line: line2 = line.replace('MI->getOperand(1).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 1))') print_line(line2) elif 'MI->getOperand(2).isImm()' in line: line2 = line.replace('MI->getOperand(2).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 2))') print_line(line2) elif 'MI->getOperand(3).isImm()' in line: line2 = line.replace('MI->getOperand(3).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 3))') print_line(line2) elif 'MI->getOperand(4).isImm()' in line: line2 = line.replace('MI->getOperand(4).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 4))') print_line(line2) elif 'MI->getOperand(5).isImm()' in line: line2 = line.replace('MI->getOperand(5).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 5))') print_line(line2) elif 'MI->getOperand(6).isImm()' in line: line2 = line.replace('MI->getOperand(6).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 6))') print_line(line2) elif 'MI->getOperand(7).isImm()' in line: line2 = line.replace('MI->getOperand(7).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 7))') print_line(line2) elif 'MI->getOperand(8).isImm()' in line: line2 = line.replace('MI->getOperand(8).isImm()', 'MCOperand_isImm(MCInst_getOperand(MI, 8))') print_line(line2) elif 'MI->getOperand(0).getImm()' in line: line2 = line.replace('MI->getOperand(0).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 0))') print_line(line2) elif 'MI->getOperand(1).getImm()' in line: line2 = line.replace('MI->getOperand(1).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 1))') print_line(line2) elif 'MI->getOperand(2).getImm()' in line: line2 = line.replace('MI->getOperand(2).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 2))') print_line(line2) elif 'MI->getOperand(3).getImm()' in line: line2 = line.replace('MI->getOperand(3).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 3))') print_line(line2) elif 'MI->getOperand(4).getImm()' in line: line2 = line.replace('MI->getOperand(4).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 4))') print_line(line2) elif 'MI->getOperand(5).getImm()' in line: line2 = line.replace('MI->getOperand(5).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 5))') print_line(line2) elif 'MI->getOperand(6).getImm()' in line: line2 = line.replace('MI->getOperand(6).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 6))') print_line(line2) elif 'MI->getOperand(7).getImm()' in line: line2 = line.replace('MI->getOperand(7).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 7))') print_line(line2) elif 'MI->getOperand(8).getImm()' in line: line2 = line.replace('MI->getOperand(8).getImm()', 'MCOperand_getImm(MCInst_getOperand(MI, 8))') print_line(line2) elif 'MRI.getRegClass(' in line: classid = extract_paren(line, 'getRegClass(') operand = extract_paren(line, 'getOperand') line2 = line.replace('MI->getNumOperands()', 'MCInst_getNumOperands(MI)') line2 = ' GETREGCLASS_CONTAIN(%s, %s)' %(classid, operand) if line.endswith('())) {'): line2 += ') {' elif line.endswith(' {'): line2 += ' {' elif line.endswith(' &&'): line2 += ' &&' print_line(line2) elif 'MI->getOperand(' in line and 'isReg' in line: operand = extract_paren(line, 'getOperand') line2 = ' MCOperand_isReg(MCInst_getOperand(MI, %s))' %(operand) # MI->getOperand(1).isReg() && if line.endswith(' {'): line2 += ' {' elif line.endswith(' &&'): line2 += ' &&' print_line(line2) elif 'MI->getOperand(' in line and 'getReg' in line: line2 = replace_getReg(line) # one more time line2 = replace_getReg(line2) print_line(line2) elif ' return false;' in line and in_printAliasInstr: print_line(' return NULL;') elif 'MCOp.isImm()' in line: line2 = line.replace('MCOp.isImm()', 'MCOperand_isImm(MCOp)') print_line(line2) elif 'MCOp.getImm()' in line: line2 = line.replace('MCOp.getImm()', 'MCOperand_getImm(MCOp)') if 'int64_t Val =' in line: line2 = line2.replace('int64_t Val =', 'Val =') print_line(line2) elif 'isSVEMaskOfIdenticalElements<' in line: if 'int8' in line: line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements8') line2 = line2.replace('<int8_t>', '') elif 'int16' in line: line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements16') line2 = line2.replace('<int16_t>', '') elif 'int32' in line: line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements32') line2 = line2.replace('<int32_t>', '') else: line2 = line.replace('isSVEMaskOfIdenticalElements', 'isSVEMaskOfIdenticalElements64') line2 = line2.replace('<int64_t>', '') print_line(line2) elif 'switch (PredicateIndex) {' in line: print_line(' int64_t Val;') print_line(line) elif 'unsigned I = 0;' in line and in_printAliasInstr: print_line(""" tmpString = cs_strdup(AsmString); while (AsmString[I] != ' ' && AsmString[I] != '\\t' && AsmString[I] != '$' && AsmString[I] != '\\0') ++I; tmpString[I] = 0; SStream_concat0(OS, tmpString); if (AsmString[I] != '\\0') { if (AsmString[I] == ' ' || AsmString[I] == '\\t') { SStream_concat0(OS, " "); ++I; } do { if (AsmString[I] == '$') { ++I; if (AsmString[I] == (char)0xff) { ++I; OpIdx = AsmString[I++] - 1; PrintMethodIdx = AsmString[I++] - 1; printCustomAliasOperand(MI, OpIdx, PrintMethodIdx, OS); } else printOperand(MI, (unsigned)(AsmString[I++]) - 1, OS); } else { SStream_concat1(OS, AsmString[I++]); } } while (AsmString[I] != '\\0'); } return tmpString; } """) in_printAliasInstr = False # skip next few lines skip_printing = True elif '::printCustomAliasOperand' in line: # print again skip_printing = False print_line('static void printCustomAliasOperand(') elif 'const MCSubtargetInfo &STI' in line: pass elif 'const MCInst *MI' in line: line2 = line.replace('const MCInst *MI', 'MCInst *MI') print_line(line2) elif 'llvm_unreachable("' in line: if 'default: ' in line: print_line(' default:') elif 'llvm_unreachable("Unknown MCOperandPredicate kind")' in line: print_line(' return false; // never reach') else: pass elif 'raw_ostream &' in line: line2 = line.replace('raw_ostream &', 'SStream *') if line2.endswith(' {'): line2 = line2.replace(' {', '\n{') print_line(line2) elif 'printPredicateOperand(' in line and 'STI, ' in line: line2 = line.replace('STI, ', '') print_line(line2) elif '// Fragment ' in line: # // Fragment 0 encoded into 6 bits for 51 unique commands. tmp = line.strip().split(' ') fragment_no = tmp[2] print_line(line) elif ('switch ((' in line or 'if ((' in line) and 'Bits' in line: # switch ((Bits >> 14) & 63) { bits = line.strip() bits = bits.replace('switch ', '') bits = bits.replace('if ', '') bits = bits.replace('{', '') bits = bits.strip() print_line(' // printf("Fragment %s: %%"PRIu64"\\n", %s);' %(fragment_no, bits)) print_line(line) elif not skip_printing: print_line(line) if line == ' };': if need_endif and not in_getRegisterName: # endif only for AsmStrs when we are not inside getRegisterName() print_line("#endif") need_endif = False elif 'return AsmStrs+RegAsmOffset[RegNo-1];' in line: if in_getRegisterName: # return NULL for register name on Diet mode print_line("#else") print_line(" return NULL;") print_line("#endif") print_line("}") need_endif = False in_getRegisterName = False # skip 1 line skip_line = 1 elif line == ' }': # ARM64 if in_getRegisterName: # return NULL for register name on Diet mode print_line("#else") print_line(" return NULL;") print_line("#endif") print_line("}") need_endif = False in_getRegisterName = False # skip 1 line skip_line = 1 elif 'default:' in line: # ARM64 if in_getRegisterName: # get the size of RegAsmOffsetvreg[] print_line(" return (const char *)(sizeof(RegAsmOffsetvreg)/sizeof(RegAsmOffsetvreg[0]));") f1.close() f2.close()
capstone-rust/capstone-rs
capstone-sys/capstone/suite/synctools/asmwriter.py
Python
mit
33,268
const PlotCard = require('../../plotcard.js'); class RiseOfTheKraken extends PlotCard { setupCardAbilities() { this.interrupt({ when: { onUnopposedWin: event => event.challenge.winner === this.controller }, handler: () => { this.game.addMessage('{0} uses {1} to gain an additional power from winning an unopposed challenge', this.controller, this); this.game.addPower(this.controller, 1); } }); } } RiseOfTheKraken.code = '02012'; module.exports = RiseOfTheKraken;
cryogen/gameteki
server/game/cards/02.1-TtB/RiseOfTheKraken.js
JavaScript
mit
588
using System.Drawing; using System.Drawing.Drawing2D; static class DrawHelper { public static GraphicsPath CreateRoundRect(float x, float y, float width, float height, float radius) { GraphicsPath gp = new GraphicsPath(); gp.AddLine(x + radius, y, x + width - (radius * 2), y); gp.AddArc(x + width - (radius * 2), y, radius * 2, radius * 2, 270, 90); gp.AddLine(x + width, y + radius, x + width, y + height - (radius * 2)); gp.AddArc(x + width - (radius * 2), y + height - (radius * 2), radius * 2, radius * 2, 0, 90); gp.AddLine(x + width - (radius * 2), y + height, x + radius, y + height); gp.AddArc(x, y + height - (radius * 2), radius * 2, radius * 2, 90, 90); gp.AddLine(x, y + height - (radius * 2), x, y + radius); gp.AddArc(x, y, radius * 2, radius * 2, 180, 90); gp.CloseFigure(); return gp; } public static GraphicsPath CreateUpRoundRect(float x, float y, float width, float height, float radius) { GraphicsPath gp = new GraphicsPath(); gp.AddLine(x + radius, y, x + width - (radius * 2), y); gp.AddArc(x + width - (radius * 2), y, radius * 2, radius * 2, 270, 90); gp.AddLine(x + width, y + radius, x + width, y + height - (radius * 2) + 1); gp.AddArc(x + width - (radius * 2), y + height - (radius * 2), radius * 2, 2, 0, 90); gp.AddLine(x + width, y + height, x + radius, y + height); gp.AddArc(x, y + height - (radius * 2) + 1, radius * 2, 1, 90, 90); gp.AddLine(x, y + height, x, y + radius); gp.AddArc(x, y, radius * 2, radius * 2, 180, 90); gp.CloseFigure(); return gp; } public static GraphicsPath CreateLeftRoundRect(float x, float y, float width, float height, float radius) { GraphicsPath gp = new GraphicsPath(); gp.AddLine(x + radius, y, x + width - (radius * 2), y); gp.AddArc(x + width - (radius * 2), y, radius * 2, radius * 2, 270, 90); gp.AddLine(x + width, y + 0, x + width, y + height); gp.AddArc(x + width - (radius * 2), y + height - (1), radius * 2, 1, 0, 90); gp.AddLine(x + width - (radius * 2), y + height, x + radius, y + height); gp.AddArc(x, y + height - (radius * 2), radius * 2, radius * 2, 90, 90); gp.AddLine(x, y + height - (radius * 2), x, y + radius); gp.AddArc(x, y, radius * 2, radius * 2, 180, 90); gp.CloseFigure(); return gp; } public static Color BlendColor(Color backgroundColor, Color frontColor) { double ratio = 0 / 255d; double invRatio = 1d - ratio; int r = (int)((backgroundColor.R * invRatio) + (frontColor.R * ratio)); int g = (int)((backgroundColor.G * invRatio) + (frontColor.G * ratio)); int b = (int)((backgroundColor.B * invRatio) + (frontColor.B * ratio)); return Color.FromArgb(r, g, b); } }
uit-cs217-g11/smart-search
statics/external_tool/RyeTokenizer/project/UIManagers/DrawHelper.cs
C#
mit
2,938
namespace Perspex.Controls { using Layout; public class RightDocker : Docker { public RightDocker(Size availableSize) : base(availableSize) { } public Rect GetDockingRect(Size sizeToDock, Margins margins, Alignments alignments) { var marginsCutout = margins.AsThickness(); var withoutMargins = OriginalRect.Deflate(marginsCutout); var finalRect = withoutMargins.AlignChild(sizeToDock, Alignment.End, alignments.Vertical); AccumulatedOffset += sizeToDock.Width; margins.HorizontalMargin = margins.HorizontalMargin.Offset(0, sizeToDock.Width); return finalRect; } } }
DavidKarlas/Perspex
src/Perspex.Controls/DockPanel/RightDocker.cs
C#
mit
705
class CreateCollectSalaries < ActiveRecord::Migration def change create_table :collect_salaries do |t| t.belongs_to :user t.decimal :money, :precision => 10, :scale => 2 t.date :collect_date t.string :notes t.timestamps null: false end end end
mumaoxi/contract_works_api
db/migrate/20160204101820_create_collect_salaries.rb
Ruby
mit
286
<?php /** * Amon: Integrate FuelPHP with Amon Exception & Logging * * @package Amon * @version v0.1 * @author Matthew McConnell * @license MIT License * @link http://github.com/maca134/fuelphp-amon */ Autoloader::add_core_namespace('Amon'); Autoloader::add_classes(array( 'Amon\\Error' => __DIR__ . '/classes/error.php', 'Amon\\Log' => __DIR__ . '/classes/log.php', 'Amon\\Amon_Data' => __DIR__ . '/classes/amon/data.php', 'Amon\\Amon_Request' => __DIR__ . '/classes/amon/request.php', 'Amon\\Amon_Request_Http' => __DIR__ . '/classes/amon/request/http.php', 'Amon\\Amon_Request_Zeromq' => __DIR__ . '/classes/amon/request/zeromq.php', ));
fuel-packages/fuel-amon
bootstrap.php
PHP
mit
708
#include "machineoperand.h" #include "basicblock.h" #include <cassert> #include <iostream> #include <new> using namespace TosLang::BackEnd; MachineOperand::MachineOperand() : mKind{ OperandKind::UNKNOWN } { } MachineOperand::MachineOperand(const unsigned op, const OperandKind kind) { assert((kind == OperandKind::IMMEDIATE) || (kind == OperandKind::STACK_SLOT) || (kind == OperandKind::REGISTER)); mKind = kind; switch (kind) { case OperandKind::IMMEDIATE: imm = op; break; case OperandKind::STACK_SLOT: stackslot = op; break; case OperandKind::REGISTER: reg = op; break; default: assert(false && "Unexpected error while building a virtual instruction"); break; } } std::ostream& TosLang::BackEnd::operator<<(std::ostream& stream, const MachineOperand& op) { switch (op.mKind) { case MachineOperand::OperandKind::IMMEDIATE: return stream << op.imm; case MachineOperand::OperandKind::STACK_SLOT: return stream << "S" << op.stackslot; case MachineOperand::OperandKind::REGISTER: return stream << "R" << op.reg; default: return stream; } }
faouellet/TosLang
TosLang/CodeGen/machineoperand.cpp
C++
mit
1,230
module Packet class Reactor include Core #set_thread_pool_size(20) attr_accessor :fd_writers, :msg_writers,:msg_reader attr_accessor :result_hash attr_accessor :live_workers #after_connection :provide_workers def self.server_logger= (log_file_name) @@server_logger = log_file_name end def self.run master_reactor_instance = new master_reactor_instance.result_hash = {} master_reactor_instance.live_workers = DoubleKeyedHash.new yield(master_reactor_instance) master_reactor_instance.load_workers master_reactor_instance.start_reactor end # end of run method def set_result_hash(hash) @result_hash = hash end def update_result(worker_key,result) @result_hash ||= {} @result_hash[worker_key.to_sym] = result end def handle_internal_messages(t_sock) sock_fd = t_sock.fileno worker_instance = @live_workers[sock_fd] begin raw_data = read_data(t_sock) worker_instance.receive_data(raw_data) if worker_instance.respond_to?(:receive_data) rescue DisconnectError => sock_error worker_instance.receive_data(sock_error.data) if worker_instance.respond_to?(:receive_data) remove_worker(t_sock) end end def remove_worker(t_sock) @live_workers.delete(t_sock.fileno) read_ios.delete(t_sock) end def delete_worker(worker_options = {}) worker_name = worker_options[:worker] worker_name_key = gen_worker_key(worker_name,worker_options[:worker_key]) worker_options[:method] = :exit @live_workers[worker_name_key].send_request(worker_options) end def load_workers worker_root = defined?(WORKER_ROOT) ? WORKER_ROOT : "#{PACKET_APP}/worker" t_workers = Dir["#{worker_root}/**/*.rb"] return if t_workers.empty? t_workers.each do |b_worker| worker_name = File.basename(b_worker,".rb") require worker_name worker_klass = Object.const_get(packet_classify(worker_name)) next if worker_klass.no_auto_load fork_and_load(worker_klass) end end def start_worker(worker_options = { }) worker_name = worker_options[:worker].to_s worker_name_key = gen_worker_key(worker_name,worker_options[:worker_key]) return if @live_workers[worker_name_key] worker_options.delete(:worker) begin require worker_name worker_klass = Object.const_get(packet_classify(worker_name)) fork_and_load(worker_klass,worker_options) rescue LoadError puts "no such worker #{worker_name}" return end end def enable_nonblock io f = io.fcntl(Fcntl::F_GETFL,0) io.fcntl(Fcntl::F_SETFL,Fcntl::O_NONBLOCK | f) end # method should use worker_key if provided in options hash. def fork_and_load(worker_klass,worker_options = { }) t_worker_name = worker_klass.worker_name worker_pimp = worker_klass.worker_proxy.to_s # socket from which master process is going to read master_read_end,worker_write_end = UNIXSocket.pair(Socket::SOCK_STREAM) # socket to which master process is going to write worker_read_end,master_write_end = UNIXSocket.pair(Socket::SOCK_STREAM) option_dump = Marshal.dump(worker_options) option_dump_length = option_dump.length master_write_end.write(option_dump) worker_name_key = gen_worker_key(t_worker_name,worker_options[:worker_key]) if(!(pid = fork)) [master_write_end,master_read_end].each { |x| x.close } [worker_read_end,worker_write_end].each { |x| enable_nonblock(x) } begin if(ARGV[0] == 'start' && Object.const_defined?(:SERVER_LOGGER)) redirect_io(SERVER_LOGGER) end rescue puts $!.backtrace end exec form_cmd_line(worker_read_end.fileno,worker_write_end.fileno,t_worker_name,option_dump_length) end Process.detach(pid) [master_read_end,master_write_end].each { |x| enable_nonblock(x) } if worker_pimp && !worker_pimp.empty? require worker_pimp pimp_klass = Object.const_get(packet_classify(worker_pimp)) @live_workers[worker_name_key,master_read_end.fileno] = pimp_klass.new(master_write_end,pid,self) else t_pimp = Packet::MetaPimp.new(master_write_end,pid,self) t_pimp.worker_key = worker_name_key t_pimp.worker_name = t_worker_name t_pimp.invokable_worker_methods = worker_klass.instance_methods @live_workers[worker_name_key,master_read_end.fileno] = t_pimp end worker_read_end.close worker_write_end.close read_ios << master_read_end end # end of fork_and_load method # Free file descriptors and # point them somewhere sensible # STDOUT/STDERR should go to a logfile def redirect_io(logfile_name) begin; STDIN.reopen "/dev/null"; rescue ::Exception; end if logfile_name begin STDOUT.reopen logfile_name, "a" STDOUT.sync = true rescue ::Exception begin; STDOUT.reopen "/dev/null"; rescue ::Exception; end end else begin; STDOUT.reopen "/dev/null"; rescue ::Exception; end end begin; STDERR.reopen STDOUT; rescue ::Exception; end STDERR.sync = true end def form_cmd_line *args min_string = "packet_worker_runner #{args[0]}:#{args[1]}:#{args[2]}:#{args[3]}" min_string << ":#{WORKER_ROOT}" if defined? WORKER_ROOT min_string << ":#{WORKER_LOAD_ENV}" if defined? WORKER_LOAD_ENV min_string end end # end of Reactor class end # end of Packet module
openwisp/packet-legacy
lib/packet/packet_master.rb
Ruby
mit
5,702
// // Reflect.cs: Creates Element classes from an instance // // Author: // Miguel de Icaza (miguel@gnome.org) // // Copyright 2010, Novell, Inc. // // Code licensed under the MIT X11 license // using System; using System.Collections; using System.Collections.Generic; using System.Reflection; using System.Text; using MonoTouch.UIKit; using System.Drawing; using MonoTouch.Foundation; namespace MonoTouch.Dialog { [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class EntryAttribute : Attribute { public EntryAttribute () : this (null) { } public EntryAttribute (string placeholder) { Placeholder = placeholder; } public string Placeholder; public UIKeyboardType KeyboardType; public UITextAutocorrectionType AutocorrectionType; public UITextAutocapitalizationType AutocapitalizationType; public UITextFieldViewMode ClearButtonMode; } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class DateAttribute : Attribute { } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class TimeAttribute : Attribute { } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class CheckboxAttribute : Attribute {} [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class MultilineAttribute : Attribute {} [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class HtmlAttribute : Attribute {} [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class SkipAttribute : Attribute {} [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class PasswordAttribute : EntryAttribute { public PasswordAttribute (string placeholder) : base (placeholder) {} } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class AlignmentAttribute : Attribute { public AlignmentAttribute (UITextAlignment alignment) { Alignment = alignment; } public UITextAlignment Alignment; } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class RadioSelectionAttribute : Attribute { public string Target; public RadioSelectionAttribute (string target) { Target = target; } } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class OnTapAttribute : Attribute { public OnTapAttribute (string method) { Method = method; } public string Method; } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class CaptionAttribute : Attribute { public CaptionAttribute (string caption) { Caption = caption; } public string Caption; } [AttributeUsage (AttributeTargets.Field | AttributeTargets.Property, Inherited=false)] public class SectionAttribute : Attribute { public SectionAttribute () {} public SectionAttribute (string caption) { Caption = caption; } public SectionAttribute (string caption, string footer) { Caption = caption; Footer = footer; } public string Caption, Footer; } public class RangeAttribute : Attribute { public RangeAttribute (float low, float high) { Low = low; High = high; } public float Low, High; public bool ShowCaption; } public class BindingContext : IDisposable { public RootElement Root; Dictionary<Element,MemberAndInstance> mappings; class MemberAndInstance { public MemberAndInstance (MemberInfo mi, object o) { Member = mi; Obj = o; } public MemberInfo Member; public object Obj; } static object GetValue (MemberInfo mi, object o) { var fi = mi as FieldInfo; if (fi != null) return fi.GetValue (o); var pi = mi as PropertyInfo; var getMethod = pi.GetGetMethod (); return getMethod.Invoke (o, new object [0]); } static void SetValue (MemberInfo mi, object o, object val) { var fi = mi as FieldInfo; if (fi != null){ fi.SetValue (o, val); return; } var pi = mi as PropertyInfo; var setMethod = pi.GetSetMethod (); setMethod.Invoke (o, new object [] { val }); } static string MakeCaption (string name) { var sb = new StringBuilder (name.Length); bool nextUp = true; foreach (char c in name){ if (nextUp){ sb.Append (Char.ToUpper (c)); nextUp = false; } else { if (c == '_'){ sb.Append (' '); continue; } if (Char.IsUpper (c)) sb.Append (' '); sb.Append (c); } } return sb.ToString (); } // Returns the type for fields and properties and null for everything else static Type GetTypeForMember (MemberInfo mi) { if (mi is FieldInfo) return ((FieldInfo) mi).FieldType; else if (mi is PropertyInfo) return ((PropertyInfo) mi).PropertyType; return null; } public BindingContext (object callbacks, object o, string title) { if (o == null) throw new ArgumentNullException ("o"); mappings = new Dictionary<Element,MemberAndInstance> (); Root = new RootElement (title); Populate (callbacks, o, Root); } void Populate (object callbacks, object o, RootElement root) { MemberInfo last_radio_index = null; var members = o.GetType ().GetMembers (BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); Section section = null; foreach (var mi in members){ Type mType = GetTypeForMember (mi); if (mType == null) continue; string caption = null; object [] attrs = mi.GetCustomAttributes (false); bool skip = false; foreach (var attr in attrs){ if (attr is SkipAttribute || attr is System.Runtime.CompilerServices.CompilerGeneratedAttribute) skip = true; else if (attr is CaptionAttribute) caption = ((CaptionAttribute) attr).Caption; else if (attr is SectionAttribute){ if (section != null) root.Add (section); var sa = attr as SectionAttribute; section = new Section (sa.Caption, sa.Footer); } } if (skip) continue; if (caption == null) caption = MakeCaption (mi.Name); if (section == null) section = new Section (); Element element = null; if (mType == typeof (string)){ PasswordAttribute pa = null; AlignmentAttribute align = null; EntryAttribute ea = null; object html = null; NSAction invoke = null; bool multi = false; foreach (object attr in attrs){ if (attr is PasswordAttribute) pa = attr as PasswordAttribute; else if (attr is EntryAttribute) ea = attr as EntryAttribute; else if (attr is MultilineAttribute) multi = true; else if (attr is HtmlAttribute) html = attr; else if (attr is AlignmentAttribute) align = attr as AlignmentAttribute; if (attr is OnTapAttribute){ string mname = ((OnTapAttribute) attr).Method; if (callbacks == null){ throw new Exception ("Your class contains [OnTap] attributes, but you passed a null object for `context' in the constructor"); } var method = callbacks.GetType ().GetMethod (mname); if (method == null) throw new Exception ("Did not find method " + mname); invoke = delegate { method.Invoke (method.IsStatic ? null : callbacks, new object [0]); }; } } string value = (string) GetValue (mi, o); if (pa != null) element = new EntryElement (caption, pa.Placeholder, value, true); else if (ea != null) element = new EntryElement (caption, ea.Placeholder, value) { KeyboardType = ea.KeyboardType, AutocapitalizationType = ea.AutocapitalizationType, AutocorrectionType = ea.AutocorrectionType, ClearButtonMode = ea.ClearButtonMode }; else if (multi) element = new MultilineElement (caption, value); else if (html != null) element = new HtmlElement (caption, value); else { var selement = new StringElement (caption, value); element = selement; if (align != null) selement.Alignment = align.Alignment; } if (invoke != null) ((StringElement) element).Tapped += invoke; } else if (mType == typeof (float)){ var floatElement = new FloatElement (null, null, (float) GetValue (mi, o)); floatElement.Caption = caption; element = floatElement; foreach (object attr in attrs){ if (attr is RangeAttribute){ var ra = attr as RangeAttribute; floatElement.MinValue = ra.Low; floatElement.MaxValue = ra.High; floatElement.ShowCaption = ra.ShowCaption; } } } else if (mType == typeof (bool)){ bool checkbox = false; foreach (object attr in attrs){ if (attr is CheckboxAttribute) checkbox = true; } if (checkbox) element = new CheckboxElement (caption, (bool) GetValue (mi, o)); else element = new BooleanElement (caption, (bool) GetValue (mi, o)); } else if (mType == typeof (DateTime)){ var dateTime = (DateTime) GetValue (mi, o); bool asDate = false, asTime = false; foreach (object attr in attrs){ if (attr is DateAttribute) asDate = true; else if (attr is TimeAttribute) asTime = true; } if (asDate) element = new DateElement (caption, dateTime); else if (asTime) element = new TimeElement (caption, dateTime); else element = new DateTimeElement (caption, dateTime); } else if (mType.IsEnum){ var csection = new Section (); ulong evalue = Convert.ToUInt64 (GetValue (mi, o), null); int idx = 0; int selected = 0; foreach (var fi in mType.GetFields (BindingFlags.Public | BindingFlags.Static)){ ulong v = Convert.ToUInt64 (GetValue (fi, null)); if (v == evalue) selected = idx; CaptionAttribute ca = Attribute.GetCustomAttribute(fi, typeof(CaptionAttribute)) as CaptionAttribute; csection.Add (new RadioElement (ca != null ? ca.Caption : MakeCaption (fi.Name))); idx++; } element = new RootElement (caption, new RadioGroup (null, selected)) { csection }; } else if (mType == typeof (UIImage)){ element = new ImageElement ((UIImage) GetValue (mi, o)); } else if (typeof (System.Collections.IEnumerable).IsAssignableFrom (mType)){ var csection = new Section (); int count = 0; if (last_radio_index == null) throw new Exception ("IEnumerable found, but no previous int found"); foreach (var e in (IEnumerable) GetValue (mi, o)){ csection.Add (new RadioElement (e.ToString ())); count++; } int selected = (int) GetValue (last_radio_index, o); if (selected >= count || selected < 0) selected = 0; element = new RootElement (caption, new MemberRadioGroup (null, selected, last_radio_index)) { csection }; last_radio_index = null; } else if (typeof (int) == mType){ foreach (object attr in attrs){ if (attr is RadioSelectionAttribute){ last_radio_index = mi; break; } } } else { var nested = GetValue (mi, o); if (nested != null){ var newRoot = new RootElement (caption); Populate (callbacks, nested, newRoot); element = newRoot; } } if (element == null) continue; section.Add (element); mappings [element] = new MemberAndInstance (mi, o); } root.Add (section); } class MemberRadioGroup : RadioGroup { public MemberInfo mi; public MemberRadioGroup (string key, int selected, MemberInfo mi) : base (key, selected) { this.mi = mi; } } public void Dispose () { Dispose (true); } protected virtual void Dispose (bool disposing) { if (disposing){ foreach (var element in mappings.Keys){ element.Dispose (); } mappings = null; } } public void Fetch () { foreach (var dk in mappings){ Element element = dk.Key; MemberInfo mi = dk.Value.Member; object obj = dk.Value.Obj; if (element is DateTimeElement) SetValue (mi, obj, ((DateTimeElement) element).DateValue); else if (element is FloatElement) SetValue (mi, obj, ((FloatElement) element).Value); else if (element is BooleanElement) SetValue (mi, obj, ((BooleanElement) element).Value); else if (element is CheckboxElement) SetValue (mi, obj, ((CheckboxElement) element).Value); else if (element is EntryElement){ var entry = (EntryElement) element; entry.FetchValue (); SetValue (mi, obj, entry.Value); } else if (element is ImageElement) SetValue (mi, obj, ((ImageElement) element).Value); else if (element is RootElement){ var re = element as RootElement; if (re.group as MemberRadioGroup != null){ var group = re.group as MemberRadioGroup; SetValue (group.mi, obj, re.RadioSelected); } else if (re.group as RadioGroup != null){ var mType = GetTypeForMember (mi); var fi = mType.GetFields (BindingFlags.Public | BindingFlags.Static) [re.RadioSelected]; SetValue (mi, obj, fi.GetValue (null)); } } } } } }
danmiser/MonoTouch.Dialog
MonoTouch.Dialog/Reflect.cs
C#
mit
13,498
using Microsoft.AspNet.Identity; using Microsoft.AspNet.Identity.EntityFramework; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Security.Claims; using System.Threading.Tasks; using System.Web; namespace Blog.Models { // You can add profile data for the user by adding more properties to your ApplicationUser class, please visit http://go.microsoft.com/fwlink/?LinkID=317594 to learn more. public class ApplicationUser : IdentityUser { [Required] public string FullName { get; set; } public async Task<ClaimsIdentity> GenerateUserIdentityAsync(UserManager<ApplicationUser> manager) { // Note the authenticationType must match the one defined in CookieAuthenticationOptions.AuthenticationType var userIdentity = await manager.CreateIdentityAsync(this, DefaultAuthenticationTypes.ApplicationCookie); // Add custom user claims here return userIdentity; } } }
yangra/SoftUni
TechModule/Software Technologies/SoftUniBlog-CSharp-Admin/Blog/Models/ApplicationUser.cs
C#
mit
1,041
#!/usr/bin/env ruby require './lib/metalbird/authenticators/twitter.rb' url = 'https://api.twitter.com' authenticator = Metalbird::Authenticator::Twitter.new(url) authenticator.authenticate
hubtee/post_publisher
bin/auth_twitter.rb
Ruby
mit
192
import createSvgIcon from './utils/createSvgIcon'; import { jsx as _jsx } from "react/jsx-runtime"; export default createSvgIcon( /*#__PURE__*/_jsx("path", { d: "M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM8 17c-.55 0-1-.45-1-1v-5c0-.55.45-1 1-1s1 .45 1 1v5c0 .55-.45 1-1 1zm4 0c-.55 0-1-.45-1-1V8c0-.55.45-1 1-1s1 .45 1 1v8c0 .55-.45 1-1 1zm4 0c-.55 0-1-.45-1-1v-2c0-.55.45-1 1-1s1 .45 1 1v2c0 .55-.45 1-1 1z" }), 'AssessmentRounded');
oliviertassinari/material-ui
packages/mui-icons-material/lib/esm/AssessmentRounded.js
JavaScript
mit
471
#pragma once #include "net/uri.hpp" namespace http { using uri = net::uri; }
ruoka/net4cpp
src/http/uri.hpp
C++
mit
82
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging, os logging.basicConfig(level=logging.INFO) from deepy.networks import RecursiveAutoEncoder from deepy.trainers import SGDTrainer, LearningRateAnnealer from util import get_data, VECTOR_SIZE model_path = os.path.join(os.path.dirname(__file__), "models", "rae1.gz") if __name__ == '__main__': model = RecursiveAutoEncoder(input_dim=VECTOR_SIZE, rep_dim=10) trainer = SGDTrainer(model) annealer = LearningRateAnnealer() trainer.run(get_data(), epoch_controllers=[annealer]) model.save_params(model_path)
zomux/deepy
examples/auto_encoders/recursive_auto_encoder.py
Python
mit
593
namespace StringExtensions { using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Reflection; using System.Security.Cryptography; using System.Text; using System.Text.RegularExpressions; /// <summary> /// Static class providing various string extension methods: /// <list type="bullet"> /// <item> /// <description>ToMd5Hash,</description> /// </item> /// <item> /// <description>ToBoolean,</description> /// </item> /// <item> /// <description>ToShort,</description> /// </item> /// <item> /// <description>ToInteger,</description> /// </item> /// <item> /// <description>ToLong,</description> /// </item> /// <item> /// <description>ToDateTime,</description> /// </item> /// <item> /// <description>CapitalizeFirstLetter,</description> /// </item> /// <item> /// <description>ConvertCyrillicToLatinLetters,</description> /// </item> /// <item> /// <description>ConvertLatinToCyrillicKeyboard,</description> /// </item> /// <item> /// <description>ToValidUsername,</description> /// </item> /// <item> /// <description>ToValidLatinFileName,</description> /// </item> /// <item> /// <description>GetFirstCharacters,</description> /// </item> /// <item> /// <description>GetFileExtension,</description> /// </item> /// <item> /// <description>ToContentType,</description> /// </item> /// <item> /// <description>ToByteArray,</description> /// </item> /// </list> /// </summary> public static class StringExtensions { /// <summary> /// A string extension method that converts the target string to a byte array, and /// computes the hashes for each element. /// Then bytes are formatted as a hexadecimal strings and appended to the resulting /// string that is finally returned. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>A hexadecimal string</returns> /// <exception cref="TargetInvocationException">The algorithm was used with Federal Information Processing Standards (FIPS) /// mode enabled, but is not FIPS compatible.</exception> public static string ToMd5Hash(this string input) { var md5Hash = MD5.Create(); // Convert the input string to a byte array and compute the hash. var data = md5Hash.ComputeHash(Encoding.UTF8.GetBytes(input)); // Create a new StringBuilder to collect the bytes // and create a string. var builder = new StringBuilder(); // Loop through each byte of the hashed data // and format each one as a hexadecimal string. for (int i = 0; i < data.Length; i++) { builder.Append(data[i].ToString("x2")); } // Return the hexadecimal string. return builder.ToString(); } /// <summary> /// A string extension method that checks whether the target string is contained within a /// predefined collection of true-like values. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>Whether the input is among the given true values (True/False)</returns> public static bool ToBoolean(this string input) { var stringTrueValues = new[] { "true", "ok", "yes", "1", "да" }; return stringTrueValues.Contains(input.ToLower()); } /// <summary> /// Converts the target string to a short value and returns it. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The short value obtained from parsing the input string</returns> public static short ToShort(this string input) { short shortValue; short.TryParse(input, out shortValue); return shortValue; } /// <summary> /// Converts the target string to an integer value and returns it. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The integer value obtained from parsing the input string</returns> public static int ToInteger(this string input) { int integerValue; int.TryParse(input, out integerValue); return integerValue; } /// <summary> /// Converts the target string to a long value and returns it. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The long value obtained from parsing the input string</returns> public static long ToLong(this string input) { long longValue; long.TryParse(input, out longValue); return longValue; } /// <summary> /// Converts the target string to a DateTime value and returns it. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The DateTime value obtained from parsing the input string</returns> public static DateTime ToDateTime(this string input) { DateTime dateTimeValue; DateTime.TryParse(input, out dateTimeValue); return dateTimeValue; } /// <summary> /// Capitalizes the first letter of the target string. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The string with capital first letter.</returns> public static string CapitalizeFirstLetter(this string input) { if (string.IsNullOrEmpty(input)) { return input; } return input.Substring(0, 1).ToUpper(CultureInfo.CurrentCulture) + input.Substring(1, input.Length - 1); } /// <summary> /// Returns the substring between two given substrings. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <param name="startString">The start of the substring</param> /// <param name="endString">The end of the substring</param> /// <param name="startFrom">The index to start the search from</param> /// <returns>The found substring or an empty one</returns> public static string GetStringBetween(this string input, string startString, string endString, int startFrom = 0) { input = input.Substring(startFrom); startFrom = 0; if (!input.Contains(startString) || !input.Contains(endString)) { return string.Empty; } var startPosition = input.IndexOf(startString, startFrom, StringComparison.Ordinal) + startString.Length; if (startPosition == -1) { return string.Empty; } var endPosition = input.IndexOf(endString, startPosition, StringComparison.Ordinal); if (endPosition == -1) { return string.Empty; } return input.Substring(startPosition, endPosition - startPosition); } /// <summary> /// Replaces cyrillic letters in a string with their latin representation. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The new string with latin letters.</returns> public static string ConvertCyrillicToLatinLetters(this string input) { var bulgarianLetters = new[] { "а", "б", "в", "г", "д", "е", "ж", "з", "и", "й", "к", "л", "м", "н", "о", "п", "р", "с", "т", "у", "ф", "х", "ц", "ч", "ш", "щ", "ъ", "ь", "ю", "я" }; var latinRepresentationsOfBulgarianLetters = new[] { "a", "b", "v", "g", "d", "e", "j", "z", "i", "y", "k", "l", "m", "n", "o", "p", "r", "s", "t", "u", "f", "h", "c", "ch", "sh", "sht", "u", "i", "yu", "ya" }; for (var i = 0; i < bulgarianLetters.Length; i++) { input = input.Replace(bulgarianLetters[i], latinRepresentationsOfBulgarianLetters[i]); input = input.Replace(bulgarianLetters[i].ToUpper(), latinRepresentationsOfBulgarianLetters[i].CapitalizeFirstLetter()); } return input; } /// <summary> /// Replaces latin letters in a string with their cyrillic representation. /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The new string with cyrillic letters.</returns> public static string ConvertLatinToCyrillicKeyboard(this string input) { var latinLetters = new[] { "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z" }; var bulgarianRepresentationOfLatinKeyboard = new[] { "а", "б", "ц", "д", "е", "ф", "г", "х", "и", "й", "к", "л", "м", "н", "о", "п", "я", "р", "с", "т", "у", "ж", "в", "ь", "ъ", "з" }; for (int i = 0; i < latinLetters.Length; i++) { input = input.Replace(latinLetters[i], bulgarianRepresentationOfLatinKeyboard[i]); input = input.Replace(latinLetters[i].ToUpper(), bulgarianRepresentationOfLatinKeyboard[i].ToUpper()); } return input; } /// <summary> /// Converts a string into a valid username /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The string after the cyrllic letters are converted to latin and all characters /// that are not alpha-numeric, ".", or "_", are removed.</returns> /// <exception cref="RegexMatchTimeoutException">A time-out occurred. For more information about time-outs, see the Remarks section.</exception> public static string ToValidUsername(this string input) { input = input.ConvertCyrillicToLatinLetters(); return Regex.Replace(input, @"[^a-zA-z0-9_\.]+", string.Empty); } /// <summary> /// Converts a string into a valid latin filename /// </summary> /// <param name="input">The string the method is called upon.</param> /// <returns>The string after the cyrllic letters are converted to latin, spaces are replaced with "-", /// and all characters that are not alpha-numeric, ".", "-", or "_", are removed.</returns> /// <exception cref="RegexMatchTimeoutException">A time-out occurred. For more information about time-outs, see the Remarks section.</exception> public static string ToValidLatinFileName(this string input) { input = input.Replace(" ", "-").ConvertCyrillicToLatinLetters(); return Regex.Replace(input, @"[^a-zA-z0-9_\.\-]+", string.Empty); } /// <summary> /// Returns the first n characters from the string, where n is the second parameter. /// </summary> /// <param name="input">The string the method is called upon</param> /// <param name="charsCount">The number of characters to be returned</param> /// <returns>The first n characters from the string (or the whole string if charsCount /// is larger than the length of the string).</returns> public static string GetFirstCharacters(this string input, int charsCount) { return input.Substring(0, Math.Min(input.Length, charsCount)); } /// <summary> /// Returns the file extension of the given filename. /// </summary> /// <param name="fileName">The string (filename) the method is called upon.</param> /// <returns>The file extension of the filename</returns> public static string GetFileExtension(this string fileName) { if (string.IsNullOrWhiteSpace(fileName)) { return string.Empty; } string[] fileParts = fileName.Split(new[] { "." }, StringSplitOptions.None); if (fileParts.Count() == 1 || string.IsNullOrEmpty(fileParts.Last())) { return string.Empty; } return fileParts.Last().Trim().ToLower(); } /// <summary> /// Returns the content type of a file depending on its extension. /// </summary> /// <param name="fileExtension">The file extension</param> /// <returns>The content type associated with the given file extension</returns> public static string ToContentType(this string fileExtension) { var fileExtensionToContentType = new Dictionary<string, string> { { "jpg", "image/jpeg" }, { "jpeg", "image/jpeg" }, { "png", "image/x-png" }, { "docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document" }, { "doc", "application/msword" }, { "pdf", "application/pdf" }, { "txt", "text/plain" }, { "rtf", "application/rtf" } }; if (fileExtensionToContentType.ContainsKey(fileExtension.Trim())) { return fileExtensionToContentType[fileExtension.Trim()]; } return "application/octet-stream"; } /// <summary> /// Converts a string into an array of bytes /// </summary> /// <param name="input">The string the method is called upon</param> /// <returns>An array of bytes derived from converting every character /// in the given string to its byte representation</returns> /// <exception cref="OverflowException">The array is multidimensional and contains more than <see cref="F:System.Int32.MaxValue" /> elements.</exception> public static byte[] ToByteArray(this string input) { var bytesArray = new byte[input.Length * sizeof(char)]; Buffer.BlockCopy(input.ToCharArray(), 0, bytesArray, 0, bytesArray.Length); return bytesArray; } } }
NikolaiMishev/Telerik-Academy
Module-2/High-Quality-Code/Code Documentation and Comments/Task 1. Code documentation/Program.cs
C#
mit
16,083
<?php declare(strict_types=1); namespace WyriMaps\Tests\BattleNet\Resource\Async\WorldOfWarcraft; use ApiClients\Tools\ResourceTestUtilities\AbstractEmptyResourceTest; use WyriMaps\BattleNet\Resource\Async\WorldOfWarcraft\EmptyQuest; final class EmptyQuestTest extends AbstractEmptyResourceTest { public function getSyncAsync(): string { return 'Async'; } public function getClass(): string { return EmptyQuest::class; } }
WyriMaps/php-battlenet-client
tests/Resource/Async/WorldOfWarcraft/EmptyQuestTest.php
PHP
mit
467
SVG.G = SVG.invent({ // Initialize node create: 'g' // Inherit from , inherit: SVG.Container // Add class methods , extend: { // Move over x-axis x: function(x) { return x == null ? this.transform('x') : this.transform({ x: x - this.x() }, true) } // Move over y-axis , y: function(y) { return y == null ? this.transform('y') : this.transform({ y: y - this.y() }, true) } // Move by center over x-axis , cx: function(x) { return x == null ? this.gbox().cx : this.x(x - this.gbox().width / 2) } // Move by center over y-axis , cy: function(y) { return y == null ? this.gbox().cy : this.y(y - this.gbox().height / 2) } , gbox: function() { var bbox = this.bbox() , trans = this.transform() bbox.x += trans.x bbox.x2 += trans.x bbox.cx += trans.x bbox.y += trans.y bbox.y2 += trans.y bbox.cy += trans.y return bbox } } // Add parent method , construct: { // Create a group element group: function() { return this.put(new SVG.G) } } })
albohlabs/svg.js
src/group.js
JavaScript
mit
1,102
using System; using System.Collections.Generic; using System.Drawing; using System.Globalization; using System.IO; using System.Windows; using System.Windows.Data; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Runtime.InteropServices; namespace Wox { public class ImagePathConverter : IMultiValueConverter { private static Dictionary<string, object> imageCache = new Dictionary<string, object>(); private static ImageSource GetIcon(string fileName) { Icon icon = GetFileIcon(fileName); if (icon == null) icon = Icon.ExtractAssociatedIcon(fileName); if (icon != null) { return System.Windows.Interop.Imaging.CreateBitmapSourceFromHIcon(icon.Handle, new Int32Rect(0, 0, icon.Width, icon.Height), BitmapSizeOptions.FromEmptyOptions()); } return null; } public object Convert(object[] values, Type targetType, object parameter, CultureInfo culture) { object img = null; if (values[0] == null) return null; string path = values[0].ToString(); string pluginDirectory = values[1].ToString(); string fullPath = Path.Combine(pluginDirectory, path); if (imageCache.ContainsKey(fullPath)) { return imageCache[fullPath]; } string resolvedPath = string.Empty; if (!string.IsNullOrEmpty(path) && path.Contains(":\\") && File.Exists(path)) { resolvedPath = path; } else if (!string.IsNullOrEmpty(path) && File.Exists(fullPath)) { resolvedPath = fullPath; } if (resolvedPath.ToLower().EndsWith(".exe") || resolvedPath.ToLower().EndsWith(".lnk")) { img = GetIcon(resolvedPath); } else if (!string.IsNullOrEmpty(resolvedPath) && File.Exists(resolvedPath)) { img = new BitmapImage(new Uri(resolvedPath)); } if (img != null) { imageCache.Add(fullPath, img); } return img; } public object[] ConvertBack(object value, Type[] targetTypes, object parameter, CultureInfo culture) { return null; } // http://blogs.msdn.com/b/oldnewthing/archive/2011/01/27/10120844.aspx public static System.Drawing.Icon GetFileIcon(string name) { SHFILEINFO shfi = new SHFILEINFO(); uint flags = SHGFI_SYSICONINDEX; IntPtr himl = SHGetFileInfo(name, FILE_ATTRIBUTE_NORMAL, ref shfi, (uint)System.Runtime.InteropServices.Marshal.SizeOf(shfi), flags); if (himl != IntPtr.Zero) { IntPtr hIcon = ImageList_GetIcon(himl, shfi.iIcon, ILD_NORMAL); System.Drawing.Icon icon = (System.Drawing.Icon)System.Drawing.Icon.FromHandle(hIcon).Clone(); DestroyIcon(hIcon); return icon; } return null; } [DllImport("comctl32.dll", SetLastError = true)] private static extern IntPtr ImageList_GetIcon(IntPtr himl, int i, uint flags); private const int MAX_PATH = 256; [StructLayout(LayoutKind.Sequential)] private struct SHITEMID { public ushort cb; [MarshalAs(UnmanagedType.LPArray)] public byte[] abID; } [StructLayout(LayoutKind.Sequential)] private struct ITEMIDLIST { public SHITEMID mkid; } [StructLayout(LayoutKind.Sequential)] private struct BROWSEINFO { public IntPtr hwndOwner; public IntPtr pidlRoot; public IntPtr pszDisplayName; [MarshalAs(UnmanagedType.LPTStr)] public string lpszTitle; public uint ulFlags; public IntPtr lpfn; public int lParam; public IntPtr iImage; } // Browsing for directory. private const uint BIF_RETURNONLYFSDIRS = 0x0001; private const uint BIF_DONTGOBELOWDOMAIN = 0x0002; private const uint BIF_STATUSTEXT = 0x0004; private const uint BIF_RETURNFSANCESTORS = 0x0008; private const uint BIF_EDITBOX = 0x0010; private const uint BIF_VALIDATE = 0x0020; private const uint BIF_NEWDIALOGSTYLE = 0x0040; private const uint BIF_USENEWUI = (BIF_NEWDIALOGSTYLE | BIF_EDITBOX); private const uint BIF_BROWSEINCLUDEURLS = 0x0080; private const uint BIF_BROWSEFORCOMPUTER = 0x1000; private const uint BIF_BROWSEFORPRINTER = 0x2000; private const uint BIF_BROWSEINCLUDEFILES = 0x4000; private const uint BIF_SHAREABLE = 0x8000; [StructLayout(LayoutKind.Sequential)] private struct SHFILEINFO { public const int NAMESIZE = 80; public IntPtr hIcon; public int iIcon; public uint dwAttributes; [MarshalAs(UnmanagedType.ByValTStr, SizeConst = MAX_PATH)] public string szDisplayName; [MarshalAs(UnmanagedType.ByValTStr, SizeConst = NAMESIZE)] public string szTypeName; }; private const uint SHGFI_ICON = 0x000000100; // get icon private const uint SHGFI_DISPLAYNAME = 0x000000200; // get display name private const uint SHGFI_TYPENAME = 0x000000400; // get type name private const uint SHGFI_ATTRIBUTES = 0x000000800; // get attributes private const uint SHGFI_ICONLOCATION = 0x000001000; // get icon location private const uint SHGFI_EXETYPE = 0x000002000; // return exe type private const uint SHGFI_SYSICONINDEX = 0x000004000; // get system icon index private const uint SHGFI_LINKOVERLAY = 0x000008000; // put a link overlay on icon private const uint SHGFI_SELECTED = 0x000010000; // show icon in selected state private const uint SHGFI_ATTR_SPECIFIED = 0x000020000; // get only specified attributes private const uint SHGFI_LARGEICON = 0x000000000; // get large icon private const uint SHGFI_SMALLICON = 0x000000001; // get small icon private const uint SHGFI_OPENICON = 0x000000002; // get open icon private const uint SHGFI_SHELLICONSIZE = 0x000000004; // get shell size icon private const uint SHGFI_PIDL = 0x000000008; // pszPath is a pidl private const uint SHGFI_USEFILEATTRIBUTES = 0x000000010; // use passed dwFileAttribute private const uint SHGFI_ADDOVERLAYS = 0x000000020; // apply the appropriate overlays private const uint SHGFI_OVERLAYINDEX = 0x000000040; // Get the index of the overlay private const uint FILE_ATTRIBUTE_DIRECTORY = 0x00000010; private const uint FILE_ATTRIBUTE_NORMAL = 0x00000080; private const uint ILD_NORMAL = 0x00000000; [DllImport("Shell32.dll")] private static extern IntPtr SHGetFileInfo( string pszPath, uint dwFileAttributes, ref SHFILEINFO psfi, uint cbFileInfo, uint uFlags ); [DllImport("User32.dll")] private static extern int DestroyIcon(IntPtr hIcon); } }
Rovak/Wox
Wox/ImagePathConverter.cs
C#
mit
7,522
// This file is distributed under the BSD License. // See "license.txt" for details. // Copyright 2009-2012, Jonathan Turner (jonathan@emptycrate.com) // Copyright 2009-2015, Jason Turner (jason@emptycrate.com) // http://www.chaiscript.com #ifndef CHAISCRIPT_BOXED_VALUE_HPP_ #define CHAISCRIPT_BOXED_VALUE_HPP_ #include <functional> #include <map> #include <memory> #include <type_traits> #include "../chaiscript_threading.hpp" #include "../chaiscript_defines.hpp" #include "any.hpp" #include "type_info.hpp" namespace chaiscript { /// \brief A wrapper for holding any valid C++ type. All types in ChaiScript are Boxed_Value objects /// \sa chaiscript::boxed_cast class Boxed_Value { public: /// used for explicitly creating a "void" object struct Void_Type { }; private: /// structure which holds the internal state of a Boxed_Value /// \todo Get rid of Any and merge it with this, reducing an allocation in the process struct Data { Data(const Type_Info &ti, chaiscript::detail::Any to, bool tr, const void *t_void_ptr) : m_type_info(ti), m_obj(std::move(to)), m_data_ptr(ti.is_const()?nullptr:const_cast<void *>(t_void_ptr)), m_const_data_ptr(t_void_ptr), m_is_ref(tr) { } Data &operator=(const Data &rhs) { m_type_info = rhs.m_type_info; m_obj = rhs.m_obj; m_is_ref = rhs.m_is_ref; m_data_ptr = rhs.m_data_ptr; m_const_data_ptr = rhs.m_const_data_ptr; if (rhs.m_attrs) { m_attrs = std::unique_ptr<std::map<std::string, Boxed_Value>>(new std::map<std::string, Boxed_Value>(*rhs.m_attrs)); } return *this; } Data(const Data &) = delete; #if !defined(__APPLE__) && (!defined(_MSC_VER) || _MSC_VER != 1800) Data(Data &&) = default; Data &operator=(Data &&rhs) = default; #endif Type_Info m_type_info; chaiscript::detail::Any m_obj; void *m_data_ptr; const void *m_const_data_ptr; std::unique_ptr<std::map<std::string, Boxed_Value>> m_attrs; bool m_is_ref; }; struct Object_Data { static std::shared_ptr<Data> get(Boxed_Value::Void_Type) { return std::make_shared<Data>( detail::Get_Type_Info<void>::get(), chaiscript::detail::Any(), false, nullptr) ; } template<typename T> static std::shared_ptr<Data> get(const std::shared_ptr<T> *obj) { return get(*obj); } template<typename T> static std::shared_ptr<Data> get(const std::shared_ptr<T> &obj) { return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(obj), false, obj.get() ); } template<typename T> static std::shared_ptr<Data> get(std::shared_ptr<T> &&obj) { auto ptr = obj.get(); return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(std::move(obj)), false, ptr ); } template<typename T> static std::shared_ptr<Data> get(T *t) { return get(std::ref(*t)); } template<typename T> static std::shared_ptr<Data> get(const T *t) { return get(std::cref(*t)); } template<typename T> static std::shared_ptr<Data> get(std::reference_wrapper<T> obj) { auto p = &obj.get(); return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(std::move(obj)), true, p ); } template<typename T> static std::shared_ptr<Data> get(T t) { auto p = std::make_shared<T>(std::move(t)); auto ptr = p.get(); return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(std::move(p)), false, ptr ); } static std::shared_ptr<Data> get() { return std::make_shared<Data>( Type_Info(), chaiscript::detail::Any(), false, nullptr ); } }; public: /// Basic Boxed_Value constructor template<typename T, typename = typename std::enable_if<!std::is_same<Boxed_Value, typename std::decay<T>::type>::value>::type> explicit Boxed_Value(T &&t) : m_data(Object_Data::get(std::forward<T>(t))) { } /// Unknown-type constructor Boxed_Value() : m_data(Object_Data::get()) { } #if !defined(_MSC_VER) || _MSC_VER != 1800 Boxed_Value(Boxed_Value&&) = default; Boxed_Value& operator=(Boxed_Value&&) = default; #endif Boxed_Value(const Boxed_Value&) = default; Boxed_Value& operator=(const Boxed_Value&) = default; void swap(Boxed_Value &rhs) { std::swap(m_data, rhs.m_data); } /// Copy the values stored in rhs.m_data to m_data. /// m_data pointers are not shared in this case Boxed_Value assign(const Boxed_Value &rhs) { (*m_data) = (*rhs.m_data); return *this; } const Type_Info &get_type_info() const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info; } /// return true if the object is uninitialized bool is_undef() const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info.is_undef(); } bool is_const() const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info.is_const(); } bool is_type(const Type_Info &ti) const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info.bare_equal(ti); } bool is_null() const CHAISCRIPT_NOEXCEPT { return (m_data->m_data_ptr == nullptr && m_data->m_const_data_ptr == nullptr); } const chaiscript::detail::Any & get() const CHAISCRIPT_NOEXCEPT { return m_data->m_obj; } bool is_ref() const CHAISCRIPT_NOEXCEPT { return m_data->m_is_ref; } bool is_pointer() const CHAISCRIPT_NOEXCEPT { return !is_ref(); } void *get_ptr() const CHAISCRIPT_NOEXCEPT { return m_data->m_data_ptr; } const void *get_const_ptr() const CHAISCRIPT_NOEXCEPT { return m_data->m_const_data_ptr; } Boxed_Value get_attr(const std::string &t_name) { if (!m_data->m_attrs) { m_data->m_attrs = std::unique_ptr<std::map<std::string, Boxed_Value>>(new std::map<std::string, Boxed_Value>()); } return (*m_data->m_attrs)[t_name]; } Boxed_Value &copy_attrs(const Boxed_Value &t_obj) { if (t_obj.m_data->m_attrs) { m_data->m_attrs = std::unique_ptr<std::map<std::string, Boxed_Value>>(new std::map<std::string, Boxed_Value>(*t_obj.m_data->m_attrs)); } return *this; } /// \returns true if the two Boxed_Values share the same internal type static bool type_match(const Boxed_Value &l, const Boxed_Value &r) CHAISCRIPT_NOEXCEPT { return l.get_type_info() == r.get_type_info(); } private: std::shared_ptr<Data> m_data; }; /// @brief Creates a Boxed_Value. If the object passed in is a value type, it is copied. If it is a pointer, std::shared_ptr, or std::reference_type /// a copy is not made. /// @param t The value to box /// /// Example: /// /// ~~~{.cpp} /// int i; /// chaiscript::ChaiScript chai; /// chai.add(chaiscript::var(i), "i"); /// chai.add(chaiscript::var(&i), "ip"); /// ~~~ /// /// @sa @ref adding_objects template<typename T> Boxed_Value var(T t) { return Boxed_Value(t); } namespace detail { /// \brief Takes a value, copies it and returns a Boxed_Value object that is immutable /// \param[in] t Value to copy and make const /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(const T &t) { return Boxed_Value(std::make_shared<typename std::add_const<T>::type >(t)); } /// \brief Takes a pointer to a value, adds const to the pointed to type and returns an immutable Boxed_Value. /// Does not copy the pointed to value. /// \param[in] t Pointer to make immutable /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(T *t) { return Boxed_Value( const_cast<typename std::add_const<T>::type *>(t) ); } /// \brief Takes a std::shared_ptr to a value, adds const to the pointed to type and returns an immutable Boxed_Value. /// Does not copy the pointed to value. /// \param[in] t Pointer to make immutable /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(const std::shared_ptr<T> &t) { return Boxed_Value( std::const_pointer_cast<typename std::add_const<T>::type>(t) ); } /// \brief Takes a std::reference_wrapper value, adds const to the referenced type and returns an immutable Boxed_Value. /// Does not copy the referenced value. /// \param[in] t Reference object to make immutable /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(const std::reference_wrapper<T> &t) { return Boxed_Value( std::cref(t.get()) ); } } /// \brief Takes an object and returns an immutable Boxed_Value. If the object is a std::reference or pointer type /// the value is not copied. If it is an object type, it is copied. /// \param[in] t Object to make immutable /// \returns Immutable Boxed_Value /// \sa chaiscript::Boxed_Value::is_const /// \sa chaiscript::var /// /// Example: /// \code /// enum Colors /// { /// Blue, /// Green, /// Red /// }; /// chaiscript::ChaiScript chai /// chai.add(chaiscript::const_var(Blue), "Blue"); // add immutable constant /// chai.add(chaiscript::const_var(Red), "Red"); /// chai.add(chaiscript::const_var(Green), "Green"); /// \endcode /// /// \todo support C++11 strongly typed enums /// \sa \ref adding_objects template<typename T> Boxed_Value const_var(const T &t) { return detail::const_var_impl(t); } } #endif
spijdar/dayspring
src/server/chaiscript/dispatchkit/boxed_value.hpp
C++
mit
11,084
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.auth; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.Properties; /** * Simple implementation AWSCredentials that reads in AWS access keys from a * properties file. The AWS access key is expected to be in the "accessKey" * property and the AWS secret key id is expected to be in the "secretKey" * property. */ public class PropertiesCredentials implements AWSCredentials { private final String accessKey; private final String secretAccessKey; /** * Reads the specified file as a Java properties file and extracts the * AWS access key from the "accessKey" property and AWS secret access * key from the "secretKey" property. If the specified file doesn't * contain the AWS access keys an IOException will be thrown. * * @param file * The file from which to read the AWS credentials * properties. * * @throws FileNotFoundException * If the specified file isn't found. * @throws IOException * If any problems are encountered reading the AWS access * keys from the specified file. * @throws IllegalArgumentException * If the specified properties file does not contain the * required keys. */ public PropertiesCredentials(File file) throws FileNotFoundException, IOException, IllegalArgumentException { if (!file.exists()) { throw new FileNotFoundException("File doesn't exist: " + file.getAbsolutePath()); } FileInputStream stream = new FileInputStream(file); try { Properties accountProperties = new Properties(); accountProperties.load(stream); if (accountProperties.getProperty("accessKey") == null || accountProperties.getProperty("secretKey") == null) { throw new IllegalArgumentException( "The specified file (" + file.getAbsolutePath() + ") doesn't contain the expected properties 'accessKey' " + "and 'secretKey'." ); } accessKey = accountProperties.getProperty("accessKey"); secretAccessKey = accountProperties.getProperty("secretKey"); } finally { try { stream.close(); } catch (IOException e) { } } } /** * Reads the specified input stream as a stream of Java properties file * content and extracts the AWS access key ID and secret access key from the * properties. * * @param inputStream * The input stream containing the AWS credential properties. * * @throws IOException * If any problems occur while reading from the input stream. */ public PropertiesCredentials(InputStream inputStream) throws IOException { Properties accountProperties = new Properties(); try { accountProperties.load(inputStream); } finally { try {inputStream.close();} catch (Exception e) {} } if (accountProperties.getProperty("accessKey") == null || accountProperties.getProperty("secretKey") == null) { throw new IllegalArgumentException("The specified properties data " + "doesn't contain the expected properties 'accessKey' and 'secretKey'."); } accessKey = accountProperties.getProperty("accessKey"); secretAccessKey = accountProperties.getProperty("secretKey"); } /* (non-Javadoc) * @see com.amazonaws.auth.AWSCredentials#getAWSAccessKeyId() */ public String getAWSAccessKeyId() { return accessKey; } /* (non-Javadoc) * @see com.amazonaws.auth.AWSCredentials#getAWSSecretKey() */ public String getAWSSecretKey() { return secretAccessKey; } }
loremipsumdolor/CastFast
src/com/amazonaws/auth/PropertiesCredentials.java
Java
mit
4,679
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.s3.transfer; import java.io.File; import com.amazonaws.services.s3.model.ObjectMetadata; /** * This is the callback interface which is used by TransferManager.uploadDirectory and * TransferManager.uploadFileList. The callback is invoked for each file that is uploaded by * <code>TransferManager</code> and given an opportunity to specify the metadata for each file. */ public interface ObjectMetadataProvider { /* * This method is called for every file that is uploaded by <code>TransferManager</code> * and gives an opportunity to specify the metadata for the file. * * @param file * The file being uploaded. * * @param metadata * The default metadata for the file. You can modify this object to specify * your own metadata. */ public void provideObjectMetadata(final File file, final ObjectMetadata metadata); }
loremipsumdolor/CastFast
src/com/amazonaws/services/s3/transfer/ObjectMetadataProvider.java
Java
mit
1,511
//---------------------------------------------------------------------------------------------- // <copyright file="AppDeployment.cs" company="Microsoft Corporation"> // Licensed under the MIT License. See LICENSE.TXT in the project root license information. // </copyright> //---------------------------------------------------------------------------------------------- using System; using System.Collections.Generic; using System.IO; #if !WINDOWS_UWP using System.Net; using System.Net.Http; #endif // !WINDOWS_UWP using System.Runtime.Serialization; using System.Text; using System.Threading.Tasks; #if WINDOWS_UWP using Windows.Foundation; using Windows.Security.Credentials; using Windows.Storage.Streams; using Windows.Web.Http; using Windows.Web.Http.Filters; using Windows.Web.Http.Headers; #endif namespace Microsoft.Tools.WindowsDevicePortal { /// <content> /// Wrappers for App Deployment methods. /// </content> public partial class DevicePortal { /// <summary> /// API to retrieve list of installed packages. /// </summary> public static readonly string InstalledPackagesApi = "api/app/packagemanager/packages"; /// <summary> /// Install state API. /// </summary> public static readonly string InstallStateApi = "api/app/packagemanager/state"; /// <summary> /// API for package management. /// </summary> public static readonly string PackageManagerApi = "api/app/packagemanager/package"; /// <summary> /// App Install Status handler. /// </summary> public event ApplicationInstallStatusEventHandler AppInstallStatus; /// <summary> /// Gets the collection of applications installed on the device. /// </summary> /// <returns>AppPackages object containing the list of installed application packages.</returns> public async Task<AppPackages> GetInstalledAppPackagesAsync() { return await this.GetAsync<AppPackages>(InstalledPackagesApi); } /// <summary> /// Installs an application /// </summary> /// <param name="appName">Friendly name (ex: Hello World) of the application. If this parameter is not provided, the name of the package is assumed to be the app name.</param> /// <param name="packageFileName">Full name of the application package file.</param> /// <param name="dependencyFileNames">List containing the full names of any required dependency files.</param> /// <param name="certificateFileName">Full name of the optional certificate file.</param> /// <param name="stateCheckIntervalMs">How frequently we should check the installation state.</param> /// <param name="timeoutInMinutes">Operation timeout.</param> /// <param name="uninstallPreviousVersion">Indicate whether or not the previous app version should be uninstalled prior to installing.</param> /// <remarks>InstallApplication sends ApplicationInstallStatus events to indicate the current progress in the installation process. /// Some applications may opt to not register for the AppInstallStatus event and await on InstallApplication.</remarks> /// <returns>Task for tracking completion of install initialization.</returns> public async Task InstallApplicationAsync( string appName, string packageFileName, List<string> dependencyFileNames, string certificateFileName = null, short stateCheckIntervalMs = 500, short timeoutInMinutes = 15, bool uninstallPreviousVersion = true) { string installPhaseDescription = string.Empty; try { FileInfo packageFile = new FileInfo(packageFileName); // If appName was not provided, use the package file name if (string.IsNullOrEmpty(appName)) { appName = packageFile.Name; } // Uninstall the application's previous version, if one exists. if (uninstallPreviousVersion) { installPhaseDescription = string.Format("Uninstalling any previous version of {0}", appName); this.SendAppInstallStatus( ApplicationInstallStatus.InProgress, ApplicationInstallPhase.UninstallingPreviousVersion, installPhaseDescription); AppPackages installedApps = await this.GetInstalledAppPackagesAsync(); foreach (PackageInfo package in installedApps.Packages) { if (package.Name == appName) { await this.UninstallApplicationAsync(package.FullName); break; } } } // Create the API endpoint and generate a unique boundary string. Uri uri; string boundaryString; this.CreateAppInstallEndpointAndBoundaryString( packageFile.Name, out uri, out boundaryString); installPhaseDescription = string.Format("Copying: {0}", packageFile.Name); this.SendAppInstallStatus( ApplicationInstallStatus.InProgress, ApplicationInstallPhase.CopyingFile, installPhaseDescription); var content = new HttpMultipartFileContent(); content.Add(packageFile.FullName); content.AddRange(dependencyFileNames); content.Add(certificateFileName); await this.PostAsync(uri, content); // Poll the status until complete. ApplicationInstallStatus status = ApplicationInstallStatus.InProgress; do { installPhaseDescription = string.Format("Installing {0}", appName); this.SendAppInstallStatus( ApplicationInstallStatus.InProgress, ApplicationInstallPhase.Installing, installPhaseDescription); await Task.Delay(TimeSpan.FromMilliseconds(stateCheckIntervalMs)); status = await this.GetInstallStatusAsync().ConfigureAwait(false); } while (status == ApplicationInstallStatus.InProgress); installPhaseDescription = string.Format("{0} installed successfully", appName); this.SendAppInstallStatus( ApplicationInstallStatus.Completed, ApplicationInstallPhase.Idle, installPhaseDescription); } catch (Exception e) { DevicePortalException dpe = e as DevicePortalException; if (dpe != null) { this.SendAppInstallStatus( ApplicationInstallStatus.Failed, ApplicationInstallPhase.Idle, string.Format("Failed to install {0}: {1}", appName, dpe.Reason)); } else { this.SendAppInstallStatus( ApplicationInstallStatus.Failed, ApplicationInstallPhase.Idle, string.Format("Failed to install {0}: {1}", appName, installPhaseDescription)); } } } /// <summary> /// Uninstalls the specified application. /// </summary> /// <param name="packageName">The name of the application package to uninstall.</param> /// <returns>Task tracking the uninstall operation.</returns> public async Task UninstallApplicationAsync(string packageName) { await this.DeleteAsync( PackageManagerApi, //// NOTE: When uninstalling an app package, the package name is not Hex64 encoded. string.Format("package={0}", packageName)); } /// <summary> /// Builds the application installation Uri and generates a unique boundary string for the multipart form data. /// </summary> /// <param name="packageName">The name of the application package.</param> /// <param name="uri">The endpoint for the install request.</param> /// <param name="boundaryString">Unique string used to separate the parts of the multipart form data.</param> private void CreateAppInstallEndpointAndBoundaryString( string packageName, out Uri uri, out string boundaryString) { uri = Utilities.BuildEndpoint( this.deviceConnection.Connection, PackageManagerApi, string.Format("package={0}", packageName)); boundaryString = Guid.NewGuid().ToString(); } /// <summary> /// Sends application install status. /// </summary> /// <param name="status">Status of the installation.</param> /// <param name="phase">Current installation phase (ex: Uninstalling previous version)</param> /// <param name="message">Optional error message describing the install status.</param> private void SendAppInstallStatus( ApplicationInstallStatus status, ApplicationInstallPhase phase, string message = "") { this.AppInstallStatus?.Invoke( this, new ApplicationInstallStatusEventArgs(status, phase, message)); } #region Data contract /// <summary> /// Object representing a list of Application Packages /// </summary> [DataContract] public class AppPackages { /// <summary> /// Gets a list of the packages /// </summary> [DataMember(Name = "InstalledPackages")] public List<PackageInfo> Packages { get; private set; } /// <summary> /// Presents a user readable representation of a list of AppPackages /// </summary> /// <returns>User readable list of AppPackages.</returns> public override string ToString() { string output = "Packages:\n"; foreach (PackageInfo package in this.Packages) { output += package; } return output; } } /// <summary> /// Object representing the install state /// </summary> [DataContract] public class InstallState { /// <summary> /// Gets install state code /// </summary> [DataMember(Name = "Code")] public int Code { get; private set; } /// <summary> /// Gets message text /// </summary> [DataMember(Name = "CodeText")] public string CodeText { get; private set; } /// <summary> /// Gets reason for state /// </summary> [DataMember(Name = "Reason")] public string Reason { get; private set; } /// <summary> /// Gets a value indicating whether this was successful /// </summary> [DataMember(Name = "Success")] public bool WasSuccessful { get; private set; } } /// <summary> /// object representing the package information /// </summary> [DataContract] public class PackageInfo { /// <summary> /// Gets package name /// </summary> [DataMember(Name = "Name")] public string Name { get; private set; } /// <summary> /// Gets package family name /// </summary> [DataMember(Name = "PackageFamilyName")] public string FamilyName { get; private set; } /// <summary> /// Gets package full name /// </summary> [DataMember(Name = "PackageFullName")] public string FullName { get; private set; } /// <summary> /// Gets package relative Id /// </summary> [DataMember(Name = "PackageRelativeId")] public string AppId { get; private set; } /// <summary> /// Gets package publisher /// </summary> [DataMember(Name = "Publisher")] public string Publisher { get; private set; } /// <summary> /// Gets package version /// </summary> [DataMember(Name = "Version")] public PackageVersion Version { get; private set; } /// <summary> /// Gets package origin, a measure of how the app was installed. /// PackageOrigin_Unknown            = 0, /// PackageOrigin_Unsigned           = 1, /// PackageOrigin_Inbox              = 2, /// PackageOrigin_Store              = 3, /// PackageOrigin_DeveloperUnsigned  = 4, /// PackageOrigin_DeveloperSigned    = 5, /// PackageOrigin_LineOfBusiness     = 6 /// </summary> [DataMember(Name = "PackageOrigin")] public int PackageOrigin { get; private set; } /// <summary> /// Helper method to determine if the app was sideloaded and therefore can be used with e.g. GetFolderContentsAsync /// </summary> /// <returns> True if the package is sideloaded. </returns> public bool IsSideloaded() { return this.PackageOrigin == 4 || this.PackageOrigin == 5; } /// <summary> /// Get a string representation of the package /// </summary> /// <returns>String representation</returns> public override string ToString() { return string.Format("\t{0}\n\t\t{1}\n", this.FullName, this.AppId); } } /// <summary> /// Object representing a package version /// </summary> [DataContract] public class PackageVersion { /// <summary> /// Gets version build /// </summary> [DataMember(Name = "Build")] public int Build { get; private set; } /// <summary> /// Gets package Major number /// </summary> [DataMember(Name = "Major")] public int Major { get; private set; } /// <summary> /// Gets package minor number /// </summary> [DataMember(Name = "Minor")] public int Minor { get; private set; } /// <summary> /// Gets package revision /// </summary> [DataMember(Name = "Revision")] public int Revision { get; private set; } /// <summary> /// Gets package version /// </summary> public Version Version { get { return new Version(this.Major, this.Minor, this.Build, this.Revision); } } /// <summary> /// Get a string representation of a version /// </summary> /// <returns>String representation</returns> public override string ToString() { return Version.ToString(); } } #endregion // Data contract } }
davidkline-ms/WindowsDevicePortalWrapper
WindowsDevicePortalWrapper/WindowsDevicePortalWrapper.Shared/Core/AppDeployment.cs
C#
mit
16,069
'use strict'; module.exports = function generate_format(it, $keyword) { var out = ' '; var $lvl = it.level; var $dataLvl = it.dataLevel; var $schema = it.schema[$keyword]; var $schemaPath = it.schemaPath + it.util.getProperty($keyword); var $errSchemaPath = it.errSchemaPath + '/' + $keyword; var $breakOnError = !it.opts.allErrors; var $data = 'data' + ($dataLvl || ''); if (it.opts.format === false) { if ($breakOnError) { out += ' if (true) { '; } return out; } var $isData = it.opts.v5 && $schema && $schema.$data, $schemaValue; if ($isData) { out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; $schemaValue = 'schema' + $lvl; } else { $schemaValue = $schema; } var $unknownFormats = it.opts.unknownFormats, $allowUnknown = Array.isArray($unknownFormats); if ($isData) { var $format = 'format' + $lvl; out += ' var ' + ($format) + ' = formats[' + ($schemaValue) + ']; var isObject' + ($lvl) + ' = typeof ' + ($format) + ' == \'object\' && !(' + ($format) + ' instanceof RegExp) && ' + ($format) + '.validate; if (isObject' + ($lvl) + ') { '; if (it.async) { out += ' var async' + ($lvl) + ' = ' + ($format) + '.async; '; } out += ' ' + ($format) + ' = ' + ($format) + '.validate; } if ( '; if ($isData) { out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'string\') || '; } out += ' ('; if ($unknownFormats === true || $allowUnknown) { out += ' (' + ($schemaValue) + ' && !' + ($format) + ' '; if ($allowUnknown) { out += ' && self._opts.unknownFormats.indexOf(' + ($schemaValue) + ') == -1 '; } out += ') || '; } out += ' (' + ($format) + ' && !(typeof ' + ($format) + ' == \'function\' ? '; if (it.async) { out += ' (async' + ($lvl) + ' ? ' + (it.yieldAwait) + ' ' + ($format) + '(' + ($data) + ') : ' + ($format) + '(' + ($data) + ')) '; } else { out += ' ' + ($format) + '(' + ($data) + ') '; } out += ' : ' + ($format) + '.test(' + ($data) + '))))) {'; } else { var $format = it.formats[$schema]; if (!$format) { if ($unknownFormats === true || ($allowUnknown && $unknownFormats.indexOf($schema) == -1)) { throw new Error('unknown format "' + $schema + '" is used in schema at path "' + it.errSchemaPath + '"'); } else { if (!$allowUnknown) { console.warn('unknown format "' + $schema + '" ignored in schema at path "' + it.errSchemaPath + '"'); if ($unknownFormats !== 'ignore') console.warn('In the next major version it will throw exception. See option unknownFormats for more information'); } if ($breakOnError) { out += ' if (true) { '; } return out; } } var $isObject = typeof $format == 'object' && !($format instanceof RegExp) && $format.validate; if ($isObject) { var $async = $format.async === true; $format = $format.validate; } if ($async) { if (!it.async) throw new Error('async format in sync schema'); var $formatRef = 'formats' + it.util.getProperty($schema) + '.validate'; out += ' if (!(' + (it.yieldAwait) + ' ' + ($formatRef) + '(' + ($data) + '))) { '; } else { out += ' if (! '; var $formatRef = 'formats' + it.util.getProperty($schema); if ($isObject) $formatRef += '.validate'; if (typeof $format == 'function') { out += ' ' + ($formatRef) + '(' + ($data) + ') '; } else { out += ' ' + ($formatRef) + '.test(' + ($data) + ') '; } out += ') { '; } } var $$outStack = $$outStack || []; $$outStack.push(out); out = ''; /* istanbul ignore else */ if (it.createErrors !== false) { out += ' { keyword: \'' + ('format') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { format: '; if ($isData) { out += '' + ($schemaValue); } else { out += '' + (it.util.toQuotedString($schema)); } out += ' } '; if (it.opts.messages !== false) { out += ' , message: \'should match format "'; if ($isData) { out += '\' + ' + ($schemaValue) + ' + \''; } else { out += '' + (it.util.escapeQuotes($schema)); } out += '"\' '; } if (it.opts.verbose) { out += ' , schema: '; if ($isData) { out += 'validate.schema' + ($schemaPath); } else { out += '' + (it.util.toQuotedString($schema)); } out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; } out += ' } '; } else { out += ' {} '; } var __err = out; out = $$outStack.pop(); if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */ if (it.async) { out += ' throw new ValidationError([' + (__err) + ']); '; } else { out += ' validate.errors = [' + (__err) + ']; return false; '; } } else { out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; } out += ' } '; if ($breakOnError) { out += ' else { '; } return out; }
Moccine/global-service-plus.com
web/libariries/bootstrap/node_modules/ajv/lib/dotjs/format.js
JavaScript
mit
5,423
package org.luban.common.plugin; import org.luban.common.network.URL; /** * 服务插件接口 * * @author hexiaofeng * @version 1.0.0 * @since 12-12-12 下午8:47 */ public interface ServicePlugin { /** * 返回类型 * * @return */ String getType(); /** * 设置URL * * @param url */ void setUrl(URL url); }
krisjin/common-base
luban-common/src/main/java/org/luban/common/plugin/ServicePlugin.java
Java
mit
377
#!/usr/bin/env python2 """Example of server-side computations used in global forest change analysis. In this example we will focus on server side computation using NDVI and EVI data. This both metrics are computed bands created by third party companies or directly taken by the satellites. NDVI and EVI are two metrics used in global forest change analysis. They represent the forest concentration in a specific area. We will use the MOD13A1 vegetation indice provided by the NASA [1]. The goal is to generate an RGB image, where reds stands for deforestation, gree for reforestation and blue for masked data (e.g. rivers, oceans...). [1] https://code.earthengine.google.com/dataset/MODIS/MOD13A1 """ import ee # Initialize the Earth Engine ee.Initialize() # Small rectangle used to generate the image, over the Amazonian forest. # The location is above the Rondonia (West of Bresil). rectangle = ee.Geometry.Rectangle(-68, -7, -65, -8) # Get the MODIS dataset. collection = ee.ImageCollection('MODIS/MOD13A1') # Select the EVI, since it is more accurate on this dataset. You can also # use the NDVI band here. collection = collection.select(['EVI']) # Get two dataset, one over the year 2000 and the other one over 2015 ndvi2000 = collection.filterDate('2000-01-01', '2000-12-31').median() ndvi2015 = collection.filterDate('2015-01-01', '2015-12-31').median() # Substract the two datasets to see the evolution between both of them. difference = ndvi2015.subtract(ndvi2000) # Use a mask to avoid showing data on rivers. # TODO(funkysayu) move this mask to blue color. classifiedImage = ee.Image('MODIS/051/MCD12Q1/2001_01_01') mask = classifiedImage.select(['Land_Cover_Type_1']) maskedDifference = difference.updateMask(mask) # Convert it to RGB image. visualized = maskedDifference.visualize( min=-2000, max=2000, palette='FF0000, 000000, 00FF00', ) # Finally generate the PNG. print visualized.getDownloadUrl({ 'region': rectangle.toGeoJSONString(), 'scale': 500, 'format': 'png', })
ArchangelX360/EnvironmentalEventsDetector
computation/imagefetcher/examples/ndvi_difference.py
Python
mit
2,028
require File.expand_path("../../../test_helper", __FILE__) describe Flipflop::Strategies::DefaultStrategy do before do Flipflop::FeatureSet.current.replace do Flipflop.configure do feature :one, default: true feature :two end end end describe "with defaults" do subject do Flipflop::Strategies::DefaultStrategy.new.freeze end it "should have default name" do assert_equal "default", subject.name end it "should have title derived from name" do assert_equal "Default", subject.title end it "should have no default description" do assert_equal "Uses feature default status.", subject.description end it "should not be switchable" do assert_equal false, subject.switchable? end it "should have unique key" do assert_match /^\w+$/, subject.key end describe "with explicitly defaulted feature" do it "should have feature enabled" do assert_equal true, subject.enabled?(:one) end end describe "with implicitly defaulted feature" do it "should not have feature enabled" do assert_equal false, subject.enabled?(:two) end end end end
voormedia/flipflop
test/unit/strategies/default_strategy_test.rb
Ruby
mit
1,218
require 'forwardable' require 'puppet/node' require 'puppet/resource/catalog' require 'puppet/util/errors' require 'puppet/resource/type_collection_helper' # Maintain a graph of scopes, along with a bunch of data # about the individual catalog we're compiling. class Puppet::Parser::Compiler extend Forwardable include Puppet::Util include Puppet::Util::Errors include Puppet::Util::MethodHelper include Puppet::Resource::TypeCollectionHelper def self.compile(node) $env_module_directories = nil node.environment.check_for_reparse if node.environment.conflicting_manifest_settings? errmsg = [ "The 'disable_per_environment_manifest' setting is true, and this '#{node.environment}'", "has an environment.conf manifest that conflicts with the 'default_manifest' setting.", "Compilation has been halted in order to avoid running a catalog which may be using", "unexpected manifests. For more information, see", "http://docs.puppetlabs.com/puppet/latest/reference/environments.html", ] raise(Puppet::Error, errmsg.join(' ')) end new(node).compile {|resulting_catalog| resulting_catalog.to_resource } rescue Puppet::ParseErrorWithIssue => detail detail.node = node.name Puppet.log_exception(detail) raise rescue => detail message = "#{detail} on node #{node.name}" Puppet.log_exception(detail, message) raise Puppet::Error, message, detail.backtrace end attr_reader :node, :facts, :collections, :catalog, :resources, :relationships, :topscope # The injector that provides lookup services, or nil if accessed before the compiler has started compiling and # bootstrapped. The injector is initialized and available before any manifests are evaluated. # # @return [Puppet::Pops::Binder::Injector, nil] The injector that provides lookup services for this compiler/environment # @api public # attr_accessor :injector # Access to the configured loaders for 4x # @return [Puppet::Pops::Loader::Loaders] the configured loaders # @api private attr_reader :loaders # The injector that provides lookup services during the creation of the {#injector}. # @return [Puppet::Pops::Binder::Injector, nil] The injector that provides lookup services during injector creation # for this compiler/environment # # @api private # attr_accessor :boot_injector # Add a collection to the global list. def_delegator :@collections, :<<, :add_collection def_delegator :@relationships, :<<, :add_relationship # Store a resource override. def add_override(override) # If possible, merge the override in immediately. if resource = @catalog.resource(override.ref) resource.merge(override) else # Otherwise, store the override for later; these # get evaluated in Resource#finish. @resource_overrides[override.ref] << override end end def add_resource(scope, resource) @resources << resource # Note that this will fail if the resource is not unique. @catalog.add_resource(resource) if not resource.class? and resource[:stage] raise ArgumentError, "Only classes can set 'stage'; normal resources like #{resource} cannot change run stage" end # Stages should not be inside of classes. They are always a # top-level container, regardless of where they appear in the # manifest. return if resource.stage? # This adds a resource to the class it lexically appears in in the # manifest. unless resource.class? return @catalog.add_edge(scope.resource, resource) end end # Do we use nodes found in the code, vs. the external node sources? def_delegator :known_resource_types, :nodes?, :ast_nodes? # Store the fact that we've evaluated a class def add_class(name) @catalog.add_class(name) unless name == "" end # Return a list of all of the defined classes. def_delegator :@catalog, :classes, :classlist # Compiler our catalog. This mostly revolves around finding and evaluating classes. # This is the main entry into our catalog. def compile Puppet.override( @context_overrides , "For compiling #{node.name}") do @catalog.environment_instance = environment # Set the client's parameters into the top scope. Puppet::Util::Profiler.profile("Compile: Set node parameters", [:compiler, :set_node_params]) { set_node_parameters } Puppet::Util::Profiler.profile("Compile: Created settings scope", [:compiler, :create_settings_scope]) { create_settings_scope } if is_binder_active? # create injector, if not already created - this is for 3x that does not trigger # lazy loading of injector via context Puppet::Util::Profiler.profile("Compile: Created injector", [:compiler, :create_injector]) { injector } end Puppet::Util::Profiler.profile("Compile: Evaluated main", [:compiler, :evaluate_main]) { evaluate_main } Puppet::Util::Profiler.profile("Compile: Evaluated AST node", [:compiler, :evaluate_ast_node]) { evaluate_ast_node } Puppet::Util::Profiler.profile("Compile: Evaluated node classes", [:compiler, :evaluate_node_classes]) { evaluate_node_classes } Puppet::Util::Profiler.profile("Compile: Evaluated generators", [:compiler, :evaluate_generators]) { evaluate_generators } Puppet::Util::Profiler.profile("Compile: Finished catalog", [:compiler, :finish_catalog]) { finish } fail_on_unevaluated if block_given? yield @catalog else @catalog end end end # Constructs the overrides for the context def context_overrides() if Puppet.future_parser?(environment) { :current_environment => environment, :global_scope => @topscope, # 4x placeholder for new global scope :loaders => lambda {|| loaders() }, # 4x loaders :injector => lambda {|| injector() } # 4x API - via context instead of via compiler } else { :current_environment => environment, } end end def_delegator :@collections, :delete, :delete_collection # Return the node's environment. def environment node.environment end # Evaluate all of the classes specified by the node. # Classes with parameters are evaluated as if they were declared. # Classes without parameters or with an empty set of parameters are evaluated # as if they were included. This means classes with an empty set of # parameters won't conflict even if the class has already been included. def evaluate_node_classes if @node.classes.is_a? Hash classes_with_params, classes_without_params = @node.classes.partition {|name,params| params and !params.empty?} # The results from Hash#partition are arrays of pairs rather than hashes, # so we have to convert to the forms evaluate_classes expects (Hash, and # Array of class names) classes_with_params = Hash[classes_with_params] classes_without_params.map!(&:first) else classes_with_params = {} classes_without_params = @node.classes end evaluate_classes(classes_with_params, @node_scope || topscope) evaluate_classes(classes_without_params, @node_scope || topscope) end # Evaluate each specified class in turn. If there are any classes we can't # find, raise an error. This method really just creates resource objects # that point back to the classes, and then the resources are themselves # evaluated later in the process. # # Sometimes we evaluate classes with a fully qualified name already, in which # case, we tell scope.find_hostclass we've pre-qualified the name so it # doesn't need to search its namespaces again. This gets around a weird # edge case of duplicate class names, one at top scope and one nested in our # namespace and the wrong one (or both!) getting selected. See ticket #13349 # for more detail. --jeffweiss 26 apr 2012 def evaluate_classes(classes, scope, lazy_evaluate = true, fqname = false) raise Puppet::DevError, "No source for scope passed to evaluate_classes" unless scope.source class_parameters = nil # if we are a param class, save the classes hash # and transform classes to be the keys if classes.class == Hash class_parameters = classes classes = classes.keys end hostclasses = classes.collect do |name| scope.find_hostclass(name, :assume_fqname => fqname) or raise Puppet::Error, "Could not find class #{name} for #{node.name}" end if class_parameters resources = ensure_classes_with_parameters(scope, hostclasses, class_parameters) if !lazy_evaluate resources.each(&:evaluate) end resources else already_included, newly_included = ensure_classes_without_parameters(scope, hostclasses) if !lazy_evaluate newly_included.each(&:evaluate) end already_included + newly_included end end def evaluate_relationships @relationships.each { |rel| rel.evaluate(catalog) } end # Return a resource by either its ref or its type and title. def_delegator :@catalog, :resource, :findresource def initialize(node, options = {}) @node = node set_options(options) initvars end # Create a new scope, with either a specified parent scope or # using the top scope. def newscope(parent, options = {}) parent ||= topscope scope = Puppet::Parser::Scope.new(self, options) scope.parent = parent scope end # Return any overrides for the given resource. def resource_overrides(resource) @resource_overrides[resource.ref] end def injector create_injector if @injector.nil? @injector end def loaders @loaders ||= Puppet::Pops::Loaders.new(environment) end def boot_injector create_boot_injector(nil) if @boot_injector.nil? @boot_injector end # Creates the boot injector from registered system, default, and injector config. # @return [Puppet::Pops::Binder::Injector] the created boot injector # @api private Cannot be 'private' since it is called from the BindingsComposer. # def create_boot_injector(env_boot_bindings) assert_binder_active() pb = Puppet::Pops::Binder boot_contribution = pb::SystemBindings.injector_boot_contribution(env_boot_bindings) final_contribution = pb::SystemBindings.final_contribution binder = pb::Binder.new(pb::BindingsFactory.layered_bindings(final_contribution, boot_contribution)) @boot_injector = pb::Injector.new(binder) end # Answers if Puppet Binder should be active or not, and if it should and is not active, then it is activated. # @return [Boolean] true if the Puppet Binder should be activated def is_binder_active? should_be_active = Puppet[:binder] || Puppet.future_parser? if should_be_active # TODO: this should be in a central place, not just for ParserFactory anymore... Puppet::Parser::ParserFactory.assert_rgen_installed() @@binder_loaded ||= false unless @@binder_loaded require 'puppet/pops' require 'puppetx' @@binder_loaded = true end end should_be_active end private def ensure_classes_with_parameters(scope, hostclasses, parameters) hostclasses.collect do |klass| klass.ensure_in_catalog(scope, parameters[klass.name] || {}) end end def ensure_classes_without_parameters(scope, hostclasses) already_included = [] newly_included = [] hostclasses.each do |klass| class_scope = scope.class_scope(klass) if class_scope already_included << class_scope.resource else newly_included << klass.ensure_in_catalog(scope) end end [already_included, newly_included] end # If ast nodes are enabled, then see if we can find and evaluate one. def evaluate_ast_node return unless ast_nodes? # Now see if we can find the node. astnode = nil @node.names.each do |name| break if astnode = known_resource_types.node(name.to_s.downcase) end unless (astnode ||= known_resource_types.node("default")) raise Puppet::ParseError, "Could not find default node or by name with '#{node.names.join(", ")}'" end # Create a resource to model this node, and then add it to the list # of resources. resource = astnode.ensure_in_catalog(topscope) resource.evaluate @node_scope = topscope.class_scope(astnode) end # Evaluate our collections and return true if anything returned an object. # The 'true' is used to continue a loop, so it's important. def evaluate_collections return false if @collections.empty? exceptwrap do # We have to iterate over a dup of the array because # collections can delete themselves from the list, which # changes its length and causes some collections to get missed. Puppet::Util::Profiler.profile("Evaluated collections", [:compiler, :evaluate_collections]) do found_something = false @collections.dup.each do |collection| found_something = true if collection.evaluate end found_something end end end # Make sure all of our resources have been evaluated into native resources. # We return true if any resources have, so that we know to continue the # evaluate_generators loop. def evaluate_definitions exceptwrap do Puppet::Util::Profiler.profile("Evaluated definitions", [:compiler, :evaluate_definitions]) do !unevaluated_resources.each do |resource| resource.evaluate end.empty? end end end # Iterate over collections and resources until we're sure that the whole # compile is evaluated. This is necessary because both collections # and defined resources can generate new resources, which themselves could # be defined resources. def evaluate_generators count = 0 loop do done = true Puppet::Util::Profiler.profile("Iterated (#{count + 1}) on generators", [:compiler, :iterate_on_generators]) do # Call collections first, then definitions. done = false if evaluate_collections done = false if evaluate_definitions end break if done count += 1 if count > 1000 raise Puppet::ParseError, "Somehow looped more than 1000 times while evaluating host catalog" end end end # Find and evaluate our main object, if possible. def evaluate_main @main = known_resource_types.find_hostclass([""], "") || known_resource_types.add(Puppet::Resource::Type.new(:hostclass, "")) @topscope.source = @main @main_resource = Puppet::Parser::Resource.new("class", :main, :scope => @topscope, :source => @main) @topscope.resource = @main_resource add_resource(@topscope, @main_resource) @main_resource.evaluate end # Make sure the entire catalog is evaluated. def fail_on_unevaluated fail_on_unevaluated_overrides fail_on_unevaluated_resource_collections end # If there are any resource overrides remaining, then we could # not find the resource they were supposed to override, so we # want to throw an exception. def fail_on_unevaluated_overrides remaining = @resource_overrides.values.flatten.collect(&:ref) if !remaining.empty? fail Puppet::ParseError, "Could not find resource(s) #{remaining.join(', ')} for overriding" end end # Make sure we don't have any remaining collections that specifically # look for resources, because we want to consider those to be # parse errors. def fail_on_unevaluated_resource_collections if Puppet.future_parser? remaining = @collections.collect(&:unresolved_resources).flatten.compact else remaining = @collections.collect(&:resources).flatten.compact end if !remaining.empty? raise Puppet::ParseError, "Failed to realize virtual resources #{remaining.join(', ')}" end end # Make sure all of our resources and such have done any last work # necessary. def finish evaluate_relationships resources.each do |resource| # Add in any resource overrides. if overrides = resource_overrides(resource) overrides.each do |over| resource.merge(over) end # Remove the overrides, so that the configuration knows there # are none left. overrides.clear end resource.finish if resource.respond_to?(:finish) end add_resource_metaparams end def add_resource_metaparams unless main = catalog.resource(:class, :main) raise "Couldn't find main" end names = Puppet::Type.metaparams.select do |name| !Puppet::Parser::Resource.relationship_parameter?(name) end data = {} catalog.walk(main, :out) do |source, target| if source_data = data[source] || metaparams_as_data(source, names) # only store anything in the data hash if we've actually got # data data[source] ||= source_data source_data.each do |param, value| target[param] = value if target[param].nil? end data[target] = source_data.merge(metaparams_as_data(target, names)) end target.tag(*(source.tags)) end end def metaparams_as_data(resource, params) data = nil params.each do |param| unless resource[param].nil? # Because we could be creating a hash for every resource, # and we actually probably don't often have any data here at all, # we're optimizing a bit by only creating a hash if there's # any data to put in it. data ||= {} data[param] = resource[param] end end data end # Set up all of our internal variables. def initvars # The list of overrides. This is used to cache overrides on objects # that don't exist yet. We store an array of each override. @resource_overrides = Hash.new do |overs, ref| overs[ref] = [] end # The list of collections that have been created. This is a global list, # but they each refer back to the scope that created them. @collections = [] # The list of relationships to evaluate. @relationships = [] # For maintaining the relationship between scopes and their resources. @catalog = Puppet::Resource::Catalog.new(@node.name, @node.environment) # MOVED HERE - SCOPE IS NEEDED (MOVE-SCOPE) # Create the initial scope, it is needed early @topscope = Puppet::Parser::Scope.new(self) # Need to compute overrides here, and remember them, because we are about to # enter the magic zone of known_resource_types and intial import. # Expensive entries in the context are bound lazily. @context_overrides = context_overrides() # This construct ensures that initial import (triggered by instantiating # the structure 'known_resource_types') has a configured context # It cannot survive the initvars method, and is later reinstated # as part of compiling... # Puppet.override( @context_overrides , "For initializing compiler") do # THE MAGIC STARTS HERE ! This triggers parsing, loading etc. @catalog.version = known_resource_types.version end @catalog.add_resource(Puppet::Parser::Resource.new("stage", :main, :scope => @topscope)) # local resource array to maintain resource ordering @resources = [] # Make sure any external node classes are in our class list if @node.classes.class == Hash @catalog.add_class(*@node.classes.keys) else @catalog.add_class(*@node.classes) end end # Set the node's parameters into the top-scope as variables. def set_node_parameters node.parameters.each do |param, value| @topscope[param.to_s] = value end # These might be nil. catalog.client_version = node.parameters["clientversion"] catalog.server_version = node.parameters["serverversion"] if Puppet[:trusted_node_data] @topscope.set_trusted(node.trusted_data) end if(Puppet[:immutable_node_data]) facts_hash = node.facts.nil? ? {} : node.facts.values @topscope.set_facts(facts_hash) end end def create_settings_scope settings_type = Puppet::Resource::Type.new :hostclass, "settings" environment.known_resource_types.add(settings_type) settings_resource = Puppet::Parser::Resource.new("class", "settings", :scope => @topscope) @catalog.add_resource(settings_resource) settings_type.evaluate_code(settings_resource) scope = @topscope.class_scope(settings_type) env = environment Puppet.settings.each do |name, setting| next if name == :name scope[name.to_s] = env[name] end end # Return an array of all of the unevaluated resources. These will be definitions, # which need to get evaluated into native resources. def unevaluated_resources # The order of these is significant for speed due to short-circuting resources.reject { |resource| resource.evaluated? or resource.virtual? or resource.builtin_type? } end # Creates the injector from bindings found in the current environment. # @return [void] # @api private # def create_injector assert_binder_active() composer = Puppet::Pops::Binder::BindingsComposer.new() layered_bindings = composer.compose(topscope) @injector = Puppet::Pops::Binder::Injector.new(Puppet::Pops::Binder::Binder.new(layered_bindings)) end def assert_binder_active unless is_binder_active? raise ArgumentError, "The Puppet Binder is only available when either '--binder true' or '--parser future' is used" end end end
thejonanshow/my-boxen
vendor/bundle/ruby/2.3.0/gems/puppet-3.8.7/lib/puppet/parser/compiler.rb
Ruby
mit
21,679
package nxt.http; import nxt.Nxt; import nxt.Transaction; import nxt.util.Convert; import org.json.simple.JSONObject; import org.json.simple.JSONStreamAware; import javax.servlet.http.HttpServletRequest; import static nxt.http.JSONResponses.INCORRECT_TRANSACTION; import static nxt.http.JSONResponses.MISSING_TRANSACTION; import static nxt.http.JSONResponses.UNKNOWN_TRANSACTION; public final class GetTransaction extends APIServlet.APIRequestHandler { static final GetTransaction instance = new GetTransaction(); private GetTransaction() { super("transaction", "hash"); } @Override JSONStreamAware processRequest(HttpServletRequest req) { String transactionIdString = Convert.emptyToNull(req.getParameter("transaction")); String transactionHash = Convert.emptyToNull(req.getParameter("hash")); if (transactionIdString == null && transactionHash == null) { return MISSING_TRANSACTION; } Long transactionId = null; Transaction transaction; try { if (transactionIdString != null) { transactionId = Convert.parseUnsignedLong(transactionIdString); transaction = Nxt.getBlockchain().getTransaction(transactionId); } else { transaction = Nxt.getBlockchain().getTransaction(transactionHash); if (transaction == null) { return UNKNOWN_TRANSACTION; } } } catch (RuntimeException e) { return INCORRECT_TRANSACTION; } JSONObject response; if (transaction == null) { transaction = Nxt.getTransactionProcessor().getUnconfirmedTransaction(transactionId); if (transaction == null) { return UNKNOWN_TRANSACTION; } response = transaction.getJSONObject(); } else { response = transaction.getJSONObject(); response.put("block", Convert.toUnsignedLong(transaction.getBlockId())); response.put("confirmations", Nxt.getBlockchain().getLastBlock().getHeight() - transaction.getHeight()); response.put("blockTimestamp", transaction.getBlockTimestamp()); } response.put("sender", Convert.toUnsignedLong(transaction.getSenderId())); response.put("hash", transaction.getHash()); return response; } }
aspnmy/NasCoin
src/java/nxt/http/GetTransaction.java
Java
mit
2,418
# =================================================================== # # Copyright (c) 2015, Legrandin <helderijs@gmail.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== """Keccak family of cryptographic hash algorithms. `Keccak`_ is the winning algorithm of the SHA-3 competition organized by NIST. What eventually became SHA-3 is a variant incompatible to Keccak, even though the security principles and margins remain the same. If you are interested in writing SHA-3 compliant code, you must use the modules ``SHA3_224``, ``SHA3_256``, ``SHA3_384`` or ``SHA3_512``. This module implements the Keccak hash functions for the 64 bit word length (b=1600) and the fixed digest sizes of 224, 256, 384 and 512 bits. >>> from Cryptodome.Hash import keccak >>> >>> keccak_hash = keccak.new(digest_bits=512) >>> keccak_hash.update(b'Some data') >>> print keccak_hash.hexdigest() .. _Keccak: http://www.keccak.noekeon.org/Keccak-specifications.pdf """ from Cryptodome.Util.py3compat import bord from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, expect_byte_string) _raw_keccak_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._keccak", """ int keccak_init(void **state, size_t capacity_bytes, uint8_t padding_byte); int keccak_destroy(void *state); int keccak_absorb(void *state, const uint8_t *in, size_t len); int keccak_squeeze(const void *state, uint8_t *out, size_t len); int keccak_digest(void *state, uint8_t *digest, size_t len); """) class Keccak_Hash(object): """Class that implements a Keccak hash """ def __init__(self, data, digest_bytes, update_after_digest): #: The size of the resulting hash in bytes. self.digest_size = digest_bytes self._update_after_digest = update_after_digest self._digest_done = False state = VoidPointer() result = _raw_keccak_lib.keccak_init(state.address_of(), c_size_t(self.digest_size * 2), 0x01) if result: raise ValueError("Error %d while instantiating keccak" % result) self._state = SmartPointer(state.get(), _raw_keccak_lib.keccak_destroy) if data: self.update(data) def update(self, data): """Continue hashing of a message by consuming the next chunk of data. Repeated calls are equivalent to a single call with the concatenation of all the arguments. In other words: >>> m.update(a); m.update(b) is equivalent to: >>> m.update(a+b) :Parameters: data : byte string The next chunk of the message being hashed. """ if self._digest_done and not self._update_after_digest: raise TypeError("You can only call 'digest' or 'hexdigest' on this object") expect_byte_string(data) result = _raw_keccak_lib.keccak_absorb(self._state.get(), data, c_size_t(len(data))) if result: raise ValueError("Error %d while updating keccak" % result) return self def digest(self): """Return the **binary** (non-printable) digest of the message that has been hashed so far. You cannot update the hash anymore after the first call to ``digest`` (or ``hexdigest``). :Return: A byte string of `digest_size` bytes. It may contain non-ASCII characters, including null bytes. """ self._digest_done = True bfr = create_string_buffer(self.digest_size) result = _raw_keccak_lib.keccak_digest(self._state.get(), bfr, c_size_t(self.digest_size)) if result: raise ValueError("Error %d while squeezing keccak" % result) return get_raw_buffer(bfr) def hexdigest(self): """Return the **printable** digest of the message that has been hashed so far. This method does not change the state of the hash object. :Return: A string of 2* `digest_size` characters. It contains only hexadecimal ASCII digits. """ return "".join(["%02x" % bord(x) for x in self.digest()]) def new(self, **kwargs): if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: kwargs["digest_bytes"] = self.digest_size return new(**kwargs) def new(**kwargs): """Return a fresh instance of the hash object. :Keywords: data : byte string Optional. The very first chunk of the message to hash. It is equivalent to an early call to ``update()``. digest_bytes : integer The size of the digest, in bytes (28, 32, 48, 64). digest_bits : integer The size of the digest, in bits (224, 256, 384, 512). update_after_digest : boolean Optional. By default, a hash object cannot be updated anymore after the digest is computed. When this flag is ``True``, such check is no longer enforced. :Return: A `Keccak_Hash` object """ data = kwargs.pop("data", None) update_after_digest = kwargs.pop("update_after_digest", False) digest_bytes = kwargs.pop("digest_bytes", None) digest_bits = kwargs.pop("digest_bits", None) if None not in (digest_bytes, digest_bits): raise TypeError("Only one digest parameter must be provided") if (None, None) == (digest_bytes, digest_bits): raise TypeError("Digest size (bits, bytes) not provided") if digest_bytes is not None: if digest_bytes not in (28, 32, 48, 64): raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64") else: if digest_bits not in (224, 256, 384, 512): raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512") digest_bytes = digest_bits // 8 if kwargs: raise TypeError("Unknown parameters: " + str(kwargs)) return Keccak_Hash(data, digest_bytes, update_after_digest)
mchristopher/PokemonGo-DesktopMap
app/pylibs/win32/Cryptodome/Hash/keccak.py
Python
mit
8,329
module Octokit class Client module Gists # List gists for a user or all public gists # # @param username [String] An optional user to filter listing # @return [Array<Hashie::Mash>] A list of gists # @example Fetch all gists for defunkt # Octokit.gists('defunkt') # @example Fetch all public gists # Octokit.gists # @see http://developer.github.com/v3/gists/#list-gists def gists(username=nil, options={}) if username.nil? get 'gists', options else get "users/#{username}/gists", options end end alias :list_gists :gists # List public gists # # @return [Array<Hashie::Mash>] A list of gists # @example Fetch all public gists # Octokit.public_gists # @see http://developer.github.com/v3/gists/#list-gists def public_gists(options={}) get 'gists/public', options end # List the authenticated user’s starred gists # # @return [Array<Hashie::Mash>] A list of gists def starred_gists(options={}) get 'gists/starred', options end # Get a single gist # # @param gist [String] ID of gist to fetch # @return [Hash::Mash] Gist information # @see http://developer.github.com/v3/gists/#get-a-single-gist def gist(gist, options={}) get "gists/#{Gist.new gist}", options end # Create a gist # # @param options [Hash] Gist information. # @option options [String] :description # @option options [Boolean] :public Sets gist visibility # @option options [Array<Hash>] :files Files that make up this gist. Keys # should be the filename, the value a Hash with a :content key with text # content of the Gist. # @return [Hashie::Mash] Newly created gist info # @see http://developer.github.com/v3/gists/#create-a-gist def create_gist(options={}) post 'gists', options end # Edit a gist # # @param options [Hash] Gist information. # @option options [String] :description # @option options [Boolean] :public Sets gist visibility # @option options [Array<Hash>] :files Files that make up this gist. Keys # should be the filename, the value a Hash with a :content key with text # content of the Gist. # # NOTE: All files from the previous version of the # gist are carried over by default if not included in the hash. Deletes # can be performed by including the filename with a null hash. # @return # [Hashie::Mash] Newly created gist info # @see http://developer.github.com/v3/gists/#edit-a-gist def edit_gist(gist, options={}) patch "gists/#{Gist.new gist}", options end # # Star a gist # # @param gist [String] Gist ID # @return [Boolean] Indicates if gist is starred successfully # @see http://developer.github.com/v3/gists/#star-a-gist def star_gist(gist, options={}) boolean_from_response(:put, "gists/#{Gist.new gist}/star", options) end # Unstar a gist # # @param gist [String] Gist ID # @return [Boolean] Indicates if gist is unstarred successfully # @see http://developer.github.com/v3/gists/#unstar-a-gist def unstar_gist(gist, options={}) boolean_from_response(:delete, "gists/#{Gist.new gist}/star", options) end # Check if a gist is starred # # @param gist [String] Gist ID # @return [Boolean] Indicates if gist is starred # @see http://developer.github.com/v3/gists/#check-if-a-gist-is-starred def gist_starred?(gist, options={}) boolean_from_response(:get, "gists/#{Gist.new gist}/star", options) end # Fork a gist # # @param gist [String] Gist ID # @return [Hashie::Mash] Data for the new gist # @see http://developer.github.com/v3/gists/#fork-a-gist def fork_gist(gist, options={}) post "gists/#{Gist.new gist}/forks", options end # Delete a gist # # @param gist [String] Gist ID # @return [Boolean] Indicating success of deletion # @see http://developer.github.com/v3/gists/#delete-a-gist def delete_gist(gist, options={}) boolean_from_response(:delete, "gists/#{Gist.new gist}", options) end # List gist comments # # @param gist_id [String] Gist Id. # @return [Array<Hashie::Mash>] Array of hashes representing comments. # @see http://developer.github.com/v3/gists/comments/#list-comments-on-a-gist # @example # Octokit.gist_comments('3528ae645') def gist_comments(gist_id, options={}) get "gists/#{gist_id}/comments", options end # Get gist comment # # @param gist_id [String] Id of the gist. # @param gist_comment_id [Integer] Id of the gist comment. # @return [Hashie::Mash] Hash representing gist comment. # @see http://developer.github.com/v3/gists/comments/#get-a-single-comment # @example # Octokit.gist_comment('208sdaz3', 1451398) def gist_comment(gist_id, gist_comment_id, options={}) get "gists/#{gist_id}/comments/#{gist_comment_id}", options end # Create gist comment # # Requires authenticated client. # # @param gist_id [String] Id of the gist. # @param comment [String] Comment contents. # @return [Hashie::Mash] Hash representing the new comment. # @see Octokit::Client # @see http://developer.github.com/v3/gists/comments/#create-a-comment # @example # @client.create_gist_comment('3528645', 'This is very helpful.') def create_gist_comment(gist_id, comment, options={}) options.merge!({:body => comment}) post "gists/#{gist_id}/comments", options end # Update gist comment # # Requires authenticated client # # @param gist_id [String] Id of the gist. # @param gist_comment_id [Integer] Id of the gist comment to update. # @param comment [String] Updated comment contents. # @return [Hashie::Mash] Hash representing the updated comment. # @see Octokit::Client # @see http://developer.github.com/v3/gists/comments/#edit-a-comment # @example # @client.update_gist_comment('208sdaz3', '3528645', ':heart:') def update_gist_comment(gist_id, gist_comment_id, comment, options={}) options.merge!({:body => comment}) patch "gists/#{gist_id}/comments/#{gist_comment_id}", options end # Delete gist comment # # Requires authenticated client. # # @param gist_id [String] Id of the gist. # @param gist_comment_id [Integer] Id of the gist comment to delete. # @return [Boolean] True if comment deleted, false otherwise. # @see Octokit::Client # @see http://developer.github.com/v3/gists/comments/#delete-a-comment # @example # @client.delete_gist_comment('208sdaz3', '586399') def delete_gist_comment(gist_id, gist_comment_id, options={}) boolean_from_response(:delete, "gists/#{gist_id}/comments/#{gist_comment_id}", options) end end end end
phatpenguin/boxen-belgarion
.bundle/ruby/1.9.1/gems/octokit-1.23.0/lib/octokit/client/gists.rb
Ruby
mit
7,325
require( [ 'gui/Button' ], function (Button) { return; var button = new Button({ main: $('#ui-button') }); button.render(); } );
musicode/gui
demo/Popup.js
JavaScript
mit
189
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace AsanaNet { [Serializable] public class AsanaTeam : AsanaObject, IAsanaData { [AsanaDataAttribute("name")] public string Name { get; private set; } // ------------------------------------------------------ public bool IsObjectLocal { get { return true; } } public void Complete() { throw new NotImplementedException(); } static public implicit operator AsanaTeam(Int64 ID) { return Create(typeof(AsanaTeam), ID) as AsanaTeam; } } }
jfjcn/AsanaNet
AsanaNet/Objects/AsanaTeam.cs
C#
mit
656
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("MultiplicationSign")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("MultiplicationSign")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("16b9c3ba-c518-4e36-93c4-969cc63ec2bc")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
tokera/TelerikAcademyHomeworks
CSharpPartOneHomeworks/ConditionalStatements/MultiplicationSign/Properties/AssemblyInfo.cs
C#
mit
1,448
<?xml version="1.0" ?><!DOCTYPE TS><TS language="sr" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Fuguecoin</source> <translation>О Fuguecoin-у</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Fuguecoin&lt;/b&gt; version</source> <translation>&lt;b&gt;Fuguecoin&lt;/b&gt; верзија</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The Fuguecoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Адресар</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Кликните два пута да промените адресу и/или етикету</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Прави нову адресу</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Копира изабрану адресу на системски клипборд</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Нова адреса</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Fuguecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Ово су Ваше Fuguecoin адресе за примање уплата. Можете да сваком пошиљаоцу дате другачију адресу да би пратили ко је вршио уплате.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Prikaži &amp;QR kod</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Fuguecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Fuguecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Избриши</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Fuguecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Извоз података из адресара</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Зарезом одвојене вредности (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Грешка током извоза</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Није могуће писати у фајл %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Етикета</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(без етикете)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Унесите лозинку</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Нова лозинка</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Поновите нову лозинку</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Унесите нову лозинку за приступ новчанику.&lt;br/&gt;Молимо Вас да лозинка буде &lt;b&gt;10 или више насумице одабраних знакова&lt;/b&gt;, или &lt;b&gt;осам или више речи&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Шифровање новчаника</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ова акција захтева лозинку Вашег новчаника да би га откључала.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Откључавање новчаника</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ова акција захтева да унесете лозинку да би дешифловала новчаник.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Дешифровање новчаника</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Промена лозинке</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Унесите стару и нову лозинку за шифровање новчаника.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Одобрите шифровање новчаника</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR BITCOINS&lt;/b&gt;!</source> <translation>Упозорење: Ако се ваш новчаник шифрује а потом изгубите лозинкзу, ви ћете &lt;b&gt;ИЗГУБИТИ СВЕ BITCOIN-Е&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Да ли сте сигурни да желите да се новчаник шифује?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Новчаник је шифрован</translation> </message> <message> <location line="-56"/> <source>Fuguecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your bitcoins from being stolen by malware infecting your computer.</source> <translation>Fuguecoin će se sad zatvoriti da bi završio proces enkripcije. Zapamti da enkripcija tvog novčanika ne može u potpunosti da zaštiti tvoje bitcoine da ne budu ukradeni od malawarea koji bi inficirao tvoj kompjuter.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Неуспело шифровање новчаника</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Настала је унутрашња грешка током шифровања новчаника. Ваш новчаник није шифрован.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Лозинке које сте унели се не подударају.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Неуспело откључавање новчаника</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Лозинка коју сте унели за откључавање новчаника је нетачна.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Неуспело дешифровање новчаника</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Лозинка за приступ новчанику је успешно промењена.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation type="unfinished"/> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Синхронизација са мрежом у току...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Општи преглед</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Погледајте општи преглед новчаника</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Трансакције</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Претражите историјат трансакција</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Уредите запамћене адресе и њихове етикете</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Прегледајте листу адреса на којима прихватате уплате</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>I&amp;zlaz</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Напустите програм</translation> </message> <message> <location line="+4"/> <source>Show information about Fuguecoin</source> <translation>Прегледајте информације о Fuguecoin-у</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>О &amp;Qt-у</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Прегледајте информације о Qt-у</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>П&amp;оставке...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Шифровање новчаника...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Backup новчаника</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Промени &amp;лозинку...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a Fuguecoin address</source> <translation>Пошаљите новац на bitcoin адресу</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Fuguecoin</source> <translation>Изаберите могућности bitcoin-а</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Мењање лозинке којом се шифрује новчаник</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation type="unfinished"/> </message> <message> <location line="-165"/> <location line="+530"/> <source>Fuguecoin</source> <translation type="unfinished"/> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>новчаник</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About Fuguecoin</source> <translation>&amp;О Fuguecoin-у</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your Fuguecoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Fuguecoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Фајл</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Подешавања</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>П&amp;омоћ</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Трака са картицама</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Fuguecoin client</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Fuguecoin network</source> <translation><numerusform>%n активна веза са Fuguecoin мрежом</numerusform><numerusform>%n активне везе са Fuguecoin мрежом</numerusform><numerusform>%n активних веза са Fuguecoin мрежом</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Ажурно</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Ажурирање у току...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Послана трансакција</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Придошла трансакција</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum: %1⏎ Iznos: %2⏎ Tip: %3⏎ Adresa: %4⏎</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Fuguecoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Новчаник јс &lt;b&gt;шифрован&lt;/b&gt; и тренутно &lt;b&gt;откључан&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Новчаник јс &lt;b&gt;шифрован&lt;/b&gt; и тренутно &lt;b&gt;закључан&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Fuguecoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Измени адресу</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Етикета</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Адреса</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>New sending address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Унешена адреса &quot;%1&quot; се већ налази у адресару.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Fuguecoin address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Немогуће откључати новчаник.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation type="unfinished"/> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Fuguecoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation>верзија</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Korišćenje:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Поставке</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Automatically start Fuguecoin after logging in to the system.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Start Fuguecoin on system login</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Automatically open the Fuguecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Connect to the Fuguecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Fuguecoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Јединица за приказивање износа:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Whether to show Fuguecoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation type="unfinished"/> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Fuguecoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"/> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Форма</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Fuguecoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-124"/> <source>Balance:</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Непотврђено:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>новчаник</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Недавне трансакције&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start bitcoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Zatraži isplatu</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Iznos:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>&amp;Етикета</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Poruka:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Snimi kao...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation type="unfinished"/> </message> <message> <location line="-217"/> <source>Client version</source> <translation type="unfinished"/> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation type="unfinished"/> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the Fuguecoin-Qt help message to get a list with possible Fuguecoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location line="-260"/> <source>Build date</source> <translation type="unfinished"/> </message> <message> <location line="-104"/> <source>Fuguecoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Fuguecoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the Fuguecoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Fuguecoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Слање новца</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Ukloni sva polja sa transakcijama</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Balance:</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Потврди акцију слања</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Пошаљи</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Да ли сте сигурни да желите да пошаљете %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>и</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Форма</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation type="unfinished"/> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Етикета</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Izaberite adresu iz adresara</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Fuguecoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Unesite Fuguecoin adresu (n.pr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Fuguecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Fuguecoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Fuguecoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Unesite Fuguecoin adresu (n.pr. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter Fuguecoin signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Fuguecoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Otvorite do %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepotvrdjeno</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 potvrde</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>datum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation type="unfinished"/> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation>етикета</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation type="unfinished"/> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Net amount</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Message</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Comment</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>iznos</translation> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, nije još uvek uspešno emitovan</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>nepoznato</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>detalji transakcije</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Ovaj odeljak pokazuje detaljan opis transakcije</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>datum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>tip</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>iznos</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Otvoreno do %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline * van mreže (%1 potvrdjenih)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Nepotvrdjeno (%1 of %2 potvrdjenih)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Potvrdjena (%1 potvrdjenih)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Ovaj blok nije primljen od ostalih čvorova (nodova) i verovatno neće biti prihvaćen!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generisan ali nije prihvaćen</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Primljen sa</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Primljeno od</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Poslat ka</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Isplata samom sebi</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minirano</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Status vaše transakcije. Predjite mišem preko ovog polja da bi ste videli broj konfirmacija</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Datum i vreme primljene transakcije.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tip transakcije</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Destinacija i adresa transakcije</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Iznos odbijen ili dodat balansu.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Sve</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Danas</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>ove nedelje</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Ovog meseca</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Prošlog meseca</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Ove godine</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Opseg...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Primljen sa</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Poslat ka</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Vama - samom sebi</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minirano</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Drugi</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Navedite adresu ili naziv koji bi ste potražili</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Min iznos</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>kopiraj adresu</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>kopiraj naziv</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>kopiraj iznos</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>promeni naziv</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Izvezi podatke o transakcijama</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Зарезом одвојене вредности (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Potvrdjen</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>datum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>tip</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Етикета</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>iznos</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Грешка током извоза</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Није могуће писати у фајл %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Opseg:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>do</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Слање новца</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Fuguecoin version</source> <translation>Fuguecoin верзија</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Korišćenje:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or bitcoind</source> <translation>Pošalji naredbu na -server ili bitcoinid </translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Listaj komande</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Zatraži pomoć za komande</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Opcije</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: bitcoin.conf)</source> <translation>Potvrdi željeni konfiguracioni fajl (podrazumevani:bitcoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: bitcoind.pid)</source> <translation>Konkretizuj pid fajl (podrazumevani: bitcoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Gde je konkretni data direktorijum </translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation type="unfinished"/> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 8333 or testnet: 18333)</source> <translation>Slušaj konekcije na &lt;port&gt; (default: 8333 or testnet: 18333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Održavaj najviše &lt;n&gt; konekcija po priključku (default: 125) </translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 8332 or testnet: 18332)</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Prihvati komandnu liniju i JSON-RPC komande</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Radi u pozadini kao daemon servis i prihvati komande</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Koristi testnu mrežu</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=bitcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Fuguecoin Alert&quot; admin@foo.com </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Fuguecoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Fuguecoin will not work properly.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>SSL options: (see the Fuguecoin Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Korisničko ime za JSON-RPC konekcije</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Lozinka za JSON-RPC konekcije</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Dozvoli JSON-RPC konekcije sa posebne IP adrese</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Pošalji komande to nodu koji radi na &lt;ip&gt; (default: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation type="unfinished"/> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Odredi veličinu zaštićenih ključeva na &lt;n&gt; (default: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Ponovo skeniraj lanac blokova za nedostajuće transakcije iz novčanika</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Koristi OpenSSL (https) za JSON-RPC konekcije</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>privatni ključ za Server (podrazumevan: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Prihvatljive cifre (podrazumevano: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Ova poruka Pomoći</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"/> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation type="unfinished"/> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>učitavam adrese....</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Fuguecoin</source> <translation type="unfinished"/> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Fuguecoin to complete</source> <translation type="unfinished"/> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation type="unfinished"/> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Učitavam blok indeksa...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation type="unfinished"/> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Fuguecoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Новчаник се учитава...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Ponovo skeniram...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Završeno učitavanje</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS>
gjhiggins/fuguecoin-1
src/qt/locale/bitcoin_sr.ts
TypeScript
mit
103,421
load File.dirname(__FILE__) + '/production.rb' if File.exists? File.dirname(__FILE__) + '/../application.local.rb' require File.dirname(__FILE__) + '/../application.local.rb' end
dtulibrary/gazo
config/environments/unstable.rb
Ruby
mit
181
module SlotMachine # A Slot defines a slot in a slotted. A bit like a variable name but for objects. # # PS: for the interested: A "development" of Smalltalk was the # prototype based language (read: JavaScript equivalent) # called Self https://en.wikipedia.org/wiki/Self_(programming_language) # # Slots are the instance names of objects. But since the language is dynamic # what is it that we can say about instance names at runtime? # Start with a Slotted, like the Message (in register one), we know all it's # variables. But there is a Message in there, and for that we know the instances # too. And off course for _all_ objects we know where the type is. # # The definiion is an array of symbols that we can resolve to SlotLoad # Instructions. Or in the case of constants to ConstantLoad # class Slot attr_reader :name , :next_slot # initialize with just the name of the slot. Add more to the chain with set_next def initialize( name ) raise "No name" unless name @name = name end #set the next_slot , but always at the end of the chain def set_next(slot) if(@next_slot) @next_slot.set_next(slot) else @next_slot = slot end end # return the length of chain, ie 1 plus however many more next_slots there are def length return 1 unless @next_slot 1 + @next_slot.length end # name of all the slots, with dot syntax def to_s names = name.to_s names += ".#{@next_slot}" if @next_slot names end end end
salama/salama
lib/slot_machine/slot.rb
Ruby
mit
1,579
var GUID = (function () { function _GUID() { return UUIDcreatePart(4) + UUIDcreatePart(2) + UUIDcreatePart(2) + UUIDcreatePart(2) + UUIDcreatePart(6); }; function UUIDcreatePart(length) { var uuidpart = ""; for (var i = 0; i < length; i++) { var uuidchar = parseInt((Math.random() * 256), 10).toString(16); if (uuidchar.length == 1) { uuidchar = "0" + uuidchar; } uuidpart += uuidchar; } return uuidpart; } return { newGuid: _GUID }; })(); var dataSource = (function () { var tablesStorageKey = "60AE0285-40EE-4A2D-BA5F-F75D601593DD"; var globalData = []; var tables = [ { Id: 5, Name: "Contact", Schema: [ { k: "name", t: 1 }, { k: "companyname", t: 1 }, { k: "position", t: 1 } ], Created: "2016-08-12T07:32:46.69" }, { Id: 6, Name: "Profile", Schema: [ { k: "Name", t: 1 }, { k: "Age", t: 2 }, { k: "Gender", t: 4 }, { k: "Rating", t: 3 }, { k: "Created", t: 5 } ], Created: "2016-09-28T21:53:40.19" } ]; function _loadData(){ globalData = JSON.parse(localStorage[tablesStorageKey] || "[]"); } function _save() { localStorage[tablesStorageKey] = JSON.stringify(globalData); } function _getSchema(tableId) { for (var t = 0; t < tables.length; t++) if (tableId === tables[t].Id) return tables[t].Schema; } function _find(data) { var skip = data.start; var take = data.length; return { draw: data.draw, recordsTotal: globalData.length, recordsFiltered: globalData.length, data: globalData.slice(skip, take + skip) }; } function _insert(data) { var id = GUID.newGuid(); globalData.push([id, data.Name, data.Age, data.Gender, data.Rating, data.Created]); _save() return { IsOk: true, id: id }; } function _update(data) { for (var t = 0; t < globalData.length; t++) if (data._id === globalData[t][0]) { globalData[t] = [data._id, data.Name, data.Age, data.Gender, data.Rating, data.Created]; _save() return { IsOk: true }; } return { IsOk: false }; } function _delete(id) { for (var t = 0; t < globalData.length; t++) if (id === globalData[t][0]) { globalData = globalData.filter(item => item !== globalData[t]); _save(); return { IsOk: true }; } return { IsOk: false }; } _loadData(); return { getSchema: _getSchema, find: _find, insert: _insert, update: _update, delete: _delete }; })();
storyclm/storyCLM.js
presentation/js/dataSource.js
JavaScript
mit
3,648
<?php /** * @link http://www.yiiframework.com/ * @copyright Copyright (c) 2008 Yii Software LLC * @license http://www.yiiframework.com/license/ */ namespace yii\log; use Yii; use yii\base\Component; /** * Logger records logged messages in memory and sends them to different targets if [[dispatcher]] is set. * * A Logger instance can be accessed via `Yii::getLogger()`. You can call the method [[log()]] to record a single log message. * For convenience, a set of shortcut methods are provided for logging messages of various severity levels * via the [[Yii]] class: * * - [[Yii::trace()]] * - [[Yii::error()]] * - [[Yii::warning()]] * - [[Yii::info()]] * - [[Yii::beginProfile()]] * - [[Yii::endProfile()]] * * For more details and usage information on Logger, see the [guide article on logging](guide:runtime-logging). * * When the application ends or [[flushInterval]] is reached, Logger will call [[flush()]] * to send logged messages to different log targets, such as [[FileTarget|file]], [[EmailTarget|email]], * or [[DbTarget|database]], with the help of the [[dispatcher]]. * * @property-read array $dbProfiling The first element indicates the number of SQL statements executed, and * the second element the total time spent in SQL execution. This property is read-only. * @property-read float $elapsedTime The total elapsed time in seconds for current request. This property is * read-only. * @property-read array $profiling The profiling results. Each element is an array consisting of these * elements: `info`, `category`, `timestamp`, `trace`, `level`, `duration`, `memory`, `memoryDiff`. The `memory` * and `memoryDiff` values are available since version 2.0.11. This property is read-only. * * @author Qiang Xue <qiang.xue@gmail.com> * @since 2.0 */ class Logger extends Component { /** * Error message level. An error message is one that indicates the abnormal termination of the * application and may require developer's handling. */ const LEVEL_ERROR = 0x01; /** * Warning message level. A warning message is one that indicates some abnormal happens but * the application is able to continue to run. Developers should pay attention to this message. */ const LEVEL_WARNING = 0x02; /** * Informational message level. An informational message is one that includes certain information * for developers to review. */ const LEVEL_INFO = 0x04; /** * Tracing message level. An tracing message is one that reveals the code execution flow. */ const LEVEL_TRACE = 0x08; /** * Profiling message level. This indicates the message is for profiling purpose. */ const LEVEL_PROFILE = 0x40; /** * Profiling message level. This indicates the message is for profiling purpose. It marks the * beginning of a profiling block. */ const LEVEL_PROFILE_BEGIN = 0x50; /** * Profiling message level. This indicates the message is for profiling purpose. It marks the * end of a profiling block. */ const LEVEL_PROFILE_END = 0x60; /** * @var array logged messages. This property is managed by [[log()]] and [[flush()]]. * Each log message is of the following structure: * * ``` * [ * [0] => message (mixed, can be a string or some complex data, such as an exception object) * [1] => level (integer) * [2] => category (string) * [3] => timestamp (float, obtained by microtime(true)) * [4] => traces (array, debug backtrace, contains the application code call stacks) * [5] => memory usage in bytes (int, obtained by memory_get_usage()), available since version 2.0.11. * ] * ``` */ public $messages = []; /** * @var int how many messages should be logged before they are flushed from memory and sent to targets. * Defaults to 1000, meaning the [[flush]] method will be invoked once every 1000 messages logged. * Set this property to be 0 if you don't want to flush messages until the application terminates. * This property mainly affects how much memory will be taken by the logged messages. * A smaller value means less memory, but will increase the execution time due to the overhead of [[flush()]]. */ public $flushInterval = 1000; /** * @var int how much call stack information (file name and line number) should be logged for each message. * If it is greater than 0, at most that number of call stacks will be logged. Note that only application * call stacks are counted. */ public $traceLevel = 0; /** * @var Dispatcher the message dispatcher */ public $dispatcher; /** * Initializes the logger by registering [[flush()]] as a shutdown function. */ public function init() { parent::init(); register_shutdown_function(function () { // make regular flush before other shutdown functions, which allows session data collection and so on $this->flush(); // make sure log entries written by shutdown functions are also flushed // ensure "flush()" is called last when there are multiple shutdown functions register_shutdown_function([$this, 'flush'], true); }); } /** * Logs a message with the given type and category. * If [[traceLevel]] is greater than 0, additional call stack information about * the application code will be logged as well. * @param string|array $message the message to be logged. This can be a simple string or a more * complex data structure that will be handled by a [[Target|log target]]. * @param int $level the level of the message. This must be one of the following: * `Logger::LEVEL_ERROR`, `Logger::LEVEL_WARNING`, `Logger::LEVEL_INFO`, `Logger::LEVEL_TRACE`, * `Logger::LEVEL_PROFILE_BEGIN`, `Logger::LEVEL_PROFILE_END`. * @param string $category the category of the message. */ public function log($message, $level, $category = 'application') { $time = microtime(true); $traces = []; if ($this->traceLevel > 0) { $count = 0; $ts = debug_backtrace(DEBUG_BACKTRACE_IGNORE_ARGS); array_pop($ts); // remove the last trace since it would be the entry script, not very useful foreach ($ts as $trace) { if (isset($trace['file'], $trace['line']) && strpos($trace['file'], YII2_PATH) !== 0) { unset($trace['object'], $trace['args']); $traces[] = $trace; if (++$count >= $this->traceLevel) { break; } } } } $this->messages[] = [$message, $level, $category, $time, $traces, memory_get_usage()]; if ($this->flushInterval > 0 && count($this->messages) >= $this->flushInterval) { $this->flush(); } } /** * Flushes log messages from memory to targets. * @param bool $final whether this is a final call during a request. */ public function flush($final = false) { $messages = $this->messages; // https://github.com/yiisoft/yii2/issues/5619 // new messages could be logged while the existing ones are being handled by targets $this->messages = []; if ($this->dispatcher instanceof Dispatcher) { $this->dispatcher->dispatch($messages, $final); } } /** * Returns the total elapsed time since the start of the current request. * This method calculates the difference between now and the timestamp * defined by constant `YII_BEGIN_TIME` which is evaluated at the beginning * of [[\yii\BaseYii]] class file. * @return float the total elapsed time in seconds for current request. */ public function getElapsedTime() { return microtime(true) - YII_BEGIN_TIME; } /** * Returns the profiling results. * * By default, all profiling results will be returned. You may provide * `$categories` and `$excludeCategories` as parameters to retrieve the * results that you are interested in. * * @param array $categories list of categories that you are interested in. * You can use an asterisk at the end of a category to do a prefix match. * For example, 'yii\db\*' will match categories starting with 'yii\db\', * such as 'yii\db\Connection'. * @param array $excludeCategories list of categories that you want to exclude * @return array the profiling results. Each element is an array consisting of these elements: * `info`, `category`, `timestamp`, `trace`, `level`, `duration`, `memory`, `memoryDiff`. * The `memory` and `memoryDiff` values are available since version 2.0.11. */ public function getProfiling($categories = [], $excludeCategories = []) { $timings = $this->calculateTimings($this->messages); if (empty($categories) && empty($excludeCategories)) { return $timings; } foreach ($timings as $i => $timing) { $matched = empty($categories); foreach ($categories as $category) { $prefix = rtrim($category, '*'); if (($timing['category'] === $category || $prefix !== $category) && strpos($timing['category'], $prefix) === 0) { $matched = true; break; } } if ($matched) { foreach ($excludeCategories as $category) { $prefix = rtrim($category, '*'); foreach ($timings as $i => $timing) { if (($timing['category'] === $category || $prefix !== $category) && strpos($timing['category'], $prefix) === 0) { $matched = false; break; } } } } if (!$matched) { unset($timings[$i]); } } return array_values($timings); } /** * Returns the statistical results of DB queries. * The results returned include the number of SQL statements executed and * the total time spent. * @return array the first element indicates the number of SQL statements executed, * and the second element the total time spent in SQL execution. */ public function getDbProfiling() { $timings = $this->getProfiling(['yii\db\Command::query', 'yii\db\Command::execute']); $count = count($timings); $time = 0; foreach ($timings as $timing) { $time += $timing['duration']; } return [$count, $time]; } /** * Calculates the elapsed time for the given log messages. * @param array $messages the log messages obtained from profiling * @return array timings. Each element is an array consisting of these elements: * `info`, `category`, `timestamp`, `trace`, `level`, `duration`, `memory`, `memoryDiff`. * The `memory` and `memoryDiff` values are available since version 2.0.11. */ public function calculateTimings($messages) { $timings = []; $stack = []; foreach ($messages as $i => $log) { list($token, $level, $category, $timestamp, $traces) = $log; $memory = isset($log[5]) ? $log[5] : 0; $log[6] = $i; $hash = md5(json_encode($token)); if ($level == self::LEVEL_PROFILE_BEGIN) { $stack[$hash] = $log; } elseif ($level == self::LEVEL_PROFILE_END) { if (isset($stack[$hash])) { $timings[$stack[$hash][6]] = [ 'info' => $stack[$hash][0], 'category' => $stack[$hash][2], 'timestamp' => $stack[$hash][3], 'trace' => $stack[$hash][4], 'level' => count($stack) - 1, 'duration' => $timestamp - $stack[$hash][3], 'memory' => $memory, 'memoryDiff' => $memory - (isset($stack[$hash][5]) ? $stack[$hash][5] : 0), ]; unset($stack[$hash]); } } } ksort($timings); return array_values($timings); } /** * Returns the text display of the specified level. * @param int $level the message level, e.g. [[LEVEL_ERROR]], [[LEVEL_WARNING]]. * @return string the text display of the level */ public static function getLevelName($level) { static $levels = [ self::LEVEL_ERROR => 'error', self::LEVEL_WARNING => 'warning', self::LEVEL_INFO => 'info', self::LEVEL_TRACE => 'trace', self::LEVEL_PROFILE_BEGIN => 'profile begin', self::LEVEL_PROFILE_END => 'profile end', self::LEVEL_PROFILE => 'profile', ]; return isset($levels[$level]) ? $levels[$level] : 'unknown'; } }
yujiandong/simpleforum
core/vendor/yiisoft/yii2/log/Logger.php
PHP
mit
13,222
package com.github.scribejava.apis.examples; import com.github.scribejava.apis.EtsyApi; import com.github.scribejava.core.builder.ServiceBuilder; import com.github.scribejava.core.model.OAuth1AccessToken; import com.github.scribejava.core.model.OAuth1RequestToken; import com.github.scribejava.core.model.OAuthRequest; import com.github.scribejava.core.model.Response; import com.github.scribejava.core.model.Verb; import com.github.scribejava.core.oauth.OAuth10aService; import java.io.IOException; import java.util.Scanner; import java.util.concurrent.ExecutionException; public class EtsyExample { private static final String PROTECTED_RESOURCE_URL = "https://openapi.etsy.com/v2/users/__SELF__"; private EtsyExample() { } @SuppressWarnings("PMD.SystemPrintln") public static void main(String[] args) throws InterruptedException, ExecutionException, IOException { // Replace with your api and secret key final OAuth10aService service = new ServiceBuilder("your api key") .apiSecret("your secret key") .build(EtsyApi.instance()); final Scanner in = new Scanner(System.in); System.out.println("=== Etsy's OAuth Workflow ==="); System.out.println(); // Obtain the Request Token System.out.println("Fetching the Request Token..."); final OAuth1RequestToken requestToken = service.getRequestToken(); System.out.println("Got the Request Token!"); System.out.println(); System.out.println("Now go and authorize ScribeJava here:"); System.out.println(service.getAuthorizationUrl(requestToken)); System.out.println("And paste the verifier here"); System.out.print(">>"); final String oauthVerifier = in.nextLine(); System.out.println(); // Trade the Request Token and Verifier for the Access Token System.out.println("Trading the Request Token for an Access Token..."); final OAuth1AccessToken accessToken = service.getAccessToken(requestToken, oauthVerifier); System.out.println("Got the Access Token!"); System.out.println("(The raw response looks like this: " + accessToken.getRawResponse() + "')"); System.out.println(); // Now let's go and ask for a protected resource! System.out.println("Now we're going to access a protected resource..."); final OAuthRequest request = new OAuthRequest(Verb.GET, PROTECTED_RESOURCE_URL); service.signRequest(accessToken, request); final Response response = service.execute(request); System.out.println("Got it! Lets see what we found..."); System.out.println(); System.out.println(response.getBody()); System.out.println(); System.out.println("That's it man! Go and build something awesome with ScribeJava! :)"); } }
fernandezpablo85/scribe-java
scribejava-apis/src/test/java/com/github/scribejava/apis/examples/EtsyExample.java
Java
mit
2,876
<?php namespace App; use Illuminate\Support\Facades\Route; /* * Clearboard Routes */ Route::group(['middleware' => ['web']], function () { Route::get('/', function() { return view('clearboard.index.viewindex', ['forums' => Forum::all()]); }); Route::get('/forum/{fid}-{_}', 'ForumController@view'); Route::get('/thread/{tid}-{_}', 'ThreadController@view'); Route::get('/profile/{uid}-{_}', 'ProfileController@view'); Route::get('/forum', function() { return redirect('/'); }); // Route for processing markdown to HTML. Route::post('/ajax/markdown', 'MarkdownController@postParse'); Route::post('/ajax/markdown_inline', 'MarkdownController@postInlineParse'); // for parsing inline markdown // Posting routes Route::post('/ajax/new_post', 'PostController@createApi')->middleware('auth'); Route::post('/ajax/new_thread', 'ThreadController@createApi')->middleware('auth'); Route::get('/newthread/{forumid}', 'ThreadController@create')->middleware('auth'); // Account Settings Route::get('/settings/{userid}', 'SettingsController@view'); Route::get('/settings', 'SettingsController@view')->middleware('auth'); // Registration Route::get('/register', function(){ return view('clearboard.register.register'); }); Route::post('/ajax/register', 'RegisterController@postRegister'); // Authentication routes Route::group(array('prefix' => '/auth'), function() { Route::post('/login', 'Auth\AuthController@postAjaxLogin'); Route::get('/logout', 'Auth\AuthController@getLogout')->middleware('get_csrf'); Route::post('/sudo', 'Auth\AuthController@postSudo'); Route::get('/ping', function () { return ''; }); // a simple request that returns nothing to update the existence of a user. }); // Introduction route. Probably will be a way to disable at some point. Route::get('/clearboard/welcome', function () { return view('clearboard.welcome'); }); });
clearboard/clearboard
app/Http/routes.php
PHP
mit
2,024
(function() { 'use strict'; angular .module('app.core') .constant('STATIC_URL', '/static/js/'); })();
gopar/OhMyCommand
apps/static/js/core/constants.js
JavaScript
mit
127
// Copyright (c) 2014 AlphaSierraPapa for the SharpDevelop Team // // Permission is hereby granted, free of charge, to any person obtaining a copy of this // software and associated documentation files (the "Software"), to deal in the Software // without restriction, including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons // to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or // substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, // INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR // PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE // FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using Avalonia; using AvaloniaEdit.Document; using AvaloniaEdit.Editing; using AvaloniaEdit.Utils; using Avalonia.Media; namespace AvaloniaEdit.Rendering { /// <summary> /// Helper for creating a PathGeometry. /// </summary> public sealed class BackgroundGeometryBuilder { /// <summary> /// Gets/sets the radius of the rounded corners. /// </summary> public double CornerRadius { get; set; } /// <summary> /// Gets/Sets whether to align to whole pixels. /// /// If BorderThickness is set to 0, the geometry is aligned to whole pixels. /// If BorderThickness is set to a non-zero value, the outer edge of the border is aligned /// to whole pixels. /// /// The default value is <c>false</c>. /// </summary> public bool AlignToWholePixels { get; set; } /// <summary> /// Gets/sets the border thickness. /// /// This property only has an effect if <c>AlignToWholePixels</c> is enabled. /// When using the resulting geometry to paint a border, set this property to the border thickness. /// Otherwise, leave the property set to the default value <c>0</c>. /// </summary> public double BorderThickness { get; set; } /// <summary> /// Gets/Sets whether to extend the rectangles to full width at line end. /// </summary> public bool ExtendToFullWidthAtLineEnd { get; set; } /// <summary> /// Adds the specified segment to the geometry. /// </summary> public void AddSegment(TextView textView, ISegment segment) { if (textView == null) throw new ArgumentNullException(nameof(textView)); var pixelSize = PixelSnapHelpers.GetPixelSize(textView); foreach (var r in GetRectsForSegment(textView, segment, ExtendToFullWidthAtLineEnd)) { AddRectangle(pixelSize, r); } } /// <summary> /// Adds a rectangle to the geometry. /// </summary> /// <remarks> /// This overload will align the coordinates according to /// <see cref="AlignToWholePixels"/>. /// Use the <see cref="AddRectangle(double,double,double,double)"/>-overload instead if the coordinates should not be aligned. /// </remarks> public void AddRectangle(TextView textView, Rect rectangle) { AddRectangle(PixelSnapHelpers.GetPixelSize(textView), rectangle); } private void AddRectangle(Size pixelSize, Rect r) { if (AlignToWholePixels) { var halfBorder = 0.5 * BorderThickness; AddRectangle(PixelSnapHelpers.Round(r.X - halfBorder, pixelSize.Width) + halfBorder, PixelSnapHelpers.Round(r.Y - halfBorder, pixelSize.Height) + halfBorder, PixelSnapHelpers.Round(r.Right + halfBorder, pixelSize.Width) - halfBorder, PixelSnapHelpers.Round(r.Bottom + halfBorder, pixelSize.Height) - halfBorder); } else { AddRectangle(r.X, r.Y, r.Right, r.Bottom); } } /// <summary> /// Calculates the list of rectangle where the segment in shown. /// This method usually returns one rectangle for each line inside the segment /// (but potentially more, e.g. when bidirectional text is involved). /// </summary> public static IEnumerable<Rect> GetRectsForSegment(TextView textView, ISegment segment, bool extendToFullWidthAtLineEnd = false) { if (textView == null) throw new ArgumentNullException(nameof(textView)); if (segment == null) throw new ArgumentNullException(nameof(segment)); return GetRectsForSegmentImpl(textView, segment, extendToFullWidthAtLineEnd); } private static IEnumerable<Rect> GetRectsForSegmentImpl(TextView textView, ISegment segment, bool extendToFullWidthAtLineEnd) { var segmentStart = segment.Offset; var segmentEnd = segment.Offset + segment.Length; segmentStart = segmentStart.CoerceValue(0, textView.Document.TextLength); segmentEnd = segmentEnd.CoerceValue(0, textView.Document.TextLength); TextViewPosition start; TextViewPosition end; if (segment is SelectionSegment sel) { start = new TextViewPosition(textView.Document.GetLocation(sel.StartOffset), sel.StartVisualColumn); end = new TextViewPosition(textView.Document.GetLocation(sel.EndOffset), sel.EndVisualColumn); } else { start = new TextViewPosition(textView.Document.GetLocation(segmentStart)); end = new TextViewPosition(textView.Document.GetLocation(segmentEnd)); } foreach (var vl in textView.VisualLines) { var vlStartOffset = vl.FirstDocumentLine.Offset; if (vlStartOffset > segmentEnd) break; var vlEndOffset = vl.LastDocumentLine.Offset + vl.LastDocumentLine.Length; if (vlEndOffset < segmentStart) continue; int segmentStartVc; segmentStartVc = segmentStart < vlStartOffset ? 0 : vl.ValidateVisualColumn(start, extendToFullWidthAtLineEnd); int segmentEndVc; if (segmentEnd > vlEndOffset) segmentEndVc = extendToFullWidthAtLineEnd ? int.MaxValue : vl.VisualLengthWithEndOfLineMarker; else segmentEndVc = vl.ValidateVisualColumn(end, extendToFullWidthAtLineEnd); foreach (var rect in ProcessTextLines(textView, vl, segmentStartVc, segmentEndVc)) yield return rect; } } /// <summary> /// Calculates the rectangles for the visual column segment. /// This returns one rectangle for each line inside the segment. /// </summary> public static IEnumerable<Rect> GetRectsFromVisualSegment(TextView textView, VisualLine line, int startVc, int endVc) { if (textView == null) throw new ArgumentNullException(nameof(textView)); if (line == null) throw new ArgumentNullException(nameof(line)); return ProcessTextLines(textView, line, startVc, endVc); } private static IEnumerable<Rect> ProcessTextLines(TextView textView, VisualLine visualLine, int segmentStartVc, int segmentEndVc) { var lastTextLine = visualLine.TextLines.Last(); var scrollOffset = textView.ScrollOffset; for (var i = 0; i < visualLine.TextLines.Count; i++) { var line = visualLine.TextLines[i]; var y = visualLine.GetTextLineVisualYPosition(line, VisualYPosition.LineTop); var visualStartCol = visualLine.GetTextLineVisualStartColumn(line); var visualEndCol = visualStartCol + line.Length; if (line == lastTextLine) visualEndCol -= 1; // 1 position for the TextEndOfParagraph // TODO: ? //else // visualEndCol -= line.TrailingWhitespaceLength; if (segmentEndVc < visualStartCol) break; if (lastTextLine != line && segmentStartVc > visualEndCol) continue; var segmentStartVcInLine = Math.Max(segmentStartVc, visualStartCol); var segmentEndVcInLine = Math.Min(segmentEndVc, visualEndCol); y -= scrollOffset.Y; var lastRect = Rect.Empty; if (segmentStartVcInLine == segmentEndVcInLine) { // GetTextBounds crashes for length=0, so we'll handle this case with GetDistanceFromCharacterHit // We need to return a rectangle to ensure empty lines are still visible var pos = visualLine.GetTextLineVisualXPosition(line, segmentStartVcInLine); pos -= scrollOffset.X; // The following special cases are necessary to get rid of empty rectangles at the end of a TextLine if "Show Spaces" is active. // If not excluded once, the same rectangle is calculated (and added) twice (since the offset could be mapped to two visual positions; end/start of line), if there is no trailing whitespace. // Skip this TextLine segment, if it is at the end of this line and this line is not the last line of the VisualLine and the selection continues and there is no trailing whitespace. if (segmentEndVcInLine == visualEndCol && i < visualLine.TextLines.Count - 1 && segmentEndVc > segmentEndVcInLine && line.TrailingWhitespaceLength == 0) continue; if (segmentStartVcInLine == visualStartCol && i > 0 && segmentStartVc < segmentStartVcInLine && visualLine.TextLines[i - 1].TrailingWhitespaceLength == 0) continue; lastRect = new Rect(pos, y, textView.EmptyLineSelectionWidth, line.Height); } else { if (segmentStartVcInLine <= visualEndCol) { var b = line.GetTextBounds(segmentStartVcInLine, segmentEndVcInLine - segmentStartVcInLine); var left = b.X - scrollOffset.X; var right = b.Right - scrollOffset.X; if (!lastRect.IsEmpty) yield return lastRect; // left>right is possible in RTL languages lastRect = new Rect(Math.Min(left, right), y, Math.Abs(right - left), line.Height); } } // If the segment ends in virtual space, extend the last rectangle with the rectangle the portion of the selection // after the line end. // Also, when word-wrap is enabled and the segment continues into the next line, extend lastRect up to the end of the line. if (segmentEndVc > visualEndCol) { double left, right; if (segmentStartVc > visualLine.VisualLengthWithEndOfLineMarker) { // segmentStartVC is in virtual space left = visualLine.GetTextLineVisualXPosition(lastTextLine, segmentStartVc); } else { // Otherwise, we already processed the rects from segmentStartVC up to visualEndCol, // so we only need to do the remainder starting at visualEndCol. // For word-wrapped lines, visualEndCol doesn't include the whitespace hidden by the wrap, // so we'll need to include it here. // For the last line, visualEndCol already includes the whitespace. left = line == lastTextLine ? line.WidthIncludingTrailingWhitespace : line.Width; } // TODO: !!!!!!!!!!!!!!!!!! SCROLL !!!!!!!!!!!!!!!!!! //if (line != lastTextLine || segmentEndVC == int.MaxValue) { // // If word-wrap is enabled and the segment continues into the next line, // // or if the extendToFullWidthAtLineEnd option is used (segmentEndVC == int.MaxValue), // // we select the full width of the viewport. // right = Math.Max(((IScrollInfo)textView).ExtentWidth, ((IScrollInfo)textView).ViewportWidth); //} else { right = visualLine.GetTextLineVisualXPosition(lastTextLine, segmentEndVc); //} var extendSelection = new Rect(Math.Min(left, right), y, Math.Abs(right - left), line.Height); if (!lastRect.IsEmpty) { if (extendSelection.Intersects(lastRect)) { lastRect.Union(extendSelection); yield return lastRect; } else { // If the end of the line is in an RTL segment, keep lastRect and extendSelection separate. yield return lastRect; yield return extendSelection; } } else yield return extendSelection; } else yield return lastRect; } } private readonly PathFigures _figures = new PathFigures(); private PathFigure _figure; private int _insertionIndex; private double _lastTop, _lastBottom; private double _lastLeft, _lastRight; /// <summary> /// Adds a rectangle to the geometry. /// </summary> /// <remarks> /// This overload assumes that the coordinates are aligned properly /// (see <see cref="AlignToWholePixels"/>). /// Use the <see cref="AddRectangle(TextView,Rect)"/>-overload instead if the coordinates are not yet aligned. /// </remarks> public void AddRectangle(double left, double top, double right, double bottom) { if (!top.IsClose(_lastBottom)) { CloseFigure(); } if (_figure == null) { _figure = new PathFigure { StartPoint = new Point(left, top + CornerRadius) }; if (Math.Abs(left - right) > CornerRadius) { _figure.Segments.Add(MakeArc(left + CornerRadius, top, SweepDirection.Clockwise)); _figure.Segments.Add(MakeLineSegment(right - CornerRadius, top)); _figure.Segments.Add(MakeArc(right, top + CornerRadius, SweepDirection.Clockwise)); } _figure.Segments.Add(MakeLineSegment(right, bottom - CornerRadius)); _insertionIndex = _figure.Segments.Count; //figure.Segments.Add(MakeArc(left, bottom - cornerRadius, SweepDirection.Clockwise)); } else { if (!_lastRight.IsClose(right)) { var cr = right < _lastRight ? -CornerRadius : CornerRadius; var dir1 = right < _lastRight ? SweepDirection.Clockwise : SweepDirection.CounterClockwise; var dir2 = right < _lastRight ? SweepDirection.CounterClockwise : SweepDirection.Clockwise; _figure.Segments.Insert(_insertionIndex++, MakeArc(_lastRight + cr, _lastBottom, dir1)); _figure.Segments.Insert(_insertionIndex++, MakeLineSegment(right - cr, top)); _figure.Segments.Insert(_insertionIndex++, MakeArc(right, top + CornerRadius, dir2)); } _figure.Segments.Insert(_insertionIndex++, MakeLineSegment(right, bottom - CornerRadius)); _figure.Segments.Insert(_insertionIndex, MakeLineSegment(_lastLeft, _lastTop + CornerRadius)); if (!_lastLeft.IsClose(left)) { var cr = left < _lastLeft ? CornerRadius : -CornerRadius; var dir1 = left < _lastLeft ? SweepDirection.CounterClockwise : SweepDirection.Clockwise; var dir2 = left < _lastLeft ? SweepDirection.Clockwise : SweepDirection.CounterClockwise; _figure.Segments.Insert(_insertionIndex, MakeArc(_lastLeft, _lastBottom - CornerRadius, dir1)); _figure.Segments.Insert(_insertionIndex, MakeLineSegment(_lastLeft - cr, _lastBottom)); _figure.Segments.Insert(_insertionIndex, MakeArc(left + cr, _lastBottom, dir2)); } } _lastTop = top; _lastBottom = bottom; _lastLeft = left; _lastRight = right; } private ArcSegment MakeArc(double x, double y, SweepDirection dir) { var arc = new ArcSegment { Point = new Point(x, y), Size = new Size(CornerRadius, CornerRadius), SweepDirection = dir }; return arc; } private static LineSegment MakeLineSegment(double x, double y) { return new LineSegment { Point = new Point(x, y) }; } /// <summary> /// Closes the current figure. /// </summary> public void CloseFigure() { if (_figure != null) { _figure.Segments.Insert(_insertionIndex, MakeLineSegment(_lastLeft, _lastTop + CornerRadius)); if (Math.Abs(_lastLeft - _lastRight) > CornerRadius) { _figure.Segments.Insert(_insertionIndex, MakeArc(_lastLeft, _lastBottom - CornerRadius, SweepDirection.Clockwise)); _figure.Segments.Insert(_insertionIndex, MakeLineSegment(_lastLeft + CornerRadius, _lastBottom)); _figure.Segments.Insert(_insertionIndex, MakeArc(_lastRight - CornerRadius, _lastBottom, SweepDirection.Clockwise)); } _figure.IsClosed = true; _figures.Add(_figure); _figure = null; } } /// <summary> /// Creates the geometry. /// Returns null when the geometry is empty! /// </summary> public Geometry CreateGeometry() { CloseFigure(); return _figures.Count != 0 ? new PathGeometry { Figures = _figures } : null; } } }
AvaloniaUI/AvaloniaEdit
src/AvaloniaEdit/Rendering/BackgroundGeometryBuilder.cs
C#
mit
19,570
class Fitbit::Activity < Fitbit::Data attr_accessor :activityId, :activityParentId, :activityParentName, :calories, :description, :distance, :duration, :hasStartTime, :isFavorite, :logId, :name, :startTime, :steps def initialize(activity_data, unit_measurement_mappings) @activityId = activity_data['activityId'] @activityParentId = activity_data['activityParentId'] @activityParentName = activity_data['activityParentName'] @calories = activity_data['calories'] @description = activity_data['description'] @distance = "#{activity_data['distance']} #{unit_measurement_mappings[:distance]}" if activity_data['distance'] @duration = Time.at(activity_data['duration']/1000).utc.strftime("%H:%M:%S") if activity_data['duration'] @hasStartTime = activity_data['hasStartTime'] @logId = activity_data['logId'] @name = activity_data['name'] @startTime = activity_data['startTime'] @steps = activity_data['steps'] # Uncomment to view the data that is returned by the Fitbit service # ActiveRecord::Base.logger.info activity_data end def self.fetch_all_on_date(user, date) activity_objects = [] if user.present? && user.linked? activities = user.fitbit_data.activities_on_date(date)['activities'] activity_objects = activities.map {|a| Fitbit::Activity.new(a, user.unit_measurement_mappings) } end activity_objects end def self.log_activity(user, activity) if user.present? && user.linked? user.fitbit_data.log_activity(activity) end end end # Sample response from fitbit.com api #{"activityId"=>17151, # "activityParentId"=>90013, # "activityParentName"=>"Walking", # "calories"=>54, # "description"=>"less than 2 mph, strolling very slowly", # "distance"=>0.5, # "duration"=>1200000, # "hasStartTime"=>true, # "isFavorite"=>true, # "logId"=>21537078, # "name"=>"Walking", # "startTime"=>"11:45", # "steps"=>1107}
whazzmaster/fitgem-client
app/models/fitbit/activity.rb
Ruby
mit
1,937
var fixDate = function(date) { return date.Format('2006-01-02 15:04:05'); }; var entries = executeCommand('getEntries', {}); dbotCommands = []; userCommands = []; for (var i = 0; i < entries.length; i++) { if (typeof entries[i].Contents == 'string' && entries[i].Contents.indexOf('!') === 0 && entries[i].Contents.indexOf('!listExecutedCommands') !== 0) { if (entries[i].Metadata.User) { if (args.source === 'All' || args.source === 'Manual') { userCommands.push({ 'Time': fixDate(entries[i].Metadata.Created), 'Entry ID': entries[i].ID, 'User': entries[i].Metadata.User, 'Command': entries[i].Contents }); } } else { if (args.source === 'All' || args.source === 'Playbook') { dbotCommands.push({ 'Time': fixDate(entries[i].Metadata.Created), 'Entry ID': entries[i].ID, 'Playbook (Task)': entries[i].Metadata.EntryTask.PlaybookName + " (" + entries[i].Metadata.EntryTask.TaskName + ")", 'Command': entries[i].Contents }); } } } } var md = ''; if (dbotCommands.length > 0) { md += tableToMarkdown('DBot Executed Commands', dbotCommands, ['Time', 'Entry ID', 'Playbook (Task)', 'Command']) + '\n'; } if (userCommands.length > 0) { md += tableToMarkdown('User Executed Commands', userCommands, ['Time', 'Entry ID', 'User', 'Command']) + '\n'; } if (md === '') { md = 'No commands found\n'; } return {ContentsFormat: formats.markdown, Type: entryTypes.note, Contents: md};
demisto/content
Packs/CommonScripts/Scripts/ListExecutedCommands/ListExecutedCommands.js
JavaScript
mit
1,692
public class A extends B { public A() {} }
gregwym/joos-compiler-java
testcases/a2/Je_4_ClassExtendsCyclicClass/A.java
Java
mit
47
<?php // AdminBundle:Inquiry:inquiry.html.twig return array ( );
sanofuzir/Royaltransfer.si
app/cache/prod/assetic/config/6/6caf06a2425699c446204888d8d22119.php
PHP
mit
66
package easyupload.entity; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Lob; @Entity public class FileUpload { public FileUpload(String filename, byte[] file, String mimeType) { this.file = file; this.filename = filename; this.mimeType = mimeType; } public FileUpload() { // Default Constructor } @Id private String filename; @Lob private byte[] file; private String mimeType; public String getFilename() { return filename; } public void setFilename(String filename) { this.filename = filename; } public byte[] getFile() { return file; } public void setFile(byte[] file) { this.file = file; } public String getMimeType() { return mimeType; } public void setMimeType(String mimeType) { this.mimeType = mimeType; } }
vberbenetz/EasyUpload
Backend/src/main/java/easyupload/entity/FileUpload.java
Java
mit
936
<?php /* * This file is part of the PHPExifTool package. * * (c) Alchemy <support@alchemy.fr> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace PHPExiftool\Driver\Tag\MIEGeo; use JMS\Serializer\Annotation\ExclusionPolicy; use PHPExiftool\Driver\AbstractTag; /** * @ExclusionPolicy("all") */ class Country extends AbstractTag { protected $Id = 'Country'; protected $Name = 'Country'; protected $FullName = 'MIE::Geo'; protected $GroupName = 'MIE-Geo'; protected $g0 = 'MIE'; protected $g1 = 'MIE-Geo'; protected $g2 = 'Location'; protected $Type = 'string'; protected $Writable = true; protected $Description = 'Country'; }
bburnichon/PHPExiftool
lib/PHPExiftool/Driver/Tag/MIEGeo/Country.php
PHP
mit
777
'use strict'; var canUseDOM = require('./canUseDOM'); var one = function() { }; var on = function() { }; var off = function() { }; if (canUseDOM) { var bind = window.addEventListener ? 'addEventListener' : 'attachEvent'; var unbind = window.removeEventListener ? 'removeEventListener' : 'detachEvent'; var prefix = bind !== 'addEventListener' ? 'on' : ''; one = function(node, eventNames, eventListener) { var typeArray = eventNames.split(' '); var recursiveFunction = function(e) { e.target.removeEventListener(e.type, recursiveFunction); return eventListener(e); }; for (var i = typeArray.length - 1; i >= 0; i--) { this.on(node, typeArray[i], recursiveFunction); } }; /** * Bind `node` event `eventName` to `eventListener`. * * @param {Element} node * @param {String} eventName * @param {Function} eventListener * @param {Boolean} capture * @return {Obejct} * @api public */ on = function(node, eventName, eventListener, capture) { node[bind](prefix + eventName, eventListener, capture || false); return { off: function() { node[unbind](prefix + eventName, eventListener, capture || false); } }; } /** * Unbind `node` event `eventName`'s callback `eventListener`. * * @param {Element} node * @param {String} eventName * @param {Function} eventListener * @param {Boolean} capture * @return {Function} * @api public */ off = function(node, eventName, eventListener, capture) { node[unbind](prefix + eventName, eventListener, capture || false); return eventListener; }; } module.exports = { one: one, on: on, off: off };
minwe/amazeui-react
src/utils/Events.js
JavaScript
mit
1,693