text
stringlengths
2
1.04M
meta
dict
package com.example.repository; import java.util.List; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.Repository; import org.springframework.data.repository.query.Param; import com.example.entity.User; public interface UserRepository extends Repository<User, Long> { List<User> findByNameAndAddress(String name, String address); @Query(value = "from User u where u.name=:name") List<User> findByName1(@Param("name") String name); // @Query(value = "select * from #{#entityName} u where u.name=?1", nativeQuery = true) // List<User> findByName2(String name); List<User> findByName(String name); }
{ "content_hash": "9f5f802d5dc21bc479802fc4a6444bbe", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 90, "avg_line_length": 29.391304347826086, "alnum_prop": 0.7440828402366864, "repo_name": "diwang011/spring-data-jpa-demo", "id": "8f722d16710e738f4792fede25b8cbdfbc272d0e", "size": "676", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/example/repository/UserRepository.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "45923" } ], "symlink_target": "" }
package com.google.android.voiceime; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.os.IBinder; import com.anysoftkeyboard.utils.Logger; /** * Handles the connection, and the method call, and the call backs between the IME and the activity. */ class ServiceBridge { @SuppressWarnings("unused") private static final String TAG = "ServiceBridge"; private final IntentApiTrigger.Callback mCallback; public ServiceBridge() { this(null); } public ServiceBridge(IntentApiTrigger.Callback callback) { mCallback = callback; } /** * Start a voice search recognition. */ public void startVoiceRecognition(final Context context, final String languageCode) { final ConnectionRequest conReq = new ConnectionRequest(languageCode); conReq.setServiceCallback(new ServiceHelper.Callback() { @Override public void onResult(final String recognitionResult) { mCallback.onRecognitionResult(recognitionResult); try { context.unbindService(conReq); } catch (IllegalArgumentException e) { //https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/432 Logger.w(TAG, "Failed to unbind from service! Swallowing.", e); } } }); context.bindService(new Intent(context, ServiceHelper.class), conReq, Context.BIND_AUTO_CREATE); } public void notifyResult(Context context, String recognitionResult) { ServiceConnection conn = new ConnectionResponse(context, recognitionResult); context.bindService(new Intent(context, ServiceHelper.class), conn, Context.BIND_AUTO_CREATE); } /** * Service connection for requesting a recognition. */ private class ConnectionRequest implements ServiceConnection { private final String mLanguageCode; private ServiceHelper.Callback mServiceCallback; private ConnectionRequest(String languageCode) { mLanguageCode = languageCode; } private void setServiceCallback(ServiceHelper.Callback callback) { mServiceCallback = callback; } @Override public void onServiceConnected(ComponentName className, IBinder service) { ServiceHelper serviceHelper = ((ServiceHelper.ServiceHelperBinder) service).getService(); serviceHelper.startRecognition(mLanguageCode, mServiceCallback); } @Override public void onServiceDisconnected(ComponentName className) { // Empty } } /** * Service connection for notifying a recognition result. */ private class ConnectionResponse implements ServiceConnection { private final String mRecognitionResult; private final Context mContext; private ConnectionResponse(Context context, String recognitionResult) { mRecognitionResult = recognitionResult; mContext = context; } @Override public void onServiceDisconnected(ComponentName name) { // Empty } @Override public void onServiceConnected(ComponentName name, IBinder service) { ServiceHelper serviceHelper = ((ServiceHelper.ServiceHelperBinder) service).getService(); serviceHelper.notifyResult(mRecognitionResult); mContext.unbindService(this); } } }
{ "content_hash": "1ee1120750c52ebccee33c15eb673770", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 100, "avg_line_length": 31.594827586206897, "alnum_prop": 0.6545702592087312, "repo_name": "OmerMachluf/Mykeyboard", "id": "1a1c35281c7ca76104f5c9cc7e7c8c59c012950c", "size": "4259", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/google/android/voiceime/ServiceBridge.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "36897" }, { "name": "C++", "bytes": "202686" }, { "name": "CSS", "bytes": "3864" }, { "name": "Groovy", "bytes": "1692" }, { "name": "HTML", "bytes": "38013011" }, { "name": "Java", "bytes": "2023091" }, { "name": "JavaScript", "bytes": "275" }, { "name": "Makefile", "bytes": "701" }, { "name": "Python", "bytes": "1737" }, { "name": "Shell", "bytes": "9399" } ], "symlink_target": "" }
//============================================================================================================= //************************************************************************************************************* //============================================================================================================= // INCLUDES //============================================================================================================= #include "metatreeitem.h" //************************************************************************************************************* //============================================================================================================= // Qt INCLUDES //============================================================================================================= #include <QList> #include <QVariant> #include <QStringList> #include <QColor> #include <QStandardItem> #include <QStandardItemModel> #include <QVector3D> //************************************************************************************************************* //============================================================================================================= // Eigen INCLUDES //============================================================================================================= #include <Eigen/Core> //************************************************************************************************************* //============================================================================================================= // USED NAMESPACES //============================================================================================================= using namespace DISP3DLIB; //************************************************************************************************************* //============================================================================================================= // DEFINE MEMBER METHODS //============================================================================================================= MetaTreeItem::MetaTreeItem(int iType, const QString& text) : AbstractTreeItem(iType, text) { QString sToolTip; switch(m_iType) { case MetaTreeItemTypes::FileName: sToolTip = "File name"; break; case MetaTreeItemTypes::FilePath: sToolTip = "File path"; break; case MetaTreeItemTypes::SurfaceType: sToolTip = "Surface type"; break; case MetaTreeItemTypes::SurfaceColorGyri: sToolTip = "Color Gyri"; break; case MetaTreeItemTypes::SurfaceColorSulci: sToolTip = "Color Sulci"; break; case MetaTreeItemTypes::RTDataStreamStatus: sToolTip = "Turn real time data streaming on/off"; break; case MetaTreeItemTypes::RTDataSourceSpaceType: sToolTip = "The source space type"; break; case MetaTreeItemTypes::RTDataColormapType: sToolTip = "The color map type"; break; case MetaTreeItemTypes::RTDataTimeInterval: sToolTip = "The m seconds waited in between each sample"; break; case MetaTreeItemTypes::RTDataLoopedStreaming: sToolTip = "Turn looped streaming on/off"; break; case MetaTreeItemTypes::RTDataNumberAverages: sToolTip = "The number of samples averaged together (downsampling)"; break; case MetaTreeItemTypes::RTDataNormalizationValue: sToolTip = "The value to normalize the source localization result"; break; case MetaTreeItemTypes::RTDataVisualizationType: sToolTip = "The visualization type"; break; case MetaTreeItemTypes::SurfaceColor: sToolTip = "Surface color item"; break; case MetaTreeItemTypes::PointColor: sToolTip = "Point color item"; break; case MetaTreeItemTypes::SurfaceAlpha: sToolTip = "Surface alpha value"; break; case MetaTreeItemTypes::SurfaceTranslateX: sToolTip = "Surface x translation value"; break; case MetaTreeItemTypes::SurfaceTranslateY: sToolTip = "Surface y translation value"; break; case MetaTreeItemTypes::SurfaceTranslateZ: sToolTip = "Surface z translation value"; break; default: // do nothing; break; } this->setToolTip(sToolTip); } //************************************************************************************************************* MetaTreeItem::~MetaTreeItem() { } //************************************************************************************************************* QVariant MetaTreeItem::data(int role) const { return AbstractTreeItem::data(role); } //************************************************************************************************************* void MetaTreeItem::setData(const QVariant& value, int role) { AbstractTreeItem::setData(value, role); switch(role) { case MetaTreeItemRoles::SurfaceColorSulci: { emit curvColorsChanged(); break; } case MetaTreeItemRoles::SurfaceColorGyri: { emit curvColorsChanged(); break; } case MetaTreeItemRoles::RTDataTimeInterval: { emit rtDataTimeIntervalChanged(value.toInt()); break; } case MetaTreeItemRoles::RTDataNormalizationValue: { QVector3D vecTemp = value.value<QVector3D>(); emit rtDataNormalizationValueChanged(vecTemp); break; } case MetaTreeItemRoles::RTDataColormapType: { emit rtDataColormapTypeChanged(value.toString()); break; } case MetaTreeItemRoles::RTDataVisualizationType: { emit rtDataVisualizationTypeChanged(value.toString()); break; } case MetaTreeItemRoles::SurfaceColor: { emit surfaceColorChanged(value.value<QColor>()); break; } case MetaTreeItemRoles::PointColor: { emit surfaceColorChanged(value.value<QColor>()); break; } case MetaTreeItemRoles::RTDataNumberAverages: { emit rtDataNumberAveragesChanged(value.toInt()); break; } case MetaTreeItemRoles::SurfaceAlpha: { emit surfaceAlphaChanged(value.toFloat()); break; } case MetaTreeItemRoles::SurfaceTranslateX: { emit surfaceTranslationXChanged(value.toFloat()); break; } case MetaTreeItemRoles::SurfaceTranslateY: { emit surfaceTranslationYChanged(value.toFloat()); break; } case MetaTreeItemRoles::SurfaceTranslateZ: { emit surfaceTranslationZChanged(value.toFloat()); break; } default: // do nothing; break; } }
{ "content_hash": "19c7454409a31aec0e7531444d44369e", "timestamp": "", "source": "github", "line_count": 210, "max_line_length": 111, "avg_line_length": 34.2, "alnum_prop": 0.42133110554163183, "repo_name": "rickytjen/mne-cpp", "id": "5328ae0c189008d2208de0ab1617846483261b0e", "size": "8994", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MNE/disp3D/3DObjects/common/metatreeitem.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "163512" }, { "name": "C++", "bytes": "10966590" }, { "name": "GLSL", "bytes": "8518" }, { "name": "Prolog", "bytes": "22902" }, { "name": "QMake", "bytes": "240276" } ], "symlink_target": "" }
/* Test case for bug in dlsym accessing dependency objects' symbols. */ #include <stdlib.h> #include <stdio.h> #include <unistd.h> #include <dlfcn.h> int main(void) { void *handle; char *c; /* open lib1.so, which has the unresolved test symbol and a DT_NEEDED on lib2.so, which provides the symbol */ if ((handle = dlopen("bug-dlsym1-lib1.so", RTLD_NOW)) == NULL) { printf("dlopen(\"bug-dlsym1-lib1.so\"): %s\n", dlerror()); abort(); } if ((c = dlsym(handle, "dlopen_test_variable")) == NULL) { printf("dlsym(handle, \"dlopen_test_variable\"): %s\n", dlerror()); abort(); } (void) dlclose(handle); return 0; }
{ "content_hash": "869d80a53060dbaf49c5bbb01ef39c99", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 73, "avg_line_length": 23.714285714285715, "alnum_prop": 0.6129518072289156, "repo_name": "andrewjylee/omniplay", "id": "3bbf6a2f02695286533c141262e2a180cb38d6f9", "size": "664", "binary": false, "copies": "94", "ref": "refs/heads/master", "path": "eglibc-2.15/dlfcn/bug-dlsym1.c", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "ASP", "bytes": "4528" }, { "name": "Assembly", "bytes": "8662249" }, { "name": "Awk", "bytes": "79791" }, { "name": "Batchfile", "bytes": "903" }, { "name": "C", "bytes": "451499135" }, { "name": "C++", "bytes": "6338106" }, { "name": "Groff", "bytes": "2522798" }, { "name": "HTML", "bytes": "47935" }, { "name": "Java", "bytes": "2193" }, { "name": "Lex", "bytes": "44513" }, { "name": "Logos", "bytes": "97869" }, { "name": "Makefile", "bytes": "1700085" }, { "name": "Objective-C", "bytes": "1148023" }, { "name": "Perl", "bytes": "530370" }, { "name": "Perl6", "bytes": "3727" }, { "name": "Python", "bytes": "493452" }, { "name": "Scilab", "bytes": "21433" }, { "name": "Shell", "bytes": "409014" }, { "name": "SourcePawn", "bytes": "11760" }, { "name": "TeX", "bytes": "283872" }, { "name": "UnrealScript", "bytes": "6143" }, { "name": "XS", "bytes": "1240" }, { "name": "Yacc", "bytes": "93190" } ], "symlink_target": "" }
namespace ash { namespace quick_pair { // This class is used to represent server errors (both network and HTTP errors) // we encounter in the repository component. class COMPONENT_EXPORT(QUICK_PAIR_COMMON) FastPairHttpResult { public: FastPairHttpResult(const int net_error, const network::mojom::URLResponseHead* head); FastPairHttpResult(const FastPairHttpResult&) = delete; FastPairHttpResult& operator=(const FastPairHttpResult&) = delete; FastPairHttpResult& operator=(FastPairHttpResult&&) = delete; ~FastPairHttpResult(); absl::optional<int> net_error() const { return net_error_; } absl::optional<int> http_response_error() const { return http_response_error_; } bool IsSuccess() const; std::string ToString() const; private: enum class Type { kSuccess, kNetworkFailure, kHttpFailure } type_; // Only set if the code is an error, i.e., not set on success. absl::optional<int> net_error_; absl::optional<int> http_response_error_; }; } // namespace quick_pair } // namespace ash #endif // ASH_QUICK_PAIR_COMMON_FAST_PAIR_FAST_PAIR_HTTP_RESULT_H_
{ "content_hash": "2a42376c91636c1af9923fb48cbf20f6", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 79, "avg_line_length": 32.88235294117647, "alnum_prop": 0.7191413237924866, "repo_name": "nwjs/chromium.src", "id": "168925234d62ecec47c6d0d3fb64fbcabbba7493", "size": "1560", "binary": false, "copies": "6", "ref": "refs/heads/nw70", "path": "ash/quick_pair/common/fast_pair/fast_pair_http_result.h", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<?php namespace InstagramAPI\Exception; class EmptyResponseException extends RequestException { }
{ "content_hash": "d84e72f5212e52f2a8f1307acc8c3b72", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 53, "avg_line_length": 14.285714285714286, "alnum_prop": 0.83, "repo_name": "MoritzGruber/instabot", "id": "ad8a6c29241933d1889a1a25d1a72d1bae619088", "size": "100", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "app/Services/Content/phpapi/vendor/mgp25/instagram-php/src/Exception/EmptyResponseException.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6780" }, { "name": "HTML", "bytes": "5766" }, { "name": "JavaScript", "bytes": "11712" }, { "name": "PHP", "bytes": "12300" }, { "name": "Python", "bytes": "17024" }, { "name": "Shell", "bytes": "3527" } ], "symlink_target": "" }
DWORD player_base = 0, camera_base = 0, uncap_base = 0; int(__thiscall *UpdatePlayerOriginal)(int, int, int); void(*UpdateCameraOriginal)(); void(*UpdateMenuOriginal)(); void(*UpdateCapOriginal)(); DWORD static_base = 0; int __fastcall UpdatePlayerHook(int this_, void *idle_, int a1, int a2) { GetData()->player_base = player_base = this_; return UpdatePlayerOriginal(this_, a1, a2); } __declspec(naked) void UpdateMenuHook() { __asm { mov player_base, ebx push ecx push eax } GetData()->player_base = player_base; __asm { pop eax pop ecx jmp UpdateMenuOriginal } } __declspec(naked) void UpdateCameraHook() { __asm { mov camera_base, ebp push ecx push eax } GetData()->camera_base = camera_base; __asm { pop eax pop ecx jmp UpdateCameraOriginal } } DWORD GetPlayerBase() { return player_base; // return strcmp(GetData()->level, "TdMainMenu") == 0 ? player_base : (GetData()->player_base = (DWORD)GetPointer(GetCurrentProcess(), 5, static_base, 0xCC, 0x4A4, 0x214, 0x00)); } DWORD GetCameraBase() { return camera_base; } float IntToDegrees(int i) { float r = (float)fmod(((float)i / 0x10000) * 360.0, 360); return r < 0 ? r + 360 : r; } int DegreesToInt(float i) { return (int)((i / 360) * 0x10000) % 0x10000; } void SetupPlayer() { static_base = (DWORD)FindPattern((void *)((DWORD)GetModuleHandle(0)), 0x12800000, "\x89\x0D\x00\x00\x00\x00\xB9\x00\x00\x00\x00\xFF", "xx????x????x"); static_base = *(DWORD *)(static_base + 0x2); DWORD addr; addr = 0x12B5690; TrampolineHook(UpdatePlayerHook, (void *)addr, (void **)&UpdatePlayerOriginal); addr = 0xE41667; TrampolineHook(UpdateMenuHook, (void *)addr, (void **)&UpdateMenuOriginal); addr = 0xB5C050; TrampolineHook(UpdateCameraHook, (void *)addr, (void **)&UpdateCameraOriginal); }
{ "content_hash": "58fbc3ce8b3e4be5d3f19765d95b5feb", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 179, "avg_line_length": 23.5, "alnum_prop": 0.6462765957446809, "repo_name": "btbd/megem", "id": "9f55b8626ee00aa4b688dd1f39787619d6f2cdf8", "size": "1901", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DLL/player.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "51368" }, { "name": "C++", "bytes": "339485" }, { "name": "Objective-C", "bytes": "14" } ], "symlink_target": "" }
package org.apache.camel.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.ExchangePattern; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.spi.Metadata; /** * Sends the message to a dynamic endpoint * <p/> * You can specify multiple languages in the uri separated by the plus sign, * such as <tt>mock:+language:xpath:/order/@uri</tt> where <tt>mock:</tt> would * be a prefix to a xpath expression. * <p/> * For more dynamic behavior use * <a href="http://camel.apache.org/recipient-list.html">Recipient List</a> or * <a href="http://camel.apache.org/dynamic-router.html">Dynamic Router</a> EIP * instead. */ @Metadata(label = "eip,endpoint,routing") @XmlRootElement(name = "toD") @XmlAccessorType(XmlAccessType.FIELD) public class ToDynamicDefinition extends NoOutputDefinition<ToDynamicDefinition> { @XmlTransient protected EndpointProducerBuilder endpointProducerBuilder; @XmlAttribute @Metadata(required = true) private String uri; @XmlAttribute @Metadata(javaType = "org.apache.camel.ExchangePattern", enums = "InOnly,InOut,InOptionalOut") private String pattern; @XmlAttribute @Metadata(javaType = "java.lang.Integer") private String cacheSize; @XmlAttribute @Metadata(javaType = "java.lang.Boolean") private String ignoreInvalidEndpoint; @XmlAttribute @Metadata(defaultValue = "true", javaType = "java.lang.Boolean") private String allowOptimisedComponents; public ToDynamicDefinition() { } public ToDynamicDefinition(String uri) { this.uri = uri; } @Override public String getShortName() { return "toD"; } @Override public String toString() { return "DynamicTo[" + getLabel() + "]"; } @Override public String getLabel() { return uri; } // Fluent API // ------------------------------------------------------------------------- /** * Sets the optional {@link ExchangePattern} used to invoke this endpoint */ public ToDynamicDefinition pattern(ExchangePattern pattern) { return pattern(pattern.name()); } /** * Sets the optional {@link ExchangePattern} used to invoke this endpoint */ public ToDynamicDefinition pattern(String pattern) { setPattern(pattern); return this; } /** * Sets the maximum size used by the * {@link org.apache.camel.spi.ProducerCache} which is used to cache and * reuse producers when using this recipient list, when uris are reused. * * Beware that when using dynamic endpoints then it affects how well the cache can be utilized. * If each dynamic endpoint is unique then its best to turn of caching by setting this to -1, which * allows Camel to not cache both the producers and endpoints; they are regarded as prototype scoped * and will be stopped and discarded after use. This reduces memory usage as otherwise producers/endpoints * are stored in memory in the caches. * * However if there are a high degree of dynamic endpoints that have been used before, then it can * benefit to use the cache to reuse both producers and endpoints and therefore the cache size * can be set accordingly or rely on the default size (1000). * * If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size * can help reduce memory usage to avoid storing too many non frequent used producers. * * @param cacheSize the cache size, use <tt>0</tt> for default cache size, * or <tt>-1</tt> to turn cache off. * @return the builder */ public ToDynamicDefinition cacheSize(int cacheSize) { return cacheSize(Integer.toString(cacheSize)); } /** * Sets the maximum size used by the * {@link org.apache.camel.spi.ProducerCache} which is used to cache and * reuse producers when using this recipient list, when uris are reused. * * Beware that when using dynamic endpoints then it affects how well the cache can be utilized. * If each dynamic endpoint is unique then its best to turn of caching by setting this to -1, which * allows Camel to not cache both the producers and endpoints; they are regarded as prototype scoped * and will be stopped and discarded after use. This reduces memory usage as otherwise producers/endpoints * are stored in memory in the caches. * * However if there are a high degree of dynamic endpoints that have been used before, then it can * benefit to use the cache to reuse both producers and endpoints and therefore the cache size * can be set accordingly or rely on the default size (1000). * * If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size * can help reduce memory usage to avoid storing too many non frequent used producers. * * @param cacheSize the cache size, use <tt>0</tt> for default cache size, * or <tt>-1</tt> to turn cache off. * @return the builder */ public ToDynamicDefinition cacheSize(String cacheSize) { setCacheSize(cacheSize); return this; } /** * Ignore the invalidate endpoint exception when try to create a producer * with that endpoint * * @return the builder */ public ToDynamicDefinition ignoreInvalidEndpoint(boolean ignoreInvalidEndpoint) { return ignoreInvalidEndpoint(Boolean.toString(ignoreInvalidEndpoint)); } /** * Ignore the invalidate endpoint exception when try to create a producer * with that endpoint * * @return the builder */ public ToDynamicDefinition ignoreInvalidEndpoint(String ignoreInvalidEndpoint) { setIgnoreInvalidEndpoint(ignoreInvalidEndpoint); return this; } /** * Whether to allow components to optimise toD if they are * {@link org.apache.camel.spi.SendDynamicAware}. * * @return the builder */ public ToDynamicDefinition allowOptimisedComponents() { return allowOptimisedComponents(true); } /** * Whether to allow components to optimise toD if they are * {@link org.apache.camel.spi.SendDynamicAware}. * * @return the builder */ public ToDynamicDefinition allowOptimisedComponents(boolean allowOptimisedComponents) { return allowOptimisedComponents(Boolean.toString(allowOptimisedComponents)); } /** * Whether to allow components to optimise toD if they are * {@link org.apache.camel.spi.SendDynamicAware}. * * @return the builder */ public ToDynamicDefinition allowOptimisedComponents(String allowOptimisedComponents) { setAllowOptimisedComponents(allowOptimisedComponents); return this; } // Properties // ------------------------------------------------------------------------- public String getUri() { return uri; } /** * The uri of the endpoint to send to. The uri can be dynamic computed using * the {@link org.apache.camel.language.simple.SimpleLanguage} expression. */ public void setUri(String uri) { this.uri = uri; } public EndpointProducerBuilder getEndpointProducerBuilder() { return endpointProducerBuilder; } public void setEndpointProducerBuilder(EndpointProducerBuilder endpointProducerBuilder) { this.endpointProducerBuilder = endpointProducerBuilder; } public String getPattern() { return pattern; } public void setPattern(String pattern) { this.pattern = pattern; } public String getCacheSize() { return cacheSize; } public void setCacheSize(String cacheSize) { this.cacheSize = cacheSize; } public String getIgnoreInvalidEndpoint() { return ignoreInvalidEndpoint; } public void setIgnoreInvalidEndpoint(String ignoreInvalidEndpoint) { this.ignoreInvalidEndpoint = ignoreInvalidEndpoint; } public String getAllowOptimisedComponents() { return allowOptimisedComponents; } public void setAllowOptimisedComponents(String allowOptimisedComponents) { this.allowOptimisedComponents = allowOptimisedComponents; } }
{ "content_hash": "1afdc3ae1a209abe5582eb16c8908bb1", "timestamp": "", "source": "github", "line_count": 249, "max_line_length": 110, "avg_line_length": 34.46987951807229, "alnum_prop": 0.6802982640102528, "repo_name": "ullgren/camel", "id": "4afe0954a03d73c97a022e186060ef6ef08ad5f7", "size": "9385", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "core/camel-core-engine/src/main/java/org/apache/camel/model/ToDynamicDefinition.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Apex", "bytes": "6519" }, { "name": "Batchfile", "bytes": "1518" }, { "name": "CSS", "bytes": "16394" }, { "name": "Elm", "bytes": "10852" }, { "name": "FreeMarker", "bytes": "11410" }, { "name": "Groovy", "bytes": "14490" }, { "name": "HTML", "bytes": "896075" }, { "name": "Java", "bytes": "69929414" }, { "name": "JavaScript", "bytes": "90399" }, { "name": "Makefile", "bytes": "513" }, { "name": "Shell", "bytes": "17108" }, { "name": "Tcl", "bytes": "4974" }, { "name": "Thrift", "bytes": "6979" }, { "name": "XQuery", "bytes": "546" }, { "name": "XSLT", "bytes": "270186" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "2305672bd07c05d6d441ec94d1e8d8f4", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "cb3eb19b0b0c09063de44cfce1761b5a44e9a390", "size": "180", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Erigeron/Erigeron strictissimus/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
import socket from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType from test_framework.test_framework import NavCoinTestFramework from test_framework.util import * from test_framework.netutil import test_ipv6_local ''' Test plan: - Start navcoind's with different proxy configurations - Use addnode to initiate connections - Verify that proxies are connected to, and the right connection command is given - Proxy configurations to test on navcoind side: - `-proxy` (proxy everything) - `-onion` (proxy just onions) - `-proxyrandomize` Circuit randomization - Proxy configurations to test on proxy side, - support no authentication (other proxy) - support no authentication + user/pass authentication (Tor) - proxy on IPv6 - Create various proxies (as threads) - Create navcoinds that connect to them - Manipulate the navcoinds using addnode (onetry) an observe effects addnode connect to IPv4 addnode connect to IPv6 addnode connect to onion addnode connect to generic DNS name ''' class ProxyTest(NavCoinTestFramework): def __init__(self): super().__init__() self.num_nodes = 4 self.setup_clean_chain = False self.have_ipv6 = test_ipv6_local() # Create two proxies on different ports # ... one unauthenticated self.conf1 = Socks5Configuration() self.conf1.addr = ('127.0.0.1', 13000 + (os.getpid() % 1000)) self.conf1.unauth = True self.conf1.auth = False # ... one supporting authenticated and unauthenticated (Tor) self.conf2 = Socks5Configuration() self.conf2.addr = ('127.0.0.1', 14000 + (os.getpid() % 1000)) self.conf2.unauth = True self.conf2.auth = True if self.have_ipv6: # ... one on IPv6 with similar configuration self.conf3 = Socks5Configuration() self.conf3.af = socket.AF_INET6 self.conf3.addr = ('::1', 15000 + (os.getpid() % 1000)) self.conf3.unauth = True self.conf3.auth = True else: print("Warning: testing without local IPv6 support") self.serv1 = Socks5Server(self.conf1) self.serv1.start() self.serv2 = Socks5Server(self.conf2) self.serv2.start() if self.have_ipv6: self.serv3 = Socks5Server(self.conf3) self.serv3.start() def setup_nodes(self): # Note: proxies are not used to connect to local nodes # this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost args = [ ['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'], ['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'], ['-listen', '-debug=net', '-debug=proxy', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'], [] ] if self.have_ipv6: args[3] = ['-listen', '-debug=net', '-debug=proxy', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion'] return start_nodes(self.num_nodes, self.options.tmpdir, extra_args=args) def node_test(self, node, proxies, auth, test_onion=True): rv = [] # Test: outgoing IPv4 connection through node node.addnode("15.61.23.23:1234", "onetry") cmd = proxies[0].queue.get() assert(isinstance(cmd, Socks5Command)) # Note: navcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6 assert_equal(cmd.atyp, AddressType.DOMAINNAME) assert_equal(cmd.addr, b"15.61.23.23") assert_equal(cmd.port, 1234) if not auth: assert_equal(cmd.username, None) assert_equal(cmd.password, None) rv.append(cmd) if self.have_ipv6: # Test: outgoing IPv6 connection through node node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry") cmd = proxies[1].queue.get() assert(isinstance(cmd, Socks5Command)) # Note: navcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6 assert_equal(cmd.atyp, AddressType.DOMAINNAME) assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534") assert_equal(cmd.port, 5443) if not auth: assert_equal(cmd.username, None) assert_equal(cmd.password, None) rv.append(cmd) if test_onion: # Test: outgoing onion connection through node node.addnode("navcoinostk4e4re.onion:5556", "onetry") cmd = proxies[2].queue.get() assert(isinstance(cmd, Socks5Command)) assert_equal(cmd.atyp, AddressType.DOMAINNAME) assert_equal(cmd.addr, b"navcoinostk4e4re.onion") assert_equal(cmd.port, 5556) if not auth: assert_equal(cmd.username, None) assert_equal(cmd.password, None) rv.append(cmd) # Test: outgoing DNS name connection through node node.addnode("node.noumenon:5556", "onetry") cmd = proxies[3].queue.get() assert(isinstance(cmd, Socks5Command)) assert_equal(cmd.atyp, AddressType.DOMAINNAME) assert_equal(cmd.addr, b"node.noumenon") assert_equal(cmd.port, 5556) if not auth: assert_equal(cmd.username, None) assert_equal(cmd.password, None) rv.append(cmd) return rv def run_test(self): # basic -proxy self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False) # -proxy plus -onion self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False) # -proxy plus -onion, -proxyrandomize rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True) # Check that credentials as used for -proxyrandomize connections are unique credentials = set((x.username,x.password) for x in rv) assert_equal(len(credentials), len(rv)) if self.have_ipv6: # proxy on IPv6 localhost self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False) def networks_dict(d): r = {} for x in d['networks']: r[x['name']] = x return r # test RPC getnetworkinfo n0 = networks_dict(self.nodes[0].getnetworkinfo()) for net in ['ipv4','ipv6','onion']: assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr)) assert_equal(n0[net]['proxy_randomize_credentials'], True) assert_equal(n0['onion']['reachable'], True) n1 = networks_dict(self.nodes[1].getnetworkinfo()) for net in ['ipv4','ipv6']: assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr)) assert_equal(n1[net]['proxy_randomize_credentials'], False) assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr)) assert_equal(n1['onion']['proxy_randomize_credentials'], False) assert_equal(n1['onion']['reachable'], True) n2 = networks_dict(self.nodes[2].getnetworkinfo()) for net in ['ipv4','ipv6','onion']: assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr)) assert_equal(n2[net]['proxy_randomize_credentials'], True) assert_equal(n2['onion']['reachable'], True) if self.have_ipv6: n3 = networks_dict(self.nodes[3].getnetworkinfo()) for net in ['ipv4','ipv6']: assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr)) assert_equal(n3[net]['proxy_randomize_credentials'], False) assert_equal(n3['onion']['reachable'], False) if __name__ == '__main__': ProxyTest().main()
{ "content_hash": "89eea0ba37c799e0d600cba1ffc83baa", "timestamp": "", "source": "github", "line_count": 190, "max_line_length": 146, "avg_line_length": 42.584210526315786, "alnum_prop": 0.6051167964404894, "repo_name": "navcoindev/navcoin-core", "id": "3e0a6f50f4f8958f925250c65e6bd3e4f774041e", "size": "8306", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "qa/rpc-tests/proxy_test.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "3655915" }, { "name": "C++", "bytes": "4954999" }, { "name": "CSS", "bytes": "1127" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2100" }, { "name": "M4", "bytes": "176582" }, { "name": "Makefile", "bytes": "105930" }, { "name": "Objective-C", "bytes": "3771" }, { "name": "Objective-C++", "bytes": "7240" }, { "name": "Protocol Buffer", "bytes": "2308" }, { "name": "Python", "bytes": "946426" }, { "name": "QMake", "bytes": "2020" }, { "name": "Roff", "bytes": "3792" }, { "name": "Shell", "bytes": "426873" } ], "symlink_target": "" }
describe "API Rules" do before do RestChain::API.clear :demo end describe "Interface" do it "should create 1 definition" do RestChain::API::Definition.describe :demo do define :properties do |resource| resource.properties.each { |k, v| resource.set(k, v) } end end RestChain::API.definition_for(:demo).rules.count.should ==1 RestChain::API.definition_for(:demo).rules.first.name.should == :properties end it "should create 2 descriptions" do RestChain::API::Definition.describe :demo do define :properties do |resource| resource.properties.each { |k, v| resource.set(k, v) } end define :entities do |resource| resource.properties.each { |k, v| resource.set(k, v) } end end rules = RestChain::API.definition_for(:demo).rules rules.count.should ==2 rules.first.name.should == :properties rules.last.name.should == :entities end it "should join rules" do RestChain::API::Definition.describe :demo do define :properties do |resource| resource['properties'].each { |k, v| resource[k] = v } end define :some do |resource| apply_rule :properties, resource end end resource = { 'properties' => { "name" => "mile" } } RestChain::API.definition_for(:demo).rules.last.apply_on(resource,:some_method) resource.should have_key('name') end it "should skip rule" do RestChain::API::Definition.describe :demo do define :properties do |resource| next resource['properties'].each { |k, v| resource[k] = v } end end resource = { 'properties' => { "name" => "mile" } } RestChain::API.definition_for(:demo).rules.first.apply_on(resource,:some_method) rescue nil resource.should_not have_key('name') end describe "Suggestions" do let(:item) { SIREN_YML['item'].dup } it "should return :user,:create,:customer" do RestChain::API::Definition.describe :demo do define( :actions, suggest: :name ){|*|} define( :entities, suggest: :rel ){|*|} end resource = item.to_rest_chain resource.suggestions.should == [:user, :customer, :items, :owner, :create, :update] end end end end
{ "content_hash": "003e5019e1ba6ad47cff08176d252a24", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 98, "avg_line_length": 31.19736842105263, "alnum_prop": 0.6001687051876845, "repo_name": "avstudio/rest-chain", "id": "f87d88a0728bbf36ad3a0be05a39902bede21795", "size": "2371", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/api_definition_spec.rb", "mode": "33261", "license": "mit", "language": [ { "name": "Perl", "bytes": "1048" }, { "name": "Ruby", "bytes": "42002" }, { "name": "Shell", "bytes": "37" } ], "symlink_target": "" }
[![Build Status](https://secure.travis-ci.org/SlexAxton/messageformat.js.png)](http://travis-ci.org/SlexAxton/messageformat.js) # messageformat.js The experience and subtlety of your program's text can be important. MessageFormat (PluralFormat + SelectFormat) is a mechanism for handling both *pluralization* and *gender* in your applications. It can also lead to much better translations, as it was built by [ICU](http://icu-project.org/apiref/icu4j/com/ibm/icu/text/MessageFormat.html) to help solve those two problems for all known [CLDR](http://cldr.unicode.org/) languages - likely all the ones you care about. There is a good slide-deck on [Plural and Gender in Translated Messages](https://docs.google.com/presentation/d/1ZyN8-0VXmod5hbHveq-M1AeQ61Ga3BmVuahZjbmbBxo/pub?start=false&loop=false&delayms=3000#slide=id.g1bc43a82_2_14) by Markus Scherer and Mark Davis. But, again, remember that many of these problems apply even if you're only outputting english. [See just how many different pluralization rules there are.](http://unicode.org/repos/cldr-tmp/trunk/diff/supplemental/language_plural_rules.html) MessageFormat in Java-land technically incorporates all other type formatting (and the older ChoiceFormat) directly into its messages, however, in the name of filesize, messageformat.js only strives to implement **SelectFormat** and **PluralFormat**. There are plans to pull in locale-aware **NumberFormat** parsing as a "plugin" to this library, but as of right now, it's best to pass things in preformatted (as suggested in the ICU docs). We have also ported the Google Closure implementation of [NumberFormat](https://github.com/jedtoolkit/numberformat.js), but there is no direct integration of these two libraries. (They work well together!) ## What problems does it solve? A progression of strings in programs: > There are 1 results. > There are 1 result(s). > Number of results: 5. These are generally unacceptable in this day and age. Not to mention the problem expands when you consider languages with 6 different pluralization rules. You may be using something like Gettext to solve this across multiple languages, but even Gettext falls flat. ## What does it look like? ICU bills the format as easy to read and write. It may be _more_ easy to read and write, but I'd still suggest a tool for non-programmers. It looks a lot like Java's `ChoiceFormat` - but is different in a few significant ways, most notably its addition of the `plural` keyword, and more friendly `select` syntax. ``` {GENDER, select, male {He} female {She} other {They} } found {NUM_RESULTS, plural, one {1 result} other {# results} } in {NUM_CATEGORIES, plural, one {1 category} other {# categories} }. ``` Here's a few data sets against this message: ```javascript { "GENDER" : "male", "NUM_RESULTS" : 1, "NUM_CATEGORIES" : 2 } > "He found 1 result in 2 categories." { "GENDER" : "female", "NUM_RESULTS" : 1, "NUM_CATEGORIES" : 2 } > "She found 1 result in 2 categories." { "GENDER" : "male", "NUM_RESULTS" : 2, "NUM_CATEGORIES" : 1 } > "He found 2 results in 1 category." { "NUM_RESULTS" : 2, "NUM_CATEGORIES" : 2 } > "They found 2 results in 2 categories." ``` There is very little that needs to be repeated (until gender modifies more than one word), and there are equivalent/appropriate plural keys for every single language in the CLDR database. The syntax highlighting is less than ideal, but parsing a string like this gives you flexibility for your messages even if you're _only_ dealing with english. ## Features * Handles arbitrary nesting of pluralization and select rules. * Works on the server and the client. * No i18n necessary - you can use it for just well-formed english sentences. `UX++;` * Speed: Compiles messages to JavaScript code. * This is great for speed. Message formatting is just string concatenation. * Run a precompiler at build time and remove the need for most of the library. `filesize--;` * Compatible with other languages that support MessageFormat * Very whitespace tolerant. * Supports unicode characters * Most translation companies are beginning to be exposed to translations like this, even if it's not their core business. ## Install ### Node ```javascript > npm install messageformat var MessageFormat = require('messageformat'); ``` ### Browser ```html <script src="/path/to/messageformat-v0.x.x.js"></script> ``` ### Adding Provided Locales ```html <!-- after the messageformat.js include, but before you need to use the locale --> <script src="/path/to/messageformat/locales/xx_xx.js"></script> ``` TODO:: In node, we can automatically pull in all known locales for you. ### Adding Custom locales ```javascript // Any time after MessageFormat is included MessageFormat.locale["locale_name"] = function () { ... }; // Or during instantiation var mf = new MessageFormat( 'locale_name', function () { ... } ); ``` ## Tests These require node: ```sh > make test > make test-browser ``` ## Build Time Compilation You **really** should take advantage of this. It is _much_ faster than parsing in real-time. I will eventually release a Handlebars and Require.js (r.js) plugin to do this automatically. For now you can grab the raw javascript function the following way: ```javascript > var mf = new MessageFormat('en'); > var messages = { simple: 'A simple message.', var: 'Message with {X}.', plural: 'You have {N, plural, =0{no messages} one{1 message} other{# messages}}.', select: '{GENDER, select, male{He has} female{She has} other{They have}} sent you a message.', ordinal: 'The {N, selectordinal, one{1st} two{2nd} few{3rd} other{#th}} message.' }; > var vfunc = mf.compile(messages.var); > vfunc({X:'a variable'}) 'Message with a variable.' > vfunc.toString() 'function (d){return "Message with "+d["X"]+"."}' > var mfunc = mf.compile(messages); > mfunc().ordinal({N:3}) 'The 3rd message.' > console.log(mfunc.toString()) function anonymous() { var n=function(v,o){ if (isNaN(v)) throw new Error("'"+v+"' isn't a number."); return v - (o||0) }, p=function(v,o,l,p,s){ return v in p ? p[v] : ( v = l(o ? v-o : v, s), v in p ? p[v] : p.other ) }, s=function(v,p){ return v in p ? p[v] : p.other }, pf={"en":function(n,ord) { var s = String(n).split('.'), v0 = !s[1], t0 = Number(s[0]) == n, n10 = t0 && s[0].substr(-1), n100 = t0 && s[0].substr(-2); if (ord) return (n10 == 1 && n100 != 11) ? 'one' : (n10 == 2 && n100 != 12) ? 'two' : (n10 == 3 && n100 != 13) ? 'few' : 'other'; return (n == 1 && v0) ? 'one' : 'other'; }}, fmt={}; return { simple:function(d){return "A simple message."}, var:function(d){return "Message with "+d["X"]+"."}, plural:function(d){return "You have "+p(d["N"],0,pf["en"],{0:"no messages",one:"1 message",other:n(d["N"])+" messages"})+"."}, select:function(d){return s(d["GENDER"],{male:"He has",female:"She has",other:"They have"})+" sent you a message."}, ordinal:function(d){return "The "+p(d["N"],0,pf["en"],{one:"1st",two:"2nd",few:"3rd",other:n(d["N"])+"th"},1)+" message."}} } ``` ### The CLI compiler If you don't want to compile your templates programmatically, you can use the built in CLI compiler. This tool is in early stage. It was tested on Linux and Windows, but if you find a bug, please create an issue. #### Usage > [sudo] npm install -g messageformat > messageformat Usage: messageformat -l [locale] [INPUT_DIR] [OUTPUT_DIR] --locale, -l locale to use [mandatory] --inputdir, -i directory containings messageformat files to compile $PWD --output, -o output where messageformat will be compiled $PWD --watch, -w watch `inputdir` for change false --namespace, -ns object in the browser containing the templates window.i18n --include, -I Glob patterns for files to include in `inputdir` **/*.json --stdout, -s Print the result in stdout instead of writing in a file false --module, -m create a commonJS module, instead of a window variable false --verbose, -v Print logs for debug false If your prefer looking at an example [go there](https://github.com/SlexAxton/messageformat.js/tree/master/example/en). `messageformat` will read every JSON files in `inputdir` and compile them to `output`. When using the CLI, the following commands will works exactly the same: > messageformat --locale en ./example/en > messageformat --locale en ./example/en ./i18n.js > messageformat --locale en --inputdir ./example/en --output ./i18n.js or even shorter > cd example/en > messageformat -l en You can also do it with a unix pipe > messageformat -l en --stdout > i18n.js Take a look at the example [inputdir](https://github.com/SlexAxton/messageformat.js/tree/master/example/en) and [output](https://github.com/SlexAxton/messageformat.js/tree/master/example/en/i18n.js) A watch mode is available with the `--watch` or `-w` option. #### The JSON messageformat files The original JSON files are simple objects, with a key and a messageformat string as value, like [this one](https://github.com/SlexAxton/messageformat.js/blob/master/example/en/sub/folder/plural.json): { "test": "Your {NUM, plural, one{message} other{messages}} go here." } The CLI walks into `inputdir` recursively so you can structure your messageformat with [dirs and subdirs](https://github.com/SlexAxton/messageformat.js/tree/master/example/en). #### In the browser Now that you have compiled your messageformat, you can use it in your [html](https://github.com/SlexAxton/messageformat.js/blob/master/example/index.html) by adding a `<script src="index.js"></script>`. In the browser, the global `window.i18n` is an object containing the messageformat compiled functions. > i18n Object colors: Object blue: [ Function ] green: [ Function ] red: [ Function ] "sub/folder/plural": Object test: [ Function ] You could then use it: $('<div>').text( window.i18n[ 'sub/folder/plural' ].test( { NUM: 1 } ) ).appendTo('#content'); The namespace `window.i18n` could be changed with the `--namespace` or `-ns` option. Subdirectories messageformat are available in the `window.i18n` namespace, prefixed with their relative path : > window.i18n['sub/folder/plural'] Object * test: [ Function ] `sub/folder` is the path, `plural` is the name of [the JSON file](https://github.com/SlexAxton/messageformat.js/blob/master/example/en/sub/folder/plural.json), `test` is the key used. A working example is available [here](https://github.com/SlexAxton/messageformat.js/tree/master/example). ### No Frills The most simple case of MessageFormat would involve no formatting. Just a string passthrough. This sounds silly, but often it's nice to always use the same i18n system when doing translations, and not everything takes variables. ```javascript // Insantiate a MessageFormat object on your locale var mf = new MessageFormat('en'); // Compile a message var message = mf.compile( 'This is a message.' ); // returns a function // You can call the function to get data out > message(); "This is a message." // NOTE:: if a message _does_ require data to be passed in, an error is thrown if you do not. ``` ### Simple Variable Replacement The second most simple way to use MessageFormat is for simple variable replacement. MessageFormat looks odd at first, but it's actually fairly simple. One way to think about the `{` and `}` is that every level of them bring you into and out-of `literal` and `code` mode. By default (like in the previous example), you are just writing a literal. Then the first level of brackets brings you into one of several data-driven situations. The most simple is variable replacement. Simply putting a variable name in between `{` and `}` will place that variable there in the output. ```javascript // Instantiate new MessageFormat object for your locale var mf = new MessageFormat('en'); // Compile a message var message = mf.compile('His name is {NAME}.'); // Then send that data into the function > message({ "NAME" : "Jed" }); "His name is Jed." // NOTE:: it's best to try and stick to keys that would be natively // tolerant in your JS runtimes (think valid JS variable names). ``` ### SelectFormat `SelectFormat` is a lot like a switch statement for your messages. Most often it's used to select gender in a string. Here's an example: ```javascript // Insantiate an instance with your language settings var mf = new MesssageFormat('en'); // Compile a message - returns a function var message = mf.compile('{GENDER, select, male{He} female{She} other{They}} liked this.'); // Run your message function with your data > message({"GENDER" : "male"}); "He liked this." > message({"GENDER" : "female"}); "She liked this." // The 'other' key is **required** and in the case of GENDER // it should be phrased as if you are too far away to tell the gender of the subject. > message({}); "They liked this." ``` ### PluralFormat `PluralFormat` is a similar mechanism to `SelectFormat` (especially syntax wise), but it's specific to numbers, and the key that is chosen is generated by a _Plural Function_. ```javascript // Insantiate a new MessageFormat object var mf = new MessageFormat('en'); // You can use the provided locales in the `/locale` folder // (include the file directly after including messageformat.js var mf = new MessageFormat( 'sl' ); // OR - you can pass a custom plural function to the MessageFormat constructor function. var mf = new Message( 'requiredCustomName', function (n) { if ( n === 42 ) { return 'many'; } return 'other'; }); // Then the numbers that are passed into a compiled message will run through this function to select // the keys. This is for the 'en' locale: var message = mf.compile('There {NUM_RESULTS, plural, one{is one result} other{are # results}}.'); // Then the data causes the function to output: > message({"NUM_RESULTS" : 0}); "There are 0 results." > message({"NUM_RESULTS" : 1}); "There is one result." > message({"NUM_RESULTS" : 100}); "There are 100 results." ``` #### Named Keys ICU declares the 6 named keys that CLDR defines for their plural form data. Those are: * zero * one * two * few * many * other (**required**) All of them are fairly straight-forward, but do remember, that for some languages, they are more loose "guidelines" than they are exact. The only **required** key is `other`. Your compilation will throw an error if you forget this. In english, and many other languages, the logic is simple: `If N equals 1, then ONE, otherwise OTHER` Other languages (take a peak at `ar.js` or `sl.js`) can get much more complicated. **Remember. English only uses `one` and `other` - so including `zero` will never get called, even when the number is 0** The most simple (to pluralize) languages have no pluralization rules an rely solely on the `other` named key. ``` {NUM, plural, zero {There are zero - in a lang that needs it.} one {There is one - in a lang that has it.} two {There is two - in a lang that has it.} few {There are a few - in a lang that has it.} many {There are many - in a lang that has it.} other {There is a different amount than all the other stuff above.} } ``` #### Literal Numeric Keys There also exists the capability to put literal numbers as keys in a select statement. These are delimited by prefixing them with the `=` character. These will match single, specific numbers. If there is a match, that branch will immediately run, and the corresponding named key **will not** also run. There are plenty of legitimate uses for this, especially when considering base cases and more pleasant language. But if you're a Douglas Adams fan, might use it like so: ``` You have {NUM_TASKS, plural, one {one task} other {# tasks} =42 {the answer to the life, the universe and everything tasks} } remaining. ``` When `NUM_TASKS` is 42, this outputs smiles. Remember, these have priority over the named keys. ### PluralFormat - offset extension ICU provided the ability to extend existing select and plural functionality, and the only official extension (that I could find) is the `offset` extension. It goes after the `plural` declaration, and is used to generate sentences that break up a number into multiple sections. For instance: > You and 4 others added this to their profiles. In this case, the total number of people who added 'this' to their profiles is actually 5. We can use the `offset` extension to help us with this. ```javascript var mf = new MessageFormat('en'); // For simplicity's sake, let's assume the base case here isn't silly. // The test suite has a bigger offset example at the bottom // Let's also assume neutral gender for the same reason // Set the offset to 1 var message = mf.compile( 'You {NUM_ADDS, plural, offset:1' + '=0{didnt add this to your profile}' + // Number literals, with a `=` do **NOT** use '=1{added this to your profile}' + // the offset value 'one{and one other person added this to their profile}' + 'other{and # others added this to their profiles}' + '}.' ); // Tip: I like to consider the `=` prefixed number literals as more of an "inductive step" // e.g. in this case, since (0 - 1) is _negative_ 1, we want to handle that base case. > message({"NUM_ADDS" : 0 }); "You didnt add this to your profile." > message({"NUM_ADDS" : 1 }); "You added this to your profile." > message({"NUM_ADDS" : 2 }); "You and one other person added this to their profile." > message({"NUM_ADDS" : 3 }); "You and 2 others added this to their profile." ``` ### Nesting Very simply, you can nest both `SelectFormat` blocks into `PluralFormat` blocks, and visa-versa, as deeply as you'd like. Simply start the new block directly inside: ``` {SEL1, select, other {{PLUR1, plural, one {1} other {{SEL2, select, other {deep in the heart.} }} }} } ``` ### Escaping messageformat.js tries to a good job of being tolerant of as much as possible, but some characters, like the ones used the actual MessageFormat spec itself, must be escaped to be a part of your string. For `{`, `}` and `#` (only inside of a select value) literals, just escape them with a backslash. (If you are in a JS string, you'll need to escape the escape backslash so it'll look like two). ```javascript // Technically, it's just: \{\}\# // But in practice, since you're often dealing with string literals, it looks more like var msg = mf.compile("\\{ {S, select, other{# is a \\#}} \\}"); > msg({S:5}); "{ 5 is a # }" ``` ## Why not Gettext? Gettext can generally go only one level deep without hitting some serious roadblocks. For example, two plural elements in a sentence, or the combination of gender and plurals. ### This would be prohibitively difficult with Gettext > He found 5 results in 2 categories. > She found 1 result in 1 category. > He found 2 results in 1 category. It can likely be done with contexts/domains for gender and some extra plural forms work to pick contexts for the plurals, but it's less than ideal. Not to mention every translation must be completed in its entirety for every combination. That stinks too. You can easily mix Gettext and MessageFormat by storing MessageFormat strings in your .po files. However, I would stop using the built in plural functions of Gettext. I tend to only use Gettext on projects that are already using it in other languages, so we can share translations, otherwise, I like to live on the wild-side and use PluralFormat and SelectFormat. Most Gettext tools will look up the Plural Forms for a given locale for you. This is also the opinion of PluralFormat. The library should just contain the known plural forms of every locale, and not force translators to reinput this information each time. ## Version `0.3.0-0` ## TODO * Update the documentation * Create a tool to help translators understand and use this format. * Template integration - I specifically want to make a build time handlebars.js plugin to build this logic into the template builds. ## License You may use this software under the MIT License. You may contribute to this software under the Dojo CLA - <http://dojofoundation.org/about/cla> ## Author * Alex Sexton - [@SlexAxton](http://twitter.com/SlexAxton) - <http://alexsexton.com/> ## Major Contributors * Eemeli Aro - [@eemeli](https://github.com/eemeli) ## Credits Thanks to: * [Bazaarvoice](https://github.com/Bazaarvoice) - my employer - for letting me do cool stuff like this. * Google has an implementation that is similar in Google Closure, I tried to vet my code against many of their tests. * Norbert Lindenberg for showing me how good it can be. ## Implementations in other languages [Jeff Hansen](https://github.com/jeffijoe) ([@jeffijoe](https://twitter.com/jeffijoe)) wrote an implementation for .NET: https://github.com/jeffijoe/messageformat.net - it's a Portable Class Library, making it possible to use on iOS, Android, Windows Phone, and pretty much any other .NET target.
{ "content_hash": "ee9d23249f46d69ac69a49a7682128e2", "timestamp": "", "source": "github", "line_count": 556, "max_line_length": 468, "avg_line_length": 38.710431654676256, "alnum_prop": 0.6994842726385727, "repo_name": "nooks/messageformat.js", "id": "116bbdaa102aa3f16d64e5b5d9944ede911c9508", "size": "21523", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1760" }, { "name": "JavaScript", "bytes": "73873" }, { "name": "Makefile", "bytes": "1238" } ], "symlink_target": "" }
cask "soundsource" do version "5.5.4" sha256 :no_check url "https://rogueamoeba.com/soundsource/download/SoundSource.zip" name "SoundSource" desc "Sound and audio controller" homepage "https://rogueamoeba.com/soundsource/" livecheck do url "https://rogueamoeba.net/ping/versionCheck.cgi?format=sparkle&system=1231&bundleid=com.rogueamoeba.soundsource&platform=osx&version=#{version.no_dots}8000" strategy :sparkle end auto_updates true depends_on macos: ">= :high_sierra" app "SoundSource.app" zap trash: [ "~/Library/Application Support/SoundSource", "~/Library/Caches/com.rogueamoeba.soundsource", "~/Library/Preferences/com.rogueamoeba.soundsource.plist", "~/Library/WebKit/com.rogueamoeba.soundsource", ] end
{ "content_hash": "107e6d3892b6d493e50631b308c93032", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 163, "avg_line_length": 29.53846153846154, "alnum_prop": 0.7330729166666666, "repo_name": "tjnycum/homebrew-cask", "id": "29a94f6ac9620baa60632fcc5b415f184f0688f8", "size": "768", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "Casks/soundsource.rb", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "3630" }, { "name": "Ruby", "bytes": "3059535" }, { "name": "Shell", "bytes": "32035" } ], "symlink_target": "" }
var mongoose = require('mongoose'); var Schema = mongoose.Schema; require('mongoose-currency').loadType(mongoose); var commentSchema = new Schema({ rating: { type: Number, min: 1, max: 5, required: true }, comment: { type: String, required: true }, postBy: { type: mongoose.Schema.Types.ObjectId, ref: 'User' } }, { timestamps: true }); var dishSchema = new Schema({ name: { type: String, required: true, unique: true }, comments: [commentSchema], image: { type: String, required: true }, category: { type: String, required: true }, label: { type: String, default: '' }, price: { type: mongoose.Types.Currency, required: true }, feature: { type: Boolean, default: false }, description: { type: String, required: true } }, { timestamps: true }); var Dishes = mongoose.model('Dish', dishSchema); module.exports = Dishes;
{ "content_hash": "b5d05d6bb801bc2b808842ca845eb76b", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 48, "avg_line_length": 15.5, "alnum_prop": 0.5910509885535901, "repo_name": "kimochg/node-examples", "id": "ab9fcc67f81ed632435ab7cfd31e751483265d10", "size": "961", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rest-server-passport-oauth/models/dishes.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "444" }, { "name": "HTML", "bytes": "2180" }, { "name": "JavaScript", "bytes": "94979" } ], "symlink_target": "" }
.datepicker_dashboard { position: absolute; font-size: 10px; font-family: "Lucida Grande", LucidaGrande, "Lucida Sans", Geneva, Verdana, sans-serif; color: #fff; line-height: normal; width: 172px; height: 135px; padding: 14px; background: url(frame.png) no-repeat; } /* header ********************************************************/ .datepicker_dashboard .header { position: relative; height: 15px; margin-bottom: 5px; padding-top: 1px; } .datepicker_dashboard .header .title { text-align: center; margin: 2px 18px 0 18px; } .datepicker_dashboard .header .titleText { color: #ccff00; } .datepicker_dashboard .header .previous, .datepicker_dashboard .header .next, .datepicker_dashboard .header .closeButton { position: absolute; cursor: pointer; text-indent: -40px; overflow: hidden; width: 12px; height: 12px; top: 2px; background-image: url(buttons.png); background-position: left top; background-repeat: no-repeat; } .datepicker_dashboard .header .previous { left: 4px; } .datepicker_dashboard .header .previous:hover { background-position: left bottom; } .datepicker_dashboard .header .next { right: 4px; background-position: -13px top; } .datepicker_dashboard .header .next:hover { background-position: -13px bottom; } .datepicker_dashboard .header .closeButton { display: none; right: 0px; top: 0px; background-position: right top; } .datepicker_dashboard .header .closeButton:hover { background-position: right bottom; } /* body ********************************************************/ .datepicker_dashboard .body { position: relative; top: 0px; left: 2px; width: 168px; height: 112px; overflow: hidden; } /* time ********************************************************/ .datepicker_dashboard .time { position: relative; width: 100%; height: 100%; } .datepicker_dashboard .time .hour, .datepicker_dashboard .time .separator, .datepicker_dashboard .time .minutes { background: #333; border: 0px; width: 50px; font-size: 32px; color: #fff; position: absolute; top: 10px; text-align: center; padding: 2px; } .datepicker_dashboard .time .hour { left: 15px; } .datepicker_dashboard .time .separator { background: transparent; width: 10px; left: 76px; } .datepicker_dashboard .time .minutes { left: 95px; } .datepicker_dashboard .time .ok { position: absolute; top: 65px; height: 32px; width: 136px; left: 15px; font-size: 20px; } /* days-grid ********************************************************/ .datepicker_dashboard .days .day { float: left; text-align: center; overflow: hidden; width: 23px; padding-top: 1px; height: 14px; margin: 0 1px 1px 0; } .datepicker_dashboard .days .titles { height: 15px; margin-bottom: 2px; text-transform: uppercase; color: #aaa; } .datepicker_dashboard .days .day0 { margin-right: 0; } .datepicker_dashboard .days .week5 .day { margin-bottom: 0; } /* days-colors ********************************************************/ .datepicker_dashboard .days .week .day { cursor: pointer; } .datepicker_dashboard .days .week .day:hover { color: #ccff00; } .datepicker_dashboard .days .otherMonth { color: #444444; } .datepicker_dashboard .days .selected { color: #ccff00; } /* months-grid ********************************************************/ .datepicker_dashboard .months .month { float: left; cursor: pointer; text-align: center; padding-top: 6px; width: 55px; overflow: hidden; height: 21px; margin: 0 1px 1px 0; } .datepicker_dashboard .months .month3, .datepicker_dashboard .months .month6, .datepicker_dashboard .months .month9, .datepicker_dashboard .months .month12 { margin-right: 0; } .datepicker_dashboard .months .month10, .datepicker_dashboard .months .month11, .datepicker_dashboard .months .month12 { margin-bottom: 0; } /* months-colors ********************************************************/ .datepicker_dashboard .months .month:hover { color: #ccff00; } .datepicker_dashboard .months .selected { color: #ccff00; } /* years-grid ********************************************************/ .datepicker_dashboard .years .year { float: left; cursor: pointer; text-align: center; padding-top: 6px; width: 32px; overflow: hidden; height: 21px; margin: 0 1px 1px 0; } .datepicker_dashboard .years .year4, .datepicker_dashboard .years .year9, .datepicker_dashboard .years .year14, .datepicker_dashboard .years .year19 { margin-right: 0; } .datepicker_dashboard .years .year15, .datepicker_dashboard .years .year16, .datepicker_dashboard .years .year17, .datepicker_dashboard .years .year18, .datepicker_dashboard .years .year19 { margin-bottom: 0; } /* years-colors ********************************************************/ .datepicker_dashboard .years .year:hover { color: #ccff00; } .datepicker_dashboard .years .selected { color: #ccff00 } /* global ********************************************************/ .datepicker_dashboard .unavailable { color: #533 !important; cursor: default !important; text-decoration: line-through; }
{ "content_hash": "47cdee81f02aa1089c21260e1633e68c", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 88, "avg_line_length": 20.473469387755102, "alnum_prop": 0.6349681020733652, "repo_name": "epsi-rns/AlumniBook-SF", "id": "3b8f06f0acf4b81e4439af7c9bd3a159038a2232", "size": "5016", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "plugins/sfMooDatePickerPlugin/web/css/datepicker_dashboard/datepicker_dashboard.css", "mode": "33261", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "340811" }, { "name": "PHP", "bytes": "728063" } ], "symlink_target": "" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE122_Heap_Based_Buffer_Overflow__c_CWE805_int_memmove_15.c Label Definition File: CWE122_Heap_Based_Buffer_Overflow__c_CWE805.label.xml Template File: sources-sink-15.tmpl.c */ /* * @description * CWE: 122 Heap Based Buffer Overflow * BadSource: Allocate using malloc() and set data pointer to a small buffer * GoodSource: Allocate using malloc() and set data pointer to a large buffer * Sink: memmove * BadSink : Copy int array to data using memmove * Flow Variant: 15 Control flow: switch(6) * * */ #include "std_testcase.h" #ifndef OMITBAD void CWE122_Heap_Based_Buffer_Overflow__c_CWE805_int_memmove_15_bad() { int * data; data = NULL; switch(6) { case 6: /* FLAW: Allocate and point data to a small buffer that is smaller than the large buffer used in the sinks */ data = (int *)malloc(50*sizeof(int)); if (data == NULL) {exit(-1);} break; default: /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ printLine("Benign, fixed string"); break; } { int source[100] = {0}; /* fill with 0's */ /* POTENTIAL FLAW: Possible buffer overflow if data < 100 */ memmove(data, source, 100*sizeof(int)); printIntLine(data[0]); free(data); } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B1() - use goodsource and badsink by changing the switch to switch(5) */ static void goodG2B1() { int * data; data = NULL; switch(5) { case 6: /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ printLine("Benign, fixed string"); break; default: /* FIX: Allocate and point data to a large buffer that is at least as large as the large buffer used in the sink */ data = (int *)malloc(100*sizeof(int)); if (data == NULL) {exit(-1);} break; } { int source[100] = {0}; /* fill with 0's */ /* POTENTIAL FLAW: Possible buffer overflow if data < 100 */ memmove(data, source, 100*sizeof(int)); printIntLine(data[0]); free(data); } } /* goodG2B2() - use goodsource and badsink by reversing the blocks in the switch */ static void goodG2B2() { int * data; data = NULL; switch(6) { case 6: /* FIX: Allocate and point data to a large buffer that is at least as large as the large buffer used in the sink */ data = (int *)malloc(100*sizeof(int)); if (data == NULL) {exit(-1);} break; default: /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ printLine("Benign, fixed string"); break; } { int source[100] = {0}; /* fill with 0's */ /* POTENTIAL FLAW: Possible buffer overflow if data < 100 */ memmove(data, source, 100*sizeof(int)); printIntLine(data[0]); free(data); } } void CWE122_Heap_Based_Buffer_Overflow__c_CWE805_int_memmove_15_good() { goodG2B1(); goodG2B2(); } #endif /* OMITGOOD */ /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); CWE122_Heap_Based_Buffer_Overflow__c_CWE805_int_memmove_15_good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); CWE122_Heap_Based_Buffer_Overflow__c_CWE805_int_memmove_15_bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
{ "content_hash": "f98f613e1121b2ced898d5ea351bd099", "timestamp": "", "source": "github", "line_count": 135, "max_line_length": 123, "avg_line_length": 29.651851851851852, "alnum_prop": 0.5998001498875843, "repo_name": "JianpingZeng/xcc", "id": "865c6d494eac7e31d1d16f52cac8a99557a15753", "size": "4003", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xcc/test/juliet/testcases/CWE122_Heap_Based_Buffer_Overflow/s08/CWE122_Heap_Based_Buffer_Overflow__c_CWE805_int_memmove_15.c", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_91) on Mon Jun 06 14:51:11 EDT 2016 --> <title>UpdateParameters (apache-cassandra API)</title> <meta name="date" content="2016-06-06"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="UpdateParameters (apache-cassandra API)"; } } catch(err) { } //--> var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; var tableTab = "tableTab"; var activeTableTab = "activeTableTab"; </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/UpdateParameters.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/apache/cassandra/cql3/UntypedResultSet.Row.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../org/apache/cassandra/cql3/UserTypes.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/apache/cassandra/cql3/UpdateParameters.html" target="_top">Frames</a></li> <li><a href="UpdateParameters.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">org.apache.cassandra.cql3</div> <h2 title="Class UpdateParameters" class="title">Class UpdateParameters</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li>java.lang.Object</li> <li> <ul class="inheritance"> <li>org.apache.cassandra.cql3.UpdateParameters</li> </ul> </li> </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <hr> <br> <pre>public class <span class="typeNameLabel">UpdateParameters</span> extends java.lang.Object</pre> <div class="block">Groups the parameters of an update query, and make building updates easier.</div> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- =========== FIELD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="field.summary"> <!-- --> </a> <h3>Field Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation"> <caption><span>Fields</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Field and Description</th> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/config/CFMetaData.html" title="class in org.apache.cassandra.config">CFMetaData</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#metadata">metadata</a></span></code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/cql3/QueryOptions.html" title="class in org.apache.cassandra.cql3">QueryOptions</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#options">options</a></span></code>&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/PartitionColumns.html" title="class in org.apache.cassandra.db">PartitionColumns</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#updatedColumns">updatedColumns</a></span></code>&nbsp;</td> </tr> </table> </li> </ul> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.summary"> <!-- --> </a> <h3>Constructor Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tr class="altColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#UpdateParameters-org.apache.cassandra.config.CFMetaData-org.apache.cassandra.db.PartitionColumns-org.apache.cassandra.cql3.QueryOptions-long-int-java.util.Map-">UpdateParameters</a></span>(<a href="../../../../org/apache/cassandra/config/CFMetaData.html" title="class in org.apache.cassandra.config">CFMetaData</a>&nbsp;metadata, <a href="../../../../org/apache/cassandra/db/PartitionColumns.html" title="class in org.apache.cassandra.db">PartitionColumns</a>&nbsp;updatedColumns, <a href="../../../../org/apache/cassandra/cql3/QueryOptions.html" title="class in org.apache.cassandra.cql3">QueryOptions</a>&nbsp;options, long&nbsp;timestamp, int&nbsp;ttl, java.util.Map&lt;<a href="../../../../org/apache/cassandra/db/DecoratedKey.html" title="class in org.apache.cassandra.db">DecoratedKey</a>,<a href="../../../../org/apache/cassandra/db/partitions/Partition.html" title="interface in org.apache.cassandra.db.partitions">Partition</a>&gt;&nbsp;prefetchedRows)</code>&nbsp;</td> </tr> </table> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method.summary"> <!-- --> </a> <h3>Method Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tr id="i0" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addCell-org.apache.cassandra.config.ColumnDefinition-java.nio.ByteBuffer-">addCell</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, java.nio.ByteBuffer&nbsp;value)</code>&nbsp;</td> </tr> <tr id="i1" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addCell-org.apache.cassandra.config.ColumnDefinition-org.apache.cassandra.db.rows.CellPath-java.nio.ByteBuffer-">addCell</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, <a href="../../../../org/apache/cassandra/db/rows/CellPath.html" title="class in org.apache.cassandra.db.rows">CellPath</a>&nbsp;path, java.nio.ByteBuffer&nbsp;value)</code>&nbsp;</td> </tr> <tr id="i2" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addCounter-org.apache.cassandra.config.ColumnDefinition-long-">addCounter</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, long&nbsp;increment)</code>&nbsp;</td> </tr> <tr id="i3" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addPrimaryKeyLivenessInfo--">addPrimaryKeyLivenessInfo</a></span>()</code>&nbsp;</td> </tr> <tr id="i4" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addRowDeletion--">addRowDeletion</a></span>()</code>&nbsp;</td> </tr> <tr id="i5" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addTombstone-org.apache.cassandra.config.ColumnDefinition-">addTombstone</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column)</code>&nbsp;</td> </tr> <tr id="i6" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#addTombstone-org.apache.cassandra.config.ColumnDefinition-org.apache.cassandra.db.rows.CellPath-">addTombstone</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, <a href="../../../../org/apache/cassandra/db/rows/CellPath.html" title="class in org.apache.cassandra.db.rows">CellPath</a>&nbsp;path)</code>&nbsp;</td> </tr> <tr id="i7" class="rowColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#buildRow--">buildRow</a></span>()</code>&nbsp;</td> </tr> <tr id="i8" class="altColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#currentClustering--">currentClustering</a></span>()</code>&nbsp;</td> </tr> <tr id="i9" class="rowColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/DeletionTime.html" title="class in org.apache.cassandra.db">DeletionTime</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#deletionTime--">deletionTime</a></span>()</code>&nbsp;</td> </tr> <tr id="i10" class="altColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#getPrefetchedRow-org.apache.cassandra.db.DecoratedKey-org.apache.cassandra.db.Clustering-">getPrefetchedRow</a></span>(<a href="../../../../org/apache/cassandra/db/DecoratedKey.html" title="class in org.apache.cassandra.db">DecoratedKey</a>&nbsp;key, <a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;clustering)</code>&nbsp;</td> </tr> <tr id="i11" class="rowColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/RangeTombstone.html" title="class in org.apache.cassandra.db">RangeTombstone</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#makeRangeTombstone-org.apache.cassandra.db.ClusteringComparator-org.apache.cassandra.db.Clustering-">makeRangeTombstone</a></span>(<a href="../../../../org/apache/cassandra/db/ClusteringComparator.html" title="class in org.apache.cassandra.db">ClusteringComparator</a>&nbsp;comparator, <a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;clustering)</code>&nbsp;</td> </tr> <tr id="i12" class="altColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/db/RangeTombstone.html" title="class in org.apache.cassandra.db">RangeTombstone</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#makeRangeTombstone-org.apache.cassandra.db.Slice-">makeRangeTombstone</a></span>(<a href="../../../../org/apache/cassandra/db/Slice.html" title="class in org.apache.cassandra.db">Slice</a>&nbsp;slice)</code>&nbsp;</td> </tr> <tr id="i13" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#newRow-org.apache.cassandra.db.Clustering-">newRow</a></span>(<a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;clustering)</code>&nbsp;</td> </tr> <tr id="i14" class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#setComplexDeletionTime-org.apache.cassandra.config.ColumnDefinition-">setComplexDeletionTime</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column)</code>&nbsp;</td> </tr> <tr id="i15" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/cql3/UpdateParameters.html#setComplexDeletionTimeForOverwrite-org.apache.cassandra.config.ColumnDefinition-">setComplexDeletionTimeForOverwrite</a></span>(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column)</code>&nbsp;</td> </tr> </table> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.Object</h3> <code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ============ FIELD DETAIL =========== --> <ul class="blockList"> <li class="blockList"><a name="field.detail"> <!-- --> </a> <h3>Field Detail</h3> <a name="metadata"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>metadata</h4> <pre>public final&nbsp;<a href="../../../../org/apache/cassandra/config/CFMetaData.html" title="class in org.apache.cassandra.config">CFMetaData</a> metadata</pre> </li> </ul> <a name="updatedColumns"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>updatedColumns</h4> <pre>public final&nbsp;<a href="../../../../org/apache/cassandra/db/PartitionColumns.html" title="class in org.apache.cassandra.db">PartitionColumns</a> updatedColumns</pre> </li> </ul> <a name="options"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>options</h4> <pre>public final&nbsp;<a href="../../../../org/apache/cassandra/cql3/QueryOptions.html" title="class in org.apache.cassandra.cql3">QueryOptions</a> options</pre> </li> </ul> </li> </ul> <!-- ========= CONSTRUCTOR DETAIL ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.detail"> <!-- --> </a> <h3>Constructor Detail</h3> <a name="UpdateParameters-org.apache.cassandra.config.CFMetaData-org.apache.cassandra.db.PartitionColumns-org.apache.cassandra.cql3.QueryOptions-long-int-java.util.Map-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>UpdateParameters</h4> <pre>public&nbsp;UpdateParameters(<a href="../../../../org/apache/cassandra/config/CFMetaData.html" title="class in org.apache.cassandra.config">CFMetaData</a>&nbsp;metadata, <a href="../../../../org/apache/cassandra/db/PartitionColumns.html" title="class in org.apache.cassandra.db">PartitionColumns</a>&nbsp;updatedColumns, <a href="../../../../org/apache/cassandra/cql3/QueryOptions.html" title="class in org.apache.cassandra.cql3">QueryOptions</a>&nbsp;options, long&nbsp;timestamp, int&nbsp;ttl, java.util.Map&lt;<a href="../../../../org/apache/cassandra/db/DecoratedKey.html" title="class in org.apache.cassandra.db">DecoratedKey</a>,<a href="../../../../org/apache/cassandra/db/partitions/Partition.html" title="interface in org.apache.cassandra.db.partitions">Partition</a>&gt;&nbsp;prefetchedRows) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> </li> </ul> <!-- ============ METHOD DETAIL ========== --> <ul class="blockList"> <li class="blockList"><a name="method.detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="newRow-org.apache.cassandra.db.Clustering-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>newRow</h4> <pre>public&nbsp;void&nbsp;newRow(<a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;clustering) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> <a name="currentClustering--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>currentClustering</h4> <pre>public&nbsp;<a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;currentClustering()</pre> </li> </ul> <a name="addPrimaryKeyLivenessInfo--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addPrimaryKeyLivenessInfo</h4> <pre>public&nbsp;void&nbsp;addPrimaryKeyLivenessInfo()</pre> </li> </ul> <a name="addRowDeletion--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addRowDeletion</h4> <pre>public&nbsp;void&nbsp;addRowDeletion()</pre> </li> </ul> <a name="addTombstone-org.apache.cassandra.config.ColumnDefinition-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addTombstone</h4> <pre>public&nbsp;void&nbsp;addTombstone(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> <a name="addTombstone-org.apache.cassandra.config.ColumnDefinition-org.apache.cassandra.db.rows.CellPath-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addTombstone</h4> <pre>public&nbsp;void&nbsp;addTombstone(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, <a href="../../../../org/apache/cassandra/db/rows/CellPath.html" title="class in org.apache.cassandra.db.rows">CellPath</a>&nbsp;path) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> <a name="addCell-org.apache.cassandra.config.ColumnDefinition-java.nio.ByteBuffer-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addCell</h4> <pre>public&nbsp;void&nbsp;addCell(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, java.nio.ByteBuffer&nbsp;value) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> <a name="addCell-org.apache.cassandra.config.ColumnDefinition-org.apache.cassandra.db.rows.CellPath-java.nio.ByteBuffer-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addCell</h4> <pre>public&nbsp;void&nbsp;addCell(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, <a href="../../../../org/apache/cassandra/db/rows/CellPath.html" title="class in org.apache.cassandra.db.rows">CellPath</a>&nbsp;path, java.nio.ByteBuffer&nbsp;value) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> <a name="addCounter-org.apache.cassandra.config.ColumnDefinition-long-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addCounter</h4> <pre>public&nbsp;void&nbsp;addCounter(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column, long&nbsp;increment) throws <a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></pre> <dl> <dt><span class="throwsLabel">Throws:</span></dt> <dd><code><a href="../../../../org/apache/cassandra/exceptions/InvalidRequestException.html" title="class in org.apache.cassandra.exceptions">InvalidRequestException</a></code></dd> </dl> </li> </ul> <a name="setComplexDeletionTime-org.apache.cassandra.config.ColumnDefinition-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>setComplexDeletionTime</h4> <pre>public&nbsp;void&nbsp;setComplexDeletionTime(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column)</pre> </li> </ul> <a name="setComplexDeletionTimeForOverwrite-org.apache.cassandra.config.ColumnDefinition-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>setComplexDeletionTimeForOverwrite</h4> <pre>public&nbsp;void&nbsp;setComplexDeletionTimeForOverwrite(<a href="../../../../org/apache/cassandra/config/ColumnDefinition.html" title="class in org.apache.cassandra.config">ColumnDefinition</a>&nbsp;column)</pre> </li> </ul> <a name="buildRow--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>buildRow</h4> <pre>public&nbsp;<a href="../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>&nbsp;buildRow()</pre> </li> </ul> <a name="deletionTime--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>deletionTime</h4> <pre>public&nbsp;<a href="../../../../org/apache/cassandra/db/DeletionTime.html" title="class in org.apache.cassandra.db">DeletionTime</a>&nbsp;deletionTime()</pre> </li> </ul> <a name="makeRangeTombstone-org.apache.cassandra.db.ClusteringComparator-org.apache.cassandra.db.Clustering-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>makeRangeTombstone</h4> <pre>public&nbsp;<a href="../../../../org/apache/cassandra/db/RangeTombstone.html" title="class in org.apache.cassandra.db">RangeTombstone</a>&nbsp;makeRangeTombstone(<a href="../../../../org/apache/cassandra/db/ClusteringComparator.html" title="class in org.apache.cassandra.db">ClusteringComparator</a>&nbsp;comparator, <a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;clustering)</pre> </li> </ul> <a name="makeRangeTombstone-org.apache.cassandra.db.Slice-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>makeRangeTombstone</h4> <pre>public&nbsp;<a href="../../../../org/apache/cassandra/db/RangeTombstone.html" title="class in org.apache.cassandra.db">RangeTombstone</a>&nbsp;makeRangeTombstone(<a href="../../../../org/apache/cassandra/db/Slice.html" title="class in org.apache.cassandra.db">Slice</a>&nbsp;slice)</pre> </li> </ul> <a name="getPrefetchedRow-org.apache.cassandra.db.DecoratedKey-org.apache.cassandra.db.Clustering-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>getPrefetchedRow</h4> <pre>public&nbsp;<a href="../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>&nbsp;getPrefetchedRow(<a href="../../../../org/apache/cassandra/db/DecoratedKey.html" title="class in org.apache.cassandra.db">DecoratedKey</a>&nbsp;key, <a href="../../../../org/apache/cassandra/db/Clustering.html" title="interface in org.apache.cassandra.db">Clustering</a>&nbsp;clustering)</pre> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/UpdateParameters.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/apache/cassandra/cql3/UntypedResultSet.Row.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../org/apache/cassandra/cql3/UserTypes.html" title="class in org.apache.cassandra.cql3"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/apache/cassandra/cql3/UpdateParameters.html" target="_top">Frames</a></li> <li><a href="UpdateParameters.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &copy; 2016 The Apache Software Foundation</small></p> </body> </html>
{ "content_hash": "18bacc1a50118899380deaef31f68c1f", "timestamp": "", "source": "github", "line_count": 589, "max_line_length": 452, "avg_line_length": 53.38370118845501, "alnum_prop": 0.6760487230862195, "repo_name": "jasonwee/videoOnCloud", "id": "efabd02ac4754c2c7196930005b1ecc733e2547b", "size": "31443", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/cassandra/apache-cassandra-3.7/javadoc/org/apache/cassandra/cql3/UpdateParameters.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "116270" }, { "name": "C", "bytes": "2209717" }, { "name": "C++", "bytes": "375267" }, { "name": "CSS", "bytes": "1134648" }, { "name": "Dockerfile", "bytes": "1656" }, { "name": "HTML", "bytes": "306558398" }, { "name": "Java", "bytes": "1465506" }, { "name": "JavaScript", "bytes": "9028509" }, { "name": "Jupyter Notebook", "bytes": "30907" }, { "name": "Less", "bytes": "107003" }, { "name": "PHP", "bytes": "856" }, { "name": "PowerShell", "bytes": "77807" }, { "name": "Pug", "bytes": "2968" }, { "name": "Python", "bytes": "1001861" }, { "name": "R", "bytes": "7390" }, { "name": "Roff", "bytes": "3553" }, { "name": "Shell", "bytes": "206191" }, { "name": "Thrift", "bytes": "80564" }, { "name": "XSLT", "bytes": "4740" } ], "symlink_target": "" }
require "cases/helper" require "models/book" require "models/post" require "models/author" require "models/event" module ActiveRecord class AdapterTest < ActiveRecord::TestCase def setup @connection = ActiveRecord::Base.connection end ## # PostgreSQL does not support null bytes in strings unless current_adapter?(:PostgreSQLAdapter) || (current_adapter?(:SQLite3Adapter) && !ActiveRecord::Base.connection.prepared_statements) def test_update_prepared_statement b = Book.create(name: "my \x00 book") b.reload assert_equal "my \x00 book", b.name b.update_attributes(name: "my other \x00 book") b.reload assert_equal "my other \x00 book", b.name end end def test_create_record_with_pk_as_zero Book.create(id: 0) assert_equal 0, Book.find(0).id assert_nothing_raised { Book.destroy(0) } end def test_valid_column @connection.native_database_types.each_key do |type| assert @connection.valid_type?(type) end end def test_invalid_column assert_not @connection.valid_type?(:foobar) end def test_tables tables = @connection.tables assert_includes tables, "accounts" assert_includes tables, "authors" assert_includes tables, "tasks" assert_includes tables, "topics" end def test_table_exists? assert @connection.table_exists?("accounts") assert @connection.table_exists?(:accounts) assert_not @connection.table_exists?("nonexistingtable") assert_not @connection.table_exists?("'") assert_not @connection.table_exists?(nil) end def test_data_sources data_sources = @connection.data_sources assert_includes data_sources, "accounts" assert_includes data_sources, "authors" assert_includes data_sources, "tasks" assert_includes data_sources, "topics" end def test_data_source_exists? assert @connection.data_source_exists?("accounts") assert @connection.data_source_exists?(:accounts) assert_not @connection.data_source_exists?("nonexistingtable") assert_not @connection.data_source_exists?("'") assert_not @connection.data_source_exists?(nil) end def test_indexes idx_name = "accounts_idx" indexes = @connection.indexes("accounts") assert indexes.empty? @connection.add_index :accounts, :firm_id, name: idx_name indexes = @connection.indexes("accounts") assert_equal "accounts", indexes.first.table assert_equal idx_name, indexes.first.name assert !indexes.first.unique assert_equal ["firm_id"], indexes.first.columns ensure @connection.remove_index(:accounts, name: idx_name) rescue nil end def test_remove_index_when_name_and_wrong_column_name_specified index_name = "accounts_idx" @connection.add_index :accounts, :firm_id, name: index_name assert_raises ArgumentError do @connection.remove_index :accounts, name: index_name, column: :wrong_column_name end ensure @connection.remove_index(:accounts, name: index_name) end def test_current_database if @connection.respond_to?(:current_database) assert_equal ARTest.connection_config["arunit"]["database"], @connection.current_database end end if current_adapter?(:Mysql2Adapter) def test_charset assert_not_nil @connection.charset assert_not_equal "character_set_database", @connection.charset assert_equal @connection.show_variable("character_set_database"), @connection.charset end def test_collation assert_not_nil @connection.collation assert_not_equal "collation_database", @connection.collation assert_equal @connection.show_variable("collation_database"), @connection.collation end def test_show_nonexistent_variable_returns_nil assert_nil @connection.show_variable("foo_bar_baz") end def test_not_specifying_database_name_for_cross_database_selects begin assert_nothing_raised do ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations["arunit"].except(:database)) config = ARTest.connection_config ActiveRecord::Base.connection.execute( "SELECT #{config['arunit']['database']}.pirates.*, #{config['arunit2']['database']}.courses.* " \ "FROM #{config['arunit']['database']}.pirates, #{config['arunit2']['database']}.courses" ) end ensure ActiveRecord::Base.establish_connection :arunit end end end def test_table_alias def @connection.test_table_alias_length() 10; end class << @connection alias_method :old_table_alias_length, :table_alias_length alias_method :table_alias_length, :test_table_alias_length end assert_equal "posts", @connection.table_alias_for("posts") assert_equal "posts_comm", @connection.table_alias_for("posts_comments") assert_equal "dbo_posts", @connection.table_alias_for("dbo.posts") class << @connection remove_method :table_alias_length alias_method :table_alias_length, :old_table_alias_length end end def test_uniqueness_violations_are_translated_to_specific_exception @connection.execute "INSERT INTO subscribers(nick) VALUES('me')" error = assert_raises(ActiveRecord::RecordNotUnique) do @connection.execute "INSERT INTO subscribers(nick) VALUES('me')" end assert_not_nil error.cause end def test_not_null_violations_are_translated_to_specific_exception error = assert_raises(ActiveRecord::NotNullViolation) do Post.create end assert_not_nil error.cause end unless current_adapter?(:SQLite3Adapter) def test_value_limit_violations_are_translated_to_specific_exception error = assert_raises(ActiveRecord::ValueTooLong) do Event.create(title: "abcdefgh") end assert_not_nil error.cause end def test_numeric_value_out_of_ranges_are_translated_to_specific_exception error = assert_raises(ActiveRecord::RangeError) do Book.connection.create("INSERT INTO books(author_id) VALUES (9223372036854775808)") end assert_not_nil error.cause end end def test_exceptions_from_notifications_are_not_translated original_error = StandardError.new("This StandardError shouldn't get translated") subscriber = ActiveSupport::Notifications.subscribe("sql.active_record") { raise original_error } actual_error = assert_raises(StandardError) do @connection.execute("SELECT * FROM posts") end assert_equal original_error, actual_error ensure ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber end def test_database_related_exceptions_are_translated_to_statement_invalid error = assert_raises(ActiveRecord::StatementInvalid) do @connection.execute("This is a syntax error") end assert_instance_of ActiveRecord::StatementInvalid, error assert_kind_of Exception, error.cause end def test_select_all_always_return_activerecord_result result = @connection.select_all "SELECT * FROM posts" assert result.is_a?(ActiveRecord::Result) end if ActiveRecord::Base.connection.prepared_statements def test_select_all_with_legacy_binds post = Post.create!(title: "foo", body: "bar") expected = @connection.select_all("SELECT * FROM posts WHERE id = #{post.id}") result = @connection.select_all("SELECT * FROM posts WHERE id = #{Arel::Nodes::BindParam.new(nil).to_sql}", nil, [[nil, post.id]]) assert_equal expected.to_hash, result.to_hash end end def test_select_methods_passing_a_association_relation author = Author.create!(name: "john") Post.create!(author: author, title: "foo", body: "bar") query = author.posts.where(title: "foo").select(:title) assert_equal({ "title" => "foo" }, @connection.select_one(query)) assert @connection.select_all(query).is_a?(ActiveRecord::Result) assert_equal "foo", @connection.select_value(query) assert_equal ["foo"], @connection.select_values(query) end def test_select_methods_passing_a_relation Post.create!(title: "foo", body: "bar") query = Post.where(title: "foo").select(:title) assert_equal({ "title" => "foo" }, @connection.select_one(query)) assert @connection.select_all(query).is_a?(ActiveRecord::Result) assert_equal "foo", @connection.select_value(query) assert_equal ["foo"], @connection.select_values(query) end test "type_to_sql returns a String for unmapped types" do assert_equal "special_db_type", @connection.type_to_sql(:special_db_type) end unless current_adapter?(:PostgreSQLAdapter) def test_log_invalid_encoding error = assert_raises RuntimeError do @connection.send :log, "SELECT 'ы' FROM DUAL" do raise "ы".force_encoding(Encoding::ASCII_8BIT) end end assert_not_nil error.message end end end class AdapterForeignKeyTest < ActiveRecord::TestCase self.use_transactional_tests = false def setup @connection = ActiveRecord::Base.connection end def test_foreign_key_violations_are_translated_to_specific_exception_with_validate_false klass_has_fk = Class.new(ActiveRecord::Base) do self.table_name = "fk_test_has_fk" end error = assert_raises(ActiveRecord::InvalidForeignKey) do has_fk = klass_has_fk.new has_fk.fk_id = 1231231231 has_fk.save(validate: false) end assert_not_nil error.cause end def test_foreign_key_violations_are_translated_to_specific_exception error = assert_raises(ActiveRecord::InvalidForeignKey) do insert_into_fk_test_has_fk end assert_not_nil error.cause end def test_disable_referential_integrity assert_nothing_raised do @connection.disable_referential_integrity do insert_into_fk_test_has_fk # should delete created record as otherwise disable_referential_integrity will try to enable constraints # after executed block and will fail (at least on Oracle) @connection.execute "DELETE FROM fk_test_has_fk" end end end private def insert_into_fk_test_has_fk # Oracle adapter uses prefetched primary key values from sequence and passes them to connection adapter insert method if @connection.prefetch_primary_key? id_value = @connection.next_sequence_value(@connection.default_sequence_name("fk_test_has_fk", "id")) @connection.execute "INSERT INTO fk_test_has_fk (id,fk_id) VALUES (#{id_value},0)" else @connection.execute "INSERT INTO fk_test_has_fk (fk_id) VALUES (0)" end end end class AdapterTestWithoutTransaction < ActiveRecord::TestCase self.use_transactional_tests = false class Klass < ActiveRecord::Base end def setup Klass.establish_connection :arunit @connection = Klass.connection end teardown do Klass.remove_connection end unless in_memory_db? test "transaction state is reset after a reconnect" do @connection.begin_transaction assert @connection.transaction_open? @connection.reconnect! assert !@connection.transaction_open? end test "transaction state is reset after a disconnect" do @connection.begin_transaction assert @connection.transaction_open? @connection.disconnect! assert !@connection.transaction_open? end end # test resetting sequences in odd tables in PostgreSQL if ActiveRecord::Base.connection.respond_to?(:reset_pk_sequence!) require "models/movie" require "models/subscriber" def test_reset_empty_table_with_custom_pk Movie.delete_all Movie.connection.reset_pk_sequence! "movies" assert_equal 1, Movie.create(name: "fight club").id end def test_reset_table_with_non_integer_pk Subscriber.delete_all Subscriber.connection.reset_pk_sequence! "subscribers" sub = Subscriber.new(name: "robert drake") sub.id = "bob drake" assert_nothing_raised { sub.save! } end end end end
{ "content_hash": "2fa64f61ed0b7ee3aa5a37908a43a46d", "timestamp": "", "source": "github", "line_count": 370, "max_line_length": 138, "avg_line_length": 34.07027027027027, "alnum_prop": 0.6658733936220848, "repo_name": "kaspth/rails", "id": "6e045785769dbe9fe66d879cdd14507a2c284839", "size": "12639", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "activerecord/test/cases/adapter_test.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "34354" }, { "name": "CoffeeScript", "bytes": "40518" }, { "name": "HTML", "bytes": "202986" }, { "name": "JavaScript", "bytes": "98605" }, { "name": "Ruby", "bytes": "11010123" }, { "name": "Yacc", "bytes": "968" } ], "symlink_target": "" }
require_relative '../../test_helper' require_relative 'flow_test_helper' require "smart_answer_flows/register-a-birth" class RegisterABirthTest < ActiveSupport::TestCase include FlowTestHelper setup do @location_slugs = %w(afghanistan algeria andorra australia bangladesh barbados belize cambodia cameroon democratic-republic-of-the-congo el-salvador estonia germany guatemala grenada india iran iraq israel laos libya maldives morocco netherlands north-korea pakistan philippines pitcairn-island saint-barthelemy serbia sierra-leone somalia spain sri-lanka st-kitts-and-nevis st-martin thailand turkey uganda united-arab-emirates venezuela) stub_world_locations(@location_slugs) setup_for_testing_flow SmartAnswer::RegisterABirthFlow end should "ask which country the child was born in" do assert_current_node :country_of_birth? end context "answer Turkey" do setup do add_response 'turkey' end should "ask which parent has british nationality" do assert_current_node :who_has_british_nationality? end context "answer mother" do setup do add_response 'mother' end should "ask if you are married or civil partnered" do assert_current_node :married_couple_or_civil_partnership? end context "answer no" do setup do add_response 'no' end should "ask where you are now and go to oru result" do add_response "same_country" assert_current_node :oru_result assert current_state.calculator.send(:document_return_fees).present? end end # not married/cp end # mother end # Turkey context "answer with a commonwealth country" do should "give the commonwealth result" do add_response 'australia' assert_current_node :commonwealth_result end end # commonweath result context "answer Andorra" do should "store the correct registration country" do add_response 'andorra' add_response 'father' add_response 'yes' add_response 'same_country' assert_equal 'spain', current_state.calculator.registration_country end end # Andorra context "answer Israel" do should "show correct document variants" do add_response 'israel' add_response 'father' add_response 'yes' add_response 'same_country' assert_current_node :oru_result end end # Andorra context "answer Iran" do should "ask who has British nationality" do add_response 'iran' assert_current_node :who_has_british_nationality? end end # Iran context "answer Spain" do setup do add_response 'spain' end should "store this as the registration country" do assert_equal 'spain', current_state.calculator.registration_country end should "ask which parent has british nationality" do assert_current_node :who_has_british_nationality? end context "answer father" do setup do add_response 'father' end should "ask if you are married or civil partnered" do assert_current_node :married_couple_or_civil_partnership? end context "answer no" do setup do add_response 'no' end should "ask when the child was born" do assert_current_node :childs_date_of_birth? end context "answer pre 1st July 2006" do should "give the homeoffice result" do add_response '2006-06-30' assert_current_node :homeoffice_result end end context "answer on or after 1st July 2006" do setup do add_response '2006-07-01' end should "ask where you are now" do assert_current_node :where_are_you_now? end end end # not married/cp end # father is british citizen context "answer mother and father" do setup do add_response 'mother_and_father' end should "ask if you are married or civil partnered" do assert_current_node :married_couple_or_civil_partnership? end context "answer yes" do setup do add_response 'yes' end should "ask where you are now" do assert_current_node :where_are_you_now? end context "answer back in the UK" do should "give the oru result" do add_response 'in_the_uk' assert_equal 'spain', current_state.calculator.registration_country assert_current_node :oru_result assert_equal "http://www.exteriores.gob.es/Portal/en/ServiciosAlCiudadano/Paginas/Traductoresas---Int%C3%A9rpretes-Juradosas.aspx", current_state.calculator.translator_link_url end end end # married end # Spain end context "answer Afghanistan" do setup do add_response "afghanistan" end should "give the ORU result and phase-5-specific intro and custom documents return waiting time" do add_response "mother_and_father" add_response "yes" add_response "same_country" assert_current_node :oru_result assert_equal 'Afghanistan', current_state.calculator.registration_country_name_lowercase_prefix assert_equal 'mother_and_father', current_state.calculator.british_national_parent assert_equal '6 months', current_state.calculator.custom_waiting_time assert_equal '/government/publications/afghanistan-list-of-lawyers', current_state.calculator.translator_link_url end should "give the no_birth_certificate_result if the child born outside of marriage" do add_response "mother" add_response "no" add_response "same_country" assert_current_node :no_birth_certificate_result end end context "answer Iraq" do setup do add_response "iraq" end should "give the no_birth_certificate_result if the child born outside of marriage" do add_response "mother" add_response "no" add_response "same_country" assert_current_node :no_birth_certificate_result end should "give the no_birth_certificate_result if the child born outside of marriage and currently in another country" do add_response "mother" add_response "no" add_response "another_country" assert_current_node :no_birth_certificate_result end end context "born in Bangladesh but currently in Pakistan" do should "give the ORU result" do add_response "bangladesh" add_response "mother_and_father" add_response "yes" add_response "another_country" add_response "pakistan" assert_current_node :oru_result assert_equal '8 months', current_state.calculator.custom_waiting_time end end # Afghanistan context "answer Pakistan" do setup do add_response "pakistan" end should "give the oru result if currently in the UK" do add_response "father" add_response "yes" add_response "in_the_uk" assert_current_node :oru_result assert_equal '6 months', current_state.calculator.custom_waiting_time end should "give the oru result with phase-5-specific introduction if currently in Pakistan" do add_response "father" add_response "yes" add_response "same_country" assert_current_node :oru_result end should "give the no_birth_certificate_result if the child born outside of marriage" do add_response "mother" add_response "no" add_response "same_country" assert_current_node :no_birth_certificate_result end should "give the no_birth_certificate_result if the child born outside of marriage and currently in another country" do add_response "mother" add_response "no" add_response "another_country" assert_current_node :no_birth_certificate_result end end # Pakistan context "answer Belize" do should "give the embassy result" do add_response "belize" add_response "father" add_response "no" add_response "2006-07-01" add_response "same_country" assert_current_node :oru_result end # Not married or CP end # Belize context "answer libya" do should "give the no embassy result" do add_response "libya" assert_current_node :no_embassy_result end end # Libya context 'answer Somalia' do should 'give the no embassy result' do add_response 'somalia' assert_current_node :no_embassy_result end end # Somalia context "answer barbados" do should "give the oru result" do add_response "barbados" add_response "father" add_response "yes" add_response "same_country" assert_current_node :oru_result assert_equal 'father', current_state.calculator.british_national_parent end # Not married or CP end # Barbados context "answer united arab emirates" do setup do add_response "united-arab-emirates" end should "give the no birth certificate result with same country phrase" do add_response "mother_and_father" add_response "no" add_response "same_country" assert_current_node :no_birth_certificate_result end # Not married or CP should "give the no birth certificate result with another country phrase" do add_response "mother_and_father" add_response "no" add_response "another_country" assert_current_node :no_birth_certificate_result end # Not married or CP should "give the oru result" do add_response "father" add_response "yes" add_response "same_country" assert_current_node :oru_result assert_equal 'father', current_state.calculator.british_national_parent assert_equal '/government/publications/united-arab-emirates-list-of-lawyers', current_state.calculator.translator_link_url end end # UAE context "el-salvador, where you have to register in guatemala" do setup do add_response "el-salvador" end should "calculate the registration country as Guatemala" do add_response 'father' add_response 'yes' add_response 'same_country' assert_equal 'guatemala', current_state.calculator.registration_country assert_equal 'Guatemala', current_state.calculator.registration_country_name_lowercase_prefix end end context "laos, no longer have to register in thailand" do setup do add_response "laos" end should "calculate the registration country as Laos" do add_response 'father' add_response 'yes' add_response 'same_country' assert_equal 'laos', current_state.calculator.registration_country assert_equal 'Laos', current_state.calculator.registration_country_name_lowercase_prefix end end context "maldives, where you have to register in sri lanka" do setup do add_response "maldives" end should "calculate the registration country as Sri Lanka" do add_response 'father' add_response 'yes' add_response 'same_country' assert_equal 'sri-lanka', current_state.calculator.registration_country end end context "Sri Lanka" do setup do add_response "sri-lanka" end should "show a custom documents variant" do add_response 'mother' add_response 'no' add_response 'same_country' assert_current_node :oru_result end end context "India" do setup do add_response "india" end should "show a custom documents variant" do add_response 'mother' add_response 'no' add_response 'same_country' assert_current_node :oru_result end end context "child born in grenada, parent in St kitts" do should "calculate the registration country as barbados" do add_response 'grenada' add_response 'mother' add_response 'yes' add_response 'another_country' add_response 'st-kitts-and-nevis' assert_current_node :oru_result end end context "answer Netherlands" do should "go to oru result" do add_response 'netherlands' add_response 'father' add_response 'yes' add_response 'same_country' assert_current_node :oru_result assert_equal '/government/publications/netherlands-list-of-lawyers', current_state.calculator.translator_link_url end end # Netherlands context "answer serbia" do should "check for clickbook and give embassy result" do add_response "serbia" add_response "father" add_response "yes" add_response "same_country" assert_current_node :oru_result assert_equal '/government/publications/list-of-translators-and-interpreters-in-serbia', current_state.calculator.translator_link_url end end # Serbia context "answer estonia" do should "show cash, credit card or cheque condition and give embassy result" do add_response "estonia" add_response "mother_and_father" add_response "yes" add_response "same_country" assert_current_node :oru_result end end # Estonia context "answer united-arab-emirates, married" do should "go to oru result" do add_response "united-arab-emirates" add_response "mother_and_father" add_response "yes" add_response "same_country" assert_current_node :oru_result assert_equal '/government/publications/united-arab-emirates-list-of-lawyers', current_state.calculator.translator_link_url end end # UAE context "answer oru country and in another country" do should "go to oru result" do add_response "united-arab-emirates" add_response "mother_and_father" add_response "yes" add_response "another_country" add_response "germany" assert_current_node :oru_result end end context "answer Morocco and in another country " do should "show :oru_result outcome" do add_response "morocco" add_response "mother_and_father" add_response "no" add_response "another_country" add_response "germany" assert_current_node :oru_result end end context "answer Germany and in Cameroon" do should "show oru_result outcome" do add_response "germany" add_response "mother_and_father" add_response "no" add_response "another_country" add_response "cameroon" assert_current_node :oru_result end end context "answer Venezuela and still in Venezuela" do should "show oru_result outcome" do add_response "venezuela" add_response "mother_and_father" add_response "no" add_response "same_country" assert_current_node :oru_result end end context "answer Philippines" do setup do add_response "philippines" end should "show ORU outcome and require extra documents regardles of the current location" do add_response "mother" add_response "no" add_response "another_country" add_response "australia" assert_current_node :oru_result end should "show ORU outcome and require even more extra documents if only the father is british" do add_response "father" add_response "no" add_response "2014-03-04" add_response "same_country" assert_current_node :oru_result end end context "answer Uganda" do should "show ORU outcome and require extra documents" do add_response "uganda" add_response "mother" add_response "no" add_response "same_country" assert_current_node :oru_result end end context "North Korea" do setup do add_response "north-korea" add_response "mother_and_father" add_response "yes" end should "lead to the North Korea-specific result if the user is still there" do add_response "same_country" assert_current_node :north_korea_result end should "lead to the ORU result if in the UK" do add_response "in_the_uk" assert_current_node :oru_result end should "lead to the ORU result if in another country" do add_response "another_country" add_response "netherlands" assert_current_node :oru_result end end context "Democratic Republic of the Congo" do should "lead to an ORU outcome with a custom translator link" do add_response "democratic-republic-of-the-congo" add_response "mother" add_response "no" add_response "same_country" assert_current_node :oru_result assert_equal '/government/publications/democratic-republic-of-congo-list-of-lawyers', current_state.calculator.translator_link_url end end context "Pitcairn Island" do should "lead to the ORU result" do add_response 'pitcairn-island' add_response 'mother' add_response 'no' add_response 'same_country' assert_current_node :oru_result end end context "St Martin" do should "lead to the ORU result" do add_response 'st-martin' add_response 'mother' add_response 'no' add_response 'same_country' assert_current_node :oru_result end end context "Saint Barthelemy" do should "lead to the ORU result" do add_response 'saint-barthelemy' add_response 'mother' add_response 'no' add_response 'same_country' assert_current_node :oru_result end end context "Registration duration" do should "display custom duration if child born in a lower risk (non phase-5) country and currently in North Korea" do add_response "netherlands" add_response "mother" add_response "yes" add_response "another_country" add_response "north-korea" assert_current_node :north_korea_result end should "display 3 months if child born in a lower risk (non phase-5) country and currently in Cambodia" do add_response "netherlands" add_response "mother" add_response "yes" add_response "another_country" add_response "cambodia" assert_current_node :oru_result end end context "ORU payment options" do should "display a custom payment message if currently in Algeria" do add_response "netherlands" add_response "mother" add_response "yes" add_response "another_country" add_response "algeria" assert_current_node :oru_result end should "display a default payment message if currently not in Algeria" do add_response "algeria" add_response "mother" add_response "yes" add_response "another_country" add_response "netherlands" assert_current_node :oru_result end should "display a default payment message if child was born in Algeria but currently in the UK" do add_response "algeria" add_response "mother" add_response "yes" add_response "in_the_uk" assert_current_node :oru_result end end end
{ "content_hash": "c96834c1d2271355612594e6533b5b98", "timestamp": "", "source": "github", "line_count": 605, "max_line_length": 443, "avg_line_length": 31.312396694214875, "alnum_prop": 0.6745143581081081, "repo_name": "aledelcueto/smart-answers", "id": "81da3667dbd339ce5d38451fe472a5fccf512a1f", "size": "18944", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/integration/smart_answer_flows/register_a_birth_test.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7922" }, { "name": "HTML", "bytes": "1601786" }, { "name": "JavaScript", "bytes": "5110" }, { "name": "Ruby", "bytes": "2060581" }, { "name": "Shell", "bytes": "4231" } ], "symlink_target": "" }
module SmartAnswer::Calculators class ChildMaintenanceCalculator include ActiveModel::Model attr_accessor :number_of_children attr_accessor :benefits attr_accessor :paying_or_receiving attr_accessor :income, :number_of_other_children, :number_of_shared_care_nights SCHEME_BASE_AMOUNT = 7.00 REDUCED_RATE_THRESHOLD = 100 BASIC_PLUS_RATE_THRESHOLD = 800 SHARED_CARE_MAX_RELIEF_EXTRA_AMOUNT = 7.00 SCHEME_MAX_INCOME = 3000 def initialize(attributes = {}) super @calculator_data = self.class.child_maintenance_data end # called after we enter income (we know benefits == no) def rate_type if @benefits == 'yes' if @number_of_shared_care_nights > 0 :nil else :flat end else # work out the rate based on income @calculator_data[:rates].find { |r| capped_income <= r[:max] }[:rate] end end def calculate_maintenance_payment if @benefits == 'no' send("calculate_#{rate_type}_rate_payment") else 0 #irrelevant what we return, with benefits rate is either nil or flat end end def calculate_reduced_rate_payment reduced_rate = ((@income - REDUCED_RATE_THRESHOLD) * reduced_rate_multiplier) + base_amount reduced_rate_decreased = (reduced_rate - (reduced_rate * shared_care_multiplier)).round(0) if shared_care_multiplier == 0.5 reduced_rate_decreased = reduced_rate_decreased - (@number_of_children * SHARED_CARE_MAX_RELIEF_EXTRA_AMOUNT) end #reduced rate can never be less than 7 pounds reduced_rate_decreased > SCHEME_BASE_AMOUNT ? reduced_rate_decreased : SCHEME_BASE_AMOUNT end def calculate_basic_rate_payment basic_rate = capped_income - (capped_income * relevant_other_child_multiplier) basic_rate = (basic_rate * basic_rate_multiplier) basic_rate_decreased = (basic_rate - (basic_rate * shared_care_multiplier)).round(0) # for maximum shared care relief, subtract additional £7 per child if shared_care_multiplier == 0.5 basic_rate_decreased = basic_rate_decreased - (@number_of_children * SHARED_CARE_MAX_RELIEF_EXTRA_AMOUNT) end #basic rate can never be less than 7 pounds basic_rate_decreased > SCHEME_BASE_AMOUNT ? basic_rate_decreased : SCHEME_BASE_AMOUNT end #only used in the 2012 scheme def calculate_basic_plus_rate_payment basic_plus_rate = capped_income - (capped_income * relevant_other_child_multiplier) basic_qualifying_child_amount = (BASIC_PLUS_RATE_THRESHOLD * basic_rate_multiplier) additional_qualifying_child_amount = ((basic_plus_rate - BASIC_PLUS_RATE_THRESHOLD) * basic_plus_rate_multiplier) child_amounts_total = basic_qualifying_child_amount + additional_qualifying_child_amount total = (child_amounts_total - (child_amounts_total * shared_care_multiplier)) if shared_care_multiplier == 0.5 total = total - (@number_of_children * SHARED_CARE_MAX_RELIEF_EXTRA_AMOUNT) end total.round(2) end def reduced_rate_multiplier matrix = @calculator_data[:reduced_rate_multipliers] matrix[number_of_other_children_index][number_of_qualifying_children_index] end def basic_rate_multiplier matrix = @calculator_data[:basic_rate_multipliers] matrix[number_of_qualifying_children_index] end def shared_care_multiplier @calculator_data[:shared_care_reductions][@number_of_shared_care_nights] end def relevant_other_child_multiplier @calculator_data[:relevant_other_child_reductions][number_of_other_children_index] end def basic_plus_rate_multiplier @calculator_data[:basic_plus_rate_multipliers][number_of_qualifying_children_index] end # never use more than the net (or gross) income maximum in calculations def capped_income max_income = SCHEME_MAX_INCOME @income > max_income ? max_income : @income end def number_of_other_children_index @number_of_other_children > 3 ? 3 : @number_of_other_children end def number_of_qualifying_children @number_of_children > 3 ? 3 : @number_of_children end def number_of_qualifying_children_index number_of_qualifying_children - 1 end def base_amount SCHEME_BASE_AMOUNT end def paying? @paying_or_receiving == "pay" end def receiving? @paying_or_receiving == "receive" end def collect_fees if paying? (base_amount * 0.2).round(2) else (base_amount * 0.04).round(2) end end def collect_fees_cmp(child_maintenance_payment) child_maintenance_payment = child_maintenance_payment.to_f if paying? (child_maintenance_payment * 0.2).round(2) else (child_maintenance_payment * 0.04).round(2) end end def total_fees(flat_rate_amount, collect_fees) flat_rate_amount = flat_rate_amount.to_f collect_fees = collect_fees.to_f if paying? (flat_rate_amount + collect_fees).round(2) else (flat_rate_amount - collect_fees).round(2) end end def total_fees_cmp(child_maintenance_payment, collect_fees) child_maintenance_payment = child_maintenance_payment.to_f collect_fees = collect_fees.to_f if paying? (child_maintenance_payment + collect_fees).round(2) else (child_maintenance_payment - collect_fees).round(2) end end def total_yearly_fees(collect_fees) collect_fees = collect_fees.to_f (collect_fees * 52).round(2) end def self.child_maintenance_data @child_maintenance_data ||= YAML.load_file(Rails.root.join("lib/data/child_maintenance_data.yml")) end end end
{ "content_hash": "a0cd83a35856cf1c47ba12d711d3439d", "timestamp": "", "source": "github", "line_count": 177, "max_line_length": 119, "avg_line_length": 32.887005649717516, "alnum_prop": 0.6643188455591823, "repo_name": "stwalsh/smart-answers", "id": "eb89086f55ac1eb98b9f2328039cfa2dce50bb59", "size": "5822", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/smart_answer/calculators/child_maintenance_calculator.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "8386" }, { "name": "HTML", "bytes": "2948544" }, { "name": "JavaScript", "bytes": "8814" }, { "name": "Ruby", "bytes": "1939196" }, { "name": "Shell", "bytes": "3673" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>hardware: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / extra-dev</a></li> <li class="active"><a href="">dev / hardware - 8.5.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> hardware <small> 8.5.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-03-10 06:33:29 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-03-10 06:33:29 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq dev Formal proof management system dune 3.0.3 Fast, portable, and opinionated build system ocaml 4.08.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.08.1 Official release 4.08.1 ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.3 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;matej.kosik@inria.fr&quot; homepage: &quot;https://github.com/coq-contribs/hardware&quot; license: &quot;LGPL 2&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/Hardware&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.5&quot; &amp; &lt; &quot;8.6~&quot;} ] tags: [ &quot;keyword:hardware verification&quot; &quot;keyword:comparator circuit&quot; &quot;category:Computer Science/Architecture&quot; &quot;category:Miscellaneous/Extracted Programs/Hardware&quot; ] authors: [ &quot;Solange Coupet-Grimal &lt;&gt;&quot; &quot;Line Jakubiec &lt;&gt;&quot; ] bug-reports: &quot;https://github.com/coq-contribs/hardware/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/hardware.git&quot; synopsis: &quot;Verification and synthesis of hardware linear arithmetic structures&quot; description: &quot;&quot;&quot; Verification and synthesis of hardware linear arithmetic structures. Example of a left-to-right comparator. Three approaches are tackled : - the usual verification of a circuit, consisting in proving that the description satisfies the specification, - the synthesis of a circuit from its specification using the Coq extractor, - the same approach as above but using the Program tactic.&quot;&quot;&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/hardware/archive/v8.5.0.tar.gz&quot; checksum: &quot;md5=906f833475ea927d3c1b6afb1353421c&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-hardware.8.5.0 coq.dev</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is dev). The following dependencies couldn&#39;t be met: - coq-hardware -&gt; coq &lt; 8.6~ -&gt; ocaml &lt; 4.06.0 base of this switch (use `--unlock-base&#39; to force) Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-hardware.8.5.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "ef20136cc28a820080a14ae9583ae2b4", "timestamp": "", "source": "github", "line_count": 171, "max_line_length": 204, "avg_line_length": 42.98830409356725, "alnum_prop": 0.5592436403210448, "repo_name": "coq-bench/coq-bench.github.io", "id": "561bd2eea8ff7e4329a3b07f416d45dcc34bc7a5", "size": "7376", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.08.1-2.0.5/extra-dev/dev/hardware/8.5.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
require 'sequel' require 'ghtorrent/migrations/mysql_defaults' Sequel.migration do up do puts("Adding organization descriminator field to table users") alter_table :users do add_column :type, String, :null => false, :default => 'USR' add_constraint(:type_allowed_values, :type => %w[USR ORG]) end puts("Updating users with default values") transaction(:rollback => :reraise, :isolation => :committed) do self[:users].update(:type => "USR") end puts("Creating table organization-members") create_table :organization_members do foreign_key :org_id, :users, :null => false foreign_key :user_id, :users, :null => false primary_key [:org_id, :user_id] DateTime :created_at, :null => false, :default => Sequel::CURRENT_TIMESTAMP end end down do puts("Droping table organization-members") drop_table :organization_members puts("Droping organization descriminator field to table users") alter_table :users do drop_column :type end end end
{ "content_hash": "f889d4a841c9ce5a0c8e647decf30528", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 67, "avg_line_length": 26.65, "alnum_prop": 0.6566604127579737, "repo_name": "pombredanne/github-mirror", "id": "29665d0976d897077539c66d2006e2cdb0d6a9a1", "size": "1066", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "lib/ghtorrent/migrations/003_add_orgs.rb", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Ruby", "bytes": "225777" }, { "name": "Shell", "bytes": "4171" } ], "symlink_target": "" }
import { getParentIds } from './helper'; import { PAGE, PRODUCT_CATEGORY, PRODUCT, RESERVED } from '../pageTypes'; const getBreadcrumbsForProduct = (product, categories) => { if (product && product.category_id) { let ids = [product.category_id]; let parentIds = getParentIds(categories, product.category_id); ids.push(...parentIds); let index = 0; const breadcrumbs = ids.reverse().map(categoryId => { const category = categories.find(item => item.id === categoryId); if (category) { index++; return getBreadcrumbItem(category.url, category.name, index); } }); return { '@context': 'http://schema.org', '@type': 'BreadcrumbList', itemListElement: breadcrumbs }; } else { return null; } }; const getBreadcrumbsForCategory = (currentCategoryId, categories) => { if (currentCategoryId) { let ids = getParentIds(categories, currentCategoryId); let index = 0; const breadcrumbs = ids.reverse().map(categoryId => { const category = categories.find(item => item.id === categoryId); if (category) { index++; return getBreadcrumbItem(category.url, category.name, index); } }); return { '@context': 'http://schema.org', '@type': 'BreadcrumbList', itemListElement: breadcrumbs }; } else { return null; } }; const getBreadcrumbItem = (url, name, position) => ({ '@type': 'ListItem', position: position, item: { '@id': url, name: name } }); const getProduct = (product, settings) => { let imageUrl = product.images && product.images.length > 0 ? product.images[0].url : null; return { '@context': 'http://schema.org/', '@type': 'Product', name: product.name, description: product.meta_description, image: imageUrl, sku: product.sku, offers: { '@type': 'Offer', priceCurrency: settings.currency_code, price: product.price, availability: product.stock_status === 'available' ? 'http://schema.org/InStock' : 'http://schema.org/OutOfStock' } }; }; const getProductJSONLD = (product, categories, settings) => { let jsonldArray = []; const breadcrumbs = getBreadcrumbsForProduct(product, categories); if (breadcrumbs) { jsonldArray.push(breadcrumbs); } jsonldArray.push(getProduct(product, settings)); return jsonldArray.length > 0 ? JSON.stringify(jsonldArray) : ''; }; const getCategoryJSONLD = (categoryId, categories) => { let jsonldArray = []; const breadcrumbs = getBreadcrumbsForCategory(categoryId, categories); if (breadcrumbs) { jsonldArray.push(breadcrumbs); } return jsonldArray.length > 0 ? JSON.stringify(jsonldArray) : ''; }; export const getJSONLD = state => { if (typeof window === 'undefined') { switch (state.currentPage.type) { case PRODUCT: return getProductJSONLD( state.productDetails, state.categories, state.settings ); break; case PRODUCT_CATEGORY: return getCategoryJSONLD(state.categoryDetails.id, state.categories); break; default: return ''; } } else { return ''; } };
{ "content_hash": "9893ddfda7d052f403782cfb26c2465b", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 77, "avg_line_length": 24.471544715447155, "alnum_prop": 0.6654485049833887, "repo_name": "cezerin/cezerin", "id": "3806dce2bc494e9497e8ca7438dfcd6b7f38e481", "size": "3010", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/store/shared/lib/jsonld.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "62407" }, { "name": "HTML", "bytes": "6210" }, { "name": "JavaScript", "bytes": "1712641" }, { "name": "Shell", "bytes": "969" } ], "symlink_target": "" }
namespace mscl { public class CV7ContinuousBIT_Filter_General : Bitfield { private global::System.Runtime.InteropServices.HandleRef swigCPtr; internal CV7ContinuousBIT_Filter_General(global::System.IntPtr cPtr, bool cMemoryOwn) : base(msclPINVOKE.CV7ContinuousBIT_Filter_General_SWIGUpcast(cPtr), cMemoryOwn) { swigCPtr = new global::System.Runtime.InteropServices.HandleRef(this, cPtr); } internal static global::System.Runtime.InteropServices.HandleRef getCPtr(CV7ContinuousBIT_Filter_General obj) { return (obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr; } protected override void Dispose(bool disposing) { lock(this) { if (swigCPtr.Handle != global::System.IntPtr.Zero) { if (swigCMemOwn) { swigCMemOwn = false; msclPINVOKE.delete_CV7ContinuousBIT_Filter_General(swigCPtr); } swigCPtr = new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero); } base.Dispose(disposing); } } public CV7ContinuousBIT_Filter_General() : this(msclPINVOKE.new_CV7ContinuousBIT_Filter_General__SWIG_0(), true) { if (msclPINVOKE.SWIGPendingException.Pending) throw msclPINVOKE.SWIGPendingException.Retrieve(); } public CV7ContinuousBIT_Filter_General(byte bits) : this(msclPINVOKE.new_CV7ContinuousBIT_Filter_General__SWIG_1(bits), true) { if (msclPINVOKE.SWIGPendingException.Pending) throw msclPINVOKE.SWIGPendingException.Retrieve(); } public byte flags() { byte ret = msclPINVOKE.CV7ContinuousBIT_Filter_General_flags(swigCPtr); if (msclPINVOKE.SWIGPendingException.Pending) throw msclPINVOKE.SWIGPendingException.Retrieve(); return ret; } public bool fault() { bool ret = msclPINVOKE.CV7ContinuousBIT_Filter_General_fault(swigCPtr); if (msclPINVOKE.SWIGPendingException.Pending) throw msclPINVOKE.SWIGPendingException.Retrieve(); return ret; } public bool timingOverrun() { bool ret = msclPINVOKE.CV7ContinuousBIT_Filter_General_timingOverrun(swigCPtr); if (msclPINVOKE.SWIGPendingException.Pending) throw msclPINVOKE.SWIGPendingException.Retrieve(); return ret; } public bool timingUnderrun() { bool ret = msclPINVOKE.CV7ContinuousBIT_Filter_General_timingUnderrun(swigCPtr); if (msclPINVOKE.SWIGPendingException.Pending) throw msclPINVOKE.SWIGPendingException.Retrieve(); return ret; } public static readonly byte FAULT = msclPINVOKE.CV7ContinuousBIT_Filter_General_FAULT_get(); public static readonly byte TIMING_OVERRUN = msclPINVOKE.CV7ContinuousBIT_Filter_General_TIMING_OVERRUN_get(); public static readonly byte TIMING_UNDERRUN = msclPINVOKE.CV7ContinuousBIT_Filter_General_TIMING_UNDERRUN_get(); } }
{ "content_hash": "c32213e24e64b01a592ac7e72539069c", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 170, "avg_line_length": 43.87692307692308, "alnum_prop": 0.7422861150070126, "repo_name": "LORD-MicroStrain/MSCL", "id": "1a3228ed8f3a26bd5b2f1002441942bee5b760c4", "size": "3255", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MSCL_Managed/mscl/CV7ContinuousBIT_Filter_General.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "15074" }, { "name": "C", "bytes": "19995" }, { "name": "C#", "bytes": "2883968" }, { "name": "C++", "bytes": "6340918" }, { "name": "CSS", "bytes": "45608" }, { "name": "MATLAB", "bytes": "4449" }, { "name": "Python", "bytes": "45086" }, { "name": "SWIG", "bytes": "136945" } ], "symlink_target": "" }
namespace Rsdn.Text { using System; public sealed class Number : Token { internal Number(string text, int index, int length) : base(text, index, length) { } public override TokenCategory Category => TokenCategory.Number; internal static bool TestCharacter(char c) { return Char.IsDigit(c); } } }
{ "content_hash": "6bd406cf2dc506d49e9b5a6c74649737", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 71, "avg_line_length": 21.68421052631579, "alnum_prop": 0.5436893203883495, "repo_name": "vborovikov/rsdn", "id": "11ce667aaa8191543cb69acecc8760e8f2faf2dd", "size": "412", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/RsdnCore/Text/Number.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "601099" }, { "name": "Smalltalk", "bytes": "435" } ], "symlink_target": "" }
/** * @file rand_stream.h * * @brief The concept of random streams * * @author Dahua Lin */ #ifdef _MSC_VER #pragma once #endif #ifndef LIGHTMAT_RAND_STREAM_H_ #define LIGHTMAT_RAND_STREAM_H_ #include <light_mat/common/basic_defs.h> #include <light_mat/simd/simd_base.h> namespace lmat { namespace random { /******************************************** * * The concept of random streams * ********************************************/ template<class RStream> struct rand_stream_traits; template<class Derived> class IRandStream { public: LMAT_CRTP_REF typedef typename rand_stream_traits<Derived>::seed_type seed_type; LMAT_ENSURE_INLINE void set_seed(const seed_type& seed) { derived().set_seed(seed); } LMAT_ENSURE_INLINE size_t state_size() const // in terms of bytes { return derived().state_size(); } LMAT_ENSURE_INLINE uint32_t rand_u32() { return derived().rand_u32(); } LMAT_ENSURE_INLINE uint64_t rand_u64() { return derived().rand_u64(); } LMAT_ENSURE_INLINE void rand_seq(size_t nbytes, void *buf) { derived().rand_seq(nbytes, buf); } }; /******************************************** * * pre-defined random stream classes * ********************************************/ template<unsigned int MEXP> class sfmt_rand_stream; typedef sfmt_rand_stream<19937> default_rand_stream; } } #endif /* RAND_STREAM_H_ */
{ "content_hash": "3bbf2afe25adf2db86d0d1af94fb7dae", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 68, "avg_line_length": 17.463414634146343, "alnum_prop": 0.577513966480447, "repo_name": "lindahua/light-matrix", "id": "a4ed4a12e4d644e8882722c38b74250f07d2382d", "size": "1432", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "light_mat/random/rand_stream.h", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "319450" }, { "name": "C++", "bytes": "931851" }, { "name": "Objective-C", "bytes": "1400" }, { "name": "Python", "bytes": "7812" } ], "symlink_target": "" }
<!DOCTYPE html> <!-- Copyright (c) 2012 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of works must retain the original copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Authors: Hao, Yunfei <yunfenx.hao@intel.com> --> <html> <head> <title>WRT Test: multiple-other-contact-write</title> <link rel="author" title="Intel" href="http://www.intel.com"/> <link rel="help" href=""/> <meta name="flags" content=""/> <meta name="assert" content="Test file."/> <meta charset="utf-8"> <script src="resources/testharness.js"></script> <script src="resources/testharnessreport.js"></script> </head> <body> <div id="log"></div> <script type="text/javascript"> test(function() { var addressbook = tizen.contact.getDefaultAddressBook(); assert_true("add" in addressbook); }, "WRT Test: multiple-other-contact-write"); </script> </body> </html>
{ "content_hash": "10123ac0de60feffa50cee59a7f2a871", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 80, "avg_line_length": 40.907407407407405, "alnum_prop": 0.736985061113626, "repo_name": "jiajiax/crosswalk-test-suite", "id": "5bb9b29c4da1eeb9432b4d73dbdde0474c0e62a8", "size": "2209", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "wrt/tct-ui01-wrt-tests/multiple-other-contact-write/multiple-other-contact-write-test.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "28136" }, { "name": "CSS", "bytes": "664274" }, { "name": "CoffeeScript", "bytes": "18978" }, { "name": "Cucumber", "bytes": "63597" }, { "name": "GLSL", "bytes": "3495" }, { "name": "Groff", "bytes": "12" }, { "name": "HTML", "bytes": "39925954" }, { "name": "Java", "bytes": "651355" }, { "name": "JavaScript", "bytes": "16846788" }, { "name": "Makefile", "bytes": "1044" }, { "name": "PHP", "bytes": "44946" }, { "name": "Python", "bytes": "4403877" }, { "name": "Shell", "bytes": "1100341" }, { "name": "XSLT", "bytes": "785898" } ], "symlink_target": "" }
package Interfaz; import java.awt.Canvas; import java.awt.Color; import java.awt.Font; import java.awt.Graphics; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.Iterator; import java.util.LinkedList; import javax.swing.JOptionPane; import Estructuras.Arista; import Estructuras.Grafo; import Estructuras.Vertice; @SuppressWarnings("serial") public class Lienzo extends Canvas implements MouseListener{ // ATRIBUTOS // ********************************************** final static int tamVertices = 20; boolean dibujandoVertices; boolean dibujandoAristas; boolean eliminandoVertices; boolean eliminandoAristas; boolean primerVertice; Grafo grafoActual; Arista aristaActual; Vertice verticeAux; // CONSTRUCTORA // ********************************************** public Lienzo(){ dibujandoVertices = false; dibujandoAristas = false; eliminandoVertices = false; eliminandoAristas = false; primerVertice = true; grafoActual = new Grafo(); this.setBackground(new Color(255,255,255)); } // MODIFICADORAS // ********************************************** public void setDibujandoVertices(){ dibujandoVertices = true; dibujandoAristas = false; eliminandoVertices = false; eliminandoAristas = false; primerVertice = true; } public void setDibujandoAristas(){ dibujandoAristas= true; dibujandoVertices = false; eliminandoVertices = false; eliminandoAristas = false; primerVertice = true; } public void setEliminandoVertices(){ eliminandoVertices = true; dibujandoVertices = false; dibujandoAristas = false; eliminandoAristas = false; primerVertice = true; } public void setEliminandoAristas(){ eliminandoAristas = true; eliminandoVertices = false; dibujandoVertices = false; dibujandoAristas = false; primerVertice = true; } public void setGrafo(Grafo g){ grafoActual = g; repaint(); } // CONSULTORAS // ********************************************** public Grafo getGrafo(){ return grafoActual; } // EVENTOS DEL RATON // ********************************************** @Override public void mouseClicked(MouseEvent e) { } @Override public void mouseEntered(MouseEvent arg0) { } @Override public void mouseExited(MouseEvent arg0) { } @Override public void mousePressed(MouseEvent e) { int x = e.getX(); int y = e.getY(); if (dibujandoVertices){ String nombreVertice = JOptionPane.showInputDialog("Inserte el nombre del vertice:", JOptionPane.QUESTION_MESSAGE); int res = 0; if (nombreVertice != null) res = grafoActual.anyadirVertice(x, y, nombreVertice); switch (res){ case 0 : this.repaint(); break; case 1 : JOptionPane.showMessageDialog(null, "El vertice esta demasido cerca de otro\nEscoja otro punto"); break; case -1 : JOptionPane.showMessageDialog(null, "Ya existe un vertice con el nombre: "+nombreVertice+"\nPor favor, escoja otro diferente"); break; } dibujandoVertices = false; } if (dibujandoAristas){ Vertice v = grafoActual.buscaVertice(x,y); if (primerVertice){ if (v != null){ aristaActual = new Arista(); aristaActual.setOrigen(v); primerVertice = false; JOptionPane.showMessageDialog(null, "Origen arista: "+v.getNombre()+"\nSeleccione vertice de destino"); } else JOptionPane.showMessageDialog(null, "No ha seleccionado ningun vertice para el origen\nPulsa sobre un vertice para seleccionarlo"); } else{ if (v != null){ aristaActual.setDestino(v); boolean valorValido = false; boolean cancelarArista = false; int valorArco = 0; while (!valorValido){ try { String valorArista = JOptionPane.showInputDialog("Introduzca un valor para la arista:", JOptionPane.QUESTION_MESSAGE); if (valorArista !=null) valorArco = Integer.parseInt(valorArista); else cancelarArista = true; valorValido = true; } catch(Exception ex){ JOptionPane.showMessageDialog(null, "No se ha introducido un valor valido para el valor de la arista!\nPor favor, introduzca un valor entero"); valorValido = false; } // catch } // while if (!cancelarArista){ aristaActual.setValorArco(valorArco); primerVertice = true; dibujandoAristas = false; if (grafoActual.anyadirArista(aristaActual)){ aristaActual = null; JOptionPane.showMessageDialog(null, "Destino arista: "+v.getNombre()); repaint(); } else JOptionPane.showMessageDialog(null, "Ya existe esta arista\nNo se añadira al conjunto de aristas"); } // else cancelarArista } else JOptionPane.showMessageDialog(null, "No ha seleccionado ningun vertice para el destino\nPulsa sobre un vertice para seleccionarlo"); } // else primerVertice } // if dibujandoAristas if (eliminandoVertices){ Vertice v = grafoActual.buscaVertice(x, y); if (v == null) JOptionPane.showMessageDialog(null, "No ha seleccionado un vertice\nPara seleccionar un vertice, haga clic sobre el"); else { grafoActual.eliminarVertice(v); eliminandoVertices = false; grafoActual.decrementaVertices(); repaint(); } } // if eliminandoVertices if (eliminandoAristas){ Vertice v = grafoActual.buscaVertice(x, y); if (primerVertice){ if (v != null){ verticeAux = v; primerVertice = false; JOptionPane.showMessageDialog(null, "Origen arista: "+v.getNombre()+"\nSeleccione vertice de destino"); } else JOptionPane.showMessageDialog(null, "No ha seleccionado ningun vertice para el origen\nPulsa sobre un vertice para seleccionarlo"); } else{ if (v != null){ JOptionPane.showMessageDialog(null, "Destino arista: "+v.getNombre()+"\nArista eliminada correctamente!"); grafoActual.eliminarArista(verticeAux, v); primerVertice = true; eliminandoAristas = false; repaint(); } else JOptionPane.showMessageDialog(null, "No ha seleccionado ningun vertice para el destino\nPulsa sobre un vertice para seleccionarlo"); } // else primerVertice } // if eliminandoVertices } @Override public void mouseReleased(MouseEvent arg0) { } // METODOS // ************************************************************* // Metodo que dibuja en el canvas public void paint(Graphics canvas){ canvas.setFont(new Font("Arial", Font.BOLD, 12)); // Establecemos la fuente para el lienzo Iterator<Arista> iteradorAristas = grafoActual.getAristas(); while (iteradorAristas.hasNext()){ Arista a = iteradorAristas.next(); canvas.setColor(a.getColor()); String valor = String.valueOf(a.getValorArco()); canvas.drawString(valor, ((a.getDestinoX()+a.getOrigenX())/2)+5, (a.getOrigenY()+a.getDestinoY())/2); canvas.drawLine(a.getOrigenX()+10, a.getOrigenY()+10, a.getDestinoX()+10, a.getDestinoY()+10); } Iterator<Vertice> iteradorVertices = grafoActual.getVertices(); while (iteradorVertices.hasNext()){ Vertice v = iteradorVertices.next(); canvas.setColor(v.getColor()); canvas.drawString(v.getNombre(), v.getCoordX()+20, v.getCoordY()); canvas.fillOval(v.getCoordX(), v.getCoordY(), tamVertices, tamVertices); } } public void pintarSolucion (LinkedList<Arista> listaSolucion) { Graphics canvas = getGraphics(); canvas.setColor(new Color(255,0,0)); Iterator<Arista> iteradorListaAristas = listaSolucion.iterator(); while (iteradorListaAristas.hasNext()){ Arista a = iteradorListaAristas.next(); canvas.drawLine(a.getOrigenX()+10, a.getOrigenY()+10, a.getDestinoX()+10, a.getDestinoY()+10); } } public void reiniciar(){ dibujandoVertices = false; dibujandoAristas = false; primerVertice = true; grafoActual = new Grafo(); repaint(); } }
{ "content_hash": "409120d5ef91b1cb6dd2b2fd3cb70ca2", "timestamp": "", "source": "github", "line_count": 285, "max_line_length": 151, "avg_line_length": 28.333333333333332, "alnum_prop": 0.6530030959752322, "repo_name": "carlosrd/LP3", "id": "e32145d70c22173c092c6aa0721e51fbc026c296", "size": "8075", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "P2/src/Interfaz/Lienzo.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "245825" } ], "symlink_target": "" }
int main(int argc, char * argv[]) { @autoreleasepool { return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); } }
{ "content_hash": "862f69c30961e5cb41872da6e6711e1d", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 90, "avg_line_length": 31.6, "alnum_prop": 0.6582278481012658, "repo_name": "dzenbot/DZNEmptyDataSet", "id": "ac1c3017ab62c062b2a2b66813ba3ac8bcd4189f", "size": "328", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "DZNEmptyDataSet/Colors/main.m", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "134981" }, { "name": "Ruby", "bytes": "4725" } ], "symlink_target": "" }
'use strict'; import $ from './jquery'; import * as deprecate from './internal/deprecation'; import globalize from './internal/globalize'; /** * Does nothing because legacy code. * * @returns {undefined} */ function warnAboutFirebug () {} /** * Includes firebug lite for debugging in IE. Especially in IE. * * @returns {undefined} */ function firebug () { var script = $(document.createElement('script')); script.attr('src', 'https://getfirebug.com/releases/lite/1.2/firebug-lite-compressed.js'); $('head').append(script); (function () { if (window.firebug) { firebug.init(); } else { setTimeout(firebug, 0); } })(); } firebug = deprecate.fn(firebug, 'firebug', { sinceVersion: '5.1.0' }); warnAboutFirebug = deprecate.fn(warnAboutFirebug, 'warnAboutFirebug', { sinceVersion: '5.8.0' }); globalize('firebug', firebug); globalize('warnAboutFirebug', warnAboutFirebug); export { firebug, warnAboutFirebug };
{ "content_hash": "daa01fe03c71268d2a1a1ed3323c9267", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 94, "avg_line_length": 20.979166666666668, "alnum_prop": 0.634558093346574, "repo_name": "parambirs/aui-demos", "id": "2f0f990df92ef86f02f90a5d7b77e2292a887b99", "size": "1007", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "node_modules/@atlassian/aui/src/js/aui/firebug.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "132297" }, { "name": "JavaScript", "bytes": "84263" } ], "symlink_target": "" }
<?php namespace Symfony\Component\HttpKernel\EventListener; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\HttpKernel\Event\FinishRequestEvent; use Symfony\Component\HttpKernel\KernelEvents; use Symfony\Component\HttpFoundation\RequestStack; use Symfony\Component\HttpFoundation\Request; use Symfony\Component\Routing\RequestContextAwareInterface; use Symfony\Component\EventDispatcher\EventSubscriberInterface; /** * Initializes the locale based on the current request. * * This listener works in 2 modes: * * * 2.3 compatibility mode where you must call setRequest whenever the Request changes. * * 2.4+ mode where you must pass a RequestStack instance in the constructor. * * @author Fabien Potencier <fabien@symfony.com> */ class LocaleListener implements EventSubscriberInterface { private $router; private $defaultLocale; private $requestStack; /** * RequestStack will become required in 3.0. * * @param RequestStack $requestStack A RequestStack instance * @param string $defaultLocale The default locale * @param RequestContextAwareInterface|null $router The router * * @throws \InvalidArgumentException */ public function __construct($requestStack = null, $defaultLocale = 'en', $router = null) { if ((null !== $requestStack && !$requestStack instanceof RequestStack) || $defaultLocale instanceof RequestContextAwareInterface || $router instanceof RequestStack) { $tmp = $router; $router = func_num_args() < 2 ? null : $defaultLocale; $defaultLocale = $requestStack; $requestStack = func_num_args() < 3 ? null : $tmp; @trigger_error('The '.__METHOD__.' method now requires a RequestStack to be given as first argument as '.__CLASS__.'::setRequest method will not be supported anymore in 3.0.', E_USER_DEPRECATED); } elseif (!$requestStack instanceof RequestStack) { @trigger_error('The '.__METHOD__.' method now requires a RequestStack instance as '.__CLASS__.'::setRequest method will not be supported anymore in 3.0.', E_USER_DEPRECATED); } if (null !== $requestStack && !$requestStack instanceof RequestStack) { throw new \InvalidArgumentException('RequestStack instance expected.'); } if (null !== $router && !$router instanceof RequestContextAwareInterface) { throw new \InvalidArgumentException('Router must implement RequestContextAwareInterface.'); } $this->defaultLocale = $defaultLocale; $this->requestStack = $requestStack; $this->router = $router; } /** * Sets the current Request. * * This method was used to synchronize the Request, but as the HttpKernel * is doing that automatically now, you should never call it directly. * It is kept public for BC with the 2.3 version. * * @param Request|null $request A Request instance * * @deprecated since version 2.4, to be removed in 3.0. */ public function setRequest(Request $request = null) { @trigger_error('The '.__METHOD__.' method is deprecated since Symfony 2.4 and will be removed in 3.0.', E_USER_DEPRECATED); if (null === $request) { return; } $this->setLocale($request); $this->setRouterContext($request); } public function onKernelRequest(GetResponseEvent $event) { $request = $event->getRequest(); $request->setDefaultLocale($this->defaultLocale); $this->setLocale($request); $this->setRouterContext($request); } public function onKernelFinishRequest(FinishRequestEvent $event) { if (null === $this->requestStack) { return; // removed when requestStack is required } if (null !== $parentRequest = $this->requestStack->getParentRequest()) { $this->setRouterContext($parentRequest); } } private function setLocale(Request $request) { if ($locale = $request->attributes->get('_locale')) { $request->setLocale($locale); } } private function setRouterContext(Request $request) { if (null !== $this->router) { $this->router->getContext()->setParameter('_locale', $request->getLocale()); } } public static function getSubscribedEvents() { return array( // must be registered after the Router to have access to the _locale KernelEvents::REQUEST => array(array('onKernelRequest', 16)), KernelEvents::FINISH_REQUEST => array(array('onKernelFinishRequest', 0)), ); } }
{ "content_hash": "3d9b26b50f1be585f21415333c747d49", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 207, "avg_line_length": 36.723076923076924, "alnum_prop": 0.645370758273984, "repo_name": "mihai-stancu/symfony", "id": "8d389150b03cbd5e4d41e6b88f92a3357f5a232f", "size": "5003", "binary": false, "copies": "1", "ref": "refs/heads/2.8", "path": "src/Symfony/Component/HttpKernel/EventListener/LocaleListener.php", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "8656" }, { "name": "CSS", "bytes": "10278" }, { "name": "HTML", "bytes": "266144" }, { "name": "JavaScript", "bytes": "345" }, { "name": "M4", "bytes": "2250" }, { "name": "PHP", "bytes": "12858288" }, { "name": "Shell", "bytes": "643" } ], "symlink_target": "" }
#ifndef _MAIN_DATA_TYPES_ #define _MAIN_DATA_TYPES_ #include <string> #include <vector> #include <unordered_map> #include <utility> #include "category-processor.h" #include "eigen-data-types.h" using std::string; using std::wstring; using std::unordered_map; using std::vector; using std::pair; namespace statcalc { //typedef WikiCategoryFullLabel Category; typedef GoogleAppsLabel Category; //typedef PatentLabel Category; //typedef PatentLabelVec Category; /** * Structure for storing word and list documents in which this word appears * pair<unsigned int, unsigned int> store id of document and number of occurrences this word */ typedef eigentools::SparseMat TDMatrix; /** * Structure for mapping name of document into id */ typedef unordered_map<string,unsigned int> DocID; /** * Structure for mapping word into id */ typedef unordered_map<wstring,int> Vocabulary; /** * Structure for mapping document id into type of domain */ typedef unordered_map<unsigned int,Category> IDCategory; /** * Structure storing map of term and its double valued statistic */ typedef eigentools::DenseVec WordDoubleStat; /** * Structure for storing term and its integer statistics */ typedef Eigen::Matrix<int,Eigen::Dynamic,1> WordIntStat; typedef vector<vector<vector<int> > > DocPlainByWords; typedef unordered_map<unsigned int, DocPlainByWords> DocCollectionPlainIdx; typedef vector<Eigen::Triplet<int> > DocCollectionBOW; class StatCalc; class DocumentProcessor; } #endif //_MAIN_DATA_TYPES_
{ "content_hash": "26b47fa0161a160d8e43514b18aa61e3", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 93, "avg_line_length": 21.942028985507246, "alnum_prop": 0.76221928665786, "repo_name": "zy4kamu/Coda", "id": "9e3bacc6fe3e2a726e53b8b388fe3ef10edc201e", "size": "2686", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/utils/StatCalc/statcalc-data-types.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "48" }, { "name": "C", "bytes": "6587769" }, { "name": "C++", "bytes": "20957504" }, { "name": "CMake", "bytes": "50801" }, { "name": "Makefile", "bytes": "778300" }, { "name": "Objective-C", "bytes": "78643" }, { "name": "Python", "bytes": "46732" }, { "name": "Shell", "bytes": "40147" } ], "symlink_target": "" }
namespace components { class Movement : public Component { public: Movement(float moveRate, float rotateRate) : m_moveRate(moveRate), m_rotateRate(rotateRate) { } const float getMoveRate() const { return m_moveRate; } const float getRotateRate() const { return m_rotateRate; } private: float m_moveRate; // unit distance per millisecond float m_rotateRate; // degrees per millisecond }; } // namespace components
{ "content_hash": "2d882a9c29306dac0d70e92abb44de0c", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 66, "avg_line_length": 27.42105263157895, "alnum_prop": 0.6065259117082533, "repo_name": "ProfPorkins/GameTech", "id": "29e1bf6e94e1b56a2e7319d10445e62d990145a6", "size": "758", "binary": false, "copies": "2", "ref": "refs/heads/trunk", "path": "C++/Multiplayer/Step 2 - Client Prediction/shared/components/Movement.hpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "665029" }, { "name": "CMake", "bytes": "72697" }, { "name": "CSS", "bytes": "12672" }, { "name": "HTML", "bytes": "9024" }, { "name": "JavaScript", "bytes": "1039429" }, { "name": "Rich Text Format", "bytes": "45920" } ], "symlink_target": "" }
package com.hadlink.library.base.presenter; import android.annotation.SuppressLint; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.support.v7.widget.Toolbar; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import com.hadlink.library.R; import com.hadlink.library.base.view.IDelegate; import com.hadlink.library.model.Event; import com.hadlink.library.util.rx.RxBus; import com.hadlink.library.util.varyview.VaryViewHelper; import com.joanzapata.iconify.Icon; import com.joanzapata.iconify.IconDrawable; import com.trello.rxlifecycle.components.support.RxAppCompatActivity; import rx.Subscription; import rx.functions.Action1; /** * Presenter base class for Activity * Presenter层的实现基类 * * @param <T> View delegate class type */ public abstract class ActivityPresenter<T extends IDelegate> extends RxAppCompatActivity { protected T viewDelegate; protected Context context; protected VaryViewHelper varyViewHelper; protected Handler handler = new Handler(Looper.getMainLooper()); protected String netTag; /** * 这里toolBar的配置可以配合策略模式设置几种模板,这里不展开 */ protected Toolbar toolbar; private Subscription rxSubscribe; public ActivityPresenter() { try { viewDelegate = getDelegateClass().newInstance(); } catch (InstantiationException e) { throw new RuntimeException("create IDelegate error"); } catch (IllegalAccessException e) { throw new RuntimeException("create IDelegate error"); } } @SuppressLint("InflateParams") @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); context = this; viewDelegate.create(getLayoutInflater(), null, savedInstanceState); setContentView(viewDelegate.getRootView()); initToolbar(); viewDelegate.initWidget(); bindEvenListener(); if (bindBus()) { rxSubscribe = RxBus.getDefault().take(Event.class) .subscribe(new Action1<Event>() { @Override public void call(Event event) { onEvent(event.arg, event.getObject()); } }, new Action1<Throwable>() { @Override public void call(Throwable throwable) { } }); } if (viewDelegate.getLoadingTargetView() != null) { varyViewHelper = new VaryViewHelper.Builder() .setDataView(viewDelegate.getLoadingTargetView()) .setLoadingView(LayoutInflater.from(context).inflate(R.layout.layout_loadingview, null)) .setEmptyView(LayoutInflater.from(context).inflate(R.layout.layout_emptyview, null)) .setErrorView(LayoutInflater.from(context).inflate(R.layout.layout_errorview, null)) .setRefreshListener(new View.OnClickListener() { @Override public void onClick(View v) { onRetryListener(); } }) .build(); } Bundle extras = getIntent().getExtras(); if (null != extras) { getBundleExtras(extras); } } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); if (viewDelegate == null) { try { viewDelegate = getDelegateClass().newInstance(); } catch (InstantiationException e) { throw new RuntimeException("create IDelegate error"); } catch (IllegalAccessException e) { throw new RuntimeException("create IDelegate error"); } } } protected void initToolbar() { if (getToolbarAvailable()) { toolbar = viewDelegate.getToolbar(); if (toolbar != null) { setSupportActionBar(toolbar); } } } @Override public boolean onCreateOptionsMenu(Menu menu) { if (viewDelegate.getOptionsMenuId() != 0) { getMenuInflater().inflate(viewDelegate.getOptionsMenuId(), menu); onUseIconifySetMenuItem(menu); } return super.onCreateOptionsMenu(menu); } @Override protected void onDestroy() { super.onDestroy(); if (bindBus()) { if (rxSubscribe != null && rxSubscribe.isUnsubscribed()) rxSubscribe.unsubscribe(); } if (viewDelegate.getLoadingTargetView() != null) varyViewHelper.releaseVaryView(); viewDelegate.destroy(); viewDelegate = null; handler.removeCallbacksAndMessages(null); } protected final void setMenuItem(Icon icon, int color, MenuItem menuItem) { menuItem.setIcon( new IconDrawable(this, icon) .colorRes(color) .actionBarSize()); } /** * ------------------------------------------------开放以下方法------------------------------------------------------------- */ /** * 绑定哪个视图类 */ protected abstract Class<T> getDelegateClass(); /** * 是否需要rxBus */ protected boolean bindBus() { return false; } /** * 如果有设置loadingView,加载失败时重试点击的回调 * * @see IDelegate #getLoadingTargetView() */ protected void onRetryListener() { } /** * rxBus事件回调,根据what判断时间类型 * * @param what 事件类型 * @param obj 携带的对象 */ protected void onEvent(int what, Object obj) { } /** * 初始化一些监听等 */ protected void bindEvenListener() { } /** * 接收bundle回调 */ protected void getBundleExtras(Bundle extras) { } /** * 是否启用toolBar */ protected boolean getToolbarAvailable() { return true; } /** * only Iconify use */ protected void onUseIconifySetMenuItem(Menu menu) { } }
{ "content_hash": "303028a7b3dc5833ea4d81aa520142ec", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 122, "avg_line_length": 29.11214953271028, "alnum_prop": 0.5876404494382023, "repo_name": "vihuela/Lay-s", "id": "2e14f1eea4f4d4198a0940b1324ad9f1333e8118", "size": "7187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "library/src/main/java/com/hadlink/library/base/presenter/ActivityPresenter.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "526000" } ], "symlink_target": "" }
<?php namespace WhichBrowser\Data; use WhichBrowser\Constants; class Manufacturers { public static $GENERIC = []; public static $TELEVISION = []; public static function identify($type, $name) { $name = preg_replace('/^CUS\:/u', '', trim($name)); require_once __DIR__ . '/../../data/manufacturer-names.php'; if ($type == Constants\DeviceType::TELEVISION) { if (isset(Manufacturers::$TELEVISION[$name])) { return self::$TELEVISION[$name]; } } if (isset(Manufacturers::$GENERIC[$name])) { return self::$GENERIC[$name]; } return $name; } }
{ "content_hash": "5a4c72ed268d3beeb2a26cc513421337", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 68, "avg_line_length": 22.4, "alnum_prop": 0.5461309523809523, "repo_name": "WhichBrowser/Parser", "id": "8dbf289b6ad911805ebd6f5bf70a48027e4986d8", "size": "672", "binary": false, "copies": "18", "ref": "refs/heads/master", "path": "src/Data/Manufacturers.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "4048067" } ], "symlink_target": "" }
<table> <tr> <td> <a href="http://lattice.cf"><img src="https://github.com/cloudfoundry-incubator/lattice/raw/develop/logos/lattice.png" align="left" width="200" ></a> </td> <td> Website: <a href="http://lattice.cf">http://lattice.cf</a><br> Mailing List: <a href="https://groups.google.com/a/cloudfoundry.org/forum/#!forum/lattice">Google Groups</a> </td> </tr> </table> Lattice is an open source project for running containerized workloads on a cluster. Lattice bundles up http load-balancing, a cluster scheduler, log aggregation/streaming and health management into an easy-to-deploy and easy-to-use package. Lattice is based on a number of open source [Cloud Foundry](http://cloudfoundry.org) components: - [Diego](https://github.com/cloudfoundry-incubator/diego-design-notes) schedules and monitors containerized workloads - [Doppler](https://github.com/cloudfoundry/loggregator) aggregates and streams application logs - [Gorouter](https://github.com/cloudfoundry/gorouter) provides http load-balancing ## Deploy Lattice A [local deployment](#local-deployment) of Lattice can be launched with Vagrant. A scalable [cluster deployment](#clustered-deployment) of Lattice can be launched with Terraform. We currently support [AWS](#amazon-web-services), [DigitalOcean](#digitalocean), and [Google Cloud](#google-cloud) ## Use Lattice The [Lattice CLI `ltc`](https://github.com/cloudfoundry-incubator/lattice/tree/master/ltc) provides a command line interface for launching docker-based applications. More complex workloads can be constructed and submitted directly to Lattice's Receptor API which is fully documented [here](https://github.com/cloudfoundry-incubator/receptor/blob/master/doc/README.md). # Local Deployment ## Launching with Vagrant Make sure you have [Vagrant](https://vagrantup.com/) installed, then: ```bash git clone git@github.com:cloudfoundry-incubator/lattice.git cd lattice git checkout <VERSION> vagrant up ``` This spins up a virtual environment that is accessible at `192.168.11.11`. Here, `VERSION` refers to the tagged version you wish to deploy. These tagged versions are known to be stable. Use the [Lattice CLI](https://github.com/cloudfoundry-incubator/lattice/tree/master/ltc) to target Lattice: ```bash ltc target 192.168.11.11.xip.io ``` ## Using Different Providers You can do this with either VirtualBox or VMware Fusion (version 7 or later): Virtualbox: ```bash vagrant up --provider virtualbox ``` VMware Fusion: ```bash vagrant up --provider vmware_fusion ``` ### Networking Conflicts If you are trying to run both the VirtualBox and VMWare providers on the same machine, you'll need to run them on different private networks (subnets) that do not conflict. Set the System IP to an address that does not conflict with the host networking configuration by passing the LATTICE_SYSTEM_IP environment variable to the vagrant up command: ```bash LATTICE_SYSTEM_IP=192.168.80.100 vagrant up ltc target 192.168.80.100.xip.io ``` ## Updating Currently, Lattice does not support updating via provision. So to update, you have to destroy the box and bring it back up: ```bash vagrant destroy --force git pull vagrant up ``` ## Troubleshooting - xip.io is sometimes flaky, resulting in no such host errors. - The alternative that we have found is to use dnsmasq configured to resolve all xip.io addresses to 192.168.11.11. - This also requires creating a /etc/resolvers/io file that points to 127.0.0.1. See further instructions [here] (http://passingcuriosity.com/2013/dnsmasq-dev-osx/). ## Running Vagrant with a custom Lattice tar By default, `vagrant up` will fetch the latest Lattice binary tarball. To use a particular tarball: ```bash VAGRANT_LATTICE_TAR_PATH=/path/to/lattice.tgz vagrant up ``` # Clustered Deployment This repository contains several [Terraform](https://www.terraform.io/) templates to help you deploy on your choice of IaaS. To deploy Lattice in this way you will need: * [Terraform](https://www.terraform.io/intro/getting-started/install.html) == 0.3.7 installed on your machine (Terraform 0.4.0 is currently unsupported, we are looking into fixing this) * Credentials for your choice of IaaS ## Deploying Here are some step-by-step instructions for deploying a Lattice cluster via Terraform: 1. Visit the [Lattice GitHub Releases page](https://github.com/cloudfoundry-incubator/lattice/releases#) 2. Select the Lattice version you wish to deploy and download the Terraform example file for your target platform. The filename will be `lattice.<platform>.tf` 3. Create an empty folder and place the `lattice.<platform>.tf` file in that folder. 4. Update the `lattice.<platform>.tf` by filling in the values for the variables. Instructions for each supported platform are here: - [Amazon Web Services](https://github.com/cloudfoundry-incubator/lattice/blob/master/terraform/aws/README.md#configure) - [DigitalOcean](https://github.com/cloudfoundry-incubator/lattice/blob/master/terraform/digitalocean/README.md#configure) - [Google Cloud](https://github.com/cloudfoundry-incubator/lattice/blob/master/terraform/google/README.md#configure) 5. Run the following commands in the folder containing the `lattice.<platform>.tf` file ```bash terraform get -update terraform apply ``` This will deploy the cluster. Upon success, terraform will print the Lattice target: ``` Outputs: lattice_target = x.x.x.x.xip.io lattice_username = xxxxxxxx lattice_password = xxxxxxxx ``` which you can use with the Lattice CLI to `ltc target x.x.x.x.xip.io`. Terraform will generate a `terraform.tfstate` file. This file describes the cluster that was built - keep it around in order to modify/tear down the cluster. ## Destroying To destroy the cluster go to the folder containing the `terraform.tfstate` file and run: ```bash terraform destroy ``` # Contributing In the spirit of [free software](http://www.fsf.org/licensing/essays/free-sw.html), **everyone** is encouraged to help improve this project. Here are some ways *you* can contribute: * by using alpha, beta, and prerelease versions * by reporting bugs * by suggesting new features * by writing or editing documentation * by writing specifications * by writing code (**no patch is too small**: fix typos, add comments, clean up inconsistent whitespace) * by refactoring code * by closing [issues](https://github.com/cloudfoundry-incubator/lattice/issues) * by reviewing patches Also see the [Development Readme](https://github.com/cloudfoundry-incubator/lattice/tree/master/development-readme.md) ## Development Workflow Development work should be done on the develop branch. As a general rule, only CI should commit to master. ## Submitting an Issue We use the [GitHub issue tracker](https://github.com/cloudfoundry-incubator/lattice/issues) to track bugs and features. Before submitting a bug report or feature request, check to make sure it hasn't already been submitted. You can indicate support for an existing issue by voting it up. When submitting a bug report, please include a [Gist](http://gist.github.com/) that includes a stack trace and any details that may be necessary to reproduce the bug including the Lattice version. ## Submitting a Pull Request 1. Propose a change by opening an issue. 2. Fork the project. 3. Create a topic branch. 4. Implement your feature or bug fix. 5. Commit and push your changes. 6. Submit a pull request. # Copyright See [LICENSE](https://github.com/cloudfoundry-incubator/lattice/blob/master/LICENSE) for details. Copyright (c) 2015 [Pivotal Software, Inc](http://www.pivotal.io/).
{ "content_hash": "e9897b0c3227ba093e66807b65a26909", "timestamp": "", "source": "github", "line_count": 197, "max_line_length": 240, "avg_line_length": 38.89340101522843, "alnum_prop": 0.764682850430697, "repo_name": "mikegehard/lattice", "id": "cd7bb49f3a185f015bafe1882e85807b3503a249", "size": "7702", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "345389" }, { "name": "Shell", "bytes": "43093" } ], "symlink_target": "" }
#include "SquashInstancedGeometryFilter.hpp" #include <sirikata/mesh/Meshdata.hpp> #include <boost/lexical_cast.hpp> namespace Sirikata { namespace Mesh { namespace { // An ordered list of materials used to render an object. This is essentially a // resolved and flattened version of the material binding map. class MaterialList : public std::vector<uint32> { public: MaterialList(const GeometryInstance::MaterialBindingMap& mbm) { // The way mat binding maps works is kind of dumb since they aren't // ordered and starting at 0. We need to first get a sorted list of // input indices, then sort it, then generate the list of used // materials. std::vector<uint32> source_ids; for(GeometryInstance::MaterialBindingMap::const_iterator mbm_it = mbm.begin(); mbm_it != mbm.end(); mbm_it++) source_ids.push_back(mbm_it->first); std::sort(source_ids.begin(), source_ids.end()); for(uint32 i = 0; i < source_ids.size(); i++) this->push_back( mbm.find(source_ids[i])->second ); } bool operator<(const MaterialList& rhs) { if (size() < rhs.size()) return true; for(uint32 i = 0; i < size(); i++) { if ((*this)[i] == rhs[i]) continue; return ((*this)[i] < rhs[i]); } return false; } }; } SquashInstancedGeometryFilter::SquashInstancedGeometryFilter(const String& args) { } FilterDataPtr SquashInstancedGeometryFilter::apply(FilterDataPtr input) { MutableFilterDataPtr output(new FilterData()); for(FilterData::const_iterator md_it = input->begin(); md_it != input->end(); md_it++) { VisualPtr vis = *md_it; MeshdataPtr md( std::tr1::dynamic_pointer_cast<Meshdata>(vis) ); // Only know how to process Meshdata and those that don't have animations. if (!md || md->hasAnimations) { output->push_back(vis); continue; } // Our basic approach for each Meshdata is to generate one // SubMeshGeometry for each set of materials. We scan through all // instanced geometry and determine which group it belongs to based on // its material. Then, we take the original SubMeshGeometry, apply the // necessary transformation, and add the transformed version to the end // of the new merged SubMeshGeometry. // This will track our new, merged SubMeshGeometries. These are unique // based on the materials used to render them. typedef std::map<MaterialList, SubMeshGeometry> MergedMeshMap; MergedMeshMap merged_meshes; // And this will track the new material binding map for each merged // SubMeshGeometry typedef std::map<MaterialList, GeometryInstance::MaterialBindingMap> MergedMaterialMapMap; MergedMaterialMapMap merged_material_maps; // This will hold our new, generated geometry. A few fields are copied // over directly since they won't change. MeshdataPtr new_md(new Meshdata()); new_md->textures = md->textures; //new_md->lights = md->lights; new_md->materials = md->materials; new_md->uri = md->uri; new_md->hash = md->hash; new_md->id = md->id; new_md->progressiveData = md->progressiveData; //new_md->lightInstances = md->lightInstances; // Old globalTransform will have already been applied new_md->globalTransform = Matrix4x4f::identity(); //new_md->joints = md->joints; // Scan through all instanced geometry, building up the new models Meshdata::GeometryInstanceIterator geoinst_it = md->getGeometryInstanceIterator(); uint32 geoinst_idx; Matrix4x4f pos_xform; while( geoinst_it.next(&geoinst_idx, &pos_xform) ) { GeometryInstance& geo_inst = md->instances[geoinst_idx]; SubMeshGeometry& geo = md->geometry[ geo_inst.geometryIndex ]; MaterialList matlist(geo_inst.materialBindingMap); if (merged_meshes.find(matlist) == merged_meshes.end()) { merged_meshes[matlist] = SubMeshGeometry(); merged_meshes[matlist].name = "mesh" + boost::lexical_cast<String>(merged_meshes.size()) + "-geometry"; merged_material_maps[matlist] = GeometryInstance::MaterialBindingMap(); } SubMeshGeometry& merged_mesh = merged_meshes[matlist]; GeometryInstance::MaterialBindingMap& merged_material_map = merged_material_maps[matlist]; // Tack transformed info onto the end of this primitive. We need to // transform each one according to this instances positioning. merged_mesh.append(geo, pos_xform); // Add new material mapping info for(GeometryInstance::MaterialBindingMap::iterator mm_it = geo_inst.materialBindingMap.begin(); mm_it != geo_inst.materialBindingMap.end(); mm_it++) { if ( merged_material_map.find(mm_it->first) == merged_material_map.end()) merged_material_map[mm_it->first] = mm_it->second; assert(merged_material_map[mm_it->first] == mm_it->second); } } // Create one root node with no transformation to hold these // pre-transformed aggregate objects Node rn(Matrix4x4f::identity()); NodeIndex root_node_idx = new_md->nodes.size(); new_md->nodes.push_back(rn); new_md->rootNodes.push_back(root_node_idx); // Set up new geometries, instance geometries, and nodes for(MergedMeshMap::iterator merged_it = merged_meshes.begin(); merged_it != merged_meshes.end(); merged_it++) { SubMeshGeometry& merged_mesh = merged_it->second; merged_mesh.recomputeBounds(); int geo_idx = new_md->geometry.size(); new_md->geometry.push_back(merged_mesh); GeometryInstance geo_inst; geo_inst.materialBindingMap = merged_material_maps[merged_it->first]; geo_inst.geometryIndex = geo_idx; geo_inst.parentNode = root_node_idx; new_md->instances.push_back(geo_inst); } output->push_back(new_md); } return output; } } // namespace Mesh } // namespace Sirikata
{ "content_hash": "65613c4e76892470b2a1c4d99e608a50", "timestamp": "", "source": "github", "line_count": 148, "max_line_length": 162, "avg_line_length": 42.58108108108108, "alnum_prop": 0.6339257378609965, "repo_name": "sirikata/sirikata", "id": "8b174bb881afbf22f9736f80361f9eb033197958", "size": "7937", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "libmesh/plugins/common-filters/SquashInstancedGeometryFilter.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "402297" }, { "name": "C++", "bytes": "13009496" }, { "name": "CMake", "bytes": "287559" }, { "name": "CSS", "bytes": "4961" }, { "name": "EmberScript", "bytes": "951536" }, { "name": "GAP", "bytes": "83739" }, { "name": "HTML", "bytes": "7887" }, { "name": "JavaScript", "bytes": "328077" }, { "name": "Makefile", "bytes": "3874" }, { "name": "PHP", "bytes": "5259" }, { "name": "Perl", "bytes": "503" }, { "name": "Protocol Buffer", "bytes": "2030" }, { "name": "Python", "bytes": "251439" }, { "name": "Shell", "bytes": "12168" } ], "symlink_target": "" }
@interface ICExampleStylePlainViewController : IMOStyledTableViewController @end
{ "content_hash": "cc4cb4e12b04a4c69d4df78a18890de5", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 75, "avg_line_length": 27.333333333333332, "alnum_prop": 0.8902439024390244, "repo_name": "fredericcormier/IMOStyledTableViewController", "id": "d6f451aa5d6a070e408ee18c130cceda84ec4c91", "size": "342", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Demo/IMOStyledTableViewControllerDemo/ICExampleStylePlainViewController.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "70088" }, { "name": "Ruby", "bytes": "713" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8" ?> <rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:wikidot="http://www.wikidot.com/rss-namespace"> <channel> <title>Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with</link> <description>Posts in the discussion thread &quot;Cana seeking rivals/friends to spar with&quot; - Hello ppl</description> <copyright></copyright> <lastBuildDate>Sun, 10 Jul 2022 05:12:29 +0000</lastBuildDate> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-236577</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-236577</link> <description></description> <pubDate>Fri, 08 Aug 2008 12:57:15 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Re opened see first post</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-147847</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-147847</link> <description></description> <pubDate>Fri, 18 Apr 2008 07:53:43 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Thats it ppl I ve got no more spaces since Strider is pending so thankies one and all</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-147380</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-147380</link> <description></description> <pubDate>Thu, 17 Apr 2008 15:29:14 +0000</pubDate> <wikidot:authorName>BrightMorn</wikidot:authorName> <wikidot:authorUserId>89749</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Taliesan &amp; Darkshard</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-147371</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-147371</link> <description></description> <pubDate>Thu, 17 Apr 2008 15:17:41 +0000</pubDate> <wikidot:authorName>Anonymous</wikidot:authorName> <content:encoded> <![CDATA[ <p>Ayako added</p> <p>4 left!!</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-147225</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-147225</link> <description></description> <pubDate>Thu, 17 Apr 2008 10:53:46 +0000</pubDate> <wikidot:authorName>Anonymous</wikidot:authorName> <content:encoded> <![CDATA[ <p>Ayako added Cana</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-147168</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-147168</link> <description></description> <pubDate>Thu, 17 Apr 2008 08:14:36 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>All added thankies</p> <p>25 places taken up 5 left!!!</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-146956</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-146956</link> <description></description> <pubDate>Wed, 16 Apr 2008 22:33:55 +0000</pubDate> <wikidot:authorName>Anonymous</wikidot:authorName> <content:encoded> <![CDATA[ <p>Nikolai, Salocin and Dranem added Cana.</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-146297</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-146297</link> <description></description> <pubDate>Wed, 16 Apr 2008 11:03:54 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>And Cana added ave maria thankies look fprward to sparring</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-145975</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-145975</link> <description></description> <pubDate>Tue, 15 Apr 2008 22:45:06 +0000</pubDate> <wikidot:authorName>ave maria</wikidot:authorName> <wikidot:authorUserId>112444</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>ave maria added Cana !…</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-145678</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-145678</link> <description></description> <pubDate>Tue, 15 Apr 2008 15:54:44 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Toshiro added thankies</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-145269</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-145269</link> <description></description> <pubDate>Tue, 15 Apr 2008 05:32:16 +0000</pubDate> <wikidot:authorName>Silvos</wikidot:authorName> <wikidot:authorUserId>92278</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Toshiro added Cana</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-143574</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-143574</link> <description></description> <pubDate>Sat, 12 Apr 2008 11:23:37 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>List updated ppl and Thankies</p> <p>Still got 11 (9 if pending ppl answer) places so any1 who wants to spar come on down</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-136622</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-136622</link> <description></description> <pubDate>Wed, 02 Apr 2008 07:59:05 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Cana added tom black</p> <p>Thankies ^.^</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-136107</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-136107</link> <description></description> <pubDate>Tue, 01 Apr 2008 17:15:19 +0000</pubDate> <wikidot:authorName>tom black</wikidot:authorName> <wikidot:authorUserId>87181</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>please add me,my name is tom black<br /> and as a friend,not a rivel (-:</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-135934</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-135934</link> <description></description> <pubDate>Tue, 01 Apr 2008 12:43:50 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>naito and geff check<br /> thanks</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-135202</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-135202</link> <description></description> <pubDate>Mon, 31 Mar 2008 16:41:04 +0000</pubDate> <wikidot:authorName>Geff</wikidot:authorName> <wikidot:authorUserId>101148</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>geff added Cana</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-134601</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-134601</link> <description></description> <pubDate>Sun, 30 Mar 2008 19:47:31 +0000</pubDate> <wikidot:authorName>naito</wikidot:authorName> <wikidot:authorUserId>95487</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>naito added Cana</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-134123</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-134123</link> <description></description> <pubDate>Sat, 29 Mar 2008 19:52:40 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Your added Kyaebe Thanks</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-133172</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-133172</link> <description></description> <pubDate>Fri, 28 Mar 2008 10:21:21 +0000</pubDate> <wikidot:authorName>Kamaitatchi</wikidot:authorName> <wikidot:authorUserId>90780</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>Kyaebe added Cana</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-133125</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-133125</link> <description></description> <pubDate>Fri, 28 Mar 2008 08:50:30 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>OKs your added kor thanks</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-133119</guid> <title>Re: Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-133119</link> <description></description> <pubDate>Fri, 28 Mar 2008 08:39:13 +0000</pubDate> <wikidot:authorName>Anonymous</wikidot:authorName> <content:encoded> <![CDATA[ <p>add Cana , my alt is kor</p> ]]> </content:encoded> </item> <item> <guid>http://bvs.wikidot.com/forum/t-49431#post-132634</guid> <title>Cana seeking rivals/friends to spar with</title> <link>http://bvs.wikidot.com/forum/t-49431/cana-seeking-rivals-friends-to-spar-with#post-132634</link> <description></description> <pubDate>Thu, 27 Mar 2008 16:59:41 +0000</pubDate> <wikidot:authorUserId>102351</wikidot:authorUserId> <content:encoded> <![CDATA[ <p>I've gotten to 608 fights so far and I got Bruce jr lv 2</p> <p>Abovewas s1 now s2 and a lot of my sparring partners have left the game so I have spaces left and I am aiming for lv2 again</p> <p>this is my list so far</p> <p>tPad<br /> skip08<br /> chinchilla<br /> Walms<br /> White Wing<br /> Jupiah<br /> kor<br /> Darkslayer<br /> Kyaebe<br /> kibibyte<br /> naito<br /> geff<br /> tom black<br /> Keiran<br /> Samurai<br /> Thark<br /> Lear<br /> Toshiro<br /> Dranem<br /> Salocin<br /> Nikolai<br /> Taliesan<br /> Dresden</p> <p><span style="text-decoration: underline;">pending</span><br /> Gandalf</p> <p>Thanks ppl</p> ]]> </content:encoded> </item> </channel> </rss>
{ "content_hash": "9e32a27b2ae3a3f39bcedfa518b858ac", "timestamp": "", "source": "github", "line_count": 254, "max_line_length": 640, "avg_line_length": 51.94881889763779, "alnum_prop": 0.6587343690791967, "repo_name": "tn5421/tn5421.github.io", "id": "311c1d789a757eb9977d6d9ff5a4ac186a37554d", "size": "13197", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "bvs.wikidot.com/feed/forum/t-49431.xml", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "400301089" } ], "symlink_target": "" }
import { applyMiddleware, compose, createStore } from 'redux'; import { install } from 'redux-loop'; import { initialState, reducer } from '../reducers'; export default function createLoopStore(state: any = {}) { const enhancer = compose( applyMiddleware(...[]), install(), ); return createStore( reducer, { ...initialState, ...state }, enhancer ); }
{ "content_hash": "335e3abcaf4b5dc35c21c7fcf1267db7", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 62, "avg_line_length": 22.470588235294116, "alnum_prop": 0.643979057591623, "repo_name": "DrPandemic/TwiolioRN", "id": "416664f5f8b76bb390fafbc5ef80c188682d4be6", "size": "392", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/test_helpers/store_helper.js", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "1688" }, { "name": "JavaScript", "bytes": "110093" }, { "name": "Objective-C", "bytes": "4411" }, { "name": "Python", "bytes": "1726" } ], "symlink_target": "" }
FROM balenalib/generic-armv7ahf-alpine:3.12-build # remove several traces of python RUN apk del python* # http://bugs.python.org/issue19846 # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. ENV LANG C.UTF-8 # key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported # key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \ && gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \ && gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059 # point Python at a system-provided certificate database. Otherwise, we might hit CERTIFICATE_VERIFY_FAILED. # https://www.python.org/dev/peps/pep-0476/#trust-database ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt ENV PYTHON_VERSION 3.9.7 # if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'" ENV PYTHON_PIP_VERSION 21.2.4 ENV SETUPTOOLS_VERSION 58.0.0 RUN set -x \ && curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" \ && echo "ac2bb1a87f649ab92d472e5fa6899205dc4a49d5ada39bb6a6a0702c1b8b1cfa Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" | sha256sum -c - \ && tar -xzf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" --strip-components=1 \ && rm -rf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" \ && if [ ! -e /usr/local/bin/pip3 ]; then : \ && curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \ && echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \ && python3 get-pip.py \ && rm get-pip.py \ ; fi \ && pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \ && find /usr/local \ \( -type d -a -name test -o -name tests \) \ -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ -exec rm -rf '{}' + \ && cd / \ && rm -rf /usr/src/python ~/.cache # install "virtualenv", since the vast majority of users of this image will want it RUN pip3 install --no-cache-dir virtualenv ENV PYTHON_DBUS_VERSION 1.2.18 # install dbus-python dependencies RUN apk add --no-cache \ dbus-dev \ dbus-glib-dev # install dbus-python RUN set -x \ && mkdir -p /usr/src/dbus-python \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \ && gpg --verify dbus-python.tar.gz.asc \ && tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \ && rm dbus-python.tar.gz* \ && cd /usr/src/dbus-python \ && PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \ && make -j$(nproc) \ && make install -j$(nproc) \ && cd / \ && rm -rf /usr/src/dbus-python # make some useful symlinks that are expected to exist RUN cd /usr/local/bin \ && ln -sf pip3 pip \ && { [ -e easy_install ] || ln -s easy_install-* easy_install; } \ && ln -sf idle3 idle \ && ln -sf pydoc3 pydoc \ && ln -sf python3 python \ && ln -sf python3-config python-config CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \ && echo "Running test-stack@python" \ && chmod +x test-stack@python.sh \ && bash test-stack@python.sh \ && rm -rf test-stack@python.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux 3.12 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.9.7, Pip v21.2.4, Setuptools v58.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
{ "content_hash": "cb2870da8aef51f8a77b22dacf0bfc86", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 715, "avg_line_length": 51.81720430107527, "alnum_prop": 0.7078231998339904, "repo_name": "resin-io-library/base-images", "id": "e66f00bf7ac85e6eab956967edebaac5bbc3ce41", "size": "4840", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/python/generic-armv7ahf/alpine/3.12/3.9.7/build/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "71234697" }, { "name": "JavaScript", "bytes": "13096" }, { "name": "Shell", "bytes": "12051936" }, { "name": "Smarty", "bytes": "59789" } ], "symlink_target": "" }
import { UPDATE_CODE } from '../actions'; export function currentSourceCode(state = '', action) { switch (action.type) { case UPDATE_CODE: return action.code; default: return state; } }
{ "content_hash": "f3fc4e0f0aa3cc4d606194692c1fac25", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 55, "avg_line_length": 21.1, "alnum_prop": 0.6303317535545023, "repo_name": "yograterol/viperid", "id": "18e04abdff2761389687fceb3c3443e0f003fd0a", "size": "211", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "frontend/reducers/code.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "520" }, { "name": "JavaScript", "bytes": "27354" }, { "name": "Python", "bytes": "7134" }, { "name": "Shell", "bytes": "66" } ], "symlink_target": "" }
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DIRECTORY="$DIR/base-devices" DOCKER_REPOSITORY="cohorte/base-devices" DOCKER_TAG="1.0" DOCKER_USER=$1 DOCKER_PASSWORD=$2 DOCKER_ARM_HOST=$3 ID_RSA=$4 echo "DOCKER_USER=$DOCKER_USER" echo "DOCKER_PASSWORD=$DOCKER_PASSWORD" echo "DOCKER_ARM_HOST=$DOCKER_ARM_HOST" echo "ID_RSA=$ID_RSA" DOCKER_REGISTRY="dr.cohorte.tech" echo -e "\x1B[1;32m[INFO] Building Image [$DOCKER_REPOSITORY:$DOCKER_TAG] located on [$DIRECTORY]\x1B[0m" bash $DIR/build_image.sh "$DIRECTORY" "$DOCKER_REPOSITORY" "$DOCKER_TAG" "$DOCKER_USER" "$DOCKER_PASSWORD" "$DOCKER_REGISTRY"
{ "content_hash": "b788944c71e7c7fb2fb9bf102ac684de", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 125, "avg_line_length": 32.1578947368421, "alnum_prop": 0.7119476268412439, "repo_name": "isandlaTech/cohorte-platforms", "id": "048eab0b30fed15d9f107bae8284063ddc32c163", "size": "624", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cohorte-home/docker/build_base-device.sh", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3772" }, { "name": "CSS", "bytes": "174119" }, { "name": "HTML", "bytes": "113064" }, { "name": "JavaScript", "bytes": "2611368" }, { "name": "Python", "bytes": "463194" }, { "name": "Shell", "bytes": "7442" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using Rhino.PlugIns; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("Wind_GH")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Wind_GH")] [assembly: AssemblyCopyright("Copyright © 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("19233bac-718a-4504-b07a-3abf9c0f6050")] // This will also be the Guid of the Rhino plug-in // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "af79f24b6853c45024d9d74c379a3c14", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 107, "avg_line_length": 38.421052631578945, "alnum_prop": 0.7417808219178083, "repo_name": "interopxyz/Aviary", "id": "f03ede3528349fe3569ca1f73a1dfdcf9751a823", "size": "1463", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Wind_GH/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "2042749" } ], "symlink_target": "" }
<?php namespace Immortal\Support; use Immortal\Filesystem\Filesystem; use Symfony\Component\Process\Process; use Symfony\Component\Process\ProcessUtils; use Symfony\Component\Process\PhpExecutableFinder; class Composer { /** * The filesystem instance. * * @var \Immortal\Filesystem\Filesystem */ protected $files; /** * The working path to regenerate from. * * @var string */ protected $workingPath; /** * Create a new Composer manager instance. * * @param \Immortal\Filesystem\Filesystem $files * @param string|null $workingPath * @return void */ public function __construct(Filesystem $files, $workingPath = null) { $this->files = $files; $this->workingPath = $workingPath; } /** * Regenerate the Composer autoloader files. * * @param string $extra * @return void */ public function dumpAutoloads($extra = '') { $process = $this->getProcess(); $process->setCommandLine(trim($this->findComposer().' dump-autoload '.$extra)); $process->run(); } /** * Regenerate the optimized Composer autoloader files. * * @return void */ public function dumpOptimized() { $this->dumpAutoloads('--optimize'); } /** * Get the composer command for the environment. * * @return string */ protected function findComposer() { if (! $this->files->exists($this->workingPath.'/composer.phar')) { return 'composer'; } $binary = ProcessUtils::escapeArgument((new PhpExecutableFinder)->find(false)); return "{$binary} composer.phar"; } /** * Get a new Symfony process instance. * * @return \Symfony\Component\Process\Process */ protected function getProcess() { return (new Process('', $this->workingPath))->setTimeout(null); } /** * Set the working path used by the class. * * @param string $path * @return $this */ public function setWorkingPath($path) { $this->workingPath = realpath($path); return $this; } }
{ "content_hash": "362e2213b5f914c2242fe181ba9659da", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 87, "avg_line_length": 21.735294117647058, "alnum_prop": 0.5809652683806946, "repo_name": "zatxm120/framework", "id": "4c11f0d04911d0267b68f6213f44fc5aa8112a63", "size": "2217", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Immortal/Support/Composer.php", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "14569" }, { "name": "PHP", "bytes": "2384697" } ], "symlink_target": "" }
find_path(OpenCV_DIR "OpenCVConfig.cmake" DOC "Root directory of OpenCV") ### Nathan, my OpenCV_DIR on my mac should be: "/Users/erlebach/Documents/src/OpenCV-2.4.3" ### How to set this up in CMake? I don't have the time to figure this out. message("GE, OPENCV_DIR: ${OpenCV_DIR}") ##==================================================== ## Find OpenCV libraries ##---------------------------------------------------- if(EXISTS "${OpenCV_DIR}") #When its possible to use the Config script use it. if(EXISTS "${OpenCV_DIR}/OpenCVConfig.cmake") ## Include the standard CMake script include("${OpenCV_DIR}/OpenCVConfig.cmake") ## Search for a specific version set(CVLIB_SUFFIX "${OpenCV_VERSION_MAJOR}${OpenCV_VERSION_MINOR}${OpenCV_VERSION_PATCH}") message("CVLIB_SUFFIX: ${CVLIB_SUFFIX}") #Otherwise it try to guess it. else(EXISTS "${OpenCV_DIR}/OpenCVConfig.cmake") set(OPENCV_LIB_COMPONENTS cxcore cv ml highgui cvaux) find_path(OpenCV_INCLUDE_DIR "cv.h" PATHS "${OpenCV_DIR}" PATH_SUFFIXES "include" "include/opencv" DOC "") if(EXISTS ${OpenCV_INCLUDE_DIR}) include_directories(${OpenCV_INCLUDE_DIR}) endif(EXISTS ${OpenCV_INCLUDE_DIR}) #Find OpenCV version by looking at cvver.h file(STRINGS ${OpenCV_INCLUDE_DIR}/cvver.h OpenCV_VERSIONS_TMP REGEX "^#define CV_[A-Z]+_VERSION[ \t]+[0-9]+$") string(REGEX REPLACE ".*#define CV_MAJOR_VERSION[ \t]+([0-9]+).*" "\\1" OpenCV_VERSION_MAJOR ${OpenCV_VERSIONS_TMP}) string(REGEX REPLACE ".*#define CV_MINOR_VERSION[ \t]+([0-9]+).*" "\\1" OpenCV_VERSION_MINOR ${OpenCV_VERSIONS_TMP}) string(REGEX REPLACE ".*#define CV_SUBMINOR_VERSION[ \t]+([0-9]+).*" "\\1" OpenCV_VERSION_PATCH ${OpenCV_VERSIONS_TMP}) set(OpenCV_VERSION ${OpenCV_VERSION_MAJOR}.${OpenCV_VERSION_MINOR}.${OpenCV_VERSION_PATCH} CACHE STRING "" FORCE) set(CVLIB_SUFFIX "${OpenCV_VERSION_MAJOR}${OpenCV_VERSION_MINOR}${OpenCV_VERSION_PATCH}") endif(EXISTS "${OpenCV_DIR}/OpenCVConfig.cmake") ## Initiate the variable before the loop set(GLOBAL OpenCV_LIBS "") set(OpenCV_FOUND_TMP true) ## Loop over each components foreach(__CVLIB ${OPENCV_LIB_COMPONENTS}) find_library(OpenCV_${__CVLIB}_LIBRARY_DEBUG NAMES "${__CVLIB}${CVLIB_SUFFIX}d" "lib${__CVLIB}${CVLIB_SUFFIX}d" PATHS "${OpenCV_DIR}/lib" NO_DEFAULT_PATH) find_library(OpenCV_${__CVLIB}_LIBRARY_RELEASE NAMES "${__CVLIB}${CVLIB_SUFFIX}" "lib${__CVLIB}${CVLIB_SUFFIX}" PATHS "${OpenCV_DIR}/lib" NO_DEFAULT_PATH) #Remove the cache value set(OpenCV_${__CVLIB}_LIBRARY "" CACHE STRING "" FORCE) #both debug/release if(OpenCV_${__CVLIB}_LIBRARY_DEBUG AND OpenCV_${__CVLIB}_LIBRARY_RELEASE) set(OpenCV_${__CVLIB}_LIBRARY debug ${OpenCV_${__CVLIB}_LIBRARY_DEBUG} optimized ${OpenCV_${__CVLIB}_LIBRARY_RELEASE} CACHE STRING "" FORCE) #only debug elseif(OpenCV_${__CVLIB}_LIBRARY_DEBUG) set(OpenCV_${__CVLIB}_LIBRARY ${OpenCV_${__CVLIB}_LIBRARY_DEBUG} CACHE STRING "" FORCE) #only release elseif(OpenCV_${__CVLIB}_LIBRARY_RELEASE) set(OpenCV_${__CVLIB}_LIBRARY ${OpenCV_${__CVLIB}_LIBRARY_RELEASE} CACHE STRING "" FORCE) #no library found else() set(OpenCV_FOUND_TMP false) endif() #Add to the general list if(OpenCV_${__CVLIB}_LIBRARY) set(OpenCV_LIBS ${OpenCV_LIBS} ${OpenCV_${__CVLIB}_LIBRARY}) endif(OpenCV_${__CVLIB}_LIBRARY) endforeach(__CVLIB) set(OpenCV_FOUND ${OpenCV_FOUND_TMP} CACHE BOOL "" FORCE) else(EXISTS "${OpenCV_DIR}") set(ERR_MSG "Please specify OpenCV directory using OpenCV_DIR env. variable") endif(EXISTS "${OpenCV_DIR}") ##==================================================== ##==================================================== ## Print message ##---------------------------------------------------- if(NOT OpenCV_FOUND) # make FIND_PACKAGE friendly if(NOT OpenCV_FIND_QUIETLY) if(OpenCV_FIND_REQUIRED) message(FATAL_ERROR "OpenCV required but some headers or libs not found. ${ERR_MSG}") else(OpenCV_FIND_REQUIRED) message(STATUS "WARNING: OpenCV was not found. ${ERR_MSG}") endif(OpenCV_FIND_REQUIRED) endif(NOT OpenCV_FIND_QUIETLY) endif(NOT OpenCV_FOUND) ##==================================================== ##==================================================== ## Backward compatibility ##---------------------------------------------------- if(OpenCV_FOUND) option(OpenCV_BACKWARD_COMPA "Add some variable to make this script compatible with the other version of FindOpenCV.cmake" false) if(OpenCV_BACKWARD_COMPA) find_path(OpenCV_INCLUDE_DIRS "cv.h" PATHS "${OpenCV_DIR}" PATH_SUFFIXES "include" "include/opencv" DOC "Include directory") find_path(OpenCV_INCLUDE_DIR "cv.h" PATHS "${OpenCV_DIR}" PATH_SUFFIXES "include" "include/opencv" DOC "Include directory") set(OpenCV_LIBRARIES "${OpenCV_LIBS}" CACHE STRING "" FORCE) endif(OpenCV_BACKWARD_COMPA) endif(OpenCV_FOUND) ##====================================================
{ "content_hash": "c7aaa6462e84381186d5a49307fd5345", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 170, "avg_line_length": 49.80172413793103, "alnum_prop": 0.5340141942184525, "repo_name": "mathnathan/EEMD", "id": "113cef1a301c0a28bd3fbfb9c74fe2c4e791fe79", "size": "7449", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cmake/find/FindOpenCV.cmake", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "203301" }, { "name": "C++", "bytes": "155728" }, { "name": "CMake", "bytes": "44026" }, { "name": "Fortran", "bytes": "35959" }, { "name": "Makefile", "bytes": "35265" }, { "name": "Shell", "bytes": "198" } ], "symlink_target": "" }
package org.freethinking.keyphrase; import java.util.Map; public class KeyPhraseStats { private long count; private long position; private Map<String, Long> rawPhrases; public long getCount() { return count; } public void setCount(long count) { this.count = count; } public long getPosition() { return position; } public void setPosition(long position) { this.position = position; } public Map<String, Long> getRawPhrases() { return rawPhrases; } public void setRawPhrases(Map<String, Long> rawPhrases) { this.rawPhrases = rawPhrases; } }
{ "content_hash": "4c469071336693d9c6616fd926f56187", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 61, "avg_line_length": 18.771428571428572, "alnum_prop": 0.6301369863013698, "repo_name": "krusheel/KeyphraseExtraction", "id": "0b146afaf0814c768775759c319818d8d3c82d82", "size": "657", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/org/freethinking/keyphrase/KeyPhraseStats.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "22827" } ], "symlink_target": "" }
<instance description="" template="weapon"> <group name="weapon_bag"> <group name="accuracy"> <float name="near" value="0" /> <float name="far" value="0" /> <float name="mid" value="0" /> </group> <group name="aim"> <group name="fire_aim_time"> <float name="max" value="0.5" /> <float name="min" value="0.125" /> </group> <float name="post_firing_aim_time" value="0" /> <float name="post_firing_cooldown_interval" value="0" /> <group name="ready_aim_time"> <float name="max" value="0.5" /> <float name="min" value="0.125" /> </group> <group name="aim_time_multiplier"> <float name="near" value="1" /> <float name="far" value="1" /> <float name="mid" value="1" /> </group> </group> <group name="anim_table"> <string name="cooldown_time_name" value="" /> <string name="state_name" value="" /> <string name="track_horizontal" value="" /> <string name="track_horizontal_speed" value="" /> <string name="track_vertical" value="" /> <string name="track_vertical_speed" value="" /> <string name="variety_name" value="" /> <string name="visibility_name" value="rockets_visible" /> <string name="target_range_name" value="" /> </group> <group name="area_effect"> <group name="accuracy"> <float name="far" value="5" /> <float name="near" value="5" /> <float name="mid" value="5" /> </group> <group name="area_info"> <float name="angle_left" value="0" /> <float name="angle_right" value="0" /> <enum name="area_type" value="circle" /> <float name="radius" value="6" /> </group> <group name="damage"> <float name="far" value="0.05" /> <float name="near" value="1" /> <float name="mid" value="0.15" /> </group> <group name="damage_friendly"> <float name="far" value="0.025" /> <float name="near" value="0.5" /> <float name="mid" value="0.075" /> </group> <group name="distance"> <float name="far" value="4.5" /> <float name="near" value="1.5" /> <float name="mid" value="3" /> </group> <bool name="has_friendly_fire" value="True" /> <bool name="can_harm_shooter" value="False" /> <group name="suppression"> <float name="far" value="0.1" /> <float name="near" value="0.2" /> <float name="mid" value="0.15" /> </group> <group name="suppression_friendly"> <float name="far" value="0.1" /> <float name="near" value="0.2" /> <float name="mid" value="0.15" /> </group> <bool name="damage_all_in_hold" value="True" /> <template_reference name="aoe_penetration" value="tables\range_table"> <float name="far" value="255.15" /> <float name="mid" value="283.5" /> <float name="near" value="315" /> </template_reference> <group name="building_damage"> <float name="near" value="1" /> <float name="mid" value="1" /> <float name="far" value="1" /> </group> <instance_reference name="weapon_building_damage" value="weapon_building_damage\aoe_profile_no_change" /> <enum name="aoe_origin_and_direction" value="hit_position_and_direction" /> </group> <group name="behaviour"> <bool name="aa_weapon" value="False" /> <bool name="aa_weapon_shoot_through" value="False" /> <bool name="artillery_force_obey_los" value="False" /> <bool name="attack_team_weapon_user" value="False" /> <bool name="can_be_offhanded" value="False" /> <bool name="can_be_substituted" value="False" /> <float name="combat_slot_offset" value="0" /> <bool name="enable_auto_target_search" value="False" /> <bool name="fire_at_building_combat_slot" value="False" /> <float name="ground_hit_rate" value="1" /> <bool name="ignore_shot_blocking" value="True" /> <bool name="non_moving_setup" value="False" /> <bool name="point_blank" value="False" /> <bool name="prevents_prone" value="False" /> <bool name="reset_rotation_on_teardown" value="False" /> <bool name="share_parent_anim" value="False" /> <bool name="single_handed_weapon" value="False" /> <bool name="substitute_weapon" value="False" /> <bool name="support_weapon" value="False" /> <bool name="surprises_idle" value="False" /> <bool name="piercing" value="False" /> <float name="reaction_radius" value="8" /> <bool name="can_be_pilfered" value="True" /> <bool name="causes_combat" value="True" /> <bool name="can_abort_winddown" value="False" /> <enum name="reaction_type" value="normal" /> <bool name="ignore_relations" value="False" /> <template_reference name="wants_prone_firing_option" value="options\none"> </template_reference> <enum name="attack_ground_type" value="outside_hold_only" /> </group> <group name="burst"> <bool name="can_burst" value="True" /> <group name="duration"> <float name="max" value="2" /> <float name="min" value="1" /> </group> <group name="incremental_target_table"> <float name="accuracy_multiplier" value="1" /> <group name="search_radius"> <float name="far" value="10" /> <float name="near" value="10" /> <float name="mid" value="10" /> </group> </group> <group name="rate_of_fire"> <float name="max" value="1" /> <float name="min" value="1" /> </group> <group name="duration_multiplier"> <float name="near" value="1" /> <float name="far" value="1" /> <float name="mid" value="1" /> </group> <group name="rate_of_fire_multiplier"> <float name="near" value="1" /> <float name="far" value="1" /> <float name="mid" value="1" /> </group> <bool name="focus_fire" value="False" /> </group> <group name="cooldown"> <group name="duration"> <float name="max" value="0" /> <float name="min" value="0" /> </group> <group name="duration_multiplier"> <float name="near" value="1" /> <float name="far" value="1" /> <float name="mid" value="1" /> </group> </group> <group name="cover_table"> <group name="tp_defcover"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_defcover_narrow"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_garrison_cover"> <float name="accuracy_multiplier" value="0.4" /> <float name="damage_multiplier" value="0.4" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0" /> </group> <group name="tp_garrison_halftrack"> <float name="accuracy_multiplier" value="0.5" /> <float name="damage_multiplier" value="0.5" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0" /> </group> <group name="tp_heavy"> <float name="accuracy_multiplier" value="0.5" /> <float name="damage_multiplier" value="0.5" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0.1" /> </group> <group name="tp_light"> <float name="accuracy_multiplier" value="0.5" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0.5" /> </group> <group name="tp_negative"> <float name="accuracy_multiplier" value="1.25" /> <float name="damage_multiplier" value="1.25" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1.5" /> </group> <group name="tp_open"> <float name="accuracy_multiplier" value="1.25" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_smoke"> <float name="accuracy_multiplier" value="0.5" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_trench"> <float name="accuracy_multiplier" value="0.15" /> <float name="damage_multiplier" value="0.1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0" /> </group> <group name="tp_water"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_z_bunker"> <float name="accuracy_multiplier" value="0.15" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="0.25" /> <float name="suppression_multiplier" value="0" /> </group> <group name="tp_z_emplacement"> <float name="accuracy_multiplier" value="0.5" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0.75" /> </group> <group name="tp_z_ice"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_z_snow"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_zz_deep_snow"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_zz_mud"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="1" /> </group> <group name="tp_zz_team_weapon_heavy"> <float name="accuracy_multiplier" value="1" /> <float name="damage_multiplier" value="0.5" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0.1" /> </group> </group> <group name="damage"> <float name="max" value="240" /> <float name="min" value="240" /> <list name="on_penetrated_actions"> </list> </group> <group name="damage_over_time"> <template_reference name="damage_over_time" value="dot_type\none"> </template_reference> </group> <group name="deflection"> <float name="deflection_damage_multiplier" value="0.5" /> <bool name="has_deflection_damage" value="True" /> <list name="on_deflected_actions"> </list> </group> <group name="fire"> <float name="wind_down" value="0" /> <float name="wind_up" value="0" /> <list name="on_fire_actions"> </list> </group> <float name="flinch_radius" value="10" /> <string name="fx_action_target_name" value="" /> <bool name="fx_always_visible" value="True" /> <float name="fx_building_hit_delay" value="0" /> <float name="fx_delay_in_building" value="0" /> <string name="fx_munition_name" value="shell_heavy_he" /> <string name="fx_tracer_name" value="" /> <float name="fx_tracer_speed" value="0" /> <bool name="fx_use_building_panel_normal" value="True" /> <locstring name="help_text" value="0" /> <icon name="icon_name" value="" /> <group name="moving"> <float name="accuracy_multiplier" value="0.5" /> <float name="burst_multiplier" value="1" /> <float name="cooldown_multiplier" value="1" /> <bool name="disable_moving_firing" value="False" /> <float name="moving_end_time" value="0" /> <float name="moving_start_time" value="0" /> </group> <string name="name" value="IL2 rockets" /> <group name="offhand"> <float name="offhand_end_time" value="0" /> <float name="offhand_start_time" value="0" /> </group> <group name="priority"> <float name="current_target" value="8" /> <group name="distance"> <float name="far" value="1" /> <float name="near" value="20" /> <float name="mid" value="10.5" /> </group> <float name="rotation" value="-0.25" /> <float name="window_bonus" value="0" /> <float name="threat" value="20" /> <float name="penetration" value="80" /> <float name="suggested_target" value="1000" /> <bool name="over_penetration_priority_penalty" value="True" /> </group> <group name="projectile"> <bool name="delete_previous_on_hit" value="False" /> <instance_reference name="projectile" value="ebps\projectile\p47_rocket" /> </group> <group name="range"> <float name="max" value="200" /> <float name="min" value="0" /> <group name="distance"> <float name="near" value="-1" /> <float name="far" value="-1" /> <float name="mid" value="-1" /> </group> </group> <group name="reload"> <group name="duration"> <float name="max" value="999" /> <float name="min" value="999" /> </group> <group name="duration_multiplier"> <float name="far" value="1" /> <float name="near" value="1" /> <float name="mid" value="1" /> </group> <group name="frequency"> <float name="max" value="111" /> <float name="min" value="111" /> </group> <list name="on_reload_actions"> </list> </group> <group name="scatter"> <float name="angle_scatter" value="1" /> <bool name="burst_pattern_enable" value="False" /> <float name="delay_bracket_change_chance" value="0" /> <float name="distance_bracket_count_air" value="0" /> <float name="distance_bracket_count_ground" value="1" /> <float name="distance_scatter_max" value="0" /> <float name="distance_scatter_obj_hit_min" value="0" /> <float name="distance_scatter_offset" value="0" /> <float name="distance_scatter_ratio" value="1" /> <float name="fow_angle_multiplier" value="1" /> <float name="fow_distance_multiplier" value="1" /> <float name="max_tilt_angle" value="0" /> <float name="min_tilt_angle" value="0" /> <float name="tilt_max_distance" value="0" /> <float name="tilt_scatter_chance" value="0" /> </group> <group name="setup"> <float name="duration" value="0" /> <bool name="has_instant_setup" value="False" /> <bool name="can_interrupt_setup" value="False" /> <float name="attach_duration" value="0" /> </group> <group name="suppressed"> <float name="pinned_burst_multiplier" value="1" /> <float name="pinned_cooldown_multiplier" value="1" /> <float name="pinned_reload_multiplier" value="1" /> <float name="suppressed_burst_multiplier" value="1" /> <float name="suppressed_cooldown_multiplier" value="1" /> <float name="suppressed_reload_multiplier" value="1" /> </group> <group name="suppression"> <float name="nearby_suppression_multiplier" value="0" /> <float name="nearby_suppression_radius" value="0" /> <group name="target_pinned_multipliers"> <float name="accuracy_multiplier" value="0.25" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0.1" /> </group> <group name="target_suppressed_multipliers"> <float name="accuracy_multiplier" value="0.5" /> <float name="damage_multiplier" value="1" /> <float name="penetration_multiplier" value="1" /> <float name="suppression_multiplier" value="0.5" /> </group> <float name="amount" value="0" /> </group> <group name="teardown"> <float name="duration" value="0" /> </group> <group name="tracking"> <float name="fire_cone_angle" value="20" /> <group name="normal"> <float name="max_down" value="-90" /> <float name="max_left" value="-90" /> <float name="max_right" value="90" /> <float name="max_up" value="90" /> <float name="speed_horizontal" value="10000" /> <float name="speed_vertical" value="10000" /> </group> <float name="pivot_end_time" value="0" /> <bool name="pivot_only" value="False" /> <float name="pivot_start_time" value="0" /> </group> <locstring name="ui_name" value="0" /> <enum name="weapon_type" value="ballistic" /> <instance_reference name="ui_range" value="" /> <instance_reference name="ui_setfacing" value="" /> <group name="penetration"> <float name="near" value="420" /> <float name="far" value="340" /> <float name="mid" value="380" /> </group> <group name="ui_map_colour"> <int name="red" value="0" /> <int name="green" value="0" /> <int name="blue" value="0" /> <int name="alpha" value="0" /> </group> </group> <uniqueid name="pbgid" value="17863" /> </instance>
{ "content_hash": "5fb87504dfa8bfd1d662fff699c4f934", "timestamp": "", "source": "github", "line_count": 444, "max_line_length": 108, "avg_line_length": 38.1981981981982, "alnum_prop": 0.6193985849056604, "repo_name": "LastCrusadeModTeam/LastCrusade", "id": "d437d132dca8edb1dea9f9b2f2c9ae953099ca82", "size": "16960", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/instances/weapon/aef/pm/explosive_weapons/heavy_artillery/p47_rocket_strafe.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AMPL", "bytes": "5218" }, { "name": "CSS", "bytes": "2640" }, { "name": "HTML", "bytes": "117223" }, { "name": "Lua", "bytes": "87140" } ], "symlink_target": "" }
package cn.xishan.oftenporter.demo.core.test4.porter; import cn.xishan.oftenporter.demo.core.test4.sth.ID; import cn.xishan.oftenporter.demo.core.test4.sth.User; import cn.xishan.oftenporter.porter.core.annotation.PortIn; import cn.xishan.oftenporter.porter.core.annotation.param.BindEntities; import cn.xishan.oftenporter.porter.core.base.PortMethod; import cn.xishan.oftenporter.porter.core.base.OftenObject; @PortIn @BindEntities(ID.class) public class BindSth2Porter { /** * <pre> * 1.通过method指定请求方法。 * 2.通过@BindEntities来绑定对象,对象中的字段通过@Nece来指定是必需值,@Unece指定非必需值,默认情况下变量的类型是 * 自动绑定的。 * </pre> * * @param oftenObject * @return */ @PortIn(method = PortMethod.POST) @BindEntities({ User.class }) public Object send(OftenObject oftenObject) { ID id = oftenObject.centity(0); User user = oftenObject.fentity(0); return user + "," + id; } }
{ "content_hash": "8b3e2660041326a4ca5b47657bb38095", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 75, "avg_line_length": 26.852941176470587, "alnum_prop": 0.7130339539978094, "repo_name": "gzxishan/OftenPorter", "id": "052925dfe8a5d34826b7b46276dba07ab2117d90", "size": "1027", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Demo/src/main/java/cn/xishan/oftenporter/demo/core/test4/porter/BindSth2Porter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1964613" } ], "symlink_target": "" }
from idaapi import * from idautils import * from idc import * from SimpleXMLRPCServer import SimpleXMLRPCServer import cPickle def is_connected() : return True def wrapper_get_raw(oops) : F = {} for function_ea in Functions() : F[ function_ea ] = [] f_start = function_ea f_end = GetFunctionAttr(function_ea, FUNCATTR_END) edges = set() boundaries = set((f_start,)) F[ function_ea ].append( GetFunctionName(function_ea) ) for head in Heads(f_start, f_end) : if isCode( GetFlags( head ) ) : F[ function_ea ].append( (head, GetMnem(head), GetOpnd(head, 0), GetOpnd(head, 1), GetOpnd(head, 2)) ) refs = CodeRefsFrom(head, 0) refs = set(filter(lambda x: x>=f_start and x<=f_end, refs)) if refs : next_head = NextHead(head, f_end) if isFlow(GetFlags(next_head)): refs.add(next_head) # Update the boundaries found so far. boundaries.update(refs) # For each of the references found, and edge is # created. for r in refs: # If the flow could also come from the address # previous to the destination of the branching # an edge is created. if isFlow(GetFlags(r)): edges.add((PrevHead(r, f_start), r)) edges.add((head, r)) #print edges, boundaries # Let's build the list of (startEA, startEA) couples # for each basic block sorted_boundaries = sorted(boundaries, reverse = True) end_addr = PrevHead(f_end, f_start) bb_addr = [] for begin_addr in sorted_boundaries: bb_addr.append((begin_addr, end_addr)) # search the next end_addr which could be # farther than just the previous head # if data are interlaced in the code # WARNING: it assumes it won't epicly fail ;) end_addr = PrevHead(begin_addr, f_start) while not isCode(GetFlags(end_addr)): end_addr = PrevHead(end_addr, f_start) # And finally return the result bb_addr.reverse() F[ function_ea ].append( (bb_addr, sorted(edges)) ) return cPickle.dumps( F ) def wrapper_Heads(oops) : start, end = cPickle.loads(oops) return cPickle.dumps( [ x for x in Heads( start, end ) ] ) def wrapper_Functions(oops) : return cPickle.dumps( [ x for x in Functions() ] ) def wrapper_get_function(oops) : name = cPickle.loads(oops) for function_ea in Functions() : if GetFunctionName(function_ea) == name : return cPickle.dumps( function_ea ) return cPickle.dumps( -1 ) def wrapper_quit(oops) : qexit(0) class IDAWrapper : def _dispatch(self, x, params) : #fd = open("toto.txt", "w") #fd.write( x + "\n" ) #fd.write( str(type(params[0])) + "\n" ) #fd.close() params = cPickle.loads( *params ) if isinstance(params, tuple) == False : params = (params,) import types import idautils import idc #[getattr(idautils, a, None) for a in dir(idautils) if isinstance(getattr(idautils, a, None) , types.FunctionType)] for a in dir(idautils) : #fd.write( "\t" + a + "\n" ) if a == x : z = getattr(idautils, a, None) ret = z( *params ) if type(ret).__name__=='generator' : return cPickle.dumps( [ i for i in ret ] ) return cPickle.dumps( ret ) for a in dir(idc) : #fd.write( "\t" + a + "\n" ) if a == x : z = getattr(idc, a, None) ret = z( *params ) if type(ret).__name__=='generator' : return cPickle.dumps( [ i for i in ret ] ) return cPickle.dumps( ret ) return cPickle.dumps( [] ) def main() : autoWait() ea = ScreenEA() server = SimpleXMLRPCServer(("localhost", 9000)) server.register_function(is_connected, "is_connected") server.register_function(wrapper_get_raw, "get_raw") server.register_function(wrapper_get_function, "get_function") server.register_function(wrapper_Heads, "Heads") server.register_function(wrapper_Functions, "Functions") server.register_instance(IDAWrapper()) server.register_function(wrapper_quit, "quit") server.serve_forever() qexit(0) main()
{ "content_hash": "9a6813bdfb2688ac9ebffd44b609eac7", "timestamp": "", "source": "github", "line_count": 143, "max_line_length": 123, "avg_line_length": 34.18881118881119, "alnum_prop": 0.5199427285743505, "repo_name": "d9w/6858-android-intents", "id": "0bf8c805137b7e9b1695c527a8686208d58cafa1", "size": "5659", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "analyzer/androguard/core/binaries/idawrapper.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1911" }, { "name": "C++", "bytes": "73382" }, { "name": "Java", "bytes": "4143" }, { "name": "PHP", "bytes": "1263" }, { "name": "Python", "bytes": "1237389" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="nl"> <head> <!-- Generated by javadoc (1.8.0_51) on Fri Jan 13 16:45:46 CET 2017 --> <title>com.xml2j.tutorial.choice.handlers</title> <meta name="date" content="2017-01-13"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="com.xml2j.tutorial.choice.handlers"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../com/xml2j/tutorial/choice/application/package-summary.html">Prev&nbsp;Package</a></li> <li><a href="../../../../../com/xml2j/tutorial/choice/processor/package-summary.html">Next&nbsp;Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/xml2j/tutorial/choice/handlers/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;com.xml2j.tutorial.choice.handlers</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../../com/xml2j/tutorial/choice/handlers/ChoiceMessageHandler.html" title="class in com.xml2j.tutorial.choice.handlers">ChoiceMessageHandler</a></td> <td class="colLast"> <div class="block">This class reads the XML document from an XML inputsource.</div> </td> </tr> </tbody> </table> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../com/xml2j/tutorial/choice/application/package-summary.html">Prev&nbsp;Package</a></li> <li><a href="../../../../../com/xml2j/tutorial/choice/processor/package-summary.html">Next&nbsp;Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/xml2j/tutorial/choice/handlers/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small> <b>Copyright © Lolke B. Dijkstra - 2016</b> </small></p> </body> </html>
{ "content_hash": "32209e6033b512475ac804aa2b567696", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 188, "avg_line_length": 35.26896551724138, "alnum_prop": 0.6222135314822057, "repo_name": "lolkedijkstra/xml2j-gen", "id": "1efa8a7162c55aa185bb3c2b7231301acd4a856b", "size": "5114", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tutorial/choice/docs/com/xml2j/tutorial/choice/handlers/package-summary.html", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "3329" }, { "name": "CSS", "bytes": "12808" }, { "name": "HTML", "bytes": "392171" }, { "name": "Java", "bytes": "553779" }, { "name": "JavaScript", "bytes": "827" }, { "name": "Shell", "bytes": "2649" }, { "name": "XSLT", "bytes": "138654" } ], "symlink_target": "" }
<?php namespace Symfony\Component\HttpFoundation\Tests; use Symfony\Component\HttpFoundation\BinaryFileResponse; use Symfony\Component\HttpFoundation\Request; use Symfony\Component\HttpFoundation\ResponseHeaderBag; use Symfony\Component\HttpFoundation\Tests\File\FakeFile; class BinaryFileResponseTest extends ResponseTestCase { public function testConstruction() { $file = __DIR__.'/../README.md'; $response = new BinaryFileResponse($file, 404, array('X-Header' => 'Foo'), true, null, true, true); $this->assertEquals(404, $response->getStatusCode()); $this->assertEquals('Foo', $response->headers->get('X-Header')); $this->assertTrue($response->headers->has('ETag')); $this->assertTrue($response->headers->has('Last-Modified')); $this->assertFalse($response->headers->has('Content-Disposition')); $response = BinaryFileResponse::create($file, 404, array(), true, ResponseHeaderBag::DISPOSITION_INLINE); $this->assertEquals(404, $response->getStatusCode()); $this->assertFalse($response->headers->has('ETag')); $this->assertEquals('inline; filename="README.md"', $response->headers->get('Content-Disposition')); } public function testConstructWithNonAsciiFilename() { touch(sys_get_temp_dir().'/fööö.html'); $response = new BinaryFileResponse(sys_get_temp_dir().'/fööö.html', 200, array(), true, 'attachment'); @unlink(sys_get_temp_dir().'/fööö.html'); $this->assertSame('fööö.html', $response->getFile()->getFilename()); } /** * @expectedException \LogicException */ public function testSetContent() { $response = new BinaryFileResponse(__FILE__); $response->setContent('foo'); } public function testGetContent() { $response = new BinaryFileResponse(__FILE__); $this->assertFalse($response->getContent()); } public function testSetContentDispositionGeneratesSafeFallbackFilename() { $response = new BinaryFileResponse(__FILE__); $response->setContentDisposition(ResponseHeaderBag::DISPOSITION_ATTACHMENT, 'föö.html'); $this->assertSame('attachment; filename="f__.html"; filename*=utf-8\'\'f%C3%B6%C3%B6.html', $response->headers->get('Content-Disposition')); } /** * @dataProvider provideRanges */ public function testRequests($requestRange, $offset, $length, $responseRange) { $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream'))->setAutoEtag(); // do a request to get the ETag $request = Request::create('/'); $response->prepare($request); $etag = $response->headers->get('ETag'); // prepare a request for a range of the testing file $request = Request::create('/'); $request->headers->set('If-Range', $etag); $request->headers->set('Range', $requestRange); $file = fopen(__DIR__.'/File/Fixtures/test.gif', 'r'); fseek($file, $offset); $data = fread($file, $length); fclose($file); $this->expectOutputString($data); $response = clone $response; $response->prepare($request); $response->sendContent(); $this->assertEquals(206, $response->getStatusCode()); $this->assertEquals($responseRange, $response->headers->get('Content-Range')); } /** * @dataProvider provideRanges */ public function testRequestsWithoutEtag($requestRange, $offset, $length, $responseRange) { $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream')); // do a request to get the LastModified $request = Request::create('/'); $response->prepare($request); $lastModified = $response->headers->get('Last-Modified'); // prepare a request for a range of the testing file $request = Request::create('/'); $request->headers->set('If-Range', $lastModified); $request->headers->set('Range', $requestRange); $file = fopen(__DIR__.'/File/Fixtures/test.gif', 'r'); fseek($file, $offset); $data = fread($file, $length); fclose($file); $this->expectOutputString($data); $response = clone $response; $response->prepare($request); $response->sendContent(); $this->assertEquals(206, $response->getStatusCode()); $this->assertEquals($responseRange, $response->headers->get('Content-Range')); } public function provideRanges() { return array( array('bytes=1-4', 1, 4, 'bytes 1-4/35'), array('bytes=-5', 30, 5, 'bytes 30-34/35'), array('bytes=30-', 30, 5, 'bytes 30-34/35'), array('bytes=30-30', 30, 1, 'bytes 30-30/35'), array('bytes=30-34', 30, 5, 'bytes 30-34/35'), ); } public function testRangeRequestsWithoutLastModifiedDate() { // prevent auto last modified $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream'), true, null, false, false); // prepare a request for a range of the testing file $request = Request::create('/'); $request->headers->set('If-Range', date('D, d M Y H:i:s').' GMT'); $request->headers->set('Range', 'bytes=1-4'); $this->expectOutputString(file_get_contents(__DIR__.'/File/Fixtures/test.gif')); $response = clone $response; $response->prepare($request); $response->sendContent(); $this->assertEquals(200, $response->getStatusCode()); $this->assertNull($response->headers->get('Content-Range')); } /** * @dataProvider provideFullFileRanges */ public function testFullFileRequests($requestRange) { $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream'))->setAutoEtag(); // prepare a request for a range of the testing file $request = Request::create('/'); $request->headers->set('Range', $requestRange); $file = fopen(__DIR__.'/File/Fixtures/test.gif', 'r'); $data = fread($file, 35); fclose($file); $this->expectOutputString($data); $response = clone $response; $response->prepare($request); $response->sendContent(); $this->assertEquals(200, $response->getStatusCode()); } public function provideFullFileRanges() { return array( array('bytes=0-'), array('bytes=0-34'), array('bytes=-35'), // Syntactical invalid range-request should also return the full resource array('bytes=20-10'), array('bytes=50-40'), ); } /** * @dataProvider provideInvalidRanges */ public function testInvalidRequests($requestRange) { $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream'))->setAutoEtag(); // prepare a request for a range of the testing file $request = Request::create('/'); $request->headers->set('Range', $requestRange); $response = clone $response; $response->prepare($request); $response->sendContent(); $this->assertEquals(416, $response->getStatusCode()); $this->assertEquals('bytes */35', $response->headers->get('Content-Range')); } public function provideInvalidRanges() { return array( array('bytes=-40'), array('bytes=30-40'), ); } /** * @dataProvider provideXSendfileFiles */ public function testXSendfile($file) { $request = Request::create('/'); $request->headers->set('X-Sendfile-Type', 'X-Sendfile'); BinaryFileResponse::trustXSendfileTypeHeader(); $response = BinaryFileResponse::create($file, 200, array('Content-Type' => 'application/octet-stream')); $response->prepare($request); $this->expectOutputString(''); $response->sendContent(); $this->assertContains('README.md', $response->headers->get('X-Sendfile')); } public function provideXSendfileFiles() { return array( array(__DIR__.'/../README.md'), array('file://'.__DIR__.'/../README.md'), ); } /** * @dataProvider getSampleXAccelMappings */ public function testXAccelMapping($realpath, $mapping, $virtual) { $request = Request::create('/'); $request->headers->set('X-Sendfile-Type', 'X-Accel-Redirect'); $request->headers->set('X-Accel-Mapping', $mapping); $file = new FakeFile($realpath, __DIR__.'/File/Fixtures/test'); BinaryFileResponse::trustXSendfileTypeHeader(); $response = new BinaryFileResponse($file, 200, array('Content-Type' => 'application/octet-stream')); $reflection = new \ReflectionObject($response); $property = $reflection->getProperty('file'); $property->setAccessible(true); $property->setValue($response, $file); $response->prepare($request); $this->assertEquals($virtual, $response->headers->get('X-Accel-Redirect')); } public function testDeleteFileAfterSend() { $request = Request::create('/'); $path = __DIR__.'/File/Fixtures/to_delete'; touch($path); $realPath = realpath($path); $this->assertFileExists($realPath); $response = new BinaryFileResponse($realPath, 200, array('Content-Type' => 'application/octet-stream')); $response->deleteFileAfterSend(true); $response->prepare($request); $response->sendContent(); $this->assertFileNotExists($path); } public function testAcceptRangeOnUnsafeMethods() { $request = Request::create('/', 'POST'); $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream')); $response->prepare($request); $this->assertEquals('none', $response->headers->get('Accept-Ranges')); } public function testAcceptRangeNotOverriden() { $request = Request::create('/', 'POST'); $response = BinaryFileResponse::create(__DIR__.'/File/Fixtures/test.gif', 200, array('Content-Type' => 'application/octet-stream')); $response->headers->set('Accept-Ranges', 'foo'); $response->prepare($request); $this->assertEquals('foo', $response->headers->get('Accept-Ranges')); } public function getSampleXAccelMappings() { return array( array('/var/www/var/www/files/foo.txt', '/var/www/=/files/', '/files/var/www/files/foo.txt'), array('/home/foo/bar.txt', '/var/www/=/files/,/home/foo/=/baz/', '/baz/bar.txt'), ); } protected function provideResponse() { return new BinaryFileResponse(__DIR__.'/../README.md', 200, array('Content-Type' => 'application/octet-stream')); } public static function tearDownAfterClass() { $path = __DIR__.'/../Fixtures/to_delete'; if (file_exists($path)) { @unlink($path); } } }
{ "content_hash": "3deb135063054f69487e13ebfeb2a613", "timestamp": "", "source": "github", "line_count": 323, "max_line_length": 166, "avg_line_length": 36.46439628482972, "alnum_prop": 0.5862625233486161, "repo_name": "Condors/TunisiaMall", "id": "d84fb62c388e4df36d78dc33cfebc8fa77b9339c", "size": "12028", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/symfony/symfony/src/Symfony/Component/HttpFoundation/Tests/BinaryFileResponseTest.php", "mode": "33261", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "16927" }, { "name": "ApacheConf", "bytes": "3688" }, { "name": "Batchfile", "bytes": "690" }, { "name": "CSS", "bytes": "836798" }, { "name": "HTML", "bytes": "917753" }, { "name": "JavaScript", "bytes": "1079135" }, { "name": "PHP", "bytes": "196744" }, { "name": "Shell", "bytes": "4247" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace x2C { public enum TestingOutcomeType { Pass, Warning, Fail } }
{ "content_hash": "8be8fb26633c3ff5aa0df4b433e3b382", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 33, "avg_line_length": 10.75, "alnum_prop": 0.7267441860465116, "repo_name": "riveryc/DimensionData.ComputeClient", "id": "6de2c59b7dc07211f2f08de6c9b7a0e40a059e53", "size": "174", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "x2C/x2C.lib/TestingOutcomeType.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "703065" }, { "name": "CSS", "bytes": "103203" }, { "name": "HTML", "bytes": "26794" }, { "name": "JavaScript", "bytes": "104171" }, { "name": "PowerShell", "bytes": "159130" }, { "name": "Smalltalk", "bytes": "2384" } ], "symlink_target": "" }
LevelPart::LevelPart(LocationPart* locPart) : locPart(locPart), scene(new Scene), lights_dt(1.0f) { } //================================================================================================= LevelPart::~LevelPart() { delete scene; DeleteElements(explos); DeleteElements(electros); DeleteElements(bullets); DeleteElements(pes); DeleteElements(tpes); } //================================================================================================= void LevelPart::Save(GameWriter& f) { f << pes.size(); for(ParticleEmitter* pe : pes) pe->Save(f); f << tpes.size(); for(TrailParticleEmitter* tpe : tpes) tpe->Save(f); f << explos.size(); for(Explo* explo : explos) explo->Save(f); f << electros.size(); for(Electro* electro : electros) electro->Save(f); f << drains.size(); for(Drain& drain : drains) drain.Save(f); f << bullets.size(); for(Bullet* bullet : bullets) bullet->Save(f); } //================================================================================================= void LevelPart::Load(GameReader& f) { const int particle_version = (LOAD_VERSION >= V_0_13 ? 2 : (LOAD_VERSION >= V_0_12 ? 1 : 0)); pes.resize(f.Read<uint>()); for(ParticleEmitter*& pe : pes) { pe = new ParticleEmitter; pe->Load(f, particle_version); } tpes.resize(f.Read<uint>()); for(TrailParticleEmitter*& tpe : tpes) { tpe = new TrailParticleEmitter; tpe->Load(f, particle_version); } explos.resize(f.Read<uint>()); for(Explo*& explo : explos) { explo = new Explo; explo->Load(f); } electros.resize(f.Read<uint>()); for(Electro*& electro : electros) { electro = new Electro; electro->locPart = locPart; electro->Load(f); } drains.resize(f.Read<uint>()); for(Drain& drain : drains) drain.Load(f); bullets.resize(f.Read<uint>()); for(Bullet*& bullet : bullets) { bullet = new Bullet; bullet->Load(f); } } //================================================================================================= void LevelPart::Write(BitStreamWriter& f) { // bullets f.Write(bullets.size()); for(Bullet* bullet : bullets) bullet->Write(f); // explosions f.Write(explos.size()); for(Explo* explo : explos) explo->Write(f); // electros f.Write(electros.size()); for(Electro* electro : electros) electro->Write(f); } //================================================================================================= bool LevelPart::Read(BitStreamReader& f) { // bullets uint count; f >> count; if(!f.Ensure(count * Bullet::MIN_SIZE)) { Error("Read level part: Broken bullet count."); return false; } bullets.resize(count); for(Bullet*& bullet : bullets) { bullet = new Bullet; if(!bullet->Read(f, *this)) { Error("Read level part: Broken bullet."); return false; } } // explosions f >> count; if(!f.Ensure(count * Explo::MIN_SIZE)) { Error("Read level part: Broken explosion count."); return false; } explos.resize(count); for(Explo*& explo : explos) { explo = new Explo; if(!explo->Read(f)) { Error("Read level part: Broken explosion."); return false; } } // electro effects f >> count; if(!f.Ensure(count * Electro::MIN_SIZE)) { Error("Read level part: Broken electro count."); return false; } electros.resize(count); for(Electro*& electro : electros) { electro = new Electro; electro->locPart = locPart; if(!electro->Read(f)) { Error("Read level part: Broken electro."); return false; } } return true; }
{ "content_hash": "407310737a34715f32bf8aa2f0195f69", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 99, "avg_line_length": 20.576470588235296, "alnum_prop": 0.5511720983419096, "repo_name": "Tomash667/carpg", "id": "5e19acb0fca6e5b813fa5f166b1d065e36a74730", "size": "3870", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "source/entity/LevelPart.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "156787" }, { "name": "C#", "bytes": "4957" }, { "name": "C++", "bytes": "3337021" }, { "name": "PowerShell", "bytes": "908" } ], "symlink_target": "" }
@interface AppDelegate : UIResponder <UIApplicationDelegate> @property (strong, nonatomic) UIWindow *window; @end
{ "content_hash": "246d425832045ee0a6ca3b27ad6c8592", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 60, "avg_line_length": 16.857142857142858, "alnum_prop": 0.7796610169491526, "repo_name": "SanggeonPark/SGTouchPointer", "id": "c10f39538afa0194857d7abf58bf9dd97992a0f2", "size": "279", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SGTouchPointer/AppDelegate.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Objective-C", "bytes": "11836" }, { "name": "Ruby", "bytes": "870" } ], "symlink_target": "" }
using System; namespace Overriding { public class Snake : Animal { public bool IsVenomous { get; set; } public override void Eat(string food) { string eat = (IsVenomous) ? String.Format("{0} venomous snake eating {1}", Color, food) : String.Format("harmless {0} snake eating {1}", Color, food); Console.WriteLine(eat); } public void Bite() { string bite = (IsVenomous) ? String.Format("{0} venomous snake biting", Color) : String.Format("harmless {0} snake biting", Color); Console.WriteLine(bite); } public Snake() { } public Snake(string color, bool isVenomous) : base(color) { IsVenomous = isVenomous; } } }
{ "content_hash": "7b34ff51792ef6219e0a847ec44084b2", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 162, "avg_line_length": 26.433333333333334, "alnum_prop": 0.5485498108448928, "repo_name": "microsoft-dx/csharp-fundamentals", "id": "4fa965e7bda542b773438d8ee61ee2eef3bf66b3", "size": "795", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CSharpFundamentals/csharp07 - Overriding/Snake.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "40011" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project version="4"> <component name="ProjectRootManager" version="2" languageLevel="JDK_1_9" default="true" project-jdk-name="1.8" project-jdk-type="JavaSDK"> <output url="file://$PROJECT_DIR$/build" /> </component> <component name="SbtLocalSettings"> <option name="modificationStamps"> <map> <entry key="$PROJECT_DIR$/../../Waves" value="1467672867000" /> </map> </option> <option name="externalProjectsViewState"> <projects_view /> </option> </component> </project>
{ "content_hash": "b83e88a84ce688f78724cd1820e5d274", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 140, "avg_line_length": 35.125, "alnum_prop": 0.6405693950177936, "repo_name": "loxal/FreeEthereum", "id": "77994650426f430bcffb2b0d1d2f8c0f63527455", "size": "562", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": ".idea/misc.xml", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1126" }, { "name": "Java", "bytes": "2604200" }, { "name": "Kotlin", "bytes": "1658683" }, { "name": "Shell", "bytes": "123" } ], "symlink_target": "" }
@implementation HJMOHandler @synthesize state; @synthesize oid; @synthesize url; @synthesize urlConn; @synthesize moData; @synthesize moReadyDataFilename; @synthesize moLoadingDataFile; @synthesize managedObj; @synthesize objManager; @synthesize ownPolicy; -(HJMOHandler*)initWithOid:(id)oid_ url:(NSURL*)url_ objManager:objManager_{ [super init]; state = stateNew; self.oid = oid_; self.url = url_ ; self.objManager = objManager_; if (oid==nil) { self.oid = url_; } users = [[HJWeakMutableArray alloc] initWithCapacity:1]; //it can expand automatically. return self; } -(void)dealloc { //NSLog(@"dealloc %@",self); [urlConn cancel]; [self clearLoadingState]; [users release]; [url release]; [moReadyDataFilename release]; //NSLog(@"managed Obj retain count before handler dealloc %i",[managedObj retainCount]); [managedObj release]; [ownPolicy release]; [oid release]; [super dealloc]; } -(BOOL) isEqual:(id)object { if (![object isKindOfClass:[HJMOHandler class]]) { return NO; } return [oid isEqual:[(HJMOHandler*)object oid]]; } -(HJMOPolicy*)policy { if (ownPolicy) { return ownPolicy; } else { return [objManager policy]; } } -(void)addUser:(id<HJMOUser>)user { //check if already managing for this user (should not be if being used right) if (nil==[users findObject:user]) { [users addObject:user]; //did not already have user, so remember it (with a weak reference) } else { //can happen if users reused, and recycling code is lazy clearing old state, eg with UITableCellView //NSLog(@"HJMOHandler was already managing for user"); } if (user.moHandler==nil) { //this is the normal case, so set the state user.moHandler=self; } else { if (user.moHandler==self) { //this is not what we expect, addUser has been called twice, but thats OK - do nothing } else { //user was pointing to another handler, this can happen when user is reused. //have to make sure that the old handler knows it should no longer manage for this user //otherwise it might send it callbacks, and it won't be able to become inactive (have no users) [user.moHandler removeUser:user]; //now can assign the current state, which will also release the old one. user.moHandler=self; } } } -(void) deleteFileIfExistsAtPath:(NSString*)path { if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { NSError* e = nil; [[NSFileManager defaultManager] removeItemAtPath:path error:&e]; if (e) { NSLog(@"HJMOHandler error deleting file %@",path); } } } -(void)clearLoadingState { self.urlConn=nil; [moLoadingDataFile closeFile]; self.moLoadingDataFile = nil; self.moData = nil; } -(void)cancelLoading { if (state==stateLoading) { [urlConn cancel]; [self clearLoadingState]; state=stateNew; } } -(void)becameNotInUse { //TODO is there more policy decisions here? //[self cancelLoading]; //don't cancel loading, do that in dealloc. because object manager might be holding on to //this handler in loadingHandlers to keep loading going } -(void)removeUser:(id<HJMOUser>)user { [users removeObject:user]; [self retain]; //because the next line could dealloc self. user.moHandler = nil; if (![self isInUse]) { [self becameNotInUse]; } [self autorelease]; } -(BOOL)isInUse { return [users count]>0; } -(BOOL)isLoading { return urlConn!=nil; } -(BOOL)isReady { return managedObj!=nil; } -(void)touchFile:(NSString*)path { HJMOFileCache* fileCache = objManager.fileCache; if (fileCache==nil) { return; } NSTimeInterval ageLimit = fileCache.fileAgeLimit; if (ageLimit<=0) { return; } NSFileManager* fileMan = [NSFileManager defaultManager]; NSError* e; NSDictionary* fsAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:&e]; double ageSeconds = -1* [[fsAttributes fileModificationDate] timeIntervalSinceNow]; if (ageSeconds>(ageLimit/4)) { //to save writes, file age modification date isn't changed on every access, only if 1/4 of age limit old. NSString *keyArray[1] = {NSFileModificationDate}; id objectArray[1] = {[NSDate dateWithTimeIntervalSinceNow:0]}; NSDictionary* attributes = [NSDictionary dictionaryWithObjects:objectArray forKeys:keyArray count:1]; NSError* err; [fileMan setAttributes:attributes ofItemAtPath:path error:&err]; } } -(void)activateNewHandlerForUser:(id<HJMOUser>)user { HJMOFileCache* fileCache = objManager.fileCache; if (fileCache) { //File caching is in use NSString* readyFile = [fileCache readyFilePathForOid:oid]; if ([[NSFileManager defaultManager] fileExistsAtPath:readyFile]) { //NSLog(@"HJCache loading from fileCache"); //mo is loaded as a file in file cache self.moReadyDataFilename = readyFile; if (self.policy.readsUpdateFileDate) { [self touchFile:readyFile]; } state = stateLoaded; [self goFromLoadedToReady]; if (state == stateReady || state==stateLoaded) { [objManager addHandlerToMemCache:self]; } return; } else { //not loaded yet, so load to file because file cache in use //NSLog(@"HJCache loading from url"); NSString* loadingFile = [fileCache loadingFilePathForOid:oid]; BOOL ok = [[NSFileManager defaultManager] createFileAtPath:loadingFile contents:nil attributes:nil]; if (!ok) { state = stateFailed; NSLog(@"HJMOHandler error creating loading file %@",loadingFile); loadingFile = nil; [self clearLoadingState]; [self callbackFailedToUsers]; return; } else { self.moLoadingDataFile = [NSFileHandle fileHandleForWritingAtPath:loadingFile]; } } } //if file cache is in use temporary file name is prepared, either way now load from url [self startDownloadingFromURL]; } -(void)activateHandlerForUser:(id<HJMOUser>)user { //stateNew, stateLoading, stateLoaded, stateReady, stateFailed switch (state) { case stateNew: [self activateNewHandlerForUser:user]; return; case stateLoading: //handler is still loading, have to wait for it to load, so nop. return; case stateLoaded: //for some reason it didn't go to ready when it was loaded, so try again now. [self goFromLoadedToReady]; return; case stateReady: [user managedObjReady]; return; case stateFailed: [user managedObjFailed]; return; default: //not supposed to get here NSLog(@"HJMOHandler activateHandlerForUser error, no recognized state"); break; } } -(void)startDownloadingFromURL { //NSLog(@"HJMOHandler starting download for %@",self); HJMOPolicy* policy = [self policy]; NSURLRequest* request = [NSURLRequest requestWithURL:url cachePolicy:NSURLRequestReloadIgnoringLocalCacheData timeoutInterval:policy.urlTimeoutTime]; self.urlConn = [[NSURLConnection alloc] initWithRequest:request delegate:self]; [urlConn release]; if (urlConn==nil) { NSLog(@"HJMOHandler nil URLConnection for %@",url); state=stateFailed; } else { state=stateLoading; //TODO if app is showing a network activity monitor in the status bar, here is where a call needs to be // made to increment the number of active URLs } } -(void) goFromLoadedToReady { if ([users count]==0) { //can't go to stateReady because there's no user to do it. stay in stateLoaded. //this is not a bug, it can happen if the object has already been deleted before its content was //loaded over the net, eg because scrolled off the top of a table. //NSLog(@"HJMOHandler no user object to make it ready"); return; } self.managedObj=nil; //just to be sure there's not some old one around //pick _one_ and only one user to take mo from loaded to ready id<HJMOUser> user = [users objectAtIndex:0]; @try { [user changeManagedObjStateFromLoadedToReady]; if (managedObj!=nil) { state = stateReady; //because it worked [self callbackReadyToUsers]; } } @catch (id exception) { NSLog(@"%@",exception); self.managedObj=nil; } @finally { if (managedObj==nil) { //managedObj was still nil, ie going from loaded to ready failed. go to stateFailed and clean up from caches state = stateFailed; self.moReadyDataFilename = nil; self.moData=nil; [objManager removeFromHandlerFromCaches:self]; [self callbackFailedToUsers]; } } } -(void) callbackReadyToUsers { for (id<HJMOUser> user in [users objectEnumerator]) { [user managedObjReady]; } } -(void) callbackFailedToUsers { for (id<HJMOUser> user in [users objectEnumerator]) { [user managedObjFailed]; } } - (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data { if (state!=stateLoading) { return; } if (!moLoadingDataFile) { //loading direct to memory if (moData==nil) { self.moData = [NSMutableData dataWithCapacity:1024*10]; } [moData appendData:data]; } else { [moLoadingDataFile writeData:data]; } } - (void)connectionDidFinishLoading:(NSURLConnection *)connection { [self retain]; //ensure that self isn't released in this method when the connection is finished with it. //NSLog(@"finishedLoading %@",self); state = stateLoaded; if (moLoadingDataFile) { //was downloading to file [moLoadingDataFile closeFile]; self.moLoadingDataFile = nil; self.urlConn = nil; NSString* readyFilename = [self.objManager.fileCache loadingFinishedForOid:oid]; if (readyFilename==nil) { state = stateFailed; [self callbackFailedToUsers]; return; } else { self.moReadyDataFilename = readyFilename; } } //TODO if app is showing a network activity monitor in the status bar, here is where a call needs to be // made to decrement the count of active URLs [objManager handlerFinishedDownloading:self]; [self goFromLoadedToReady]; if (state==stateReady || state==stateLoaded) { [objManager addHandlerToMemCache:self]; } [self release]; } - (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error { state = stateFailed; NSLog(@"HJMOHandler URLConnection failed %@",error); //TODO if app is showing a network activity monitor in the status bar, here is where a call needs to be // made to decrement the count of active URLs [self clearLoadingState]; self.moReadyDataFilename = nil; self.moData=nil; [objManager removeFromHandlerFromCaches:self]; [self callbackFailedToUsers]; } -(NSString*)description { return [NSString stringWithFormat:@"HJMOHandler %@ users:%i retains:%i",oid,[users count],[self retainCount]]; } @end
{ "content_hash": "d4110f6195708db00c72ca01d7db8ab4", "timestamp": "", "source": "github", "line_count": 366, "max_line_length": 115, "avg_line_length": 28.502732240437158, "alnum_prop": 0.7152032208588958, "repo_name": "ZeusbaseObjectiveC/ZeusbaseObjCLibs", "id": "fb8643e798389925ae97c938240923fca29af540", "size": "10841", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "Library/HJCache/HJCacheClasses/HJMOHandler.m", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "1505326" }, { "name": "C++", "bytes": "3874" }, { "name": "CSS", "bytes": "6947" }, { "name": "Objective-C", "bytes": "2664993" }, { "name": "Python", "bytes": "1031" }, { "name": "Shell", "bytes": "15807" }, { "name": "XSLT", "bytes": "22838" } ], "symlink_target": "" }
// Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. #ifndef ROCKSDB_LITE #include "table/plain_table_reader.h" #include <string> #include <vector> #include "db/dbformat.h" #include "rocksdb/cache.h" #include "rocksdb/comparator.h" #include "rocksdb/env.h" #include "rocksdb/filter_policy.h" #include "rocksdb/options.h" #include "rocksdb/statistics.h" #include "table/block.h" #include "table/bloom_block.h" #include "table/filter_block.h" #include "table/format.h" #include "table/internal_iterator.h" #include "table/meta_blocks.h" #include "table/two_level_iterator.h" #include "table/plain_table_factory.h" #include "table/plain_table_key_coding.h" #include "table/get_context.h" #include "monitoring/histogram.h" #include "monitoring/perf_context_imp.h" #include "util/arena.h" #include "util/coding.h" #include "util/dynamic_bloom.h" #include "util/hash.h" #include "util/murmurhash.h" #include "util/stop_watch.h" #include "util/string_util.h" namespace rocksdb { namespace { // Safely getting a uint32_t element from a char array, where, starting from // `base`, every 4 bytes are considered as an fixed 32 bit integer. inline uint32_t GetFixed32Element(const char* base, size_t offset) { return DecodeFixed32(base + offset * sizeof(uint32_t)); } } // namespace // Iterator to iterate IndexedTable class PlainTableIterator : public InternalIterator { public: explicit PlainTableIterator(PlainTableReader* table, bool use_prefix_seek); ~PlainTableIterator(); bool Valid() const override; void SeekToFirst() override; void SeekToLast() override; void Seek(const Slice& target) override; void SeekForPrev(const Slice& target) override; void Next() override; void Prev() override; Slice key() const override; Slice value() const override; Status status() const override; private: PlainTableReader* table_; PlainTableKeyDecoder decoder_; bool use_prefix_seek_; uint32_t offset_; uint32_t next_offset_; Slice key_; Slice value_; Status status_; // No copying allowed PlainTableIterator(const PlainTableIterator&) = delete; void operator=(const Iterator&) = delete; }; extern const uint64_t kPlainTableMagicNumber; PlainTableReader::PlainTableReader(const ImmutableCFOptions& ioptions, unique_ptr<RandomAccessFileReader>&& file, const EnvOptions& storage_options, const InternalKeyComparator& icomparator, EncodingType encoding_type, uint64_t file_size, const TableProperties* table_properties) : internal_comparator_(icomparator), encoding_type_(encoding_type), full_scan_mode_(false), user_key_len_(static_cast<uint32_t>(table_properties->fixed_key_len)), prefix_extractor_(ioptions.prefix_extractor), enable_bloom_(false), bloom_(6, nullptr), file_info_(std::move(file), storage_options, static_cast<uint32_t>(table_properties->data_size)), ioptions_(ioptions), file_size_(file_size), table_properties_(nullptr) {} PlainTableReader::~PlainTableReader() { } Status PlainTableReader::Open(const ImmutableCFOptions& ioptions, const EnvOptions& env_options, const InternalKeyComparator& internal_comparator, unique_ptr<RandomAccessFileReader>&& file, uint64_t file_size, unique_ptr<TableReader>* table_reader, const int bloom_bits_per_key, double hash_table_ratio, size_t index_sparseness, size_t huge_page_tlb_size, bool full_scan_mode) { if (file_size > PlainTableIndex::kMaxFileSize) { return Status::NotSupported("File is too large for PlainTableReader!"); } TableProperties* props = nullptr; auto s = ReadTableProperties(file.get(), file_size, kPlainTableMagicNumber, ioptions, &props); if (!s.ok()) { return s; } assert(hash_table_ratio >= 0.0); auto& user_props = props->user_collected_properties; auto prefix_extractor_in_file = props->prefix_extractor_name; if (!full_scan_mode && !prefix_extractor_in_file.empty() /* old version sst file*/ && prefix_extractor_in_file != "nullptr") { if (!ioptions.prefix_extractor) { return Status::InvalidArgument( "Prefix extractor is missing when opening a PlainTable built " "using a prefix extractor"); } else if (prefix_extractor_in_file.compare( ioptions.prefix_extractor->Name()) != 0) { return Status::InvalidArgument( "Prefix extractor given doesn't match the one used to build " "PlainTable"); } } EncodingType encoding_type = kPlain; auto encoding_type_prop = user_props.find(PlainTablePropertyNames::kEncodingType); if (encoding_type_prop != user_props.end()) { encoding_type = static_cast<EncodingType>( DecodeFixed32(encoding_type_prop->second.c_str())); } std::unique_ptr<PlainTableReader> new_reader(new PlainTableReader( ioptions, std::move(file), env_options, internal_comparator, encoding_type, file_size, props)); s = new_reader->MmapDataIfNeeded(); if (!s.ok()) { return s; } if (!full_scan_mode) { s = new_reader->PopulateIndex(props, bloom_bits_per_key, hash_table_ratio, index_sparseness, huge_page_tlb_size); if (!s.ok()) { return s; } } else { // Flag to indicate it is a full scan mode so that none of the indexes // can be used. new_reader->full_scan_mode_ = true; } *table_reader = std::move(new_reader); return s; } void PlainTableReader::SetupForCompaction() { } InternalIterator* PlainTableReader::NewIterator(const ReadOptions& options, Arena* arena, const InternalKeyComparator*, bool skip_filters) { bool use_prefix_seek = !IsTotalOrderMode() && !options.total_order_seek; if (arena == nullptr) { return new PlainTableIterator(this, use_prefix_seek); } else { auto mem = arena->AllocateAligned(sizeof(PlainTableIterator)); return new (mem) PlainTableIterator(this, use_prefix_seek); } } Status PlainTableReader::PopulateIndexRecordList( PlainTableIndexBuilder* index_builder, vector<uint32_t>* prefix_hashes) { Slice prev_key_prefix_slice; std::string prev_key_prefix_buf; uint32_t pos = data_start_offset_; bool is_first_record = true; Slice key_prefix_slice; PlainTableKeyDecoder decoder(&file_info_, encoding_type_, user_key_len_, ioptions_.prefix_extractor); while (pos < file_info_.data_end_offset) { uint32_t key_offset = pos; ParsedInternalKey key; Slice value_slice; bool seekable = false; Status s = Next(&decoder, &pos, &key, nullptr, &value_slice, &seekable); if (!s.ok()) { return s; } key_prefix_slice = GetPrefix(key); if (enable_bloom_) { bloom_.AddHash(GetSliceHash(key.user_key)); } else { if (is_first_record || prev_key_prefix_slice != key_prefix_slice) { if (!is_first_record) { prefix_hashes->push_back(GetSliceHash(prev_key_prefix_slice)); } if (file_info_.is_mmap_mode) { prev_key_prefix_slice = key_prefix_slice; } else { prev_key_prefix_buf = key_prefix_slice.ToString(); prev_key_prefix_slice = prev_key_prefix_buf; } } } index_builder->AddKeyPrefix(GetPrefix(key), key_offset); if (!seekable && is_first_record) { return Status::Corruption("Key for a prefix is not seekable"); } is_first_record = false; } prefix_hashes->push_back(GetSliceHash(key_prefix_slice)); auto s = index_.InitFromRawData(index_builder->Finish()); return s; } void PlainTableReader::AllocateAndFillBloom(int bloom_bits_per_key, int num_prefixes, size_t huge_page_tlb_size, vector<uint32_t>* prefix_hashes) { if (!IsTotalOrderMode()) { uint32_t bloom_total_bits = num_prefixes * bloom_bits_per_key; if (bloom_total_bits > 0) { enable_bloom_ = true; bloom_.SetTotalBits(&arena_, bloom_total_bits, ioptions_.bloom_locality, huge_page_tlb_size, ioptions_.info_log); FillBloom(prefix_hashes); } } } void PlainTableReader::FillBloom(vector<uint32_t>* prefix_hashes) { assert(bloom_.IsInitialized()); for (auto prefix_hash : *prefix_hashes) { bloom_.AddHash(prefix_hash); } } Status PlainTableReader::MmapDataIfNeeded() { if (file_info_.is_mmap_mode) { // Get mmapped memory. return file_info_.file->Read(0, file_size_, &file_info_.file_data, nullptr); } return Status::OK(); } Status PlainTableReader::PopulateIndex(TableProperties* props, int bloom_bits_per_key, double hash_table_ratio, size_t index_sparseness, size_t huge_page_tlb_size) { assert(props != nullptr); table_properties_.reset(props); BlockContents index_block_contents; Status s = ReadMetaBlock( file_info_.file.get(), file_size_, kPlainTableMagicNumber, ioptions_, PlainTableIndexBuilder::kPlainTableIndexBlock, &index_block_contents); bool index_in_file = s.ok(); BlockContents bloom_block_contents; bool bloom_in_file = false; // We only need to read the bloom block if index block is in file. if (index_in_file) { s = ReadMetaBlock(file_info_.file.get(), file_size_, kPlainTableMagicNumber, ioptions_, BloomBlockBuilder::kBloomBlock, &bloom_block_contents); bloom_in_file = s.ok() && bloom_block_contents.data.size() > 0; } Slice* bloom_block; if (bloom_in_file) { // If bloom_block_contents.allocation is not empty (which will be the case // for non-mmap mode), it holds the alloated memory for the bloom block. // It needs to be kept alive to keep `bloom_block` valid. bloom_block_alloc_ = std::move(bloom_block_contents.allocation); bloom_block = &bloom_block_contents.data; } else { bloom_block = nullptr; } Slice* index_block; if (index_in_file) { // If index_block_contents.allocation is not empty (which will be the case // for non-mmap mode), it holds the alloated memory for the index block. // It needs to be kept alive to keep `index_block` valid. index_block_alloc_ = std::move(index_block_contents.allocation); index_block = &index_block_contents.data; } else { index_block = nullptr; } if ((ioptions_.prefix_extractor == nullptr) && (hash_table_ratio != 0)) { // ioptions.prefix_extractor is requried for a hash-based look-up. return Status::NotSupported( "PlainTable requires a prefix extractor enable prefix hash mode."); } // First, read the whole file, for every kIndexIntervalForSamePrefixKeys rows // for a prefix (starting from the first one), generate a record of (hash, // offset) and append it to IndexRecordList, which is a data structure created // to store them. if (!index_in_file) { // Allocate bloom filter here for total order mode. if (IsTotalOrderMode()) { uint32_t num_bloom_bits = static_cast<uint32_t>(table_properties_->num_entries) * bloom_bits_per_key; if (num_bloom_bits > 0) { enable_bloom_ = true; bloom_.SetTotalBits(&arena_, num_bloom_bits, ioptions_.bloom_locality, huge_page_tlb_size, ioptions_.info_log); } } } else if (bloom_in_file) { enable_bloom_ = true; auto num_blocks_property = props->user_collected_properties.find( PlainTablePropertyNames::kNumBloomBlocks); uint32_t num_blocks = 0; if (num_blocks_property != props->user_collected_properties.end()) { Slice temp_slice(num_blocks_property->second); if (!GetVarint32(&temp_slice, &num_blocks)) { num_blocks = 0; } } // cast away const qualifier, because bloom_ won't be changed bloom_.SetRawData( const_cast<unsigned char*>( reinterpret_cast<const unsigned char*>(bloom_block->data())), static_cast<uint32_t>(bloom_block->size()) * 8, num_blocks); } else { // Index in file but no bloom in file. Disable bloom filter in this case. enable_bloom_ = false; bloom_bits_per_key = 0; } PlainTableIndexBuilder index_builder(&arena_, ioptions_, index_sparseness, hash_table_ratio, huge_page_tlb_size); std::vector<uint32_t> prefix_hashes; if (!index_in_file) { s = PopulateIndexRecordList(&index_builder, &prefix_hashes); if (!s.ok()) { return s; } } else { s = index_.InitFromRawData(*index_block); if (!s.ok()) { return s; } } if (!index_in_file) { // Calculated bloom filter size and allocate memory for // bloom filter based on the number of prefixes, then fill it. AllocateAndFillBloom(bloom_bits_per_key, index_.GetNumPrefixes(), huge_page_tlb_size, &prefix_hashes); } // Fill two table properties. if (!index_in_file) { props->user_collected_properties["plain_table_hash_table_size"] = ToString(index_.GetIndexSize() * PlainTableIndex::kOffsetLen); props->user_collected_properties["plain_table_sub_index_size"] = ToString(index_.GetSubIndexSize()); } else { props->user_collected_properties["plain_table_hash_table_size"] = ToString(0); props->user_collected_properties["plain_table_sub_index_size"] = ToString(0); } return Status::OK(); } Status PlainTableReader::GetOffset(PlainTableKeyDecoder* decoder, const Slice& target, const Slice& prefix, uint32_t prefix_hash, bool& prefix_matched, uint32_t* offset) const { prefix_matched = false; uint32_t prefix_index_offset; auto res = index_.GetOffset(prefix_hash, &prefix_index_offset); if (res == PlainTableIndex::kNoPrefixForBucket) { *offset = file_info_.data_end_offset; return Status::OK(); } else if (res == PlainTableIndex::kDirectToFile) { *offset = prefix_index_offset; return Status::OK(); } // point to sub-index, need to do a binary search uint32_t upper_bound; const char* base_ptr = index_.GetSubIndexBasePtrAndUpperBound(prefix_index_offset, &upper_bound); uint32_t low = 0; uint32_t high = upper_bound; ParsedInternalKey mid_key; ParsedInternalKey parsed_target; if (!ParseInternalKey(target, &parsed_target)) { return Status::Corruption(Slice()); } // The key is between [low, high). Do a binary search between it. while (high - low > 1) { uint32_t mid = (high + low) / 2; uint32_t file_offset = GetFixed32Element(base_ptr, mid); uint32_t tmp; Status s = decoder->NextKeyNoValue(file_offset, &mid_key, nullptr, &tmp); if (!s.ok()) { return s; } int cmp_result = internal_comparator_.Compare(mid_key, parsed_target); if (cmp_result < 0) { low = mid; } else { if (cmp_result == 0) { // Happen to have found the exact key or target is smaller than the // first key after base_offset. prefix_matched = true; *offset = file_offset; return Status::OK(); } else { high = mid; } } } // Both of the key at the position low or low+1 could share the same // prefix as target. We need to rule out one of them to avoid to go // to the wrong prefix. ParsedInternalKey low_key; uint32_t tmp; uint32_t low_key_offset = GetFixed32Element(base_ptr, low); Status s = decoder->NextKeyNoValue(low_key_offset, &low_key, nullptr, &tmp); if (!s.ok()) { return s; } if (GetPrefix(low_key) == prefix) { prefix_matched = true; *offset = low_key_offset; } else if (low + 1 < upper_bound) { // There is possible a next prefix, return it prefix_matched = false; *offset = GetFixed32Element(base_ptr, low + 1); } else { // target is larger than a key of the last prefix in this bucket // but with a different prefix. Key does not exist. *offset = file_info_.data_end_offset; } return Status::OK(); } bool PlainTableReader::MatchBloom(uint32_t hash) const { if (!enable_bloom_) { return true; } if (bloom_.MayContainHash(hash)) { PERF_COUNTER_ADD(bloom_sst_hit_count, 1); return true; } else { PERF_COUNTER_ADD(bloom_sst_miss_count, 1); return false; } } Status PlainTableReader::Next(PlainTableKeyDecoder* decoder, uint32_t* offset, ParsedInternalKey* parsed_key, Slice* internal_key, Slice* value, bool* seekable) const { if (*offset == file_info_.data_end_offset) { *offset = file_info_.data_end_offset; return Status::OK(); } if (*offset > file_info_.data_end_offset) { return Status::Corruption("Offset is out of file size"); } uint32_t bytes_read; Status s = decoder->NextKey(*offset, parsed_key, internal_key, value, &bytes_read, seekable); if (!s.ok()) { return s; } *offset = *offset + bytes_read; return Status::OK(); } void PlainTableReader::Prepare(const Slice& target) { if (enable_bloom_) { uint32_t prefix_hash = GetSliceHash(GetPrefix(target)); bloom_.Prefetch(prefix_hash); } } Status PlainTableReader::Get(const ReadOptions& ro, const Slice& target, GetContext* get_context, bool skip_filters) { // Check bloom filter first. Slice prefix_slice; uint32_t prefix_hash; if (IsTotalOrderMode()) { if (full_scan_mode_) { status_ = Status::InvalidArgument("Get() is not allowed in full scan mode."); } // Match whole user key for bloom filter check. if (!MatchBloom(GetSliceHash(GetUserKey(target)))) { return Status::OK(); } // in total order mode, there is only one bucket 0, and we always use empty // prefix. prefix_slice = Slice(); prefix_hash = 0; } else { prefix_slice = GetPrefix(target); prefix_hash = GetSliceHash(prefix_slice); if (!MatchBloom(prefix_hash)) { return Status::OK(); } } uint32_t offset; bool prefix_match; PlainTableKeyDecoder decoder(&file_info_, encoding_type_, user_key_len_, ioptions_.prefix_extractor); Status s = GetOffset(&decoder, target, prefix_slice, prefix_hash, prefix_match, &offset); if (!s.ok()) { return s; } ParsedInternalKey found_key; ParsedInternalKey parsed_target; if (!ParseInternalKey(target, &parsed_target)) { return Status::Corruption(Slice()); } Slice found_value; while (offset < file_info_.data_end_offset) { s = Next(&decoder, &offset, &found_key, nullptr, &found_value); if (!s.ok()) { return s; } if (!prefix_match) { // Need to verify prefix for the first key found if it is not yet // checked. if (GetPrefix(found_key) != prefix_slice) { return Status::OK(); } prefix_match = true; } // TODO(ljin): since we know the key comparison result here, // can we enable the fast path? if (internal_comparator_.Compare(found_key, parsed_target) >= 0) { if (!get_context->SaveValue(found_key, found_value)) { break; } } } return Status::OK(); } uint64_t PlainTableReader::ApproximateOffsetOf(const Slice& key) { return 0; } PlainTableIterator::PlainTableIterator(PlainTableReader* table, bool use_prefix_seek) : table_(table), decoder_(&table_->file_info_, table_->encoding_type_, table_->user_key_len_, table_->prefix_extractor_), use_prefix_seek_(use_prefix_seek) { next_offset_ = offset_ = table_->file_info_.data_end_offset; } PlainTableIterator::~PlainTableIterator() { } bool PlainTableIterator::Valid() const { return offset_ < table_->file_info_.data_end_offset && offset_ >= table_->data_start_offset_; } void PlainTableIterator::SeekToFirst() { next_offset_ = table_->data_start_offset_; if (next_offset_ >= table_->file_info_.data_end_offset) { next_offset_ = offset_ = table_->file_info_.data_end_offset; } else { Next(); } } void PlainTableIterator::SeekToLast() { assert(false); status_ = Status::NotSupported("SeekToLast() is not supported in PlainTable"); } void PlainTableIterator::Seek(const Slice& target) { if (use_prefix_seek_ != !table_->IsTotalOrderMode()) { // This check is done here instead of NewIterator() to permit creating an // iterator with total_order_seek = true even if we won't be able to Seek() // it. This is needed for compaction: it creates iterator with // total_order_seek = true but usually never does Seek() on it, // only SeekToFirst(). status_ = Status::InvalidArgument( "total_order_seek not implemented for PlainTable."); offset_ = next_offset_ = table_->file_info_.data_end_offset; return; } // If the user doesn't set prefix seek option and we are not able to do a // total Seek(). assert failure. if (table_->IsTotalOrderMode()) { if (table_->full_scan_mode_) { status_ = Status::InvalidArgument("Seek() is not allowed in full scan mode."); offset_ = next_offset_ = table_->file_info_.data_end_offset; return; } else if (table_->GetIndexSize() > 1) { assert(false); status_ = Status::NotSupported( "PlainTable cannot issue non-prefix seek unless in total order " "mode."); offset_ = next_offset_ = table_->file_info_.data_end_offset; return; } } Slice prefix_slice = table_->GetPrefix(target); uint32_t prefix_hash = 0; // Bloom filter is ignored in total-order mode. if (!table_->IsTotalOrderMode()) { prefix_hash = GetSliceHash(prefix_slice); if (!table_->MatchBloom(prefix_hash)) { offset_ = next_offset_ = table_->file_info_.data_end_offset; return; } } bool prefix_match; status_ = table_->GetOffset(&decoder_, target, prefix_slice, prefix_hash, prefix_match, &next_offset_); if (!status_.ok()) { offset_ = next_offset_ = table_->file_info_.data_end_offset; return; } if (next_offset_ < table_->file_info_.data_end_offset) { for (Next(); status_.ok() && Valid(); Next()) { if (!prefix_match) { // Need to verify the first key's prefix if (table_->GetPrefix(key()) != prefix_slice) { offset_ = next_offset_ = table_->file_info_.data_end_offset; break; } prefix_match = true; } if (table_->internal_comparator_.Compare(key(), target) >= 0) { break; } } } else { offset_ = table_->file_info_.data_end_offset; } } void PlainTableIterator::SeekForPrev(const Slice& target) { assert(false); status_ = Status::NotSupported("SeekForPrev() is not supported in PlainTable"); } void PlainTableIterator::Next() { offset_ = next_offset_; if (offset_ < table_->file_info_.data_end_offset) { Slice tmp_slice; ParsedInternalKey parsed_key; status_ = table_->Next(&decoder_, &next_offset_, &parsed_key, &key_, &value_); if (!status_.ok()) { offset_ = next_offset_ = table_->file_info_.data_end_offset; } } } void PlainTableIterator::Prev() { assert(false); } Slice PlainTableIterator::key() const { assert(Valid()); return key_; } Slice PlainTableIterator::value() const { assert(Valid()); return value_; } Status PlainTableIterator::status() const { return status_; } } // namespace rocksdb #endif // ROCKSDB_LITE
{ "content_hash": "025b2733c8a82eac1a7cdfdd8771cf56", "timestamp": "", "source": "github", "line_count": 747, "max_line_length": 80, "avg_line_length": 32.805890227576974, "alnum_prop": 0.6231535134252836, "repo_name": "hkernbach/arangodb", "id": "0f9449e8669a0100bd1e6d53c85aa0afd23230b5", "size": "24506", "binary": false, "copies": "4", "ref": "refs/heads/devel", "path": "3rdParty/rocksdb/v5.6.X/table/plain_table_reader.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ada", "bytes": "89079" }, { "name": "Assembly", "bytes": "391227" }, { "name": "Awk", "bytes": "7502" }, { "name": "Batchfile", "bytes": "62496" }, { "name": "C", "bytes": "9184899" }, { "name": "C#", "bytes": "96431" }, { "name": "C++", "bytes": "278343201" }, { "name": "CMake", "bytes": "664691" }, { "name": "CSS", "bytes": "650173" }, { "name": "CWeb", "bytes": "174166" }, { "name": "Cuda", "bytes": "52444" }, { "name": "DIGITAL Command Language", "bytes": "259402" }, { "name": "Emacs Lisp", "bytes": "14637" }, { "name": "Fortran", "bytes": "1856" }, { "name": "Groovy", "bytes": "51836" }, { "name": "HTML", "bytes": "2415724" }, { "name": "Java", "bytes": "1048556" }, { "name": "JavaScript", "bytes": "54219725" }, { "name": "LLVM", "bytes": "24019" }, { "name": "Lex", "bytes": "1231" }, { "name": "Lua", "bytes": "17899" }, { "name": "M4", "bytes": "658700" }, { "name": "Makefile", "bytes": "522586" }, { "name": "Max", "bytes": "36857" }, { "name": "Module Management System", "bytes": "1545" }, { "name": "NSIS", "bytes": "42998" }, { "name": "Objective-C", "bytes": "98866" }, { "name": "Objective-C++", "bytes": "2503" }, { "name": "PHP", "bytes": "118092" }, { "name": "Pascal", "bytes": "150599" }, { "name": "Perl", "bytes": "906737" }, { "name": "Perl 6", "bytes": "25883" }, { "name": "PowerShell", "bytes": "20434" }, { "name": "Python", "bytes": "4557865" }, { "name": "QMake", "bytes": "16692" }, { "name": "R", "bytes": "5123" }, { "name": "Rebol", "bytes": "354" }, { "name": "Roff", "bytes": "1089418" }, { "name": "Ruby", "bytes": "1141022" }, { "name": "SAS", "bytes": "1847" }, { "name": "Scheme", "bytes": "10604" }, { "name": "Shell", "bytes": "508528" }, { "name": "Swift", "bytes": "116" }, { "name": "Tcl", "bytes": "1172" }, { "name": "TeX", "bytes": "32117" }, { "name": "Visual Basic", "bytes": "11568" }, { "name": "XSLT", "bytes": "567028" }, { "name": "Yacc", "bytes": "53063" } ], "symlink_target": "" }
#include "config.h" #include "core/rendering/RenderLayerModelObject.h" #include "core/frame/LocalFrame.h" #include "core/rendering/RenderLayer.h" #include "core/rendering/RenderView.h" namespace blink { bool RenderLayerModelObject::s_wasFloating = false; RenderLayerModelObject::RenderLayerModelObject(ContainerNode* node) : RenderObject(node) { } RenderLayerModelObject::~RenderLayerModelObject() { // Our layer should have been destroyed and cleared by now ASSERT(!hasLayer()); ASSERT(!m_layer); } void RenderLayerModelObject::destroyLayer() { setHasLayer(false); m_layer = nullptr; } void RenderLayerModelObject::createLayer(LayerType type) { ASSERT(!m_layer); m_layer = adoptPtr(new RenderLayer(this, type)); setHasLayer(true); m_layer->insertOnlyThisLayer(); } bool RenderLayerModelObject::hasSelfPaintingLayer() const { return m_layer && m_layer->isSelfPaintingLayer(); } ScrollableArea* RenderLayerModelObject::scrollableArea() const { return m_layer ? m_layer->scrollableArea() : 0; } void RenderLayerModelObject::willBeDestroyed() { if (isPositioned()) { // Don't use this->view() because the document's renderView has been set to 0 during destruction. if (LocalFrame* frame = this->frame()) { if (FrameView* frameView = frame->view()) { if (style()->hasViewportConstrainedPosition()) frameView->removeViewportConstrainedObject(this); } } } RenderObject::willBeDestroyed(); destroyLayer(); } void RenderLayerModelObject::styleWillChange(StyleDifference diff, const RenderStyle& newStyle) { s_wasFloating = isFloating(); if (RenderStyle* oldStyle = style()) { if (parent() && diff.needsPaintInvalidationLayer()) { if (oldStyle->hasAutoClip() != newStyle.hasAutoClip() || oldStyle->clip() != newStyle.clip()) layer()->clipper().clearClipRectsIncludingDescendants(); } } RenderObject::styleWillChange(diff, newStyle); } void RenderLayerModelObject::styleDidChange(StyleDifference diff, const RenderStyle* oldStyle) { bool hadTransform = hasTransform(); RenderObject::styleDidChange(diff, oldStyle); updateFromStyle(); LayerType type = layerTypeRequired(); if (type != NoLayer) { if (!layer() && layerCreationAllowedForSubtree()) { if (s_wasFloating && isFloating()) setChildNeedsLayout(); createLayer(type); if (parent() && !needsLayout()) { // FIXME: This invalidation is overly broad. We should update to // do the correct invalidation at RenderStyle::diff time. crbug.com/349061 layer()->renderer()->setShouldDoFullPaintInvalidation(true); // FIXME: We should call a specialized version of this function. layer()->updateLayerPositionsAfterLayout(); } } } else if (layer() && layer()->parent()) { setHasTransform(false); // Either a transform wasn't specified or the object doesn't support transforms, so just null out the bit. setHasReflection(false); layer()->removeOnlyThisLayer(); // calls destroyLayer() which clears m_layer if (s_wasFloating && isFloating()) setChildNeedsLayout(); if (hadTransform) setNeedsLayoutAndPrefWidthsRecalcAndFullPaintInvalidation(); } if (layer()) { // FIXME: Ideally we shouldn't need this setter but we can't easily infer an overflow-only layer // from the style. layer()->setLayerType(type); layer()->styleChanged(diff, oldStyle); } if (FrameView *frameView = view()->frameView()) { bool newStyleIsViewportConstained = style()->hasViewportConstrainedPosition(); bool oldStyleIsViewportConstrained = oldStyle && oldStyle->hasViewportConstrainedPosition(); if (newStyleIsViewportConstained != oldStyleIsViewportConstrained) { if (newStyleIsViewportConstained && layer()) frameView->addViewportConstrainedObject(this); else frameView->removeViewportConstrainedObject(this); } } } void RenderLayerModelObject::addLayerHitTestRects(LayerHitTestRects& rects, const RenderLayer* currentLayer, const LayoutPoint& layerOffset, const LayoutRect& containerRect) const { if (hasLayer()) { if (isRenderView()) { // RenderView is handled with a special fast-path, but it needs to know the current layer. RenderObject::addLayerHitTestRects(rects, layer(), LayoutPoint(), LayoutRect()); } else { // Since a RenderObject never lives outside it's container RenderLayer, we can switch // to marking entire layers instead. This may sometimes mark more than necessary (when // a layer is made of disjoint objects) but in practice is a significant performance // savings. layer()->addLayerHitTestRects(rects); } } else { RenderObject::addLayerHitTestRects(rects, currentLayer, layerOffset, containerRect); } } InvalidationReason RenderLayerModelObject::invalidatePaintIfNeeded(const PaintInvalidationState& paintInvalidationState, const RenderLayerModelObject& newPaintInvalidationContainer) { const LayoutRect oldPaintInvalidationRect = previousPaintInvalidationRect(); const LayoutPoint oldPositionFromPaintInvalidationContainer = previousPositionFromPaintInvalidationContainer(); setPreviousPaintInvalidationRect(boundsRectForPaintInvalidation(&newPaintInvalidationContainer, &paintInvalidationState)); setPreviousPositionFromPaintInvalidationContainer(RenderLayer::positionFromPaintInvalidationContainer(this, &newPaintInvalidationContainer, &paintInvalidationState)); // If we are set to do a full paint invalidation that means the RenderView will issue // paint invalidations. We can then skip issuing of paint invalidations for the child // renderers as they'll be covered by the RenderView. if (view()->doingFullPaintInvalidation()) return InvalidationNone; return RenderObject::invalidatePaintIfNeeded(newPaintInvalidationContainer, oldPaintInvalidationRect, oldPositionFromPaintInvalidationContainer, paintInvalidationState); } void RenderLayerModelObject::invalidateTreeIfNeeded(const PaintInvalidationState& paintInvalidationState) { // FIXME: SVG should probably also go through this unified paint invalidation system. ASSERT(!needsLayout()); if (!shouldCheckForPaintInvalidation(paintInvalidationState)) return; bool establishesNewPaintInvalidationContainer = isPaintInvalidationContainer(); const RenderLayerModelObject& newPaintInvalidationContainer = *adjustCompositedContainerForSpecialAncestors(establishesNewPaintInvalidationContainer ? this : &paintInvalidationState.paintInvalidationContainer()); ASSERT(&newPaintInvalidationContainer == containerForPaintInvalidation()); InvalidationReason reason = invalidatePaintIfNeeded(paintInvalidationState, newPaintInvalidationContainer); PaintInvalidationState childTreeWalkState(paintInvalidationState, *this, newPaintInvalidationContainer); if (reason == InvalidationLocationChange || reason == InvalidationFull) childTreeWalkState.setForceCheckForPaintInvalidation(); RenderObject::invalidateTreeIfNeeded(childTreeWalkState); } } // namespace blink
{ "content_hash": "c84e7c77bbf916c350451ab71cf7a39f", "timestamp": "", "source": "github", "line_count": 187, "max_line_length": 216, "avg_line_length": 40.17112299465241, "alnum_prop": 0.713258785942492, "repo_name": "xin3liang/platform_external_chromium_org_third_party_WebKit", "id": "e340de3fd4ebb33283c95641b5379b1fb386d4aa", "size": "8665", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/core/rendering/RenderLayerModelObject.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "14584" }, { "name": "C", "bytes": "1474298" }, { "name": "C++", "bytes": "40172832" }, { "name": "CSS", "bytes": "381605" }, { "name": "Java", "bytes": "66510" }, { "name": "JavaScript", "bytes": "9259993" }, { "name": "Objective-C", "bytes": "23525" }, { "name": "Objective-C++", "bytes": "377761" }, { "name": "PHP", "bytes": "3941" }, { "name": "Perl", "bytes": "492247" }, { "name": "Python", "bytes": "3698359" }, { "name": "Ruby", "bytes": "141818" }, { "name": "Shell", "bytes": "8806" } ], "symlink_target": "" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. cr.define('options', function() { var OptionsPage = options.OptionsPage; var ArrayDataModel = cr.ui.ArrayDataModel; /** * ManageProfileOverlay class * Encapsulated handling of the 'Manage profile...' overlay page. * @constructor * @class */ function ManageProfileOverlay() { OptionsPage.call(this, 'manageProfile', loadTimeData.getString('manageProfileTabTitle'), 'manage-profile-overlay'); }; cr.addSingletonGetter(ManageProfileOverlay); ManageProfileOverlay.prototype = { // Inherit from OptionsPage. __proto__: OptionsPage.prototype, // Info about the currently managed/deleted profile. profileInfo_: null, // An object containing all known profile names. profileNames_: {}, // The currently selected icon in the icon grid. iconGridSelectedURL_: null, /** * Initialize the page. */ initializePage: function() { // Call base class implementation to start preference initialization. OptionsPage.prototype.initializePage.call(this); var self = this; var iconGrid = $('manage-profile-icon-grid'); var createIconGrid = $('create-profile-icon-grid'); options.ProfilesIconGrid.decorate(iconGrid); options.ProfilesIconGrid.decorate(createIconGrid); iconGrid.addEventListener('change', function(e) { self.onIconGridSelectionChanged_('manage'); }); createIconGrid.addEventListener('change', function(e) { self.onIconGridSelectionChanged_('create'); }); $('manage-profile-name').oninput = function(event) { self.onNameChanged_(event, 'manage'); }; $('create-profile-name').oninput = function(event) { self.onNameChanged_(event, 'create'); }; $('manage-profile-cancel').onclick = $('delete-profile-cancel').onclick = $('create-profile-cancel').onclick = function(event) { OptionsPage.closeOverlay(); }; $('manage-profile-ok').onclick = function(event) { OptionsPage.closeOverlay(); self.submitManageChanges_(); }; $('delete-profile-ok').onclick = function(event) { OptionsPage.closeOverlay(); chrome.send('deleteProfile', [self.profileInfo_.filePath]); }; $('create-profile-ok').onclick = function(event) { OptionsPage.closeOverlay(); // Get the user's chosen name and icon, or default if they do not // wish to customize their profile. var name = $('create-profile-name').value; var icon_url = createIconGrid.selectedItem; chrome.send('createProfile', [name, icon_url]); }; }, /** @inheritDoc */ didShowPage: function() { chrome.send('requestDefaultProfileIcons'); // Just ignore the manage profile dialog on Chrome OS, they use /accounts. if (!cr.isChromeOS && window.location.pathname == '/manageProfile') ManageProfileOverlay.getInstance().prepareForManageDialog_(); $('manage-profile-name').focus(); $('create-profile-name').focus(); }, /** * Set the profile info used in the dialog. * @param {Object} profileInfo An object of the form: * profileInfo = { * name: "Profile Name", * iconURL: "chrome://path/to/icon/image", * filePath: "/path/to/profile/data/on/disk" * isCurrentProfile: false, * }; * @param {String} mode A label that specifies the type of dialog * box which is currently being viewed (i.e. 'create' or * 'manage'). * @private */ setProfileInfo_: function(profileInfo, mode) { this.iconGridSelectedURL_ = profileInfo.iconURL; this.profileInfo_ = profileInfo; $(mode + '-profile-name').value = profileInfo.name; $(mode + '-profile-icon-grid').selectedItem = profileInfo.iconURL; }, /** * Sets the name of the currently edited profile. * @private */ setProfileName_: function(name) { if (this.profileInfo_) this.profileInfo_.name = name; $('manage-profile-name').value = name; }, /** * the user will use to choose their profile icon. * @param {Array.<string>} iconURLs An array of icon URLs. * @private */ receiveDefaultProfileIcons_: function(iconGrid, iconURLs) { $(iconGrid).dataModel = new ArrayDataModel(iconURLs); if (this.profileInfo_) $(iconGrid).selectedItem = this.profileInfo_.iconURL; var grid = $(iconGrid); // Recalculate the measured item size. grid.measured_ = null; grid.columns = 0; grid.redraw(); }, /** * Set a dictionary of all profile names. These are used to prevent the * user from naming two profiles the same. * @param {Object} profileNames A dictionary of profile names. * @private */ receiveProfileNames_: function(profileNames) { this.profileNames_ = profileNames; }, /** * Display the error bubble, with |errorText| in the bubble. * @param {string} errorText The localized string id to display as an error. * @param {String} mode A label that specifies the type of dialog * box which is currently being viewed (i.e. 'create' or * 'manage'). * @private */ showErrorBubble_: function(errorText, mode) { var nameErrorEl = $(mode + '-profile-error-bubble'); nameErrorEl.hidden = false; nameErrorEl.textContent = loadTimeData.getString(errorText); $(mode + '-profile-ok').disabled = true; }, /** * Hide the error bubble. * @param {String} mode A label that specifies the type of dialog * box which is currently being viewed (i.e. 'create' or * 'manage'). * @private */ hideErrorBubble_: function(mode) { $(mode + '-profile-error-bubble').hidden = true; $(mode + '-profile-ok').disabled = false; }, /** * oninput callback for <input> field. * @param {Event} event The event object. * @param {String} mode A label that specifies the type of dialog * box which is currently being viewed (i.e. 'create' or * 'manage'). * @private */ onNameChanged_: function(event, mode) { var newName = event.target.value; var oldName = this.profileInfo_.name; if (newName == oldName) { this.hideErrorBubble_(mode); } else if (this.profileNames_[newName] != undefined) { this.showErrorBubble_('manageProfilesDuplicateNameError', mode); } else { this.hideErrorBubble_(mode); var nameIsValid = $(mode + '-profile-name').validity.valid; $(mode + '-profile-ok').disabled = !nameIsValid; } }, /** * Called when the user clicks "OK". Saves the newly changed profile info. * @private */ submitManageChanges_: function() { var name = $('manage-profile-name').value; var iconURL = $('manage-profile-icon-grid').selectedItem; chrome.send('setProfileNameAndIcon', [this.profileInfo_.filePath, name, iconURL]); }, /** * Called when the selected icon in the icon grid changes. * @param {String} mode A label that specifies the type of dialog * box which is currently being viewed (i.e. 'create' or * 'manage'). * @private */ onIconGridSelectionChanged_: function(mode) { var iconURL = $(mode + '-profile-icon-grid').selectedItem; if (!iconURL || iconURL == this.iconGridSelectedURL_) return; this.iconGridSelectedURL_ = iconURL; chrome.send('profileIconSelectionChanged', [this.profileInfo_.filePath, iconURL]); }, /** * Updates the contents of the "Manage Profile" section of the dialog, * and shows that section. * @private */ prepareForManageDialog_: function() { var profileInfo = BrowserOptions.getCurrentProfile(); ManageProfileOverlay.setProfileInfo(profileInfo, 'manage'); $('manage-profile-overlay-create').hidden = true; $('manage-profile-overlay-manage').hidden = false; $('manage-profile-overlay-delete').hidden = true; this.hideErrorBubble_('manage'); }, /** * Display the "Manage Profile" dialog. * @private */ showManageDialog_: function() { this.prepareForManageDialog_(); OptionsPage.navigateToPage('manageProfile'); }, /** * Display the "Delete Profile" dialog. * @param {Object} profileInfo The profile object of the profile to delete. * @private */ showDeleteDialog_: function(profileInfo) { ManageProfileOverlay.setProfileInfo(profileInfo, 'manage'); $('manage-profile-overlay-create').hidden = true; $('manage-profile-overlay-manage').hidden = true; $('manage-profile-overlay-delete').hidden = false; $('delete-profile-message').textContent = loadTimeData.getStringF('deleteProfileMessage', profileInfo.name); $('delete-profile-message').style.backgroundImage = 'url("' + profileInfo.iconURL + '")'; // Because this dialog isn't useful when refreshing or as part of the // history, don't create a history entry for it when showing. OptionsPage.showPageByName('manageProfile', false); }, /** * Display the "Create Profile" dialog. * @param {Object} profileInfo The profile object of the profile to * create. Upon creation, this object only needs a name and an avatar. * @private */ showCreateDialog_: function(profileInfo) { ManageProfileOverlay.setProfileInfo(profileInfo, 'create'); $('manage-profile-overlay-create').hidden = false; $('manage-profile-overlay-manage').hidden = true; $('manage-profile-overlay-delete').hidden = true; $('create-profile-instructions').textContent = loadTimeData.getStringF('createProfileInstructions'); ManageProfileOverlay.getInstance().hideErrorBubble_('create'); OptionsPage.showPageByName('manageProfile', false); }, }; // Forward public APIs to private implementations. [ 'receiveDefaultProfileIcons', 'receiveProfileNames', 'setProfileInfo', 'setProfileName', 'showManageDialog', 'showDeleteDialog', 'showCreateDialog', ].forEach(function(name) { ManageProfileOverlay[name] = function() { var instance = ManageProfileOverlay.getInstance(); return instance[name + '_'].apply(instance, arguments); }; }); // Export return { ManageProfileOverlay: ManageProfileOverlay }; });
{ "content_hash": "6d0780e4c5654634c954dd98c28c2e72", "timestamp": "", "source": "github", "line_count": 317, "max_line_length": 80, "avg_line_length": 34.13564668769716, "alnum_prop": 0.6276684225117827, "repo_name": "Crystalnix/BitPop", "id": "c4997f71d46068658554035bd2e0787825c8c77b", "size": "10821", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chrome/browser/resources/options2/manage_profile_overlay.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "AppleScript", "bytes": "6973" }, { "name": "Arduino", "bytes": "464" }, { "name": "Assembly", "bytes": "1871" }, { "name": "C", "bytes": "1472539" }, { "name": "C++", "bytes": "68615409" }, { "name": "Java", "bytes": "465810" }, { "name": "JavaScript", "bytes": "17052804" }, { "name": "Objective-C", "bytes": "5073580" }, { "name": "PHP", "bytes": "97817" }, { "name": "Perl", "bytes": "64450" }, { "name": "Python", "bytes": "2794547" }, { "name": "Ruby", "bytes": "650" }, { "name": "Shell", "bytes": "262004" }, { "name": "XSLT", "bytes": "418" } ], "symlink_target": "" }
//------------------------------------------------------------------------------ // <auto-generated> // このコードはツールによって生成されました。 // ランタイム バージョン:4.0.30319.42000 // // このファイルへの変更は、以下の状況下で不正な動作の原因になったり、 // コードが再生成されるときに損失したりします // </auto-generated> //------------------------------------------------------------------------------ namespace EmpiricalListBoxDragAndDrop.Properties { /// <summary> /// ローカライズされた文字列などを検索するための、厳密に型指定されたリソース クラスです。 /// </summary> // このクラスは StronglyTypedResourceBuilder クラスによって ResGen // または Visual Studio のようなツールを使用して自動生成されました。 // メンバーを追加または削除するには、.ResX ファイルを編集して、/str オプションと共に // ResGen を実行し直すか、または VS プロジェクトをリビルドします。 [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// このクラスに使用される、キャッシュされた ResourceManager のインスタンスを返します。 /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if ((resourceMan == null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("EmpiricalListBoxDragAndDrop.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// 厳密に型指定されたこのリソース クラスを使用して、すべての検索リソースに対し、 /// 現在のスレッドの CurrentUICulture プロパティをオーバーライドします。 /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } } }
{ "content_hash": "3d8d650265932357d5b0ad99520a7483", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 178, "avg_line_length": 32.028169014084504, "alnum_prop": 0.6978891820580475, "repo_name": "daydreammodel3/EmpiricalListBoxDragAndDrop", "id": "3599a80226cedcd5420469819770d469b18d7f90", "size": "2920", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "EmpiricalListBoxDragAndDrop/Properties/Resources.Designer.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "6798" } ], "symlink_target": "" }
namespace llvm { class BlockFrequencyInfo; class Function; // Returns the maximum frequency of a BB in a function. uint64_t getMaxFreq(const Function &F, const BlockFrequencyInfo *BFI); // Calculates heat color based on current and maximum frequencies. std::string getHeatColor(uint64_t freq, uint64_t maxFreq); // Calculates heat color based on percent of "hotness". std::string getHeatColor(double percent); } // namespace llvm #endif
{ "content_hash": "28b0873c62268a2b87530adadab06dd3", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 70, "avg_line_length": 26.058823529411764, "alnum_prop": 0.7720090293453724, "repo_name": "endlessm/chromium-browser", "id": "9ecca6a69097f416a55b29ec95015e5745d9aa87", "size": "1047", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "third_party/llvm/llvm/include/llvm/Analysis/HeatUtils.h", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package gitfile import ( "fmt" "github.com/hashicorp/errwrap" "github.com/hashicorp/terraform/helper/hashcode" "os/exec" "strings" "sync" ) func gitCommand(checkout_dir string, args ...string) ([]byte, error) { command := exec.Command("git", args...) command.Dir = checkout_dir out, err := command.CombinedOutput() if err != nil { return out, errwrap.Wrapf(fmt.Sprintf("Error while running git %s: {{err}}\nWorking dir: %s\nOutput: %s", strings.Join(args, " "), checkout_dir, string(out)), err) } else { return out, err } } func flatten(args ...interface{}) []string { ret := make([]string, 0, len(args)) for _, arg := range args { switch arg := arg.(type) { default: panic("can only handle strings and []strings") case string: ret = append(ret, arg) case []string: ret = append(ret, arg...) } } return ret } func hashString(v interface{}) int { switch v := v.(type) { default: panic(fmt.Sprintf("unexpectedtype %T", v)) case string: return hashcode.String(v) } } // map of checkout_dir to lock. file, commit, and checkout should grab the lock corresponding to a checkout dir // around create/read/update/delete operations. var checkoutLocks map[string]*sync.Mutex func lockCheckout(checkout_dir string) { if checkoutLocks == nil { checkoutLocks = map[string]*sync.Mutex{} } if checkoutLocks[checkout_dir] == nil { checkoutLocks[checkout_dir] = new(sync.Mutex) } checkoutLocks[checkout_dir].Lock() } func unlockCheckout(checkout_dir string) { checkoutLocks[checkout_dir].Unlock() }
{ "content_hash": "847fb8dc74de661be9a8634f66fbf3a1", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 165, "avg_line_length": 23.815384615384616, "alnum_prop": 0.6821705426356589, "repo_name": "Yelp/terraform-provider-gitfile", "id": "49cd4b830320e902239331d4b24059bbfc363801", "size": "1548", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gitfile/helpers.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "14323" }, { "name": "HCL", "bytes": "1729" }, { "name": "Makefile", "bytes": "2348" }, { "name": "Shell", "bytes": "3187" } ], "symlink_target": "" }
define(function(require) { 'use strict'; var domready = require('vendor/domready'); /** * Helper that creates a terminal with a default display and shell. * * @param {String|Element} DOM element or selector where display will be rendered * @param {Object} displaySettings Optional display parameters * @constructor */ var terminal = function(element, displaySettings) { var self = this; // Setup shell this.shell = displaySettings.shell || (new terminal.Shell()); if(!this.shell || !(this.shell instanceof terminal.Shell)) { console.error('terminal.constructor: Provided shell not valid'); return; } displaySettings.shell = this.shell; // Setup display domready(function(){ element = (typeof element === 'string') ? document.querySelector(element) : element; self.display = new terminal.Display(element, displaySettings); }); }; terminal.prototype = { /** * @property {terminal.Shell} shell * @readonly */ set shell(value) { if(!this._shell) this._shell = value; }, get shell() { return this._shell; }, /** * @property {terminal.Display} display * @readonly */ set display(value) { if(!this._display) this._display = value; }, get display() { return this._display; } }; terminal.version = '0.1'; terminal.Display = require('ui/display'); terminal.Shell = require('system/shell'); terminal.Process = require('system/process'); return terminal; });
{ "content_hash": "7e2d856b50e7c4c7ee93421b3d492091", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 82, "avg_line_length": 20.859154929577464, "alnum_prop": 0.6529372045914922, "repo_name": "StevenTso/terminal.js", "id": "f39e00ae2beb9f1ab2169d9eea8df7896421b747", "size": "1535", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/terminal.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1746" }, { "name": "JavaScript", "bytes": "78878" }, { "name": "Makefile", "bytes": "841" } ], "symlink_target": "" }
package stack.isa.core; import stack.excetpion.OverflowException; import stack.excetpion.SimulatorException; import stack.excetpion.UnderflowException; import stack.isa.OneImmediateInstruction; import stack.simulator.Context; import stack.simulator.machine.models.CoreModel; import stack.simulator.machine.models.StackModel; public class InstructionTUCK_CP extends OneImmediateInstruction { public InstructionTUCK_CP(int imm16, String comment){ super(imm16, comment); } @Override public String toString() { return get_string(StackOP.TUCK_CP, getImmediate(), getComment()); } @Override public int toBinary() { return assemble(StackOP.TUCK_CP, getImmediate()); } @Override public void execute(Context context, CoreModel core) throws SimulatorException, UnderflowException, OverflowException { StackModel stack0 = core.getStack(0, context); stack0.checkDepth(1); stack0.checkDeepAccess(getImmediate()+1); // copy the first element, and store it at stack0[imm16] int datum = stack0.remove(0); stack0.add(getImmediate(), datum); stack0.add(0, datum); context.cycle(); } }
{ "content_hash": "854673f42763b4860bcc847a570a71e0", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 120, "avg_line_length": 27.24390243902439, "alnum_prop": 0.7663384064458371, "repo_name": "ilebedev/stacktool", "id": "230bab28926ec53370eac093527d40f5e79e015c", "size": "1117", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/stack/isa/core/InstructionTUCK_CP.java", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "54152" }, { "name": "C", "bytes": "21410" }, { "name": "Java", "bytes": "275451" }, { "name": "R", "bytes": "1814" } ], "symlink_target": "" }
import EditorEvents = require("editor/EditorEvents"); import EditorUI = require("ui/EditorUI"); import ModalWindow = require("../ModalWindow"); import ProgressModal = require("../ProgressModal"); class ProPlatformWindow extends ModalWindow { constructor() { super(false); this.init("Platform License Required", "AtomicEditor/editor/ui/platformsinfo.tb.txt"); var editField = <Atomic.UIEditField> this.getWidget("info"); var text = "\nAtomic Game Engine Pro is required to deploy apps to this platform.\n\n<color #D4FB79>Installed platforms:</color>\n\n"; var licenseSystem = ToolCore.licenseSystem; var installedText = " <widget TBSkinImage: skin: 'LogoMac-Small'> <widget TBSkinImage: skin: 'LogoWindows-Small'> <widget TBSkinImage: skin: 'LogoHTML5-Small'> "; var availableText = " "; if (licenseSystem.licenseAndroid) installedText += "<widget TBSkinImage: skin: 'LogoAndroid-Small'> "; else availableText += "<widget TBSkinImage: skin: 'LogoAndroid-Small'> "; if (licenseSystem.licenseIOS) installedText += "<widget TBSkinImage: skin: 'LogoIOS-Small'> "; else availableText += "<widget TBSkinImage: skin: 'LogoIOS-Small'> "; text += installedText + "\n\n\n"; if (!licenseSystem.licenseIOS || !licenseSystem.licenseAndroid || !licenseSystem.licenseModule3D) { text += "<color #76D6FF>Available platforms:</color>\n\n"; text += availableText + "\n\n\n"; } editField.text = text; this.resizeToFitContent(); this.center(); editField.reformat(); } handleWidgetEvent(ev: Atomic.UIWidgetEvent) { if (ev.type == Atomic.UI_EVENT_TYPE_CLICK) { var id = ev.target.id; if (id == "purchase") { Atomic.fileSystem.systemOpen("https://store.atomicgameengine.com/site"); } else if (id == "ok") { this.hide(); return true; } return false; } } } export = ProPlatformWindow;
{ "content_hash": "db122cc0ec5939eb39753bfd9e153157", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 173, "avg_line_length": 29.06756756756757, "alnum_prop": 0.602045560204556, "repo_name": "rsredsq/AtomicGameEngine", "id": "a1148cbdaa041df2d06ff2728cdeef9ffc414842", "size": "2431", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Script/AtomicEditor/ui/modal/license/ProPlatformWindow.ts", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "96044" }, { "name": "Batchfile", "bytes": "53136" }, { "name": "C", "bytes": "47231561" }, { "name": "C#", "bytes": "155477" }, { "name": "C++", "bytes": "29838729" }, { "name": "CMake", "bytes": "356497" }, { "name": "DIGITAL Command Language", "bytes": "312789" }, { "name": "Emacs Lisp", "bytes": "1639" }, { "name": "GLSL", "bytes": "109885" }, { "name": "Groff", "bytes": "5" }, { "name": "HTML", "bytes": "14807" }, { "name": "Java", "bytes": "41271" }, { "name": "JavaScript", "bytes": "1853804" }, { "name": "Makefile", "bytes": "735141" }, { "name": "NSIS", "bytes": "4282" }, { "name": "Objective-C", "bytes": "379639" }, { "name": "Objective-C++", "bytes": "25917" }, { "name": "Perl", "bytes": "2260409" }, { "name": "Perl6", "bytes": "27602" }, { "name": "Prolog", "bytes": "42455" }, { "name": "Protocol Buffer", "bytes": "2764" }, { "name": "Python", "bytes": "1974" }, { "name": "QMake", "bytes": "1782" }, { "name": "Scheme", "bytes": "4249" }, { "name": "Shell", "bytes": "139025" }, { "name": "TypeScript", "bytes": "313212" }, { "name": "XS", "bytes": "4319" }, { "name": "eC", "bytes": "5127" } ], "symlink_target": "" }
// Code generated by go-swagger; DO NOT EDIT. package s_a_n // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "context" "net/http" "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // NewFcpCollectionPerformanceMetricsGetParams creates a new FcpCollectionPerformanceMetricsGetParams object, // with the default timeout for this client. // // Default values are not hydrated, since defaults are normally applied by the API server side. // // To enforce default values in parameter, use SetDefaults or WithDefaults. func NewFcpCollectionPerformanceMetricsGetParams() *FcpCollectionPerformanceMetricsGetParams { return &FcpCollectionPerformanceMetricsGetParams{ timeout: cr.DefaultTimeout, } } // NewFcpCollectionPerformanceMetricsGetParamsWithTimeout creates a new FcpCollectionPerformanceMetricsGetParams object // with the ability to set a timeout on a request. func NewFcpCollectionPerformanceMetricsGetParamsWithTimeout(timeout time.Duration) *FcpCollectionPerformanceMetricsGetParams { return &FcpCollectionPerformanceMetricsGetParams{ timeout: timeout, } } // NewFcpCollectionPerformanceMetricsGetParamsWithContext creates a new FcpCollectionPerformanceMetricsGetParams object // with the ability to set a context for a request. func NewFcpCollectionPerformanceMetricsGetParamsWithContext(ctx context.Context) *FcpCollectionPerformanceMetricsGetParams { return &FcpCollectionPerformanceMetricsGetParams{ Context: ctx, } } // NewFcpCollectionPerformanceMetricsGetParamsWithHTTPClient creates a new FcpCollectionPerformanceMetricsGetParams object // with the ability to set a custom HTTPClient for a request. func NewFcpCollectionPerformanceMetricsGetParamsWithHTTPClient(client *http.Client) *FcpCollectionPerformanceMetricsGetParams { return &FcpCollectionPerformanceMetricsGetParams{ HTTPClient: client, } } /* FcpCollectionPerformanceMetricsGetParams contains all the parameters to send to the API endpoint for the fcp collection performance metrics get operation. Typically these are written to a http.Request. */ type FcpCollectionPerformanceMetricsGetParams struct { /* Duration. Filter by duration */ DurationQueryParameter *string /* Fields. Specify the fields to return. */ FieldsQueryParameter []string /* Interval. The time range for the data. Examples can be 1h, 1d, 1m, 1w, 1y. The period for each time range is as follows: * 1h: Metrics over the most recent hour sampled over 15 seconds. * 1d: Metrics over the most recent day sampled over 5 minutes. * 1w: Metrics over the most recent week sampled over 30 minutes. * 1m: Metrics over the most recent month sampled over 2 hours. * 1y: Metrics over the most recent year sampled over a day. Default: "1h" */ IntervalQueryParameter *string /* IopsOther. Filter by iops.other */ IopsOtherQueryParameter *int64 /* IopsRead. Filter by iops.read */ IopsReadQueryParameter *int64 /* IopsTotal. Filter by iops.total */ IopsTotalQueryParameter *int64 /* IopsWrite. Filter by iops.write */ IopsWriteQueryParameter *int64 /* LatencyOther. Filter by latency.other */ LatencyOtherQueryParameter *int64 /* LatencyRead. Filter by latency.read */ LatencyReadQueryParameter *int64 /* LatencyTotal. Filter by latency.total */ LatencyTotalQueryParameter *int64 /* LatencyWrite. Filter by latency.write */ LatencyWriteQueryParameter *int64 /* MaxRecords. Limit the number of records returned. */ MaxRecordsQueryParameter *int64 /* OrderBy. Order results by specified fields and optional [asc|desc] direction. Default direction is 'asc' for ascending. */ OrderByQueryParameter []string /* ReturnRecords. The default is true for GET calls. When set to false, only the number of records is returned. Default: true */ ReturnRecordsQueryParameter *bool /* ReturnTimeout. The number of seconds to allow the call to execute before returning. When iterating over a collection, the default is 15 seconds. ONTAP returns earlier if either max records or the end of the collection is reached. Default: 15 */ ReturnTimeoutQueryParameter *int64 /* Status. Filter by status */ StatusQueryParameter *string /* SvmUUID. The unique identifier of the SVM. */ SVMUUIDPathParameter string /* ThroughputRead. Filter by throughput.read */ ThroughputReadQueryParameter *int64 /* ThroughputTotal. Filter by throughput.total */ ThroughputTotalQueryParameter *int64 /* ThroughputWrite. Filter by throughput.write */ ThroughputWriteQueryParameter *int64 /* Timestamp. Filter by timestamp */ TimestampQueryParameter *string timeout time.Duration Context context.Context HTTPClient *http.Client } // WithDefaults hydrates default values in the fcp collection performance metrics get params (not the query body). // // All values with no default are reset to their zero value. func (o *FcpCollectionPerformanceMetricsGetParams) WithDefaults() *FcpCollectionPerformanceMetricsGetParams { o.SetDefaults() return o } // SetDefaults hydrates default values in the fcp collection performance metrics get params (not the query body). // // All values with no default are reset to their zero value. func (o *FcpCollectionPerformanceMetricsGetParams) SetDefaults() { var ( intervalQueryParameterDefault = string("1h") returnRecordsQueryParameterDefault = bool(true) returnTimeoutQueryParameterDefault = int64(15) ) val := FcpCollectionPerformanceMetricsGetParams{ IntervalQueryParameter: &intervalQueryParameterDefault, ReturnRecordsQueryParameter: &returnRecordsQueryParameterDefault, ReturnTimeoutQueryParameter: &returnTimeoutQueryParameterDefault, } val.timeout = o.timeout val.Context = o.Context val.HTTPClient = o.HTTPClient *o = val } // WithTimeout adds the timeout to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithTimeout(timeout time.Duration) *FcpCollectionPerformanceMetricsGetParams { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithContext(ctx context.Context) *FcpCollectionPerformanceMetricsGetParams { o.SetContext(ctx) return o } // SetContext adds the context to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetContext(ctx context.Context) { o.Context = ctx } // WithHTTPClient adds the HTTPClient to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithHTTPClient(client *http.Client) *FcpCollectionPerformanceMetricsGetParams { o.SetHTTPClient(client) return o } // SetHTTPClient adds the HTTPClient to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } // WithDurationQueryParameter adds the duration to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithDurationQueryParameter(duration *string) *FcpCollectionPerformanceMetricsGetParams { o.SetDurationQueryParameter(duration) return o } // SetDurationQueryParameter adds the duration to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetDurationQueryParameter(duration *string) { o.DurationQueryParameter = duration } // WithFieldsQueryParameter adds the fields to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithFieldsQueryParameter(fields []string) *FcpCollectionPerformanceMetricsGetParams { o.SetFieldsQueryParameter(fields) return o } // SetFieldsQueryParameter adds the fields to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetFieldsQueryParameter(fields []string) { o.FieldsQueryParameter = fields } // WithIntervalQueryParameter adds the interval to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithIntervalQueryParameter(interval *string) *FcpCollectionPerformanceMetricsGetParams { o.SetIntervalQueryParameter(interval) return o } // SetIntervalQueryParameter adds the interval to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetIntervalQueryParameter(interval *string) { o.IntervalQueryParameter = interval } // WithIopsOtherQueryParameter adds the iopsOther to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithIopsOtherQueryParameter(iopsOther *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetIopsOtherQueryParameter(iopsOther) return o } // SetIopsOtherQueryParameter adds the iopsOther to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetIopsOtherQueryParameter(iopsOther *int64) { o.IopsOtherQueryParameter = iopsOther } // WithIopsReadQueryParameter adds the iopsRead to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithIopsReadQueryParameter(iopsRead *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetIopsReadQueryParameter(iopsRead) return o } // SetIopsReadQueryParameter adds the iopsRead to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetIopsReadQueryParameter(iopsRead *int64) { o.IopsReadQueryParameter = iopsRead } // WithIopsTotalQueryParameter adds the iopsTotal to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithIopsTotalQueryParameter(iopsTotal *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetIopsTotalQueryParameter(iopsTotal) return o } // SetIopsTotalQueryParameter adds the iopsTotal to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetIopsTotalQueryParameter(iopsTotal *int64) { o.IopsTotalQueryParameter = iopsTotal } // WithIopsWriteQueryParameter adds the iopsWrite to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithIopsWriteQueryParameter(iopsWrite *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetIopsWriteQueryParameter(iopsWrite) return o } // SetIopsWriteQueryParameter adds the iopsWrite to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetIopsWriteQueryParameter(iopsWrite *int64) { o.IopsWriteQueryParameter = iopsWrite } // WithLatencyOtherQueryParameter adds the latencyOther to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithLatencyOtherQueryParameter(latencyOther *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetLatencyOtherQueryParameter(latencyOther) return o } // SetLatencyOtherQueryParameter adds the latencyOther to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetLatencyOtherQueryParameter(latencyOther *int64) { o.LatencyOtherQueryParameter = latencyOther } // WithLatencyReadQueryParameter adds the latencyRead to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithLatencyReadQueryParameter(latencyRead *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetLatencyReadQueryParameter(latencyRead) return o } // SetLatencyReadQueryParameter adds the latencyRead to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetLatencyReadQueryParameter(latencyRead *int64) { o.LatencyReadQueryParameter = latencyRead } // WithLatencyTotalQueryParameter adds the latencyTotal to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithLatencyTotalQueryParameter(latencyTotal *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetLatencyTotalQueryParameter(latencyTotal) return o } // SetLatencyTotalQueryParameter adds the latencyTotal to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetLatencyTotalQueryParameter(latencyTotal *int64) { o.LatencyTotalQueryParameter = latencyTotal } // WithLatencyWriteQueryParameter adds the latencyWrite to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithLatencyWriteQueryParameter(latencyWrite *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetLatencyWriteQueryParameter(latencyWrite) return o } // SetLatencyWriteQueryParameter adds the latencyWrite to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetLatencyWriteQueryParameter(latencyWrite *int64) { o.LatencyWriteQueryParameter = latencyWrite } // WithMaxRecordsQueryParameter adds the maxRecords to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithMaxRecordsQueryParameter(maxRecords *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetMaxRecordsQueryParameter(maxRecords) return o } // SetMaxRecordsQueryParameter adds the maxRecords to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetMaxRecordsQueryParameter(maxRecords *int64) { o.MaxRecordsQueryParameter = maxRecords } // WithOrderByQueryParameter adds the orderBy to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithOrderByQueryParameter(orderBy []string) *FcpCollectionPerformanceMetricsGetParams { o.SetOrderByQueryParameter(orderBy) return o } // SetOrderByQueryParameter adds the orderBy to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetOrderByQueryParameter(orderBy []string) { o.OrderByQueryParameter = orderBy } // WithReturnRecordsQueryParameter adds the returnRecords to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithReturnRecordsQueryParameter(returnRecords *bool) *FcpCollectionPerformanceMetricsGetParams { o.SetReturnRecordsQueryParameter(returnRecords) return o } // SetReturnRecordsQueryParameter adds the returnRecords to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetReturnRecordsQueryParameter(returnRecords *bool) { o.ReturnRecordsQueryParameter = returnRecords } // WithReturnTimeoutQueryParameter adds the returnTimeout to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithReturnTimeoutQueryParameter(returnTimeout *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetReturnTimeoutQueryParameter(returnTimeout) return o } // SetReturnTimeoutQueryParameter adds the returnTimeout to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetReturnTimeoutQueryParameter(returnTimeout *int64) { o.ReturnTimeoutQueryParameter = returnTimeout } // WithStatusQueryParameter adds the status to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithStatusQueryParameter(status *string) *FcpCollectionPerformanceMetricsGetParams { o.SetStatusQueryParameter(status) return o } // SetStatusQueryParameter adds the status to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetStatusQueryParameter(status *string) { o.StatusQueryParameter = status } // WithSVMUUIDPathParameter adds the svmUUID to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithSVMUUIDPathParameter(svmUUID string) *FcpCollectionPerformanceMetricsGetParams { o.SetSVMUUIDPathParameter(svmUUID) return o } // SetSVMUUIDPathParameter adds the svmUuid to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetSVMUUIDPathParameter(svmUUID string) { o.SVMUUIDPathParameter = svmUUID } // WithThroughputReadQueryParameter adds the throughputRead to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithThroughputReadQueryParameter(throughputRead *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetThroughputReadQueryParameter(throughputRead) return o } // SetThroughputReadQueryParameter adds the throughputRead to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetThroughputReadQueryParameter(throughputRead *int64) { o.ThroughputReadQueryParameter = throughputRead } // WithThroughputTotalQueryParameter adds the throughputTotal to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithThroughputTotalQueryParameter(throughputTotal *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetThroughputTotalQueryParameter(throughputTotal) return o } // SetThroughputTotalQueryParameter adds the throughputTotal to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetThroughputTotalQueryParameter(throughputTotal *int64) { o.ThroughputTotalQueryParameter = throughputTotal } // WithThroughputWriteQueryParameter adds the throughputWrite to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithThroughputWriteQueryParameter(throughputWrite *int64) *FcpCollectionPerformanceMetricsGetParams { o.SetThroughputWriteQueryParameter(throughputWrite) return o } // SetThroughputWriteQueryParameter adds the throughputWrite to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetThroughputWriteQueryParameter(throughputWrite *int64) { o.ThroughputWriteQueryParameter = throughputWrite } // WithTimestampQueryParameter adds the timestamp to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) WithTimestampQueryParameter(timestamp *string) *FcpCollectionPerformanceMetricsGetParams { o.SetTimestampQueryParameter(timestamp) return o } // SetTimestampQueryParameter adds the timestamp to the fcp collection performance metrics get params func (o *FcpCollectionPerformanceMetricsGetParams) SetTimestampQueryParameter(timestamp *string) { o.TimestampQueryParameter = timestamp } // WriteToRequest writes these params to a swagger request func (o *FcpCollectionPerformanceMetricsGetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { if err := r.SetTimeout(o.timeout); err != nil { return err } var res []error if o.DurationQueryParameter != nil { // query param duration var qrDuration string if o.DurationQueryParameter != nil { qrDuration = *o.DurationQueryParameter } qDuration := qrDuration if qDuration != "" { if err := r.SetQueryParam("duration", qDuration); err != nil { return err } } } if o.FieldsQueryParameter != nil { // binding items for fields joinedFields := o.bindParamFields(reg) // query array param fields if err := r.SetQueryParam("fields", joinedFields...); err != nil { return err } } if o.IntervalQueryParameter != nil { // query param interval var qrInterval string if o.IntervalQueryParameter != nil { qrInterval = *o.IntervalQueryParameter } qInterval := qrInterval if qInterval != "" { if err := r.SetQueryParam("interval", qInterval); err != nil { return err } } } if o.IopsOtherQueryParameter != nil { // query param iops.other var qrIopsOther int64 if o.IopsOtherQueryParameter != nil { qrIopsOther = *o.IopsOtherQueryParameter } qIopsOther := swag.FormatInt64(qrIopsOther) if qIopsOther != "" { if err := r.SetQueryParam("iops.other", qIopsOther); err != nil { return err } } } if o.IopsReadQueryParameter != nil { // query param iops.read var qrIopsRead int64 if o.IopsReadQueryParameter != nil { qrIopsRead = *o.IopsReadQueryParameter } qIopsRead := swag.FormatInt64(qrIopsRead) if qIopsRead != "" { if err := r.SetQueryParam("iops.read", qIopsRead); err != nil { return err } } } if o.IopsTotalQueryParameter != nil { // query param iops.total var qrIopsTotal int64 if o.IopsTotalQueryParameter != nil { qrIopsTotal = *o.IopsTotalQueryParameter } qIopsTotal := swag.FormatInt64(qrIopsTotal) if qIopsTotal != "" { if err := r.SetQueryParam("iops.total", qIopsTotal); err != nil { return err } } } if o.IopsWriteQueryParameter != nil { // query param iops.write var qrIopsWrite int64 if o.IopsWriteQueryParameter != nil { qrIopsWrite = *o.IopsWriteQueryParameter } qIopsWrite := swag.FormatInt64(qrIopsWrite) if qIopsWrite != "" { if err := r.SetQueryParam("iops.write", qIopsWrite); err != nil { return err } } } if o.LatencyOtherQueryParameter != nil { // query param latency.other var qrLatencyOther int64 if o.LatencyOtherQueryParameter != nil { qrLatencyOther = *o.LatencyOtherQueryParameter } qLatencyOther := swag.FormatInt64(qrLatencyOther) if qLatencyOther != "" { if err := r.SetQueryParam("latency.other", qLatencyOther); err != nil { return err } } } if o.LatencyReadQueryParameter != nil { // query param latency.read var qrLatencyRead int64 if o.LatencyReadQueryParameter != nil { qrLatencyRead = *o.LatencyReadQueryParameter } qLatencyRead := swag.FormatInt64(qrLatencyRead) if qLatencyRead != "" { if err := r.SetQueryParam("latency.read", qLatencyRead); err != nil { return err } } } if o.LatencyTotalQueryParameter != nil { // query param latency.total var qrLatencyTotal int64 if o.LatencyTotalQueryParameter != nil { qrLatencyTotal = *o.LatencyTotalQueryParameter } qLatencyTotal := swag.FormatInt64(qrLatencyTotal) if qLatencyTotal != "" { if err := r.SetQueryParam("latency.total", qLatencyTotal); err != nil { return err } } } if o.LatencyWriteQueryParameter != nil { // query param latency.write var qrLatencyWrite int64 if o.LatencyWriteQueryParameter != nil { qrLatencyWrite = *o.LatencyWriteQueryParameter } qLatencyWrite := swag.FormatInt64(qrLatencyWrite) if qLatencyWrite != "" { if err := r.SetQueryParam("latency.write", qLatencyWrite); err != nil { return err } } } if o.MaxRecordsQueryParameter != nil { // query param max_records var qrMaxRecords int64 if o.MaxRecordsQueryParameter != nil { qrMaxRecords = *o.MaxRecordsQueryParameter } qMaxRecords := swag.FormatInt64(qrMaxRecords) if qMaxRecords != "" { if err := r.SetQueryParam("max_records", qMaxRecords); err != nil { return err } } } if o.OrderByQueryParameter != nil { // binding items for order_by joinedOrderBy := o.bindParamOrderBy(reg) // query array param order_by if err := r.SetQueryParam("order_by", joinedOrderBy...); err != nil { return err } } if o.ReturnRecordsQueryParameter != nil { // query param return_records var qrReturnRecords bool if o.ReturnRecordsQueryParameter != nil { qrReturnRecords = *o.ReturnRecordsQueryParameter } qReturnRecords := swag.FormatBool(qrReturnRecords) if qReturnRecords != "" { if err := r.SetQueryParam("return_records", qReturnRecords); err != nil { return err } } } if o.ReturnTimeoutQueryParameter != nil { // query param return_timeout var qrReturnTimeout int64 if o.ReturnTimeoutQueryParameter != nil { qrReturnTimeout = *o.ReturnTimeoutQueryParameter } qReturnTimeout := swag.FormatInt64(qrReturnTimeout) if qReturnTimeout != "" { if err := r.SetQueryParam("return_timeout", qReturnTimeout); err != nil { return err } } } if o.StatusQueryParameter != nil { // query param status var qrStatus string if o.StatusQueryParameter != nil { qrStatus = *o.StatusQueryParameter } qStatus := qrStatus if qStatus != "" { if err := r.SetQueryParam("status", qStatus); err != nil { return err } } } // path param svm.uuid if err := r.SetPathParam("svm.uuid", o.SVMUUIDPathParameter); err != nil { return err } if o.ThroughputReadQueryParameter != nil { // query param throughput.read var qrThroughputRead int64 if o.ThroughputReadQueryParameter != nil { qrThroughputRead = *o.ThroughputReadQueryParameter } qThroughputRead := swag.FormatInt64(qrThroughputRead) if qThroughputRead != "" { if err := r.SetQueryParam("throughput.read", qThroughputRead); err != nil { return err } } } if o.ThroughputTotalQueryParameter != nil { // query param throughput.total var qrThroughputTotal int64 if o.ThroughputTotalQueryParameter != nil { qrThroughputTotal = *o.ThroughputTotalQueryParameter } qThroughputTotal := swag.FormatInt64(qrThroughputTotal) if qThroughputTotal != "" { if err := r.SetQueryParam("throughput.total", qThroughputTotal); err != nil { return err } } } if o.ThroughputWriteQueryParameter != nil { // query param throughput.write var qrThroughputWrite int64 if o.ThroughputWriteQueryParameter != nil { qrThroughputWrite = *o.ThroughputWriteQueryParameter } qThroughputWrite := swag.FormatInt64(qrThroughputWrite) if qThroughputWrite != "" { if err := r.SetQueryParam("throughput.write", qThroughputWrite); err != nil { return err } } } if o.TimestampQueryParameter != nil { // query param timestamp var qrTimestamp string if o.TimestampQueryParameter != nil { qrTimestamp = *o.TimestampQueryParameter } qTimestamp := qrTimestamp if qTimestamp != "" { if err := r.SetQueryParam("timestamp", qTimestamp); err != nil { return err } } } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil } // bindParamFcpCollectionPerformanceMetricsGet binds the parameter fields func (o *FcpCollectionPerformanceMetricsGetParams) bindParamFields(formats strfmt.Registry) []string { fieldsIR := o.FieldsQueryParameter var fieldsIC []string for _, fieldsIIR := range fieldsIR { // explode []string fieldsIIV := fieldsIIR // string as string fieldsIC = append(fieldsIC, fieldsIIV) } // items.CollectionFormat: "csv" fieldsIS := swag.JoinByFormat(fieldsIC, "csv") return fieldsIS } // bindParamFcpCollectionPerformanceMetricsGet binds the parameter order_by func (o *FcpCollectionPerformanceMetricsGetParams) bindParamOrderBy(formats strfmt.Registry) []string { orderByIR := o.OrderByQueryParameter var orderByIC []string for _, orderByIIR := range orderByIR { // explode []string orderByIIV := orderByIIR // string as string orderByIC = append(orderByIC, orderByIIV) } // items.CollectionFormat: "csv" orderByIS := swag.JoinByFormat(orderByIC, "csv") return orderByIS }
{ "content_hash": "177d08973f009434fe4bb4a0e982c865", "timestamp": "", "source": "github", "line_count": 884, "max_line_length": 220, "avg_line_length": 31.130090497737555, "alnum_prop": 0.7875649551219158, "repo_name": "NetApp/trident", "id": "de8290b995fdda00c1b6c9a82e72bc657ac8a2e4", "size": "27519", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "storage_drivers/ontap/api/rest/client/s_a_n/fcp_collection_performance_metrics_get_parameters.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "1429" }, { "name": "Go", "bytes": "5153619" }, { "name": "Makefile", "bytes": "14158" }, { "name": "Mustache", "bytes": "4119" }, { "name": "Python", "bytes": "43952" }, { "name": "Shell", "bytes": "2483" } ], "symlink_target": "" }
package anime import ( "net/http" "github.com/animenotifier/notify.moe/arn" "github.com/aerogo/aero" ) // SyncEpisodes syncs the episodes with an external site. func SyncEpisodes(ctx aero.Context) error { user := arn.GetUserFromContext(ctx) animeID := ctx.Get("id") if user == nil { return ctx.Error(http.StatusUnauthorized, "Not logged in") } if user.Role != "editor" && user.Role != "admin" { return ctx.Error(http.StatusUnauthorized, "Not authorized") } anime, err := arn.GetAnime(animeID) if err != nil { return ctx.Error(http.StatusNotFound, "Anime not found", err) } return anime.RefreshEpisodes() }
{ "content_hash": "eb833fadb40050beed1ea1339acb8e27", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 63, "avg_line_length": 20.451612903225808, "alnum_prop": 0.7003154574132492, "repo_name": "animenotifier/notify.moe", "id": "6a15b2349cdc6dcbe3c37d6793d1e345468fc24f", "size": "634", "binary": false, "copies": "1", "ref": "refs/heads/go", "path": "pages/anime/sync.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "776515" }, { "name": "Makefile", "bytes": "1951" }, { "name": "Shell", "bytes": "4485" }, { "name": "TypeScript", "bytes": "149263" } ], "symlink_target": "" }
import en_US from './en_US'; import zh from './zh'; import getI18nModel from './getModel'; import translate from './translate'; export const messages = { zh, en_US, }; export { getI18nModel, translate, };
{ "content_hash": "f63a6207ffa2b6e9a86d3a655ad3bf3c", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 38, "avg_line_length": 15.285714285714286, "alnum_prop": 0.6635514018691588, "repo_name": "shrimpliu/shradmin", "id": "efe4ea97eb55084b1d681a5d30847c94b1ad7daa", "size": "214", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/i18n/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "63722" } ], "symlink_target": "" }
package edu.ua.eng.software.mallard; //import java.io.FileOutputStream; //import java.io.PrintStream; import java.io.FileWriter; import java.util.concurrent.ConcurrentMap; /** * @author Nicholas A. Kraft <nkraft@cs.ua.edu> * @version $Id: Mallard.java 9 2012-04-06 20:39:29Z nkraft $ */ public class Mallard { private static void usage() { System.out.println("Usage: java edu.ua.cs.mallard.Mallard JAR ZIP"); } public static void main (String [] args) throws Exception { if (2 != args.length) { usage(); System.exit(1); } // Redirect System.out and System.err to a tmp file. /* File ifile = new File(args[0]); File ofile = File.createTempFile("mallard-",".log"); FileOutputStream ofs = new FileOutputStream(ofile); System.setOut(new PrintStream(ofs,true)); System.setErr(new PrintStream(ofs,true)); */ long start = System.nanoTime(); ConcurrentMap<String,Method> methods = Methods.merge( CallGraph.build(args[0]), Methods.extract(args[1]) ); computeMaxFanInFanOut(methods); XMLPrinter xp = new XMLPrinter(new FileWriter("out.xml")); xp.writeXML(methods); long elapsed = System.nanoTime() - start; System.out.println("Elapsed time in Mallard.main: " + (elapsed / 1000000000.0) + " seconds"); } private static void computeMaxFanInFanOut(ConcurrentMap<String,Method> methods) { int maxFanIn = 0; String maxFanInName = null; int maxFanOut = 0; String maxFanOutName = null; for (String qname : methods.keySet()) { Method method = methods.get(qname); int fanIn = method.getCallers().size(); if (fanIn > maxFanIn) { maxFanIn = fanIn; maxFanInName = qname; } int fanOut = method.getCallees().size(); if (fanOut > maxFanOut) { maxFanOut = fanOut; maxFanOutName = qname; } } System.out.println("Max Fan-In: " + maxFanIn + " --- " + maxFanInName); System.out.println("Max Fan-Out: " + maxFanOut + " --- " + maxFanOutName); } }
{ "content_hash": "c52dda2b80738d7eff21159471356959", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 101, "avg_line_length": 32.32394366197183, "alnum_prop": 0.5721132897603486, "repo_name": "software-eng-ua-edu/mallard", "id": "2ba03a288ba1aad6eaa51d6f9b45845e046aa12e", "size": "2456", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/java/edu/ua/eng/software/mallard/Mallard.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "1511" }, { "name": "Java", "bytes": "31393" }, { "name": "Shell", "bytes": "624" } ], "symlink_target": "" }
<!DOCTYPE html> <title>Canvas test: 2d.composite.globalAlpha.canvaspattern</title> <script src="../tests.js"></script> <link rel="stylesheet" href="../tests.css"> <body class="framed show_output"> <h1> <a href="2d.composite.globalAlpha.canvaspattern.html" target="_parent">2d.&#8203;composite.&#8203;globalAlpha.&#8203;canvaspattern</a> </h1> <p><a href="#" id="show_output" onclick="document.body.className += ' show_output'; return false">[show output]</a> <p class="output">Actual output:</p> <canvas id="c" class="output" width="100" height="50"><p class="fallback">FAIL (fallback content)</p></canvas> <p class="output expectedtext">Expected output:<p><img src="green-100x50.png" class="output expected" id="expected" alt=""> <ul id="d"></ul> <script> _addTest(function(canvas, ctx) { var canvas2 = document.createElement('canvas'); canvas2.width = 100; canvas2.height = 50; var ctx2 = canvas2.getContext('2d'); ctx2.fillStyle = '#f00'; ctx2.fillRect(0, 0, 100, 50); ctx.fillStyle = '#0f0'; ctx.fillRect(0, 0, 100, 50); ctx.fillStyle = ctx.createPattern(canvas2, 'no-repeat'); ctx.globalAlpha = 0.01; // avoid any potential alpha=0 optimisations ctx.fillRect(0, 0, 100, 50); _assertPixelApprox(canvas, 50,25, 2,253,0,255, "50,25", "2,253,0,255", 2); }); </script>
{ "content_hash": "1368f95b67cd1e7bb1bd7a17b5a73efb", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 134, "avg_line_length": 37.470588235294116, "alnum_prop": 0.695447409733124, "repo_name": "cjcliffe/webgl-2d", "id": "d6e27e61f1394068d6390990daa7fca2e8e283ee", "size": "1274", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "test/philip.html5.org/tests/framed.2d.composite.globalAlpha.canvaspattern.html", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "269372" } ], "symlink_target": "" }
package org.cipres.treebase.domain.matrix; import javax.persistence.AttributeOverride; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Table; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import org.cipres.treebase.domain.AbstractPersistedObject; import org.cipres.treebase.domain.TBPersistable; /** * GeneticCode.java * * Created on Mar 27, 2006 * * @author Jin Ruan * */ @Entity @Table(name = "GeneticCode") @AttributeOverride(name = "id", column = @Column(name = "GeneticCode_ID")) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "matrixCache") public class GeneticCode extends AbstractPersistedObject { private static final long serialVersionUID = 5320812309978672110L; private String mTitle; private String mCodeOrder; private String mNucOrder; private String mExtensions; private String mCodeDescription; private boolean mPredefined; /** * Constructor. */ public GeneticCode() { super(); } /** * Return the CodeDescription field. * * @return String mCodeDescription */ @Column(name = "CodeDescription", length = TBPersistable.COLUMN_LENGTH_STRING_1K) public String getCodeDescription() { return mCodeDescription; } /** * Set the CodeDescription field. */ public void setCodeDescription(String pNewCodeDescription) { mCodeDescription = pNewCodeDescription; } /** * Return the Extensions field. * * @return String mExtensions */ @Column(name = "Extensions", length = TBPersistable.COLUMN_LENGTH_STRING) public String getExtensions() { return mExtensions; } /** * Set the Extensions field. */ public void setExtensions(String pNewExtensions) { mExtensions = pNewExtensions; } /** * Return the NucOrder field. * * @return String mNucOrder */ @Column(name = "NucOrder", length = TBPersistable.COLUMN_LENGTH_STRING) public String getNucOrder() { return mNucOrder; } /** * Set the NucOrder field. */ public void setNucOrder(String pNewNucOrder) { mNucOrder = pNewNucOrder; } /** * Return the CodeOrder field. * * @return String mCodeOrder */ @Column(name = "CodeOrder", length = TBPersistable.COLUMN_LENGTH_STRING) public String getCodeOrder() { return mCodeOrder; } /** * Set the CodeOrder field. */ public void setCodeOrder(String pNewCodeOrder) { mCodeOrder = pNewCodeOrder; } /** * Return the Predefined field. * * @return boolean */ @Column(name = "Predefined") public boolean isPredefined() { return mPredefined; } /** * Set the Predefined field. */ public void setPredefined(boolean pNewPredefined) { mPredefined = pNewPredefined; } /** * Return the Title field. * * @return String */ @Column(name = "Title", nullable = true, length = TBPersistable.COLUMN_LENGTH_STRING) public String getTitle() { return mTitle; } /** * Set the Title field. */ public void setTitle(String pNewTitle) { mTitle = pNewTitle; } }
{ "content_hash": "3f1f5a09f908968dac699d131b958fbe", "timestamp": "", "source": "github", "line_count": 147, "max_line_length": 86, "avg_line_length": 21.58503401360544, "alnum_prop": 0.6804286164513079, "repo_name": "TreeBASE/treebasetest", "id": "202cdeddb7ed351fd925c964ae868f8e855dc173", "size": "3173", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "treebase-core/src/main/java/org/cipres/treebase/domain/matrix/GeneticCode.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "172" }, { "name": "Batchfile", "bytes": "62" }, { "name": "CSS", "bytes": "39246" }, { "name": "HTML", "bytes": "1330382" }, { "name": "Java", "bytes": "3078667" }, { "name": "JavaScript", "bytes": "107821" }, { "name": "PHP", "bytes": "127834" }, { "name": "PLSQL", "bytes": "653" }, { "name": "PLpgSQL", "bytes": "31601" }, { "name": "Perl", "bytes": "393086" }, { "name": "Perl6", "bytes": "38254" }, { "name": "Shell", "bytes": "9031" }, { "name": "Web Ontology Language", "bytes": "7720" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <PRONOM-Report xmlns="http://pronom.nationalarchives.gov.uk"> <report_format_detail> <FileFormat> <FormatID>1452</FormatID> <FormatName>INTERLIS Transfer File</FormatName> <FormatVersion>2.3</FormatVersion> <FormatAliases> </FormatAliases> <FormatFamilies> </FormatFamilies> <FormatTypes>Text (Mark-up)</FormatTypes> <FormatDisclosure> </FormatDisclosure> <FormatDescription>INTERLIS is a description and transfer mechanism for spatial data. INTERLIS 2 Transfer files are XML files that contain data, e.g. geodata, which is structured according to certain rules. These rules are described in another file (or multiple files) called a model file.</FormatDescription> <BinaryFileFormat> </BinaryFileFormat> <ByteOrders> </ByteOrders> <ReleaseDate> </ReleaseDate> <WithdrawnDate> </WithdrawnDate> <ProvenanceSourceID>206</ProvenanceSourceID> <ProvenanceName>Swiss Federal Office of Topography / Federal Office of Topography</ProvenanceName> <ProvenanceSourceDate>14 Jul 2014</ProvenanceSourceDate> <ProvenanceDescription>http://www.interlis.ch</ProvenanceDescription> <LastUpdatedDate>14 Jul 2014</LastUpdatedDate> <FormatNote> </FormatNote> <FormatRisk> </FormatRisk> <TechnicalEnvironment> </TechnicalEnvironment> <FileFormatIdentifier> <Identifier>fmt/653</Identifier> <IdentifierType>PUID</IdentifierType> </FileFormatIdentifier> <ExternalSignature> <ExternalSignatureID>1510</ExternalSignatureID> <Signature>xtf</Signature> <SignatureType>File extension</SignatureType> </ExternalSignature> <InternalSignature> <SignatureID>986</SignatureID> <SignatureName>INTERLIS 2.3 Transfer File</SignatureName> <SignatureNote>BOF: &lt;?xml version=('|")1.0('|") encoding=('|")UTF-8('|"){0-2}?&gt;{0-256}&lt;TRANSFER xmlns=('|")http://www.interlis.ch/INTERLIS2.3('|")</SignatureNote> <ByteSequence> <ByteSequenceID>1218</ByteSequenceID> <PositionType>Absolute from BOF</PositionType> <Offset>0</Offset> <MaxOffset>3</MaxOffset> <IndirectOffsetLocation> </IndirectOffsetLocation> <IndirectOffsetLength> </IndirectOffsetLength> <Endianness> </Endianness> <ByteSequenceValue>3C3F786D6C2076657273696F6E3D(22|27)312E30(22|27)20656E636F64696E673D(22|27)5554462D38(22|27){0-2}3F3E{0-256}3C5452414E5346455220786D6C6E733D(22|27)687474703A2F2F7777772E696E7465726C69732E63682F494E5445524C4953322E33(22|27)</ByteSequenceValue> </ByteSequence> </InternalSignature> <InternalSignature> <SignatureID>1005</SignatureID> <SignatureName>Interlis 2.3 Transfer File 2</SignatureName> <SignatureNote>Max Offset: 2 &lt;.?.x.m.l. .v.e.r.s.i.o.n.=.'.1...0.'. .e.n.c.o.d.i.n.g.=.'.U.T.F.-.8.'.?.&gt;{0-512}&lt;.T.R.A.N.S.F.E.R. .x.m.l.n.s.=.".h.t.t.p.:././.w.w.w...i.n.t.e.r.l.i.s...c.h./.I.N.T.E.R.L.I.S.2...3</SignatureNote> <ByteSequence> <ByteSequenceID>1239</ByteSequenceID> <PositionType>Absolute from BOF</PositionType> <Offset>0</Offset> <MaxOffset>2</MaxOffset> <IndirectOffsetLocation> </IndirectOffsetLocation> <IndirectOffsetLength> </IndirectOffsetLength> <Endianness> </Endianness> <ByteSequenceValue>3C003F0078006D006C002000760065007200730069006F006E003D00(22|27)0031002E003000(22|27)00200065006E0063006F00640069006E0067003D00(22|27)005500540046002D003800(22|27)003F003E{0-512}3C005400520041004E005300460045005200200078006D006C006E0073003D00(22|27)0068007400740070003A002F002F007700770077002E0069006E007400650072006C00690073002E00630068002F0049004E005400450052004C004900530032002E0033</ByteSequenceValue> </ByteSequence> </InternalSignature> <RelatedFormat> <RelationshipType>Has priority over</RelationshipType> <RelatedFormatID>638</RelatedFormatID> <RelatedFormatName>Extensible Markup Language</RelatedFormatName> <RelatedFormatVersion>1.0</RelatedFormatVersion> </RelatedFormat> </FileFormat> <SearchCriteria>Criteria</SearchCriteria> </report_format_detail> </PRONOM-Report>
{ "content_hash": "9b772670549f8c9192fad3b3d34f5adb", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 433, "avg_line_length": 50.752808988764045, "alnum_prop": 0.6750055346468895, "repo_name": "richardlehane/siegfried", "id": "b46acc31940391b254978da8a1fd36b945c5e505", "size": "4517", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "cmd/roy/data/pronom/fmt653.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "887382" }, { "name": "Shell", "bytes": "710" } ], "symlink_target": "" }
package com.artemis; import java.util.BitSet; import com.artemis.utils.Bag; import com.artemis.utils.ImmutableBag; import com.artemis.utils.IntBag; import com.artemis.utils.reflect.ClassReflection; import com.artemis.utils.reflect.Constructor; import com.artemis.utils.reflect.ReflectionException; /** * Handles the association between entities and their components. * <p> * Only one component manager exists per {@link World} instance, * managed by the world. * </p> * * @author Arni Arent */ public class ComponentManager extends Manager { /** Holds all components grouped by type. */ private final Bag<Bag<Component>> componentsByType; /** Holds all packed components sorted by type index. */ private final Bag<PackedComponent> packedComponents; private final Bag<BitSet> packedComponentOwners; /** Collects all Entites marked for deletion from this ComponentManager. */ private final IntBag deleted; private final ComponentPool pooledComponents; private int highestSeenEntityId; protected final ComponentTypeFactory typeFactory; /** * Creates a new instance of {@link ComponentManager}. */ protected ComponentManager(int entityContainerSize) { this.highestSeenEntityId = entityContainerSize; componentsByType = new Bag<Bag<Component>>(); packedComponents = new Bag<PackedComponent>(); packedComponentOwners = new Bag<BitSet>(); pooledComponents = new ComponentPool(); deleted = new IntBag(); typeFactory = new ComponentTypeFactory(); } protected <T extends Component> T create(Entity owner, Class<T> componentClass) { ComponentType type = typeFactory.getTypeFor(componentClass); T component = create(owner, type); return component; } @SuppressWarnings("unchecked") <T extends Component> T create(Entity owner, ComponentType type) { Class<T> componentClass = (Class<T>)type.getType(); T component = null; switch (type.getTaxonomy()) { case BASIC: component = newInstance(componentClass, false); break; case PACKED: PackedComponent packedComponent = packedComponents.safeGet(type.getIndex()); if (packedComponent == null) { packedComponent = (PackedComponent)newInstance( componentClass, type.packedHasWorldConstructor); packedComponents.set(type.getIndex(), packedComponent); } getPackedComponentOwners(type).set(owner.getId()); ensurePackedComponentCapacity(owner.id); packedComponent.forEntity(owner.id); component = (T)packedComponent; break; case POOLED: try { reclaimPooled(owner, type); component = (T)pooledComponents.obtain((Class<PooledComponent>)componentClass, type); break; } catch (ReflectionException e) { throw new InvalidComponentException(componentClass, "Unable to instantiate component.", e); } default: throw new InvalidComponentException(componentClass, " unknown component type: " + type.getTaxonomy()); } addComponent(owner, type, component); return component; } private void reclaimPooled(Entity owner, ComponentType type) { Bag<Component> components = componentsByType.safeGet(type.getIndex()); if (components == null) return; Component old = components.safeGet(owner.id); if (old != null) pooledComponents.free((PooledComponent)old, type); } private void ensurePackedComponentCapacity(int entityId) { if ((highestSeenEntityId - 1) < entityId) { highestSeenEntityId = entityId; for (int i = 0, s = packedComponents.size(); s > i; i++) { PackedComponent component = packedComponents.get(i); if (component == null) continue; component.ensureCapacity(entityId + 1); } } } protected BitSet getPackedComponentOwners(ComponentType type) { BitSet owners = packedComponentOwners.safeGet(type.getIndex()); if (owners == null) { owners = new BitSet(); packedComponentOwners.set(type.getIndex(), owners); } return owners; } @SuppressWarnings("unchecked") <T extends Component> T newInstance(Class<T> componentClass, boolean constructorHasWorldParameter) { try { if (constructorHasWorldParameter) { Constructor constructor = ClassReflection.getConstructor(componentClass, World.class); return (T) constructor.newInstance(world); } else { return ClassReflection.newInstance(componentClass); } } catch (ReflectionException e) { throw new InvalidComponentException(componentClass, "Unable to instantiate component.", e); } } /** * Removes all components from the entity associated in this manager. * * @param entityId * the entity to remove components from */ private void removeComponents(int entityId) { BitSet componentBits = world.getEntityManager().componentBits(entityId); for (int i = componentBits.nextSetBit(0); i >= 0; i = componentBits.nextSetBit(i+1)) { switch (typeFactory.getTaxonomy(i)) { case BASIC: componentsByType.get(i).set(entityId, null); break; case POOLED: Component pooled = componentsByType.get(i).get(entityId); pooledComponents.free((PooledComponent)pooled, i); componentsByType.get(i).set(entityId, null); break; case PACKED: PackedComponent pc = packedComponents.get(i); pc.forEntity(entityId); pc.reset(); break; default: throw new InvalidComponentException(Component.class, " unknown component type: " + typeFactory.getTaxonomy(i)); } } } @Override protected void dispose() { for (int i = 0, s = packedComponents.size(); s > i; i++) { PackedComponent component = packedComponents.get(i); if (component == null) continue; if (component instanceof PackedComponent.DisposedWithWorld) { ((PackedComponent.DisposedWithWorld)component).free(world); } } } /** * Adds the component of the given type to the entity. * <p> * Only one component of given type can be associated with a entity at the * same time. * </p> * * @param e * the entity to add to * @param type * the type of component being added * @param component * the component to add */ protected void addComponent(Entity e, ComponentType type, Component component) { if (type.isPackedComponent()) addPackedComponent(type, (PackedComponent)component); else addBasicComponent(e, type, component); // pooled components are handled the same } protected void addComponents(Entity e, Archetype archetype) { ComponentType[] types = archetype.types; for (int i = 0, s = types.length; s > i; i++) { create(e, types[i]); } } private void addPackedComponent(ComponentType type, PackedComponent component) { PackedComponent packed = packedComponents.safeGet(type.getIndex()); if (packed == null) { packedComponents.set(type.getIndex(), component); } } private void addBasicComponent(Entity e, ComponentType type, Component component) { Bag<Component> components = componentsByType.safeGet(type.getIndex()); if (components == null) { components = new Bag<Component>(highestSeenEntityId); componentsByType.set(type.getIndex(), components); } components.set(e.id, component); } /** * Removes the component of given type from the entity. * * @param e * the entity to remove from * @param type * the type of component being removed */ protected void removeComponent(Entity e, ComponentType type) { int index = type.getIndex(); switch (type.getTaxonomy()) { case BASIC: componentsByType.get(index).set(e.id, null); break; case POOLED: Component pooled = componentsByType.get(index).get(e.id); pooledComponents.free((PooledComponent)pooled, type); componentsByType.get(index).set(e.id, null); break; case PACKED: PackedComponent pc = packedComponents.get(index); pc.forEntity(e.id); pc.reset(); getPackedComponentOwners(type).clear(e.id); break; default: throw new InvalidComponentException(type.getType(), " unknown component type: " + type.getTaxonomy()); } } /** * Get all components from all entities for a given type. * * @param type * the type of components to get * @return a bag containing all components of the given type */ protected Bag<Component> getComponentsByType(ComponentType type) { if (type.isPackedComponent()) throw new InvalidComponentException(type.getType(), "PackedComponent types aren't supported."); Bag<Component> components = componentsByType.safeGet(type.getIndex()); if(components == null) { components = new Bag<Component>(); componentsByType.set(type.getIndex(), components); } return components; } public ImmutableBag<ComponentType> getComponentTypes() { return typeFactory.types; } /** * Get a component of an entity. * * @param e * the entity associated with the component * @param type * the type of component to get * @return the component of given type */ protected Component getComponent(Entity e, ComponentType type) { if (type.isPackedComponent()) { PackedComponent component = packedComponents.safeGet(type.getIndex()); if (component != null) component.forEntity(e.id); return component; } else { Bag<Component> components = componentsByType.safeGet(type.getIndex()); if (components != null && components.isIndexWithinBounds(e.id)) { return components.get(e.id); } } return null; } /** * Get all component associated with an entity. * * @param e * the entity to get components from * @param fillBag * a bag to be filled with components * @return the {@code fillBag}, filled with the entities components */ public Bag<Component> getComponentsFor(Entity e, Bag<Component> fillBag) { BitSet componentBits = e.getComponentBits(); for (int i = componentBits.nextSetBit(0); i >= 0; i = componentBits.nextSetBit(i+1)) { if (typeFactory.isPackedComponent(i)) { fillBag.add(packedComponents.get(i)); } else { fillBag.add(componentsByType.get(i).get(e.id)); } } return fillBag; } @Override public void deleted(int entityId) { deleted.add(entityId); } @Override public void added(int entityId) { if ((highestSeenEntityId - 1) < entityId) { ensurePackedComponentCapacity(entityId); } } /** * Removes all components from entities marked for deletion. */ protected void clean() { int s = deleted.size(); if(s > 0) { int[] ids = deleted.getData(); for(int i = 0; s > i; i++) { removeComponents(ids[i]); } deleted.setSize(0); } } }
{ "content_hash": "9b7f13031cd481f2cd93f5f8bb2cd652", "timestamp": "", "source": "github", "line_count": 354, "max_line_length": 116, "avg_line_length": 29.502824858757062, "alnum_prop": 0.7065300651091536, "repo_name": "antag99/artemis-odb", "id": "d23c2c97098ea1748c4a3482cc90263f1adb781b", "size": "10444", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "artemis/src/main/java/com/artemis/ComponentManager.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "831243" }, { "name": "Shell", "bytes": "119" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Taro.Persistence; namespace Taro { public abstract class AggregateRoot : IEventSource { private EventStream _events = new EventStream(); EventStream IEventSource.Events { get { return _events; } } protected virtual void AppendEvent(IEvent theEvent) { _events.Append(theEvent); } } }
{ "content_hash": "4cfa411cca69ff69255dd3d9c407cd57", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 59, "avg_line_length": 19.653846153846153, "alnum_prop": 0.5753424657534246, "repo_name": "mouhong/Taro", "id": "6c56b872bdec03a16320af1172787f3db0c03740", "size": "513", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Taro/AggregateRoot.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "55845" } ], "symlink_target": "" }
namespace net { class X509Certificate; namespace ct { // Represents a MerkleTreeLeaf as defined in RFC6962, section 3.4. // The goal of this struct is to represent the Merkle tree entry such that // all details are easily accessible and a leaf hash can be easily calculated // for the entry. // // As such, it has all the data as the MerkleTreeLeaf defined in the RFC, // but it is not identical to the structure in the RFC for the following // reasons: // * The version is implicit - it is only used for V1 leaves currently. // * the leaf_type is also implicit: There's exactly one leaf type and no // new types are planned. // * The timestamped_entry's |timestamp| and |extensions| fields are directly // accessible. // * The timestamped_entry's entry_type can be deduced from |log_entry|.type struct NET_EXPORT MerkleTreeLeaf { MerkleTreeLeaf(); MerkleTreeLeaf(const MerkleTreeLeaf& other); MerkleTreeLeaf(MerkleTreeLeaf&&); ~MerkleTreeLeaf(); // Certificate / Precertificate and indication of entry type. LogEntry log_entry; // Timestamp from the SCT. base::Time timestamp; // Extensions from the SCT. std::string extensions; }; // Given a |cert| and an |sct| for that certificate, constructs the // representation of this entry in the Merkle tree by filling in // |merkle_tree_leaf|. // Returns false if it failed to construct the |merkle_tree_leaf|. NET_EXPORT bool GetMerkleTreeLeaf(const X509Certificate* cert, const SignedCertificateTimestamp* sct, MerkleTreeLeaf* merkle_tree_leaf); // Sets |*out| to the hash of the Merkle |tree_leaf|, as defined in RFC6962, // section 3.4. Returns true if the hash was generated, false if an error // occurred. NET_EXPORT bool HashMerkleTreeLeaf(const MerkleTreeLeaf& tree_leaf, std::string* out); } // namespace ct } // namespace net #endif // NET_CERT_MERKLE_TREE_LEAF_H_
{ "content_hash": "f84ab1ad23cd97800b1798db8d03ded4", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 77, "avg_line_length": 35.58181818181818, "alnum_prop": 0.7015840572304548, "repo_name": "ssaroha/node-webrtc", "id": "21217bb6ec5c454e53c12e82cf2ff6e0b8cdf4f4", "size": "2348", "binary": false, "copies": "7", "ref": "refs/heads/develop", "path": "third_party/webrtc/include/chromium/src/net/cert/merkle_tree_leaf.h", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "6179" }, { "name": "C", "bytes": "2679" }, { "name": "C++", "bytes": "54327" }, { "name": "HTML", "bytes": "434" }, { "name": "JavaScript", "bytes": "42707" }, { "name": "Python", "bytes": "3835" } ], "symlink_target": "" }
Interface is running in an empty domain where you have edit rights. ### Steps #### Step 1 - Run the [mouse.js script](./renderState.js?raw=true) (from Menu/Edit/Open and Run scripts From URL...). Move your mouse on and off the wall. - Expected: ![](./mouse1.jpg) ![](./mouse2.jpg)
{ "content_hash": "a028e5b6d8db3f5077016da7a6755d31", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 143, "avg_line_length": 31.444444444444443, "alnum_prop": 0.6855123674911661, "repo_name": "highfidelity/hifi_tests", "id": "9b91b23e9c2ada579acf9f20ebdfc6ab3d2b459b", "size": "319", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/engine/interaction/pointer/laser/mouse/testStory.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2633" }, { "name": "CSS", "bytes": "1354" }, { "name": "F*", "bytes": "7176" }, { "name": "GLSL", "bytes": "1368" }, { "name": "HTML", "bytes": "2407" }, { "name": "JavaScript", "bytes": "972684" }, { "name": "PowerShell", "bytes": "246" }, { "name": "Python", "bytes": "6298" } ], "symlink_target": "" }
import os import numpy as np import pandas as pd import subprocess import optparse from sklearn.svm import SVC from sklearn import cross_validation from sklearn import grid_search from sklearn.metrics import precision_score from sklearn.metrics import recall_score from sklearn.metrics import f1_score from sklearn import grid_search from sklearn.cross_validation import train_test_split from stacking_create_training_set import stacking_create_training_set import xml.etree.ElementTree as ET ################################################### # Testing the model on pure test set of 0.5 size ## ################################################### ########## OUTPUT: p,r,f1 on test set ############# ################################################### #defining the options of the script #INPUTS: -i duke_config.xml, -N number_of_configurations, -a amplitude_of_perturbation, -g gold_standard_name parser = optparse.OptionParser() parser.add_option('-i','--input', dest = 'file_name', help = 'file_name') parser.add_option('-N','--number', dest = 'N', help = 'number of classifiers',type = int) parser.add_option('-a','--amplitude', dest = 'a', help = 'amplitude of perturbation',type = float) parser.add_option('-g','--gold', dest = 'gold_standard_name', help = 'gold_standard_name') (options, args) = parser.parse_args() if options.file_name is None: options.file_name = raw_input('Enter file name:') if options.N is None: options.N = raw_input('Enter number of classifiers:') if options.a is None: options.a = 0.05 #default to 0.05 if options.gold_standard_name is None: options.gold_standard_name = raw_input('Enter gold standard file name:') file_name = options.file_name #define the variables gold_standard_name = options.gold_standard_name N = int(options.N) a = float(options.a) #open files for writing output_file_raw = open('ensemble_duke_output_raw_T2_n%d.txt' %N,'w') #output_file = open('ensemble_duke_stacking_output_T2_n%d.txt' %N,'w') gold_standard_read = open(gold_standard_name,'rU') #iterate for each tweaked configuration #read actual threshold tree = ET.parse(file_name) root = tree.getroot() for thresh in root.iter('threshold'): central_thresh = float(thresh.text) #central value of the threshold thresholds = np.linspace(central_thresh - a/2, central_thresh + a/2, N) for threshold in thresholds: for thresh in root.iter('threshold'): thresh.text = str(threshold) thresh.set('updated','yes') tree.write('../../../config/FEIII2016/copy_T2.xml') java_command = ["java","-Xmx5000m", "-cp", "../../../lib/Duke/duke-core/target/*:../../../lib/Duke/duke-dist/target/*:../../../lib/Duke/duke-es/target/*:../../../lib/Duke/duke-json/target/*:../../../lib/Duke/duke-lucene/target/*:../../../lib/Duke/duke-mapdb/target/*:../../../lib/Duke/duke-mongodb/target/*:../../../lib/Duke/duke-server/target/*:../../../lib/Duke/lucene_jar/*", "no.priv.garshol.duke.Duke", "--showmatches","--batchsize=100000", "--threads=4", "../../../config/FEIII2016/copy_T2.xml"] output_file_raw.write(subprocess.check_output(java_command)) #call duke on the copy.xml file and write the raw output on file output_file_raw.write('\n') output_file_raw.write('End of run\n') output_file_raw.close() #duke_output_parser('ensemble_duke_output_raw_T2_n%d.txt' %N, 'ensemble_duke_output_union_T2_n%d.txt' %N,'FFIEC','SEC') #create the training set, named training_set_T1_n%d.csv stacking_create_training_set('ensemble_duke_output_raw_T2_n%d.txt' %N,'training_set_T2_n%d.csv' %N, gold_standard_name, N) #read it and make machine learning on it data = pd.read_csv('training_set_T2_n%d.csv' %N) #turn data into arrays X = data.values[:,2:(N+2)] #x variables y = np.array(data['y']) #class variables #p_scores = [] #r_scores = [] #f1_scores = [] #T = 5 #repeat the split many times and average the results in order to cancel random fluctuations #for i in range(T): #stratified split in train and test set X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.50, stratify = y, random_state = 20) # fit an SVM with rbf kernel clf = SVC( kernel = 'rbf',cache_size = 1000) #hyper-parameter optimization through grid-search cross validation parameters = {'gamma' : np.logspace(-9,3,30),'C': np.logspace(-2,10,30)} gs_rbf = grid_search.GridSearchCV(clf,param_grid=parameters,cv = 4) gs_rbf.fit(X_train,y_train) #select the best hyper-parameters clf = gs_rbf.best_estimator_ #save the output y_predict = np.reshape(clf.predict(X_test),(len(X_test),1)) #p_scores.append(precision_score(y_test,y_predict,average = 'binary')) #r_scores.append(recall_score(y_test,y_predict,average = 'binary')) #f1_scores.append(f1_score(y_test,y_predict,average = 'binary')) #p = np.mean(p_scores) #r = np.mean(r_scores) #f1 = np.mean(f1_scores) p = precision_score(y_test,y_predict,average = 'binary') r = recall_score(y_test,y_predict,average = 'binary') f1 = f1_score(y_test,y_predict,average = 'binary') print "%.3f,%.3f,%.3f" %(p,r,f1)
{ "content_hash": "97bb6c4cc8afdf6502c41ec6d40885f0", "timestamp": "", "source": "github", "line_count": 151, "max_line_length": 505, "avg_line_length": 33.384105960264904, "alnum_prop": 0.6712953779012101, "repo_name": "enricopal/STEM", "id": "3ef5bfec754f2886c2ce61bd7503ce7d971221f1", "size": "5041", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "validation/FEIII2016/precision_recall_threshold_curve/ensemble_duke_T2_stacking_prfoutput.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "847" }, { "name": "Java", "bytes": "636761" }, { "name": "Python", "bytes": "149144" }, { "name": "R", "bytes": "11363" }, { "name": "Shell", "bytes": "7880" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // Allgemeine Informationen über eine Assembly werden über die folgenden // Attribute gesteuert. Ändern Sie diese Attributwerte, um die Informationen zu ändern, // die mit einer Assembly verknüpft sind. [assembly: AssemblyTitle("Dblp.Domain.Interfaces")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Dblp.Domain.Interfaces")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Durch Festlegen von ComVisible auf "false" werden die Typen in dieser Assembly unsichtbar // für COM-Komponenten. Wenn Sie auf einen Typ in dieser Assembly von // COM zugreifen müssen, legen Sie das ComVisible-Attribut für diesen Typ auf "true" fest. [assembly: ComVisible(false)] // Die folgende GUID bestimmt die ID der Typbibliothek, wenn dieses Projekt für COM verfügbar gemacht wird [assembly: Guid("5c0be9ce-8981-42e2-87e6-8608a545c1aa")] // Versionsinformationen für eine Assembly bestehen aus den folgenden vier Werten: // // Hauptversion // Nebenversion // Buildnummer // Revision // // Sie können alle Werte angeben oder die standardmäßigen Build- und Revisionsnummern // übernehmen, indem Sie "*" eingeben: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "3b24e5a01d8be119d135cf474e66c172", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 106, "avg_line_length": 41.888888888888886, "alnum_prop": 0.7599469496021221, "repo_name": "Nexusger/GetTheBibTeX2", "id": "c33f804bfddb0130c57db18d8ca18d8f9d45b73e", "size": "1526", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Dblp.Domain.Interfaces/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "93" }, { "name": "C#", "bytes": "146412" }, { "name": "CSS", "bytes": "6442" }, { "name": "JavaScript", "bytes": "157339" } ], "symlink_target": "" }
using System; using System.Diagnostics; using System.Runtime.InteropServices; using System.Security; using System.Text; using Khronos; // ReSharper disable CheckNamespace // ReSharper disable InconsistentNaming // ReSharper disable JoinDeclarationAndInitializer namespace OpenGL { public partial class Egl { /// <summary> /// [EGL] Value of EGL_SYNC_PRIOR_COMMANDS_COMPLETE_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int SYNC_PRIOR_COMMANDS_COMPLETE_NV = 0x30E6; /// <summary> /// [EGL] Value of EGL_SYNC_STATUS_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int SYNC_STATUS_NV = 0x30E7; /// <summary> /// [EGL] Value of EGL_SIGNALED_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int SIGNALED_NV = 0x30E8; /// <summary> /// [EGL] Value of EGL_UNSIGNALED_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int UNSIGNALED_NV = 0x30E9; /// <summary> /// [EGL] Value of EGL_ALREADY_SIGNALED_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int ALREADY_SIGNALED_NV = 0x30EA; /// <summary> /// [EGL] Value of EGL_TIMEOUT_EXPIRED_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int TIMEOUT_EXPIRED_NV = 0x30EB; /// <summary> /// [EGL] Value of EGL_CONDITION_SATISFIED_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int CONDITION_SATISFIED_NV = 0x30EC; /// <summary> /// [EGL] Value of EGL_SYNC_TYPE_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int SYNC_TYPE_NV = 0x30ED; /// <summary> /// [EGL] Value of EGL_SYNC_CONDITION_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int SYNC_CONDITION_NV = 0x30EE; /// <summary> /// [EGL] Value of EGL_SYNC_FENCE_NV symbol. /// </summary> [RequiredByFeature("EGL_NV_sync")] public const int SYNC_FENCE_NV = 0x30EF; /// <summary> /// [EGL] eglCreateFenceSyncNV: Binding for eglCreateFenceSyncNV. /// </summary> /// <param name="dpy"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="condition"> /// A <see cref="T:uint"/>. /// </param> /// <param name="attrib_list"> /// A <see cref="T:int[]"/>. /// </param> [RequiredByFeature("EGL_NV_sync")] public static IntPtr CreateFenceSyncNV(IntPtr dpy, uint condition, int[] attrib_list) { IntPtr retValue; unsafe { fixed (int* p_attrib_list = attrib_list) { Debug.Assert(Delegates.peglCreateFenceSyncNV != null, "peglCreateFenceSyncNV not implemented"); retValue = Delegates.peglCreateFenceSyncNV(dpy, condition, p_attrib_list); LogCommand("eglCreateFenceSyncNV", retValue, dpy, condition, attrib_list ); } } DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [EGL] eglDestroySyncNV: Binding for eglDestroySyncNV. /// </summary> /// <param name="sync"> /// A <see cref="T:IntPtr"/>. /// </param> [RequiredByFeature("EGL_NV_sync")] public static bool DestroySyncNV(IntPtr sync) { bool retValue; Debug.Assert(Delegates.peglDestroySyncNV != null, "peglDestroySyncNV not implemented"); retValue = Delegates.peglDestroySyncNV(sync); LogCommand("eglDestroySyncNV", retValue, sync ); DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [EGL] eglFenceNV: Binding for eglFenceNV. /// </summary> /// <param name="sync"> /// A <see cref="T:IntPtr"/>. /// </param> [RequiredByFeature("EGL_NV_sync")] public static bool FenceNV(IntPtr sync) { bool retValue; Debug.Assert(Delegates.peglFenceNV != null, "peglFenceNV not implemented"); retValue = Delegates.peglFenceNV(sync); LogCommand("eglFenceNV", retValue, sync ); DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [EGL] eglClientWaitSyncNV: Binding for eglClientWaitSyncNV. /// </summary> /// <param name="sync"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="flags"> /// A <see cref="T:int"/>. /// </param> /// <param name="timeout"> /// A <see cref="T:ulong"/>. /// </param> [RequiredByFeature("EGL_NV_sync")] public static int ClientWaitSyncNV(IntPtr sync, int flags, ulong timeout) { int retValue; Debug.Assert(Delegates.peglClientWaitSyncNV != null, "peglClientWaitSyncNV not implemented"); retValue = Delegates.peglClientWaitSyncNV(sync, flags, timeout); LogCommand("eglClientWaitSyncNV", retValue, sync, flags, timeout ); DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [EGL] eglSignalSyncNV: Binding for eglSignalSyncNV. /// </summary> /// <param name="sync"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="mode"> /// A <see cref="T:uint"/>. /// </param> [RequiredByFeature("EGL_NV_sync")] public static bool SignalSyncNV(IntPtr sync, uint mode) { bool retValue; Debug.Assert(Delegates.peglSignalSyncNV != null, "peglSignalSyncNV not implemented"); retValue = Delegates.peglSignalSyncNV(sync, mode); LogCommand("eglSignalSyncNV", retValue, sync, mode ); DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [EGL] eglGetSyncAttribNV: Binding for eglGetSyncAttribNV. /// </summary> /// <param name="sync"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="attribute"> /// A <see cref="T:int"/>. /// </param> /// <param name="value"> /// A <see cref="T:int[]"/>. /// </param> [RequiredByFeature("EGL_NV_sync")] public static bool GetSyncAttribNV(IntPtr sync, int attribute, [Out] int[] value) { bool retValue; unsafe { fixed (int* p_value = value) { Debug.Assert(Delegates.peglGetSyncAttribNV != null, "peglGetSyncAttribNV not implemented"); retValue = Delegates.peglGetSyncAttribNV(sync, attribute, p_value); LogCommand("eglGetSyncAttribNV", retValue, sync, attribute, value ); } } DebugCheckErrors(retValue); return (retValue); } internal static unsafe partial class Delegates { [RequiredByFeature("EGL_NV_sync")] [SuppressUnmanagedCodeSecurity] internal delegate IntPtr eglCreateFenceSyncNV(IntPtr dpy, uint condition, int* attrib_list); [RequiredByFeature("EGL_NV_sync")] internal static eglCreateFenceSyncNV peglCreateFenceSyncNV; [RequiredByFeature("EGL_NV_sync")] [SuppressUnmanagedCodeSecurity] internal delegate bool eglDestroySyncNV(IntPtr sync); [RequiredByFeature("EGL_NV_sync")] internal static eglDestroySyncNV peglDestroySyncNV; [RequiredByFeature("EGL_NV_sync")] [SuppressUnmanagedCodeSecurity] internal delegate bool eglFenceNV(IntPtr sync); [RequiredByFeature("EGL_NV_sync")] internal static eglFenceNV peglFenceNV; [RequiredByFeature("EGL_NV_sync")] [SuppressUnmanagedCodeSecurity] internal delegate int eglClientWaitSyncNV(IntPtr sync, int flags, ulong timeout); [RequiredByFeature("EGL_NV_sync")] internal static eglClientWaitSyncNV peglClientWaitSyncNV; [RequiredByFeature("EGL_NV_sync")] [SuppressUnmanagedCodeSecurity] internal delegate bool eglSignalSyncNV(IntPtr sync, uint mode); [RequiredByFeature("EGL_NV_sync")] internal static eglSignalSyncNV peglSignalSyncNV; [RequiredByFeature("EGL_NV_sync")] [SuppressUnmanagedCodeSecurity] internal delegate bool eglGetSyncAttribNV(IntPtr sync, int attribute, int* value); [RequiredByFeature("EGL_NV_sync")] internal static eglGetSyncAttribNV peglGetSyncAttribNV; } } }
{ "content_hash": "2ab2a69725de25c47554d766f0067b73", "timestamp": "", "source": "github", "line_count": 269, "max_line_length": 100, "avg_line_length": 28.267657992565056, "alnum_prop": 0.6741188847974751, "repo_name": "luca-piccioni/OpenGL.Net", "id": "b968cb579188897b6620de21daa2abb38d9f1790", "size": "8874", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "OpenGL.Net/NV/Egl.NV_sync.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "12888" }, { "name": "C", "bytes": "996087" }, { "name": "C#", "bytes": "14717564" }, { "name": "GLSL", "bytes": "72069" }, { "name": "XSLT", "bytes": "7523" } ], "symlink_target": "" }
This PHP class provides a simple interface for translating text with the Google Translate service. [![endorse](http://api.coderwall.com/8xx8ru/endorsecount.png)](http://coderwall.com/8xx8ru)
{ "content_hash": "ad519e3609968bd4f3c8a5f37d7dda92", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 98, "avg_line_length": 63.666666666666664, "alnum_prop": 0.7958115183246073, "repo_name": "Andrew8xx8/GoogleTranslater", "id": "26f6dee913ec0757341b3cb84bb6ea0dd8fa03be", "size": "191", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "6052" } ], "symlink_target": "" }
"""Eager mode TF policy built using build_tf_policy(). It supports both traced and non-traced eager execution modes.""" import functools import logging import threading from typing import Dict, List, Optional, Tuple from ray.util.debug import log_once from ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.repeated_values import RepeatedValues from ray.rllib.policy.policy import Policy, LEARNER_STATS_KEY from ray.rllib.policy.rnn_sequencing import pad_batch_to_sequences_of_same_size from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.utils import add_mixins, force_list from ray.rllib.utils.annotations import override from ray.rllib.utils.deprecation import deprecation_warning, DEPRECATED_VALUE from ray.rllib.utils.framework import try_import_tf from ray.rllib.utils.threading import with_lock from ray.rllib.utils.typing import TensorType tf1, tf, tfv = try_import_tf() logger = logging.getLogger(__name__) def _convert_to_tf(x, dtype=None): if isinstance(x, SampleBatch): dict_ = {k: v for k, v in x.items() if k != SampleBatch.INFOS} return tf.nest.map_structure(_convert_to_tf, dict_) elif isinstance(x, Policy): return x # Special handling of "Repeated" values. elif isinstance(x, RepeatedValues): return RepeatedValues( tf.nest.map_structure(_convert_to_tf, x.values), x.lengths, x.max_len) if x is not None: d = dtype x = tf.nest.map_structure( lambda f: _convert_to_tf(f, d) if isinstance(f, RepeatedValues) else tf.convert_to_tensor(f, d) if f is not None else None, x) return x def _convert_to_numpy(x): def _map(x): if isinstance(x, tf.Tensor): return x.numpy() return x try: return tf.nest.map_structure(_map, x) except AttributeError: raise TypeError( ("Object of type {} has no method to convert to numpy.").format( type(x))) def convert_eager_inputs(func): @functools.wraps(func) def _func(*args, **kwargs): if tf.executing_eagerly(): args = [_convert_to_tf(x) for x in args] # TODO: (sven) find a way to remove key-specific hacks. kwargs = { k: _convert_to_tf( v, dtype=tf.int64 if k == "timestep" else None) for k, v in kwargs.items() if k not in {"info_batch", "episodes"} } return func(*args, **kwargs) return _func def convert_eager_outputs(func): @functools.wraps(func) def _func(*args, **kwargs): out = func(*args, **kwargs) if tf.executing_eagerly(): out = tf.nest.map_structure(_convert_to_numpy, out) return out return _func def _disallow_var_creation(next_creator, **kw): v = next_creator(**kw) raise ValueError("Detected a variable being created during an eager " "forward pass. Variables should only be created during " "model initialization: {}".format(v.name)) def traced_eager_policy(eager_policy_cls): """Wrapper that enables tracing for all eager policy methods. This is enabled by the --trace / "eager_tracing" config.""" class TracedEagerPolicy(eager_policy_cls): def __init__(self, *args, **kwargs): self._traced_learn_on_batch = None self._traced_compute_actions = None self._traced_compute_gradients = None self._traced_apply_gradients = None super(TracedEagerPolicy, self).__init__(*args, **kwargs) @override(eager_policy_cls) @convert_eager_inputs @convert_eager_outputs def _learn_on_batch_eager(self, samples): if self._traced_learn_on_batch is None: self._traced_learn_on_batch = tf.function( super(TracedEagerPolicy, self)._learn_on_batch_eager, autograph=False, experimental_relax_shapes=True) return self._traced_learn_on_batch(samples) @override(Policy) @convert_eager_inputs @convert_eager_outputs def compute_actions(self, obs_batch, state_batches=None, prev_action_batch=None, prev_reward_batch=None, info_batch=None, episodes=None, explore=None, timestep=None, **kwargs): obs_batch = tf.convert_to_tensor(obs_batch) state_batches = _convert_to_tf(state_batches) prev_action_batch = _convert_to_tf(prev_action_batch) prev_reward_batch = _convert_to_tf(prev_reward_batch) if self._traced_compute_actions is None: self._traced_compute_actions = tf.function( super(TracedEagerPolicy, self).compute_actions, autograph=False, experimental_relax_shapes=True) return self._traced_compute_actions( obs_batch, state_batches, prev_action_batch, prev_reward_batch, info_batch, episodes, explore, timestep, **kwargs) @override(eager_policy_cls) @convert_eager_inputs @convert_eager_outputs def _compute_gradients_eager(self, samples): if self._traced_compute_gradients is None: self._traced_compute_gradients = tf.function( super(TracedEagerPolicy, self).compute_gradients, autograph=False, experimental_relax_shapes=True) return self._traced_compute_gradients(samples) @override(Policy) @convert_eager_inputs @convert_eager_outputs def apply_gradients(self, grads): if self._traced_apply_gradients is None: self._traced_apply_gradients = tf.function( super(TracedEagerPolicy, self).apply_gradients, autograph=False, experimental_relax_shapes=True) return self._traced_apply_gradients(grads) TracedEagerPolicy.__name__ = eager_policy_cls.__name__ TracedEagerPolicy.__qualname__ = eager_policy_cls.__qualname__ return TracedEagerPolicy def build_eager_tf_policy( name, loss_fn, get_default_config=None, postprocess_fn=None, stats_fn=None, optimizer_fn=None, compute_gradients_fn=None, apply_gradients_fn=None, grad_stats_fn=None, extra_learn_fetches_fn=None, extra_action_out_fn=None, validate_spaces=None, before_init=None, before_loss_init=None, after_init=None, make_model=None, action_sampler_fn=None, action_distribution_fn=None, mixins=None, get_batch_divisibility_req=None, # Deprecated args. obs_include_prev_action_reward=DEPRECATED_VALUE, extra_action_fetches_fn=None, gradients_fn=None, ): """Build an eager TF policy. An eager policy runs all operations in eager mode, which makes debugging much simpler, but has lower performance. You shouldn't need to call this directly. Rather, prefer to build a TF graph policy and use set {"framework": "tfe"} in the trainer config to have it automatically be converted to an eager policy. This has the same signature as build_tf_policy().""" base = add_mixins(Policy, mixins) if obs_include_prev_action_reward != DEPRECATED_VALUE: deprecation_warning(old="obs_include_prev_action_reward", error=False) if extra_action_fetches_fn is not None: deprecation_warning( old="extra_action_fetches_fn", new="extra_action_out_fn", error=False) extra_action_out_fn = extra_action_fetches_fn if gradients_fn is not None: deprecation_warning( old="gradients_fn", new="compute_gradients_fn", error=False) compute_gradients_fn = gradients_fn class eager_policy_cls(base): def __init__(self, observation_space, action_space, config): assert tf.executing_eagerly() self.framework = config.get("framework", "tfe") Policy.__init__(self, observation_space, action_space, config) # Log device and worker index. from ray.rllib.evaluation.rollout_worker import get_global_worker worker = get_global_worker() worker_idx = worker.worker_index if worker else 0 if tf.config.list_physical_devices("GPU"): logger.info( "TF-eager Policy (worker={}) running on GPU.".format( worker_idx if worker_idx > 0 else "local")) else: logger.info( "TF-eager Policy (worker={}) running on CPU.".format( worker_idx if worker_idx > 0 else "local")) self._is_training = False self._loss_initialized = False self._sess = None self._loss = loss_fn self.batch_divisibility_req = get_batch_divisibility_req(self) if \ callable(get_batch_divisibility_req) else \ (get_batch_divisibility_req or 1) self._max_seq_len = config["model"]["max_seq_len"] if get_default_config: config = dict(get_default_config(), **config) if validate_spaces: validate_spaces(self, observation_space, action_space, config) if before_init: before_init(self, observation_space, action_space, config) self.config = config self.dist_class = None if action_sampler_fn or action_distribution_fn: if not make_model: raise ValueError( "`make_model` is required if `action_sampler_fn` OR " "`action_distribution_fn` is given") else: self.dist_class, logit_dim = ModelCatalog.get_action_dist( action_space, self.config["model"]) if make_model: self.model = make_model(self, observation_space, action_space, config) else: self.model = ModelCatalog.get_model_v2( observation_space, action_space, logit_dim, config["model"], framework=self.framework, ) # Lock used for locking some methods on the object-level. # This prevents possible race conditions when calling the model # first, then its value function (e.g. in a loss function), in # between of which another model call is made (e.g. to compute an # action). self._lock = threading.RLock() # Auto-update model's inference view requirements, if recurrent. self._update_model_view_requirements_from_init_state() self.exploration = self._create_exploration() self._state_inputs = self.model.get_initial_state() self._is_recurrent = len(self._state_inputs) > 0 # Combine view_requirements for Model and Policy. self.view_requirements.update(self.model.view_requirements) if before_loss_init: before_loss_init(self, observation_space, action_space, config) if optimizer_fn: optimizers = optimizer_fn(self, config) else: optimizers = tf.keras.optimizers.Adam(config["lr"]) optimizers = force_list(optimizers) if getattr(self, "exploration", None): optimizers = self.exploration.get_exploration_optimizer( optimizers) # TODO: (sven) Allow tf policy to have more than 1 optimizer. # Just like torch Policy does. self._optimizer = optimizers[0] if optimizers else None self._initialize_loss_from_dummy_batch( auto_remove_unneeded_view_reqs=True, stats_fn=stats_fn, ) self._loss_initialized = True if after_init: after_init(self, observation_space, action_space, config) # Got to reset global_timestep again after fake run-throughs. self.global_timestep = 0 @override(Policy) def postprocess_trajectory(self, sample_batch, other_agent_batches=None, episode=None): assert tf.executing_eagerly() # Call super's postprocess_trajectory first. sample_batch = Policy.postprocess_trajectory(self, sample_batch) if postprocess_fn: return postprocess_fn(self, sample_batch, other_agent_batches, episode) return sample_batch @with_lock @override(Policy) def learn_on_batch(self, postprocessed_batch): # Callback handling. learn_stats = {} self.callbacks.on_learn_on_batch( policy=self, train_batch=postprocessed_batch, result=learn_stats) if not isinstance(postprocessed_batch, SampleBatch) or \ not postprocessed_batch.zero_padded: pad_batch_to_sequences_of_same_size( postprocessed_batch, max_seq_len=self._max_seq_len, shuffle=False, batch_divisibility_req=self.batch_divisibility_req, view_requirements=self.view_requirements, ) self._is_training = True postprocessed_batch["is_training"] = True stats = self._learn_on_batch_eager(postprocessed_batch) stats.update({"custom_metrics": learn_stats}) return stats @convert_eager_inputs @convert_eager_outputs def _learn_on_batch_eager(self, samples): with tf.variable_creator_scope(_disallow_var_creation): grads_and_vars, stats = self._compute_gradients(samples) self._apply_gradients(grads_and_vars) return stats @override(Policy) def compute_gradients(self, samples): pad_batch_to_sequences_of_same_size( samples, shuffle=False, max_seq_len=self._max_seq_len, batch_divisibility_req=self.batch_divisibility_req) self._is_training = True samples["is_training"] = True return self._compute_gradients_eager(samples) @convert_eager_inputs @convert_eager_outputs def _compute_gradients_eager(self, samples): with tf.variable_creator_scope(_disallow_var_creation): grads_and_vars, stats = self._compute_gradients(samples) grads = [g for g, v in grads_and_vars] return grads, stats @override(Policy) def compute_actions(self, obs_batch, state_batches=None, prev_action_batch=None, prev_reward_batch=None, info_batch=None, episodes=None, explore=None, timestep=None, **kwargs): self._is_training = False self._is_recurrent = \ state_batches is not None and state_batches != [] if not tf1.executing_eagerly(): tf1.enable_eager_execution() input_dict = { SampleBatch.CUR_OBS: tf.convert_to_tensor(obs_batch), "is_training": tf.constant(False), } if prev_action_batch is not None: input_dict[SampleBatch.PREV_ACTIONS] = \ tf.convert_to_tensor(prev_action_batch) if prev_reward_batch is not None: input_dict[SampleBatch.PREV_REWARDS] = \ tf.convert_to_tensor(prev_reward_batch) return self._compute_action_helper(input_dict, state_batches, episodes, explore, timestep) @override(Policy) def compute_actions_from_input_dict( self, input_dict: Dict[str, TensorType], explore: bool = None, timestep: Optional[int] = None, **kwargs ) -> Tuple[TensorType, List[TensorType], Dict[str, TensorType]]: if not tf1.executing_eagerly(): tf1.enable_eager_execution() # Pass lazy (eager) tensor dict to Model as `input_dict`. input_dict = self._lazy_tensor_dict(input_dict) # Pack internal state inputs into (separate) list. state_batches = [ input_dict[k] for k in input_dict.keys() if "state_in" in k[:8] ] return self._compute_action_helper(input_dict, state_batches, None, explore, timestep) @with_lock @convert_eager_inputs @convert_eager_outputs def _compute_action_helper(self, input_dict, state_batches, episodes, explore, timestep): explore = explore if explore is not None else \ self.config["explore"] timestep = timestep if timestep is not None else \ self.global_timestep if isinstance(timestep, tf.Tensor): timestep = int(timestep.numpy()) self._is_training = False self._state_in = state_batches or [] # Calculate RNN sequence lengths. batch_size = input_dict[SampleBatch.CUR_OBS].shape[0] seq_lens = tf.ones(batch_size, dtype=tf.int32) if state_batches \ else None # Add default and custom fetches. extra_fetches = {} # Use Exploration object. with tf.variable_creator_scope(_disallow_var_creation): if action_sampler_fn: dist_inputs = None state_out = [] actions, logp = action_sampler_fn( self, self.model, input_dict[SampleBatch.CUR_OBS], explore=explore, timestep=timestep, episodes=episodes) else: # Exploration hook before each forward pass. self.exploration.before_compute_actions( timestep=timestep, explore=explore) if action_distribution_fn: # Try new action_distribution_fn signature, supporting # state_batches and seq_lens. try: dist_inputs, self.dist_class, state_out = \ action_distribution_fn( self, self.model, input_dict=input_dict, state_batches=state_batches, seq_lens=seq_lens, explore=explore, timestep=timestep, is_training=False) # Trying the old way (to stay backward compatible). # TODO: Remove in future. except TypeError as e: if "positional argument" in e.args[0] or \ "unexpected keyword argument" in e.args[0]: dist_inputs, self.dist_class, state_out = \ action_distribution_fn( self, self.model, input_dict[SampleBatch.CUR_OBS], explore=explore, timestep=timestep, is_training=False) else: raise e elif isinstance(self.model, tf.keras.Model): input_dict = SampleBatch(input_dict, seq_lens=seq_lens) if state_batches and "state_in_0" not in input_dict: for i, s in enumerate(state_batches): input_dict[f"state_in_{i}"] = s self._lazy_tensor_dict(input_dict) dist_inputs, state_out, extra_fetches = \ self.model(input_dict) else: dist_inputs, state_out = self.model( input_dict, state_batches, seq_lens) action_dist = self.dist_class(dist_inputs, self.model) # Get the exploration action from the forward results. actions, logp = self.exploration.get_exploration_action( action_distribution=action_dist, timestep=timestep, explore=explore) # Action-logp and action-prob. if logp is not None: extra_fetches[SampleBatch.ACTION_PROB] = tf.exp(logp) extra_fetches[SampleBatch.ACTION_LOGP] = logp # Action-dist inputs. if dist_inputs is not None: extra_fetches[SampleBatch.ACTION_DIST_INPUTS] = dist_inputs # Custom extra fetches. if extra_action_out_fn: extra_fetches.update(extra_action_out_fn(self)) # Update our global timestep by the batch size. self.global_timestep += int(batch_size) return actions, state_out, extra_fetches @with_lock @override(Policy) def compute_log_likelihoods(self, actions, obs_batch, state_batches=None, prev_action_batch=None, prev_reward_batch=None): if action_sampler_fn and action_distribution_fn is None: raise ValueError("Cannot compute log-prob/likelihood w/o an " "`action_distribution_fn` and a provided " "`action_sampler_fn`!") seq_lens = tf.ones(len(obs_batch), dtype=tf.int32) input_dict = { SampleBatch.CUR_OBS: tf.convert_to_tensor(obs_batch), "is_training": tf.constant(False), } if prev_action_batch is not None: input_dict[SampleBatch.PREV_ACTIONS] = \ tf.convert_to_tensor(prev_action_batch) if prev_reward_batch is not None: input_dict[SampleBatch.PREV_REWARDS] = \ tf.convert_to_tensor(prev_reward_batch) # Exploration hook before each forward pass. self.exploration.before_compute_actions(explore=False) # Action dist class and inputs are generated via custom function. if action_distribution_fn: dist_inputs, dist_class, _ = action_distribution_fn( self, self.model, input_dict[SampleBatch.CUR_OBS], explore=False, is_training=False) # Default log-likelihood calculation. else: dist_inputs, _ = self.model(input_dict, state_batches, seq_lens) dist_class = self.dist_class action_dist = dist_class(dist_inputs, self.model) log_likelihoods = action_dist.logp(actions) return log_likelihoods @override(Policy) def apply_gradients(self, gradients): self._apply_gradients( zip([(tf.convert_to_tensor(g) if g is not None else None) for g in gradients], self.model.trainable_variables())) @override(Policy) def get_exploration_state(self): return _convert_to_numpy(self.exploration.get_state()) @override(Policy) def get_weights(self, as_dict=False): variables = self.variables() if as_dict: return {v.name: v.numpy() for v in variables} return [v.numpy() for v in variables] @override(Policy) def set_weights(self, weights): variables = self.variables() assert len(weights) == len(variables), (len(weights), len(variables)) for v, w in zip(variables, weights): v.assign(w) @override(Policy) def get_state(self): state = super().get_state() if self._optimizer and \ len(self._optimizer.variables()) > 0: state["_optimizer_variables"] = \ self._optimizer.variables() # Add exploration state. state["_exploration_state"] = self.exploration.get_state() return state @override(Policy) def set_state(self, state): state = state.copy() # shallow copy # Set optimizer vars first. optimizer_vars = state.get("_optimizer_variables", None) if optimizer_vars and self._optimizer.variables(): logger.warning( "Cannot restore an optimizer's state for tf eager! Keras " "is not able to save the v1.x optimizers (from " "tf.compat.v1.train) since they aren't compatible with " "checkpoints.") for opt_var, value in zip(self._optimizer.variables(), optimizer_vars): opt_var.assign(value) # Set exploration's state. if hasattr(self, "exploration") and "_exploration_state" in state: self.exploration.set_state(state=state["_exploration_state"]) # Then the Policy's (NN) weights. super().set_state(state) def variables(self): """Return the list of all savable variables for this policy.""" if isinstance(self.model, tf.keras.Model): return self.model.variables else: return self.model.variables() @override(Policy) def is_recurrent(self): return self._is_recurrent @override(Policy) def num_state_tensors(self): return len(self._state_inputs) @override(Policy) def get_initial_state(self): if hasattr(self, "model"): return self.model.get_initial_state() return [] def get_session(self): return None # None implies eager def get_placeholder(self, ph): raise ValueError( "get_placeholder() is not allowed in eager mode. Try using " "rllib.utils.tf_ops.make_tf_callable() to write " "functions that work in both graph and eager mode.") def loss_initialized(self): return self._loss_initialized @override(Policy) def export_model(self, export_dir): pass # TODO: (sven) Deprecate this in favor of `save()`. @override(Policy) def export_checkpoint(self, export_dir): deprecation_warning("export_checkpoint", "save") def _get_is_training_placeholder(self): return tf.convert_to_tensor(self._is_training) def _apply_gradients(self, grads_and_vars): if apply_gradients_fn: apply_gradients_fn(self, self._optimizer, grads_and_vars) else: self._optimizer.apply_gradients( [(g, v) for g, v in grads_and_vars if g is not None]) @with_lock def _compute_gradients(self, samples): """Computes and returns grads as eager tensors.""" with tf.GradientTape(persistent=compute_gradients_fn is not None) \ as tape: loss = loss_fn(self, self.model, self.dist_class, samples) if isinstance(self.model, tf.keras.Model): variables = self.model.trainable_variables else: variables = self.model.trainable_variables() if compute_gradients_fn: class OptimizerWrapper: def __init__(self, tape): self.tape = tape def compute_gradients(self, loss, var_list): return list( zip(self.tape.gradient(loss, var_list), var_list)) grads_and_vars = compute_gradients_fn(self, OptimizerWrapper(tape), loss) else: grads_and_vars = list( zip(tape.gradient(loss, variables), variables)) if log_once("grad_vars"): for _, v in grads_and_vars: logger.info("Optimizing variable {}".format(v.name)) grads = [g for g, v in grads_and_vars] stats = self._stats(self, samples, grads) return grads_and_vars, stats def _stats(self, outputs, samples, grads): fetches = {} if stats_fn: fetches[LEARNER_STATS_KEY] = { k: v for k, v in stats_fn(outputs, samples).items() } else: fetches[LEARNER_STATS_KEY] = {} if extra_learn_fetches_fn: fetches.update( {k: v for k, v in extra_learn_fetches_fn(self).items()}) if grad_stats_fn: fetches.update({ k: v for k, v in grad_stats_fn(self, samples, grads).items() }) return fetches def _lazy_tensor_dict(self, postprocessed_batch: SampleBatch): # TODO: (sven): Keep for a while to ensure backward compatibility. if not isinstance(postprocessed_batch, SampleBatch): postprocessed_batch = SampleBatch(postprocessed_batch) postprocessed_batch.set_get_interceptor(_convert_to_tf) return postprocessed_batch @classmethod def with_tracing(cls): return traced_eager_policy(cls) eager_policy_cls.__name__ = name + "_eager" eager_policy_cls.__qualname__ = name + "_eager" return eager_policy_cls
{ "content_hash": "9b717d742a3022cb524633b04980844b", "timestamp": "", "source": "github", "line_count": 794, "max_line_length": 79, "avg_line_length": 39.885390428211586, "alnum_prop": 0.5285926300167356, "repo_name": "pcmoritz/ray-1", "id": "edc6eaafa26e994b2054c2960f81b19cbd4a446f", "size": "31669", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rllib/policy/eager_tf_policy.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "70670" }, { "name": "C++", "bytes": "4670851" }, { "name": "CSS", "bytes": "10912" }, { "name": "Dockerfile", "bytes": "14159" }, { "name": "HTML", "bytes": "30414" }, { "name": "Java", "bytes": "1338604" }, { "name": "JavaScript", "bytes": "914" }, { "name": "Jupyter Notebook", "bytes": "1615" }, { "name": "Makefile", "bytes": "234" }, { "name": "Python", "bytes": "10523389" }, { "name": "Shell", "bytes": "117557" }, { "name": "Smarty", "bytes": "239" }, { "name": "Starlark", "bytes": "238506" }, { "name": "TypeScript", "bytes": "259269" } ], "symlink_target": "" }
package resourcefakes import ( "sync" "github.com/cloudfoundry/bosh-cli/release/resource" ) type FakeFingerprinter struct { CalculateStub func([]resource.File, []string) (string, error) calculateMutex sync.RWMutex calculateArgsForCall []struct { arg1 []resource.File arg2 []string } calculateReturns struct { result1 string result2 error } calculateReturnsOnCall map[int]struct { result1 string result2 error } invocations map[string][][]interface{} invocationsMutex sync.RWMutex } func (fake *FakeFingerprinter) Calculate(arg1 []resource.File, arg2 []string) (string, error) { var arg1Copy []resource.File if arg1 != nil { arg1Copy = make([]resource.File, len(arg1)) copy(arg1Copy, arg1) } var arg2Copy []string if arg2 != nil { arg2Copy = make([]string, len(arg2)) copy(arg2Copy, arg2) } fake.calculateMutex.Lock() ret, specificReturn := fake.calculateReturnsOnCall[len(fake.calculateArgsForCall)] fake.calculateArgsForCall = append(fake.calculateArgsForCall, struct { arg1 []resource.File arg2 []string }{arg1Copy, arg2Copy}) fake.recordInvocation("Calculate", []interface{}{arg1Copy, arg2Copy}) fake.calculateMutex.Unlock() if fake.CalculateStub != nil { return fake.CalculateStub(arg1, arg2) } if specificReturn { return ret.result1, ret.result2 } return fake.calculateReturns.result1, fake.calculateReturns.result2 } func (fake *FakeFingerprinter) CalculateCallCount() int { fake.calculateMutex.RLock() defer fake.calculateMutex.RUnlock() return len(fake.calculateArgsForCall) } func (fake *FakeFingerprinter) CalculateArgsForCall(i int) ([]resource.File, []string) { fake.calculateMutex.RLock() defer fake.calculateMutex.RUnlock() return fake.calculateArgsForCall[i].arg1, fake.calculateArgsForCall[i].arg2 } func (fake *FakeFingerprinter) CalculateReturns(result1 string, result2 error) { fake.CalculateStub = nil fake.calculateReturns = struct { result1 string result2 error }{result1, result2} } func (fake *FakeFingerprinter) CalculateReturnsOnCall(i int, result1 string, result2 error) { fake.CalculateStub = nil if fake.calculateReturnsOnCall == nil { fake.calculateReturnsOnCall = make(map[int]struct { result1 string result2 error }) } fake.calculateReturnsOnCall[i] = struct { result1 string result2 error }{result1, result2} } func (fake *FakeFingerprinter) Invocations() map[string][][]interface{} { fake.invocationsMutex.RLock() defer fake.invocationsMutex.RUnlock() fake.calculateMutex.RLock() defer fake.calculateMutex.RUnlock() return fake.invocations } func (fake *FakeFingerprinter) recordInvocation(key string, args []interface{}) { fake.invocationsMutex.Lock() defer fake.invocationsMutex.Unlock() if fake.invocations == nil { fake.invocations = map[string][][]interface{}{} } if fake.invocations[key] == nil { fake.invocations[key] = [][]interface{}{} } fake.invocations[key] = append(fake.invocations[key], args) } var _ resource.Fingerprinter = new(FakeFingerprinter)
{ "content_hash": "fbc8fb08ed8ec1d292146238d286ddae", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 95, "avg_line_length": 27.554545454545455, "alnum_prop": 0.744968657208842, "repo_name": "cppforlife/bosh-lint", "id": "99b610b1a1f9d1a926658a59ed7f305b84769ff2", "size": "3080", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/github.com/cloudfoundry/bosh-cli/release/resource/resourcefakes/fake_fingerprinter.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "271" }, { "name": "CSS", "bytes": "16688" }, { "name": "Gherkin", "bytes": "29789" }, { "name": "Go", "bytes": "28880639" }, { "name": "HTML", "bytes": "37244" }, { "name": "JavaScript", "bytes": "26158" }, { "name": "Makefile", "bytes": "11031" }, { "name": "PowerShell", "bytes": "2578" }, { "name": "Python", "bytes": "780" }, { "name": "Roff", "bytes": "598" }, { "name": "Ruby", "bytes": "22040" }, { "name": "Shell", "bytes": "60987" } ], "symlink_target": "" }
package com.mongodb.internal.async; import com.mongodb.async.SingleResultCallback; import com.mongodb.diagnostics.logging.Logger; import static com.mongodb.assertions.Assertions.notNull; /** * This class is not part of the public API and may be removed or changed at any time. * * @param <T> the result type */ public class ErrorHandlingResultCallback<T> implements SingleResultCallback<T> { private final SingleResultCallback<T> wrapped; private final Logger logger; public static <T> SingleResultCallback<T> errorHandlingCallback(final SingleResultCallback<T> callback) { return errorHandlingCallback(callback, null); } public static <T> SingleResultCallback<T> errorHandlingCallback(final SingleResultCallback<T> callback, final Logger logger) { if (callback instanceof ErrorHandlingResultCallback) { return callback; } else { return new ErrorHandlingResultCallback<T>(callback, logger); } } ErrorHandlingResultCallback(final SingleResultCallback<T> wrapped, final Logger logger) { this.wrapped = notNull("wrapped", wrapped); this.logger = logger; } @Override public void onResult(final T result, final Throwable t) { try { wrapped.onResult(result, t); } catch (Exception e) { if (logger != null) { logger.warn("Callback onResult call produced an error", e); } } } }
{ "content_hash": "d2805807f08c8a00d59a3d9c66884776", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 130, "avg_line_length": 31.404255319148938, "alnum_prop": 0.6802168021680217, "repo_name": "gianpaj/mongo-java-driver", "id": "b43f2d4fa48f906996f0c69681157e93dd476cd4", "size": "2079", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "driver-core/src/main/com/mongodb/internal/async/ErrorHandlingResultCallback.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "1336021" }, { "name": "Java", "bytes": "4254825" } ], "symlink_target": "" }
using namespace std ; gaiaGeomCollAutoPtr makeBoxGeometry(const BBox &bbox, double buffer, int srid) { gaiaGeomCollPtr box = gaiaAllocGeomColl(); box->Srid = srid; box->DeclaredType = GAIA_POLYGON ; gaiaPolygonPtr g_poly = gaiaAddPolygonToGeomColl (box, 5, 0) ; gaiaRingPtr er = g_poly->Exterior ; gaiaSetPoint (er->Coords, 0, bbox.minx_-buffer, bbox.miny_-buffer); gaiaSetPoint (er->Coords, 1, bbox.maxx_+buffer, bbox.miny_-buffer); gaiaSetPoint (er->Coords, 2, bbox.maxx_+buffer, bbox.maxy_+buffer); gaiaSetPoint (er->Coords, 3, bbox.minx_-buffer, bbox.maxy_+buffer); gaiaSetPoint (er->Coords, 4, bbox.minx_-buffer, bbox.miny_-buffer); return gaiaGeomCollAutoPtr(box) ; } gaiaGeomCollAutoPtr makeLineString(const OSM::Way &way, OSM::Storage &reader, int srid) { gaiaGeomCollPtr geo_line = gaiaAllocGeomColl(); geo_line->Srid = srid; geo_line->DeclaredType = GAIA_LINESTRING ; gaiaLinestringPtr ls = gaiaAddLinestringToGeomColl (geo_line, way.nodes_.size()); size_t j = 0 ; reader.forAllWayCoords(way.id_, [&](osm_id_t id, double lat, double lon) { gaiaSetPoint (ls->Coords, j, lon, lat); ++j ; }) ; if ( j != way.nodes_.size() ) return nullptr ; else return gaiaGeomCollAutoPtr(geo_line) ; } gaiaGeomCollAutoPtr makeMultiLineString(const vector<OSM::Way> &ways, OSM::Storage &reader, int srid) { gaiaGeomCollPtr geo_mline = gaiaAllocGeomColl(); geo_mline->Srid = srid ; geo_mline->DeclaredType = GAIA_MULTILINESTRING ; for( auto &way: ways ) { if ( way.nodes_.size() < 2 ) continue ; gaiaLinestringPtr ls = gaiaAddLinestringToGeomColl (geo_mline, way.nodes_.size()); size_t j=0 ; reader.forAllNodeCoordList(way.nodes_, [&](double lat, double lon) { gaiaSetPoint (ls->Coords, j, lon, lat); ++j ; }) ; if ( j != way.nodes_.size() ) return nullptr ; } return gaiaGeomCollAutoPtr(geo_mline) ; } gaiaGeomCollAutoPtr makeSimplePolygon(const OSM::Ring &ring, OSM::Storage &reader, int srid) { gaiaGeomCollPtr geom = gaiaAllocGeomColl(); geom->Srid = srid; geom->DeclaredType = GAIA_POLYGON ; gaiaPolygonPtr g_poly = gaiaAddPolygonToGeomColl(geom, ring.nodes_.size(), 0); int j=0 ; reader.forAllNodeCoordList(ring.nodes_, [&](double lat, double lon) { gaiaSetPoint (g_poly->Exterior->Coords, j, lon, lat); ++j ; }) ; return gaiaGeomCollAutoPtr(geom) ; } gaiaGeomCollAutoPtr makeMultiPolygon(const OSM::Polygon &poly, OSM::Storage &reader, int srid) { gaiaGeomCollPtr geom = nullptr ; gaiaGeomCollPtr ls_geom = gaiaAllocGeomColl(); ls_geom->Srid = srid; for( uint i=0 ; i<poly.rings_.size() ; i++ ) { const OSM::Ring &ring = poly.rings_[i] ; gaiaLinestringPtr ls = gaiaAddLinestringToGeomColl(ls_geom, ring.nodes_.size()) ; int j=0 ; reader.forAllNodeCoordList(ring.nodes_, [&](double lat, double lon) { gaiaSetPoint (ls->Coords, j, lon, lat); ++j ; }) ; } // at the moment the functions bellow ignore invalid (self-intersecting) polygons // self-intersection can be handled at the level of the multi-polygon parsing function (makePolygonsFromRelation) gaiaGeomCollPtr ps = gaiaSanitize(ls_geom) ; gaiaFreeGeomColl(ls_geom) ; if ( ps ) { geom = gaiaPolygonize(ps, 1) ; gaiaFreeGeomColl(ps) ; } return gaiaGeomCollAutoPtr(geom) ; } gaiaGeomCollAutoPtr makePoint(const OSM::Node &poi, int srid) { gaiaGeomCollPtr geo_pt = gaiaAllocGeomColl(); geo_pt->DeclaredType = GAIA_POINT ; geo_pt->Srid = srid ; gaiaAddPointToGeomColl (geo_pt, poi.lon_, poi.lat_); return gaiaGeomCollAutoPtr(geo_pt) ; } gaiaGeomCollAutoPtr readWKB(const SQLite::Blob &blob) { return gaiaGeomCollAutoPtr(gaiaFromSpatiaLiteBlobWkb((unsigned char *)blob.data(), blob.size())) ; }
{ "content_hash": "083f26dbe14c388531f85b761cde1556", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 117, "avg_line_length": 31.44, "alnum_prop": 0.6615776081424937, "repo_name": "malasiot/maplite", "id": "99319955ad7a8849c606ed6fa100b42c8d0721f2", "size": "3957", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/convert/geom_utils.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "208786" }, { "name": "C++", "bytes": "1872244" }, { "name": "CMake", "bytes": "59377" }, { "name": "Lex", "bytes": "4103" }, { "name": "Python", "bytes": "7212" }, { "name": "Shell", "bytes": "7559" }, { "name": "Yacc", "bytes": "11572" } ], "symlink_target": "" }
<!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> <link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" /> <link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/> <link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/> <!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]--> <style type="text/css" media="all"> @import url('../../../../../style.css'); @import url('../../../../../tree.css'); </style> <script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script> <script src="../../../../../package-nodes-tree.js" type="text/javascript"></script> <script src="../../../../../clover-tree.js" type="text/javascript"></script> <script src="../../../../../clover.js" type="text/javascript"></script> <script src="../../../../../clover-descriptions.js" type="text/javascript"></script> <script src="../../../../../cloud.js" type="text/javascript"></script> <title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title> </head> <body> <div id="page"> <header id="header" role="banner"> <nav class="aui-header aui-dropdown2-trigger-group" role="navigation"> <div class="aui-header-inner"> <div class="aui-header-primary"> <h1 id="logo" class="aui-header-logo aui-header-logo-clover"> <a href="http://openclover.org" title="Visit OpenClover home page"><span class="aui-header-logo-device">OpenClover</span></a> </h1> </div> <div class="aui-header-secondary"> <ul class="aui-nav"> <li id="system-help-menu"> <a class="aui-nav-link" title="Open online documentation" target="_blank" href="http://openclover.org/documentation"> <span class="aui-icon aui-icon-small aui-iconfont-help">&#160;Help</span> </a> </li> </ul> </div> </div> </nav> </header> <div class="aui-page-panel"> <div class="aui-page-panel-inner"> <div class="aui-page-panel-nav aui-page-panel-nav-clover"> <div class="aui-page-header-inner" style="margin-bottom: 20px;"> <div class="aui-page-header-image"> <a href="http://cardatechnologies.com" target="_top"> <div class="aui-avatar aui-avatar-large aui-avatar-project"> <div class="aui-avatar-inner"> <img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/> </div> </div> </a> </div> <div class="aui-page-header-main" > <h1> <a href="http://cardatechnologies.com" target="_top"> ABA Route Transit Number Validator 1.0.1-SNAPSHOT </a> </h1> </div> </div> <nav class="aui-navgroup aui-navgroup-vertical"> <div class="aui-navgroup-inner"> <ul class="aui-nav"> <li class=""> <a href="../../../../../dashboard.html">Project overview</a> </li> </ul> <div class="aui-nav-heading packages-nav-heading"> <strong>Packages</strong> </div> <div class="aui-nav project-packages"> <form method="get" action="#" class="aui package-filter-container"> <input type="text" autocomplete="off" class="package-filter text" placeholder="Type to filter packages..." name="package-filter" id="package-filter" title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/> </form> <p class="package-filter-no-results-message hidden"> <small>No results found.</small> </p> <div class="packages-tree-wrapper" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator"> <div class="packages-tree-container"></div> <div class="clover-packages-lozenges"></div> </div> </div> </div> </nav> </div> <section class="aui-page-panel-content"> <div class="aui-page-panel-content-clover"> <div class="aui-page-header-main"><ol class="aui-nav aui-nav-breadcrumbs"> <li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li> <li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li> <li><a href="test-Test_AbaRouteValidator_08.html">Class Test_AbaRouteValidator_08</a></li> </ol></div> <h1 class="aui-h2-clover"> Test testAbaNumberCheck_16495_good </h1> <table class="aui"> <thead> <tr> <th>Test</th> <th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th> <th><label title="When the test execution was started">Start time</label></th> <th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th> <th><label title="A failure or error message if the test is not successful.">Message</label></th> </tr> </thead> <tbody> <tr> <td> <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_08.html?line=35944#src-35944" >testAbaNumberCheck_16495_good</a> </td> <td> <span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span> </td> <td> 7 Aug 12:37:40 </td> <td> 0.0 </td> <td> <div></div> <div class="errorMessage"></div> </td> </tr> </tbody> </table> <div>&#160;</div> <table class="aui aui-table-sortable"> <thead> <tr> <th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th> <th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> testAbaNumberCheck_16495_good</th> </tr> </thead> <tbody> <tr> <td> <span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span> &#160;&#160;<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=26537#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a> </td> <td> <span class="sortValue">0.7352941</span>73.5% </td> <td class="align-middle" style="width: 100%" colspan="3"> <div> <div title="73.5% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:73.5%"></div></div></div> </td> </tr> </tbody> </table> </div> <!-- class="aui-page-panel-content-clover" --> <footer id="footer" role="contentinfo"> <section class="footer-body"> <ul> <li> Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1 on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT. </li> </ul> <ul> <li>OpenClover is free and open-source software. </li> </ul> </section> </footer> </section> <!-- class="aui-page-panel-content" --> </div> <!-- class="aui-page-panel-inner" --> </div> <!-- class="aui-page-panel" --> </div> <!-- id="page" --> </body> </html>
{ "content_hash": "263d3a125bd99d5eb260ff8af94e9f24", "timestamp": "", "source": "github", "line_count": 209, "max_line_length": 297, "avg_line_length": 43.92822966507177, "alnum_prop": 0.5097483934211959, "repo_name": "dcarda/aba.route.validator", "id": "5397a5d99a1c755d717e1dc964ebaf60f3375a18", "size": "9181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "target13/site/clover/com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_08_testAbaNumberCheck_16495_good_kh5.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "18715254" } ], "symlink_target": "" }
<?php /** * Pre-transform that changes deprecated name attribute to ID if necessary */ class HTMLPurifier_AttrTransform_Name extends HTMLPurifier_AttrTransform { public function transform($attr, $config, $context) { if (!isset($attr['name'])) return $attr; $id = $this->confiscateAttr($attr, 'name'); if ( isset($attr['id'])) return $attr; $attr['id'] = $id; return $attr; } } // vim: et sw=4 sts=4
{ "content_hash": "7cdf49f3f384856c15f2fddf005912cf", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 74, "avg_line_length": 24.94736842105263, "alnum_prop": 0.5822784810126582, "repo_name": "raspi/zfcomicengine", "id": "d5e0303a74f9e859b213f93fbd9385f8de513423", "size": "474", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "library/htmlpurifier/HTMLPurifier/AttrTransform/Name.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "1283" }, { "name": "CSS", "bytes": "7138" }, { "name": "HTML", "bytes": "55310" }, { "name": "JavaScript", "bytes": "1546" }, { "name": "PHP", "bytes": "9918747" }, { "name": "Python", "bytes": "1974" } ], "symlink_target": "" }
@interface TwunchAPI : NSObject + (void)fetchWithCompletion:(void(^)(BOOL success))completionHandler; @end
{ "content_hash": "ec52bba24b5d0dd699d221f1c7f6f13a", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 69, "avg_line_length": 35.666666666666664, "alnum_prop": 0.7850467289719626, "repo_name": "fousa/twunch-2.0", "id": "905a650030cd050fd7770eeb8ece27ed21688980", "size": "240", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "twunch/API/TwunchAPI.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "6407" }, { "name": "Objective-C", "bytes": "285877" } ], "symlink_target": "" }
BOOST_AUTO_TEST_SUITE(TestString) BOOST_AUTO_TEST_CASE(test_load) { BOOST_TEST_MESSAGE("--- Char string value should be decoded."); std::stringstream ss1("20:internationalization"); bencode::string s1; s1.load(ss1); BOOST_CHECK(s1 == "internationalization"); BOOST_TEST_MESSAGE("--- Wchar string value should be decoded."); std::wstringstream ss2(L"19:интернационализация"); bencode::wstring s2; s2.load(ss2); BOOST_CHECK(s2 == L"интернационализация"); } BOOST_AUTO_TEST_CASE(test_load_exceptions) { BOOST_TEST_MESSAGE("--- Exception should be raised on " "missing the delimiter symbol."); std::stringstream ss1("44The randomness comes from atmospheric noise"); bencode::string s1; BOOST_CHECK_EXCEPTION(s1.load(ss1), bencode::encoding_error, [](bencode::encoding_error const& exception) { auto pos = std::string(exception.what()).find( "the delimiter `:` expected, but `e` found"); return pos != std::string::npos; }); BOOST_TEST_MESSAGE("--- Exception should be raised on " "invalid string length."); std::stringstream ss2("inf:This should be an infinite string"); bencode::string s2; BOOST_CHECK_EXCEPTION(s2.load(ss2), bencode::value_error, [](bencode::value_error const& exception) { auto pos = std::string(exception.what()).find( "the specified string length is not a number"); return pos != std::string::npos; }); BOOST_TEST_MESSAGE("--- Exception should be raised on " "negative string length."); std::stringstream ss3("-29:Each string should be unique"); bencode::string s3; BOOST_CHECK_EXCEPTION(s3.load(ss3), bencode::value_error, [](bencode::value_error const& exception) { auto pos = std::string(exception.what()).find( "the length of the string value must be a " "positive integer: `-29`"); return pos != std::string::npos; }); BOOST_TEST_MESSAGE("--- Exception should be raised on " "too small string length."); std::stringstream ss4("100:This string should be much longer."); bencode::string s4; BOOST_CHECK_EXCEPTION(s4.load(ss4), bencode::value_error, [](bencode::value_error const& exception) { auto pos = std::string(exception.what()).find( "decoded length is not equal to the real one: `100` != `34`"); return pos != std::string::npos; }); } BOOST_AUTO_TEST_CASE(test_operators) { BOOST_TEST_MESSAGE("--- Validate string comparison operators."); bencode::string s1("The forward heat shield, or apex cover, is " "made of brazed stainless steel honeycomb and covered with " "ablative material."); BOOST_CHECK(s1 == "The forward heat shield, or apex cover, is " "made of brazed stainless steel honeycomb and covered with " "ablative material."); BOOST_CHECK(s1 == std::string("The forward heat shield, or apex " "cover, is made of brazed stainless steel honeycomb and covered " "with ablative material.")); } BOOST_AUTO_TEST_CASE(test_dump) { BOOST_TEST_MESSAGE("--- String value should be encoded into Char."); std::stringstream ss1; bencode::string s1("Machiavellianism"); s1.dump(ss1); BOOST_CHECK(ss1.str() == "16:Machiavellianism"); BOOST_TEST_MESSAGE("--- String value should be encoded into WChar."); std::wstringstream ss2; bencode::wstring s2(L"グレートブリテンおよび北アイルランド連合王国"); s2.dump(ss2); BOOST_CHECK(ss2.str() == L"22:グレートブリテンおよび北アイルランド連合王国"); } BOOST_AUTO_TEST_SUITE_END()
{ "content_hash": "d4ff9bfb184cde6aebcf0ba5c64d8493", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 78, "avg_line_length": 32.796460176991154, "alnum_prop": 0.6300593631948193, "repo_name": "yashkin/libbencode", "id": "664f9e0d68b3aa6244742a12b8851f74b39e91b8", "size": "3975", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test/string_test.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "59906" }, { "name": "Makefile", "bytes": "1045" } ], "symlink_target": "" }
namespace CIK.News.Framework.Extensions { using System.IO; public static class MemoryStreamExtension { public static void WriteTo(this MemoryStream memoryStream, string fileName) { var outStream = File.OpenWrite(fileName); memoryStream.WriteTo(outStream); outStream.Flush(); outStream.Close(); } } }
{ "content_hash": "c6ff5b919c62cf6b2602ed0d54aa88a5", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 84, "avg_line_length": 26.4, "alnum_prop": 0.6060606060606061, "repo_name": "zeusro/MagazineWeb", "id": "d08fbd9d9f45778483e56ab04d302384d9c212fa", "size": "398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Framework/Extensions/MemoryStreamExtension.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "103" }, { "name": "C#", "bytes": "405787" }, { "name": "CSS", "bytes": "35135" }, { "name": "HTML", "bytes": "2954" }, { "name": "JavaScript", "bytes": "77159" }, { "name": "PowerShell", "bytes": "20844" } ], "symlink_target": "" }
#ifndef R600_PIPE_H #define R600_PIPE_H #include "../radeon/r600_pipe_common.h" #include "../radeon/r600_cs.h" #include "r600_llvm.h" #include "r600_public.h" #include "util/u_suballoc.h" #include "util/u_double_list.h" #include "util/u_transfer.h" #define R600_NUM_ATOMS 73 /* the number of CS dwords for flushing and drawing */ #define R600_MAX_FLUSH_CS_DWORDS 16 #define R600_MAX_DRAW_CS_DWORDS 40 #define R600_TRACE_CS_DWORDS 7 #define R600_MAX_USER_CONST_BUFFERS 13 #define R600_MAX_DRIVER_CONST_BUFFERS 4 #define R600_MAX_CONST_BUFFERS (R600_MAX_USER_CONST_BUFFERS + R600_MAX_DRIVER_CONST_BUFFERS) /* start driver buffers after user buffers */ #define R600_UCP_CONST_BUFFER (R600_MAX_USER_CONST_BUFFERS) #define R600_TXQ_CONST_BUFFER (R600_MAX_USER_CONST_BUFFERS + 1) #define R600_BUFFER_INFO_CONST_BUFFER (R600_MAX_USER_CONST_BUFFERS + 2) #define R600_GS_RING_CONST_BUFFER (R600_MAX_USER_CONST_BUFFERS + 3) #define R600_MAX_CONST_BUFFER_SIZE (4096 * sizeof(float[4])) #ifdef PIPE_ARCH_BIG_ENDIAN #define R600_BIG_ENDIAN 1 #else #define R600_BIG_ENDIAN 0 #endif struct r600_context; struct r600_bytecode; struct r600_shader_key; /* This is an atom containing GPU commands that never change. * This is supposed to be copied directly into the CS. */ struct r600_command_buffer { uint32_t *buf; unsigned num_dw; unsigned max_num_dw; unsigned pkt_flags; }; struct r600_db_state { struct r600_atom atom; struct r600_surface *rsurf; }; struct r600_db_misc_state { struct r600_atom atom; bool occlusion_query_enabled; bool flush_depthstencil_through_cb; bool flush_depthstencil_in_place; bool copy_depth, copy_stencil; unsigned copy_sample; unsigned log_samples; unsigned db_shader_control; bool htile_clear; }; struct r600_cb_misc_state { struct r600_atom atom; unsigned cb_color_control; /* this comes from blend state */ unsigned blend_colormask; /* 8*4 bits for 8 RGBA colorbuffers */ unsigned nr_cbufs; unsigned nr_ps_color_outputs; bool multiwrite; bool dual_src_blend; }; struct r600_clip_misc_state { struct r600_atom atom; unsigned pa_cl_clip_cntl; /* from rasterizer */ unsigned pa_cl_vs_out_cntl; /* from vertex shader */ unsigned clip_plane_enable; /* from rasterizer */ unsigned clip_dist_write; /* from vertex shader */ boolean clip_disable; /* from vertex shader */ }; struct r600_alphatest_state { struct r600_atom atom; unsigned sx_alpha_test_control; /* this comes from dsa state */ unsigned sx_alpha_ref; /* this comes from dsa state */ bool bypass; bool cb0_export_16bpc; /* from set_framebuffer_state */ }; struct r600_vgt_state { struct r600_atom atom; uint32_t vgt_multi_prim_ib_reset_en; uint32_t vgt_multi_prim_ib_reset_indx; uint32_t vgt_indx_offset; }; struct r600_blend_color { struct r600_atom atom; struct pipe_blend_color state; }; struct r600_clip_state { struct r600_atom atom; struct pipe_clip_state state; }; struct r600_cs_shader_state { struct r600_atom atom; unsigned kernel_index; struct r600_pipe_compute *shader; }; struct r600_framebuffer { struct r600_atom atom; struct pipe_framebuffer_state state; unsigned compressed_cb_mask; unsigned nr_samples; bool export_16bpc; bool cb0_is_integer; bool is_msaa_resolve; }; struct r600_sample_mask { struct r600_atom atom; uint16_t sample_mask; /* there are only 8 bits on EG, 16 bits on Cayman */ }; struct r600_config_state { struct r600_atom atom; unsigned sq_gpr_resource_mgmt_1; unsigned sq_gpr_resource_mgmt_2; }; struct r600_stencil_ref { ubyte ref_value[2]; ubyte valuemask[2]; ubyte writemask[2]; }; struct r600_stencil_ref_state { struct r600_atom atom; struct r600_stencil_ref state; struct pipe_stencil_ref pipe_state; }; struct r600_viewport_state { struct r600_atom atom; struct pipe_viewport_state state; int idx; }; struct r600_shader_stages_state { struct r600_atom atom; unsigned geom_enable; }; struct r600_gs_rings_state { struct r600_atom atom; unsigned enable; struct pipe_constant_buffer esgs_ring; struct pipe_constant_buffer gsvs_ring; }; /* This must start from 16. */ /* features */ #define DBG_LLVM (1 << 17) #define DBG_NO_CP_DMA (1 << 18) /* shader backend */ #define DBG_NO_SB (1 << 21) #define DBG_SB_CS (1 << 22) #define DBG_SB_DRY_RUN (1 << 23) #define DBG_SB_STAT (1 << 24) #define DBG_SB_DUMP (1 << 25) #define DBG_SB_NO_FALLBACK (1 << 26) #define DBG_SB_DISASM (1 << 27) #define DBG_SB_SAFEMATH (1 << 28) struct r600_screen { struct r600_common_screen b; bool has_msaa; bool has_compressed_msaa_texturing; /*for compute global memory binding, we allocate stuff here, instead of * buffers. * XXX: Not sure if this is the best place for global_pool. Also, * it's not thread safe, so it won't work with multiple contexts. */ struct compute_memory_pool *global_pool; }; struct r600_pipe_sampler_view { struct pipe_sampler_view base; struct list_head list; struct r600_resource *tex_resource; uint32_t tex_resource_words[8]; bool skip_mip_address_reloc; }; struct r600_rasterizer_state { struct r600_command_buffer buffer; boolean flatshade; boolean two_side; unsigned sprite_coord_enable; unsigned clip_plane_enable; unsigned pa_sc_line_stipple; unsigned pa_cl_clip_cntl; unsigned pa_su_sc_mode_cntl; float offset_units; float offset_scale; bool offset_enable; bool scissor_enable; bool multisample_enable; }; struct r600_poly_offset_state { struct r600_atom atom; enum pipe_format zs_format; float offset_units; float offset_scale; }; struct r600_blend_state { struct r600_command_buffer buffer; struct r600_command_buffer buffer_no_blend; unsigned cb_target_mask; unsigned cb_color_control; unsigned cb_color_control_no_blend; bool dual_src_blend; bool alpha_to_one; }; struct r600_dsa_state { struct r600_command_buffer buffer; unsigned alpha_ref; ubyte valuemask[2]; ubyte writemask[2]; unsigned zwritemask; unsigned sx_alpha_test_control; }; struct r600_pipe_shader; struct r600_pipe_shader_selector { struct r600_pipe_shader *current; struct tgsi_token *tokens; struct pipe_stream_output_info so; unsigned num_shaders; /* PIPE_SHADER_[VERTEX|FRAGMENT|...] */ unsigned type; unsigned nr_ps_max_color_exports; }; struct r600_pipe_sampler_state { uint32_t tex_sampler_words[3]; union pipe_color_union border_color; bool border_color_use; bool seamless_cube_map; }; /* needed for blitter save */ #define NUM_TEX_UNITS 16 struct r600_seamless_cube_map { struct r600_atom atom; bool enabled; }; struct r600_samplerview_state { struct r600_atom atom; struct r600_pipe_sampler_view *views[NUM_TEX_UNITS]; uint32_t enabled_mask; uint32_t dirty_mask; uint32_t compressed_depthtex_mask; /* which textures are depth */ uint32_t compressed_colortex_mask; boolean dirty_txq_constants; boolean dirty_buffer_constants; }; struct r600_sampler_states { struct r600_atom atom; struct r600_pipe_sampler_state *states[NUM_TEX_UNITS]; uint32_t enabled_mask; uint32_t dirty_mask; uint32_t has_bordercolor_mask; /* which states contain the border color */ }; struct r600_textures_info { struct r600_samplerview_state views; struct r600_sampler_states states; bool is_array_sampler[NUM_TEX_UNITS]; /* cube array txq workaround */ uint32_t *txq_constants; /* buffer related workarounds */ uint32_t *buffer_constants; }; struct r600_constbuf_state { struct r600_atom atom; struct pipe_constant_buffer cb[PIPE_MAX_CONSTANT_BUFFERS]; uint32_t enabled_mask; uint32_t dirty_mask; }; struct r600_vertexbuf_state { struct r600_atom atom; struct pipe_vertex_buffer vb[PIPE_MAX_ATTRIBS]; uint32_t enabled_mask; /* non-NULL buffers */ uint32_t dirty_mask; }; /* CSO (constant state object, in other words, immutable state). */ struct r600_cso_state { struct r600_atom atom; void *cso; /* e.g. r600_blend_state */ struct r600_command_buffer *cb; }; struct r600_scissor_state { struct r600_atom atom; struct pipe_scissor_state scissor; bool enable; /* r6xx only */ int idx; }; struct r600_fetch_shader { struct r600_resource *buffer; unsigned offset; }; struct r600_shader_state { struct r600_atom atom; struct r600_pipe_shader *shader; }; struct r600_context { struct r600_common_context b; struct r600_screen *screen; struct blitter_context *blitter; struct u_suballocator *allocator_fetch_shader; /* Hardware info. */ boolean has_vertex_cache; boolean keep_tiling_flags; unsigned default_ps_gprs, default_vs_gprs; unsigned r6xx_num_clause_temp_gprs; /* Miscellaneous state objects. */ void *custom_dsa_flush; void *custom_blend_resolve; void *custom_blend_decompress; void *custom_blend_fastclear; /* With rasterizer discard, there doesn't have to be a pixel shader. * In that case, we bind this one: */ void *dummy_pixel_shader; /* These dummy CMASK and FMASK buffers are used to get around the R6xx hardware * bug where valid CMASK and FMASK are required to be present to avoid * a hardlock in certain operations but aren't actually used * for anything useful. */ struct r600_resource *dummy_fmask; struct r600_resource *dummy_cmask; /* State binding slots are here. */ struct r600_atom *atoms[R600_NUM_ATOMS]; /* States for CS initialization. */ struct r600_command_buffer start_cs_cmd; /* invariant state mostly */ /** Compute specific registers initializations. The start_cs_cmd atom * must be emitted before start_compute_cs_cmd. */ struct r600_command_buffer start_compute_cs_cmd; /* Register states. */ struct r600_alphatest_state alphatest_state; struct r600_cso_state blend_state; struct r600_blend_color blend_color; struct r600_cb_misc_state cb_misc_state; struct r600_clip_misc_state clip_misc_state; struct r600_clip_state clip_state; struct r600_db_misc_state db_misc_state; struct r600_db_state db_state; struct r600_cso_state dsa_state; struct r600_framebuffer framebuffer; struct r600_poly_offset_state poly_offset_state; struct r600_cso_state rasterizer_state; struct r600_sample_mask sample_mask; struct r600_scissor_state scissor[16]; struct r600_seamless_cube_map seamless_cube_map; struct r600_config_state config_state; struct r600_stencil_ref_state stencil_ref; struct r600_vgt_state vgt_state; struct r600_viewport_state viewport[16]; /* Shaders and shader resources. */ struct r600_cso_state vertex_fetch_shader; struct r600_shader_state vertex_shader; struct r600_shader_state pixel_shader; struct r600_shader_state geometry_shader; struct r600_shader_state export_shader; struct r600_cs_shader_state cs_shader_state; struct r600_shader_stages_state shader_stages; struct r600_gs_rings_state gs_rings; struct r600_constbuf_state constbuf_state[PIPE_SHADER_TYPES]; struct r600_textures_info samplers[PIPE_SHADER_TYPES]; /** Vertex buffers for fetch shaders */ struct r600_vertexbuf_state vertex_buffer_state; /** Vertex buffers for compute shaders */ struct r600_vertexbuf_state cs_vertex_buffer_state; /* Additional context states. */ unsigned compute_cb_target_mask; struct r600_pipe_shader_selector *ps_shader; struct r600_pipe_shader_selector *vs_shader; struct r600_pipe_shader_selector *gs_shader; struct r600_rasterizer_state *rasterizer; bool alpha_to_one; bool force_blend_disable; boolean dual_src_blend; unsigned zwritemask; /* Index buffer. */ struct pipe_index_buffer index_buffer; /* Last draw state (-1 = unset). */ int last_primitive_type; /* Last primitive type used in draw_vbo. */ int last_start_instance; void *sb_context; struct r600_isa *isa; }; static INLINE void r600_emit_command_buffer(struct radeon_winsys_cs *cs, struct r600_command_buffer *cb) { assert(cs->cdw + cb->num_dw <= RADEON_MAX_CMDBUF_DWORDS); memcpy(cs->buf + cs->cdw, cb->buf, 4 * cb->num_dw); cs->cdw += cb->num_dw; } void r600_trace_emit(struct r600_context *rctx); static INLINE void r600_emit_atom(struct r600_context *rctx, struct r600_atom *atom) { atom->emit(&rctx->b, atom); atom->dirty = false; if (rctx->screen->b.trace_bo) { r600_trace_emit(rctx); } } static INLINE void r600_set_cso_state(struct r600_cso_state *state, void *cso) { state->cso = cso; state->atom.dirty = cso != NULL; } static INLINE void r600_set_cso_state_with_cb(struct r600_cso_state *state, void *cso, struct r600_command_buffer *cb) { state->cb = cb; state->atom.num_dw = cb ? cb->num_dw : 0; r600_set_cso_state(state, cso); } /* compute_memory_pool.c */ struct compute_memory_pool; void compute_memory_pool_delete(struct compute_memory_pool* pool); struct compute_memory_pool* compute_memory_pool_new( struct r600_screen *rscreen); /* evergreen_compute.c */ void evergreen_set_cs_sampler_view(struct pipe_context *ctx_, unsigned start_slot, unsigned count, struct pipe_sampler_view **views); /* evergreen_state.c */ struct pipe_sampler_view * evergreen_create_sampler_view_custom(struct pipe_context *ctx, struct pipe_resource *texture, const struct pipe_sampler_view *state, unsigned width0, unsigned height0, unsigned force_level); void evergreen_init_common_regs(struct r600_command_buffer *cb, enum chip_class ctx_chip_class, enum radeon_family ctx_family, int ctx_drm_minor); void cayman_init_common_regs(struct r600_command_buffer *cb, enum chip_class ctx_chip_class, enum radeon_family ctx_family, int ctx_drm_minor); void evergreen_init_state_functions(struct r600_context *rctx); void evergreen_init_atom_start_cs(struct r600_context *rctx); void evergreen_update_ps_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void evergreen_update_es_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void evergreen_update_gs_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void evergreen_update_vs_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void *evergreen_create_db_flush_dsa(struct r600_context *rctx); void *evergreen_create_resolve_blend(struct r600_context *rctx); void *evergreen_create_decompress_blend(struct r600_context *rctx); void *evergreen_create_fastclear_blend(struct r600_context *rctx); boolean evergreen_is_format_supported(struct pipe_screen *screen, enum pipe_format format, enum pipe_texture_target target, unsigned sample_count, unsigned usage); void evergreen_init_color_surface(struct r600_context *rctx, struct r600_surface *surf); void evergreen_init_color_surface_rat(struct r600_context *rctx, struct r600_surface *surf); void evergreen_update_db_shader_control(struct r600_context * rctx); /* r600_blit.c */ void r600_init_blit_functions(struct r600_context *rctx); void r600_decompress_depth_textures(struct r600_context *rctx, struct r600_samplerview_state *textures); void r600_decompress_color_textures(struct r600_context *rctx, struct r600_samplerview_state *textures); /* r600_shader.c */ int r600_pipe_shader_create(struct pipe_context *ctx, struct r600_pipe_shader *shader, struct r600_shader_key key); void r600_pipe_shader_destroy(struct pipe_context *ctx, struct r600_pipe_shader *shader); /* r600_state.c */ struct pipe_sampler_view * r600_create_sampler_view_custom(struct pipe_context *ctx, struct pipe_resource *texture, const struct pipe_sampler_view *state, unsigned width_first_level, unsigned height_first_level); void r600_init_state_functions(struct r600_context *rctx); void r600_init_atom_start_cs(struct r600_context *rctx); void r600_update_ps_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void r600_update_es_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void r600_update_gs_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void r600_update_vs_state(struct pipe_context *ctx, struct r600_pipe_shader *shader); void *r600_create_db_flush_dsa(struct r600_context *rctx); void *r600_create_resolve_blend(struct r600_context *rctx); void *r700_create_resolve_blend(struct r600_context *rctx); void *r600_create_decompress_blend(struct r600_context *rctx); bool r600_adjust_gprs(struct r600_context *rctx); boolean r600_is_format_supported(struct pipe_screen *screen, enum pipe_format format, enum pipe_texture_target target, unsigned sample_count, unsigned usage); void r600_update_db_shader_control(struct r600_context * rctx); /* r600_hw_context.c */ void r600_context_gfx_flush(void *context, unsigned flags, struct pipe_fence_handle **fence); void r600_begin_new_cs(struct r600_context *ctx); void r600_flush_emit(struct r600_context *ctx); void r600_need_cs_space(struct r600_context *ctx, unsigned num_dw, boolean count_draw_in); void r600_cp_dma_copy_buffer(struct r600_context *rctx, struct pipe_resource *dst, uint64_t dst_offset, struct pipe_resource *src, uint64_t src_offset, unsigned size); void evergreen_cp_dma_clear_buffer(struct r600_context *rctx, struct pipe_resource *dst, uint64_t offset, unsigned size, uint32_t clear_value); void r600_dma_copy_buffer(struct r600_context *rctx, struct pipe_resource *dst, struct pipe_resource *src, uint64_t dst_offset, uint64_t src_offset, uint64_t size); /* * evergreen_hw_context.c */ void evergreen_dma_copy_buffer(struct r600_context *rctx, struct pipe_resource *dst, struct pipe_resource *src, uint64_t dst_offset, uint64_t src_offset, uint64_t size); /* r600_state_common.c */ void r600_init_common_state_functions(struct r600_context *rctx); void r600_emit_cso_state(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_alphatest_state(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_blend_color(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_vgt_state(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_clip_misc_state(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_stencil_ref(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_viewport_state(struct r600_context *rctx, struct r600_atom *atom); void r600_emit_shader(struct r600_context *rctx, struct r600_atom *a); void r600_init_atom(struct r600_context *rctx, struct r600_atom *atom, unsigned id, void (*emit)(struct r600_context *ctx, struct r600_atom *state), unsigned num_dw); void r600_vertex_buffers_dirty(struct r600_context *rctx); void r600_sampler_views_dirty(struct r600_context *rctx, struct r600_samplerview_state *state); void r600_sampler_states_dirty(struct r600_context *rctx, struct r600_sampler_states *state); void r600_constant_buffers_dirty(struct r600_context *rctx, struct r600_constbuf_state *state); uint32_t r600_translate_stencil_op(int s_op); uint32_t r600_translate_fill(uint32_t func); unsigned r600_tex_wrap(unsigned wrap); unsigned r600_tex_filter(unsigned filter); unsigned r600_tex_mipfilter(unsigned filter); unsigned r600_tex_compare(unsigned compare); bool sampler_state_needs_border_color(const struct pipe_sampler_state *state); struct pipe_surface *r600_create_surface_custom(struct pipe_context *pipe, struct pipe_resource *texture, const struct pipe_surface *templ, unsigned width, unsigned height); unsigned r600_get_swizzle_combined(const unsigned char *swizzle_format, const unsigned char *swizzle_view, boolean vtx); uint32_t r600_translate_texformat(struct pipe_screen *screen, enum pipe_format format, const unsigned char *swizzle_view, uint32_t *word4_p, uint32_t *yuv_format_p); uint32_t r600_translate_colorformat(enum chip_class chip, enum pipe_format format); uint32_t r600_colorformat_endian_swap(uint32_t colorformat); /* r600_uvd.c */ struct pipe_video_codec *r600_uvd_create_decoder(struct pipe_context *context, const struct pipe_video_codec *decoder); struct pipe_video_buffer *r600_video_buffer_create(struct pipe_context *pipe, const struct pipe_video_buffer *tmpl); /* * Helpers for building command buffers */ #define PKT3_SET_CONFIG_REG 0x68 #define PKT3_SET_CONTEXT_REG 0x69 #define PKT3_SET_CTL_CONST 0x6F #define PKT3_SET_LOOP_CONST 0x6C #define R600_CONFIG_REG_OFFSET 0x08000 #define R600_CONTEXT_REG_OFFSET 0x28000 #define R600_CTL_CONST_OFFSET 0x3CFF0 #define R600_LOOP_CONST_OFFSET 0X0003E200 #define EG_LOOP_CONST_OFFSET 0x0003A200 #define PKT_TYPE_S(x) (((x) & 0x3) << 30) #define PKT_COUNT_S(x) (((x) & 0x3FFF) << 16) #define PKT3_IT_OPCODE_S(x) (((x) & 0xFF) << 8) #define PKT3_PREDICATE(x) (((x) >> 0) & 0x1) #define PKT3(op, count, predicate) (PKT_TYPE_S(3) | PKT_COUNT_S(count) | PKT3_IT_OPCODE_S(op) | PKT3_PREDICATE(predicate)) #define RADEON_CP_PACKET3_COMPUTE_MODE 0x00000002 /*Evergreen Compute packet3*/ #define PKT3C(op, count, predicate) (PKT_TYPE_S(3) | PKT3_IT_OPCODE_S(op) | PKT_COUNT_S(count) | PKT3_PREDICATE(predicate) | RADEON_CP_PACKET3_COMPUTE_MODE) static INLINE void r600_store_value(struct r600_command_buffer *cb, unsigned value) { cb->buf[cb->num_dw++] = value; } static INLINE void r600_store_array(struct r600_command_buffer *cb, unsigned num, unsigned *ptr) { assert(cb->num_dw+num <= cb->max_num_dw); memcpy(&cb->buf[cb->num_dw], ptr, num * sizeof(ptr[0])); cb->num_dw += num; } static INLINE void r600_store_config_reg_seq(struct r600_command_buffer *cb, unsigned reg, unsigned num) { assert(reg < R600_CONTEXT_REG_OFFSET); assert(cb->num_dw+2+num <= cb->max_num_dw); cb->buf[cb->num_dw++] = PKT3(PKT3_SET_CONFIG_REG, num, 0); cb->buf[cb->num_dw++] = (reg - R600_CONFIG_REG_OFFSET) >> 2; } /** * Needs cb->pkt_flags set to RADEON_CP_PACKET3_COMPUTE_MODE for compute * shaders. */ static INLINE void r600_store_context_reg_seq(struct r600_command_buffer *cb, unsigned reg, unsigned num) { assert(reg >= R600_CONTEXT_REG_OFFSET && reg < R600_CTL_CONST_OFFSET); assert(cb->num_dw+2+num <= cb->max_num_dw); cb->buf[cb->num_dw++] = PKT3(PKT3_SET_CONTEXT_REG, num, 0) | cb->pkt_flags; cb->buf[cb->num_dw++] = (reg - R600_CONTEXT_REG_OFFSET) >> 2; } /** * Needs cb->pkt_flags set to RADEON_CP_PACKET3_COMPUTE_MODE for compute * shaders. */ static INLINE void r600_store_ctl_const_seq(struct r600_command_buffer *cb, unsigned reg, unsigned num) { assert(reg >= R600_CTL_CONST_OFFSET); assert(cb->num_dw+2+num <= cb->max_num_dw); cb->buf[cb->num_dw++] = PKT3(PKT3_SET_CTL_CONST, num, 0) | cb->pkt_flags; cb->buf[cb->num_dw++] = (reg - R600_CTL_CONST_OFFSET) >> 2; } static INLINE void r600_store_loop_const_seq(struct r600_command_buffer *cb, unsigned reg, unsigned num) { assert(reg >= R600_LOOP_CONST_OFFSET); assert(cb->num_dw+2+num <= cb->max_num_dw); cb->buf[cb->num_dw++] = PKT3(PKT3_SET_LOOP_CONST, num, 0); cb->buf[cb->num_dw++] = (reg - R600_LOOP_CONST_OFFSET) >> 2; } /** * Needs cb->pkt_flags set to RADEON_CP_PACKET3_COMPUTE_MODE for compute * shaders. */ static INLINE void eg_store_loop_const_seq(struct r600_command_buffer *cb, unsigned reg, unsigned num) { assert(reg >= EG_LOOP_CONST_OFFSET); assert(cb->num_dw+2+num <= cb->max_num_dw); cb->buf[cb->num_dw++] = PKT3(PKT3_SET_LOOP_CONST, num, 0) | cb->pkt_flags; cb->buf[cb->num_dw++] = (reg - EG_LOOP_CONST_OFFSET) >> 2; } static INLINE void r600_store_config_reg(struct r600_command_buffer *cb, unsigned reg, unsigned value) { r600_store_config_reg_seq(cb, reg, 1); r600_store_value(cb, value); } static INLINE void r600_store_context_reg(struct r600_command_buffer *cb, unsigned reg, unsigned value) { r600_store_context_reg_seq(cb, reg, 1); r600_store_value(cb, value); } static INLINE void r600_store_ctl_const(struct r600_command_buffer *cb, unsigned reg, unsigned value) { r600_store_ctl_const_seq(cb, reg, 1); r600_store_value(cb, value); } static INLINE void r600_store_loop_const(struct r600_command_buffer *cb, unsigned reg, unsigned value) { r600_store_loop_const_seq(cb, reg, 1); r600_store_value(cb, value); } static INLINE void eg_store_loop_const(struct r600_command_buffer *cb, unsigned reg, unsigned value) { eg_store_loop_const_seq(cb, reg, 1); r600_store_value(cb, value); } void r600_init_command_buffer(struct r600_command_buffer *cb, unsigned num_dw); void r600_release_command_buffer(struct r600_command_buffer *cb); static INLINE void r600_write_compute_context_reg_seq(struct radeon_winsys_cs *cs, unsigned reg, unsigned num) { r600_write_context_reg_seq(cs, reg, num); /* Set the compute bit on the packet header */ cs->buf[cs->cdw - 2] |= RADEON_CP_PACKET3_COMPUTE_MODE; } static INLINE void r600_write_ctl_const_seq(struct radeon_winsys_cs *cs, unsigned reg, unsigned num) { assert(reg >= R600_CTL_CONST_OFFSET); assert(cs->cdw+2+num <= RADEON_MAX_CMDBUF_DWORDS); cs->buf[cs->cdw++] = PKT3(PKT3_SET_CTL_CONST, num, 0); cs->buf[cs->cdw++] = (reg - R600_CTL_CONST_OFFSET) >> 2; } static INLINE void r600_write_compute_context_reg(struct radeon_winsys_cs *cs, unsigned reg, unsigned value) { r600_write_compute_context_reg_seq(cs, reg, 1); radeon_emit(cs, value); } static INLINE void r600_write_context_reg_flag(struct radeon_winsys_cs *cs, unsigned reg, unsigned value, unsigned flag) { if (flag & RADEON_CP_PACKET3_COMPUTE_MODE) { r600_write_compute_context_reg(cs, reg, value); } else { r600_write_context_reg(cs, reg, value); } } static INLINE void r600_write_ctl_const(struct radeon_winsys_cs *cs, unsigned reg, unsigned value) { r600_write_ctl_const_seq(cs, reg, 1); radeon_emit(cs, value); } /* * common helpers */ static INLINE uint32_t S_FIXED(float value, uint32_t frac_bits) { return value * (1 << frac_bits); } #define ALIGN_DIVUP(x, y) (((x) + (y) - 1) / (y)) /* 12.4 fixed-point */ static INLINE unsigned r600_pack_float_12p4(float x) { return x <= 0 ? 0 : x >= 4096 ? 0xffff : x * 16; } /* Return if the depth format can be read without the DB->CB copy on r6xx-r7xx. */ static INLINE bool r600_can_read_depth(struct r600_texture *rtex) { return rtex->resource.b.b.nr_samples <= 1 && (rtex->resource.b.b.format == PIPE_FORMAT_Z16_UNORM || rtex->resource.b.b.format == PIPE_FORMAT_Z32_FLOAT); } #define V_028A6C_OUTPRIM_TYPE_POINTLIST 0 #define V_028A6C_OUTPRIM_TYPE_LINESTRIP 1 #define V_028A6C_OUTPRIM_TYPE_TRISTRIP 2 static INLINE unsigned r600_conv_prim_to_gs_out(unsigned mode) { static const int prim_conv[] = { V_028A6C_OUTPRIM_TYPE_POINTLIST, V_028A6C_OUTPRIM_TYPE_LINESTRIP, V_028A6C_OUTPRIM_TYPE_LINESTRIP, V_028A6C_OUTPRIM_TYPE_LINESTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_LINESTRIP, V_028A6C_OUTPRIM_TYPE_LINESTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP, V_028A6C_OUTPRIM_TYPE_TRISTRIP }; assert(mode < Elements(prim_conv)); return prim_conv[mode]; } #endif
{ "content_hash": "648f3e0aedfe25ee2b7bfa211391e614", "timestamp": "", "source": "github", "line_count": 849, "max_line_length": 156, "avg_line_length": 32.352179034157835, "alnum_prop": 0.7120544653584301, "repo_name": "execunix/vinos", "id": "e277269cccb56c0d9965cbef9f27000912aa73eb", "size": "28685", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xsrc/external/mit/MesaLib/dist/src/gallium/drivers/r600/r600_pipe.h", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
'use strict'; /** * Tests for the various proxies and instances once the project has initialized * its addons */ const expect = require('chai').expect; const FixturifyProject = require('../../helpers/fixturify-project'); describe('Unit | host-addons-utils', function () { let fixturifyProject; beforeEach(function () { fixturifyProject = new FixturifyProject('awesome-proj', '1.0.0'); fixturifyProject.addDevDependency('ember-cli', '*'); }); afterEach(function () { fixturifyProject.dispose(); }); it('multiple lazy engines in project, including nested lazy engines', function () { fixturifyProject.addEngine('lazy-engine-a', '1.0.0', { enableLazyLoading: true }); fixturifyProject.addAddon('addon-a', '1.0.0', { enableLazyLoading: true, callback: (addon) => { addon.addEngine('lazy-engine-b', '1.0.0', { enableLazyLoading: true, callback: (engine) => { engine.addReferenceDependency('lazy-engine-a'); engine.addEngine('lazy-engine-c', '1.0.0', { enableLazyLoading: true }); }, }); }, }); fixturifyProject.writeSync(); let project = fixturifyProject.buildProjectModel(); project.initializeAddons(); const app = {}; const lazyEngineA = project.addons.find((addon) => addon.name === 'lazy-engine-a'); lazyEngineA.app = app; const pkgInfoLazyEngineA = lazyEngineA._packageInfo; const addonA = project.addons.find((addon) => addon.name === 'addon-a'); addonA.app = app; const pkgInfoAddonA = addonA._packageInfo; let { hostPackageInfo, hostAndAncestorBundledPackageInfos } = project.hostInfoCache.getHostAddonInfo( pkgInfoLazyEngineA ); expect(hostPackageInfo).to.equal(project._packageInfo, 'host package-info for lazy-engine A is the project'); expect(project.hostInfoCache.findLCAHost(lazyEngineA)).to.equal(lazyEngineA.app, 'LCA host is the app'); expect(hostAndAncestorBundledPackageInfos).to.deep.equal( new Set([pkgInfoAddonA]), 'host packge-infos for lazy-engine A includes only addon-a' ); const lazyEngineB = project.addons .find((addon) => addon.name === 'addon-a') .addons.find((addon) => addon.name === 'lazy-engine-b'); const pkgInfoLazyEngineB = lazyEngineB._packageInfo; ({ hostPackageInfo, hostAndAncestorBundledPackageInfos } = project.hostInfoCache.getHostAddonInfo( pkgInfoLazyEngineB )); expect(hostPackageInfo).to.equal(project._packageInfo, 'host package-info for lazy-engine B is the project'); expect(project.hostInfoCache.findLCAHost(lazyEngineB)).to.equal(lazyEngineA.app, 'LCA host is the app'); expect(hostAndAncestorBundledPackageInfos).to.deep.equal( new Set([pkgInfoAddonA]), 'host packge-infos for lazy-engine B includes only addon-a' ); const lazyEngineC = project.addons .find((addon) => addon.name === 'addon-a') .addons.find((addon) => addon.name === 'lazy-engine-b') .addons.find((addon) => addon.name === 'lazy-engine-c'); const pkgInfoLazyEngineC = lazyEngineC._packageInfo; ({ hostPackageInfo, hostAndAncestorBundledPackageInfos } = project.hostInfoCache.getHostAddonInfo( pkgInfoLazyEngineC )); expect(hostPackageInfo).to.equal(pkgInfoLazyEngineB, 'host package-info for lazy-engine C is lazy engine B'); expect(project.hostInfoCache.findLCAHost(lazyEngineC)).to.equal( lazyEngineB, 'LCA host for lazy engine C is lazy engine B' ); expect(hostAndAncestorBundledPackageInfos).to.deep.equal( new Set([pkgInfoAddonA]), 'host packge-infos for lazy-engine C includes addon-a' ); }); it('multiple lazy engines in project, including nested lazy engines; some nested lazy engines have non-lazy deps', function () { fixturifyProject.addEngine('lazy-engine-a', '1.0.0', { enableLazyLoading: true }); fixturifyProject.addAddon('addon-a', '1.0.0', { enableLazyLoading: true, callback: (addon) => { addon.addEngine('lazy-engine-b', '1.0.0', { enableLazyLoading: true, callback: (engine) => { engine.addReferenceDependency('lazy-engine-a'); engine.addAddon('addon-b', '1.0.0'); engine.addEngine('lazy-engine-c', '1.0.0', { enableLazyLoading: true }); }, }); }, }); fixturifyProject.writeSync(); let project = fixturifyProject.buildProjectModel(); project.initializeAddons(); const pkgInfoAddonA = project.addons.find((addon) => addon.name === 'addon-a')._packageInfo; const pkgInfoLazyEngineB = project.addons .find((addon) => addon.name === 'addon-a') .addons.find((addon) => addon.name === 'lazy-engine-b')._packageInfo; const pkgInfoAddonB = project.addons .find((addon) => addon.name === 'addon-a') .addons.find((addon) => addon.name === 'lazy-engine-b') .addons.find((addon) => addon.name === 'addon-b')._packageInfo; const pkgInfoLazyEngineC = project.addons .find((addon) => addon.name === 'addon-a') .addons.find((addon) => addon.name === 'lazy-engine-b') .addons.find((addon) => addon.name === 'lazy-engine-c')._packageInfo; let { hostPackageInfo, hostAndAncestorBundledPackageInfos } = project.hostInfoCache.getHostAddonInfo( pkgInfoLazyEngineC ); expect(hostPackageInfo).to.equal(pkgInfoLazyEngineB, 'host package-info for lazy-engine C is lazy engine B'); expect(hostAndAncestorBundledPackageInfos).to.deep.equal( new Set([pkgInfoAddonA, pkgInfoAddonB]), 'host packge-infos for lazy-engine C includes addon-a, addon-b' ); }); it('multiple lazy engines at same level with a common ancestor host', function () { fixturifyProject.addInRepoEngine('lazy-engine-a', '1.0.0', { enableLazyLoading: true }); fixturifyProject.pkg['ember-addon'].paths = []; fixturifyProject.addInRepoEngine('lazy-engine-b', '1.0.0', { enableLazyLoading: true, callback: (engine) => { engine.pkg['ember-addon'].paths = ['../lazy-engine-a']; }, }); fixturifyProject.addInRepoEngine('lazy-engine-c', '1.0.0', { enableLazyLoading: true, callback: (engine) => { engine.pkg['ember-addon'].paths = ['../lazy-engine-a']; }, }); fixturifyProject.writeSync(); let project = fixturifyProject.buildProjectModel(); project.initializeAddons(); const pkgInfoLazyEngineA = project.addons .find((addon) => addon.name === 'lazy-engine-b') .addons.find((addon) => addon.name === 'lazy-engine-a')._packageInfo; let { hostPackageInfo, hostAndAncestorBundledPackageInfos } = project.hostInfoCache.getHostAddonInfo( pkgInfoLazyEngineA ); expect(hostPackageInfo).to.equal(project._packageInfo, 'host package-info for lazy-engine A is the project'); expect(hostAndAncestorBundledPackageInfos).to.deep.equal( new Set([]), 'host packge-infos for lazy-engine A has no non-lazy deps' ); }); });
{ "content_hash": "3fac0f52a502985da5a5d24019899230", "timestamp": "", "source": "github", "line_count": 192, "max_line_length": 130, "avg_line_length": 36.71875, "alnum_prop": 0.6614184397163121, "repo_name": "jrjohnson/ember-cli", "id": "99d8c1611662a57ca70aa9fe516e3fc98aa6c329", "size": "7050", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "tests/unit/models/host-info-cache-test.js", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "237" }, { "name": "HTML", "bytes": "2000" }, { "name": "Handlebars", "bytes": "254" }, { "name": "JavaScript", "bytes": "1593375" }, { "name": "PowerShell", "bytes": "604" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project version="4"> <component name="EntryPointsManager"> <entry_points version="2.0" /> </component> <component name="NullableNotNullManager"> <option name="myDefaultNullable" value="android.support.annotation.Nullable" /> <option name="myDefaultNotNull" value="android.support.annotation.NonNull" /> <option name="myNullables"> <value> <list size="4"> <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.Nullable" /> <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nullable" /> <item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.Nullable" /> <item index="3" class="java.lang.String" itemvalue="android.support.annotation.Nullable" /> </list> </value> </option> <option name="myNotNulls"> <value> <list size="4"> <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" /> <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" /> <item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" /> <item index="3" class="java.lang.String" itemvalue="android.support.annotation.NonNull" /> </list> </value> </option> </component> <component name="ProjectInspectionProfilesVisibleTreeState"> <entry key="Project Default"> <profile-state> <expanded-state> <State> <id /> </State> <State> <id>Abstraction issuesJava</id> </State> <State> <id>Android Lint</id> </State> <State> <id>Groovy</id> </State> <State> <id>Internationalization issuesJava</id> </State> <State> <id>J2ME issuesJava</id> </State> <State> <id>JUnit issuesJava</id> </State> <State> <id>Java</id> </State> <State> <id>Javadoc issuesJava</id> </State> <State> <id>Memory issuesJava</id> </State> <State> <id>Performance issuesJava</id> </State> <State> <id>Probable bugsGroovy</id> </State> <State> <id>Probable bugsJava</id> </State> <State> <id>Security issuesJava</id> </State> <State> <id>Spelling</id> </State> <State> <id>Visibility issuesJava</id> </State> <State> <id>XML</id> </State> </expanded-state> </profile-state> </entry> </component> <component name="ProjectLevelVcsManager" settingsEditedManually="false"> <OptionsSetting value="true" id="Add" /> <OptionsSetting value="true" id="Remove" /> <OptionsSetting value="true" id="Checkout" /> <OptionsSetting value="true" id="Update" /> <OptionsSetting value="true" id="Status" /> <OptionsSetting value="true" id="Edit" /> <ConfirmationsSetting value="0" id="Add" /> <ConfirmationsSetting value="0" id="Remove" /> </component> <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" default="true" assert-keyword="true" jdk-15="true" project-jdk-name="1.8" project-jdk-type="JavaSDK"> <output url="file://$PROJECT_DIR$/build/classes" /> </component> <component name="ProjectType"> <option name="id" value="Android" /> </component> <component name="masterDetails"> <states> <state key="ScopeChooserConfigurable.UI"> <settings> <splitter-proportions> <option name="proportions"> <list> <option value="0.2" /> </list> </option> </splitter-proportions> </settings> </state> </states> </component> </project>
{ "content_hash": "79efa37f251e1e108bdb4b97e5f41cf2", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 176, "avg_line_length": 34.05833333333333, "alnum_prop": 0.5554196231954979, "repo_name": "5uperbug/CopyCat", "id": "64b8a6dead757b00551dd68e791aea2d0f07e172", "size": "4087", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clients/android/.idea/misc.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "16789" }, { "name": "JavaScript", "bytes": "3045" } ], "symlink_target": "" }
<annotation> <folder>ring_binder</folder> <filename>ring_binder_1_view_1_pose_1_lit_1.jpg</filename> <source> <database>photorealistic rendering</database> </source> <size> <width>640</width> <height>480</height> <depth>3</depth> </size> <segmented>0</segmented> <object> <name>ring_binder</name> <pose>Unspecified</pose> <truncated>0</truncated> <difficult>0</difficult> <bndbox> <xmin>144</xmin> <ymin>160</ymin> <xmax>464</xmax> <ymax>352</ymax> </bndbox> </object> </annotation>
{ "content_hash": "58689c4b04f438aa77b2d271ed900f28", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 58, "avg_line_length": 19.32, "alnum_prop": 0.7163561076604554, "repo_name": "UMassLowell-Vision-Group/From-Virtual-to-Reality", "id": "9715e5ac06eed70386a4d5e75aebb22cb10a478a", "size": "483", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "virtual/Annotations/ring_binder_1_view_1_pose_1_lit_1.xml", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "MAXScript", "bytes": "3874" }, { "name": "Matlab", "bytes": "5696" }, { "name": "Shell", "bytes": "576" } ], "symlink_target": "" }
package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilter; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.settings.IndexSettingsService; public class LimitTokenCountFilterFactory extends AbstractTokenFilterFactory { public static final int DEFAULT_MAX_TOKEN_COUNT = 1; public static final boolean DEFAULT_CONSUME_ALL_TOKENS = false; final int maxTokenCount; final boolean consumeAllTokens; @Inject public LimitTokenCountFilterFactory(Index index, IndexSettingsService indexSettingsService, Environment env, @Assisted String name, @Assisted Settings settings) { super(index, indexSettingsService.getSettings(), name, settings); this.maxTokenCount = settings.getAsInt("max_token_count", DEFAULT_MAX_TOKEN_COUNT); this.consumeAllTokens = settings.getAsBoolean("consume_all_tokens", DEFAULT_CONSUME_ALL_TOKENS); } @Override public TokenStream create(TokenStream tokenStream) { return new LimitTokenCountFilter(tokenStream, maxTokenCount, consumeAllTokens); } }
{ "content_hash": "be1bd6e029f75ce90e6a4d91670de588", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 166, "avg_line_length": 41.303030303030305, "alnum_prop": 0.7945707997065297, "repo_name": "Kamapcuc/elasticsearch", "id": "93d019636dc64ef2242e7dae047eac82f574c8c8", "size": "2151", "binary": false, "copies": "6", "ref": "refs/heads/2.3.4_patched", "path": "core/src/main/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactory.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.javadeobfuscator.deobfuscator.iterablematcher; import com.javadeobfuscator.deobfuscator.utils.Utils; import org.objectweb.asm.Type; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.LdcInsnNode; public class NoSideEffectLoad1SlotStep extends IterableStep<AbstractInsnNode> { private final boolean alsoMatchStackLoad; public NoSideEffectLoad1SlotStep(boolean alsoMatchStackLoad) { this.alsoMatchStackLoad = alsoMatchStackLoad; } @Override public boolean tryMatch(AbstractInsnNode ain) { switch (ain.getOpcode()) { case ACONST_NULL: case ICONST_M1: case ICONST_0: case ICONST_1: case ICONST_2: case ICONST_3: case ICONST_4: case ICONST_5: case FCONST_0: case FCONST_1: case FCONST_2: case BIPUSH: case SIPUSH: return true; case ILOAD: case FLOAD: case ALOAD: return alsoMatchStackLoad; case LDC: Object obj = ((LdcInsnNode) ain).cst; return !(obj instanceof Double || obj instanceof Long || obj instanceof Type); default: return false; } } @Override public String toString() { return "NoSideEffectLoad1SlotStep{" + "captured=" + Utils.prettyprint(this.getCaptured()) + '}'; } }
{ "content_hash": "659684003b4963d3a76562490a2addc5", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 94, "avg_line_length": 29.725490196078432, "alnum_prop": 0.5844327176781002, "repo_name": "java-deobfuscator/deobfuscator", "id": "59a60d664b31c893ab7605412e2f5f74157b4972", "size": "1516", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/javadeobfuscator/deobfuscator/iterablematcher/NoSideEffectLoad1SlotStep.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jasmin", "bytes": "41372" }, { "name": "Java", "bytes": "2032443" } ], "symlink_target": "" }
"""Support for Tile device trackers.""" import logging from homeassistant.components.device_tracker.config_entry import TrackerEntity from homeassistant.components.device_tracker.const import SOURCE_TYPE_GPS from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import DATA_COORDINATOR, DATA_TILE, DOMAIN _LOGGER = logging.getLogger(__name__) ATTR_ALTITUDE = "altitude" ATTR_CONNECTION_STATE = "connection_state" ATTR_IS_DEAD = "is_dead" ATTR_IS_LOST = "is_lost" ATTR_LAST_LOST_TIMESTAMP = "last_lost_timestamp" ATTR_RING_STATE = "ring_state" ATTR_TILE_NAME = "tile_name" ATTR_VOIP_STATE = "voip_state" DEFAULT_ATTRIBUTION = "Data provided by Tile" DEFAULT_ICON = "mdi:view-grid" async def async_setup_entry(hass, entry, async_add_entities): """Set up Tile device trackers.""" async_add_entities( [ TileDeviceTracker( hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][tile_uuid], tile ) for tile_uuid, tile in hass.data[DOMAIN][DATA_TILE][entry.entry_id].items() ] ) async def async_setup_scanner(hass, config, async_see, discovery_info=None): """Detect a legacy configuration and import it.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={ CONF_USERNAME: config[CONF_USERNAME], CONF_PASSWORD: config[CONF_PASSWORD], }, ) ) _LOGGER.info( "Your Tile configuration has been imported into the UI; " "please remove it from configuration.yaml" ) return True class TileDeviceTracker(CoordinatorEntity, TrackerEntity): """Representation of a network infrastructure device.""" def __init__(self, coordinator, tile): """Initialize.""" super().__init__(coordinator) self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION} self._tile = tile @property def available(self): """Return if entity is available.""" return self.coordinator.last_update_success and not self._tile.dead @property def battery_level(self): """Return the battery level of the device. Percentage from 0-100. """ return None @property def device_state_attributes(self): """Return the device state attributes.""" return self._attrs @property def icon(self): """Return the icon.""" return DEFAULT_ICON @property def location_accuracy(self): """Return the location accuracy of the device. Value in meters. """ return self._tile.accuracy @property def latitude(self) -> float: """Return latitude value of the device.""" return self._tile.latitude @property def longitude(self) -> float: """Return longitude value of the device.""" return self._tile.longitude @property def name(self): """Return the name.""" return self._tile.name @property def unique_id(self): """Return the unique ID of the entity.""" return f"tile_{self._tile.uuid}" @property def source_type(self): """Return the source type, eg gps or router, of the device.""" return SOURCE_TYPE_GPS @callback def _handle_coordinator_update(self): """Respond to a DataUpdateCoordinator update.""" self._update_from_latest_data() self.async_write_ha_state() @callback def _update_from_latest_data(self): """Update the entity from the latest data.""" self._attrs.update( { ATTR_ALTITUDE: self._tile.altitude, ATTR_IS_LOST: self._tile.lost, ATTR_LAST_LOST_TIMESTAMP: self._tile.lost_timestamp, ATTR_RING_STATE: self._tile.ring_state, ATTR_VOIP_STATE: self._tile.voip_state, } ) async def async_added_to_hass(self): """Handle entity which will be added.""" await super().async_added_to_hass() self._update_from_latest_data()
{ "content_hash": "9dabd4226b3faafa8d638e77e8866301", "timestamp": "", "source": "github", "line_count": 148, "max_line_length": 87, "avg_line_length": 29.37162162162162, "alnum_prop": 0.6252587991718427, "repo_name": "partofthething/home-assistant", "id": "f7cc4e1736e6cd23ef7b81e77a1b91a5837d063a", "size": "4347", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "homeassistant/components/tile/device_tracker.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "1720" }, { "name": "Python", "bytes": "31051838" }, { "name": "Shell", "bytes": "4832" } ], "symlink_target": "" }
package diskscollector import ( "errors" "fmt" "regexp" "testing" "time" "github.com/kyma-project/test-infra/development/tools/pkg/diskscollector/automock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" compute "google.golang.org/api/compute/v1" ) const regexPattern = "^gke-gkeint.*[-]pvc[-]" const sampleDiskName = "gke-gkeint-abc-pvc-xyz" var ( diskNameRegex = regexp.MustCompile(regexPattern) filterFunc = NewDiskFilter(diskNameRegex, 1) //age is 1 hour timeNow = time.Now() timeNowFormatted = timeNow.Format(time.RFC3339Nano) timeTwoHoursAgo = timeNow.Add(time.Duration(-1) * time.Hour) timeTwoHoursAgoFormatted = timeTwoHoursAgo.Format(time.RFC3339Nano) nonEmptyUsers = []string{"someUser"} emptyUsers = []string{} ) func TestNewDiskFilter(t *testing.T) { //given var testCases = []struct { name string expectedFilterValue bool diskName string diskCreationTimestamp string diskUsers []string }{ {name: "Should filter matching disk", expectedFilterValue: true, diskName: sampleDiskName, diskCreationTimestamp: timeTwoHoursAgoFormatted, diskUsers: emptyUsers}, {name: "Should skip disk with non matching name", expectedFilterValue: false, diskName: "otherName", diskCreationTimestamp: timeTwoHoursAgoFormatted, diskUsers: emptyUsers}, {name: "Should skip disk recently created", expectedFilterValue: false, diskName: sampleDiskName, diskCreationTimestamp: timeNowFormatted, diskUsers: emptyUsers}, {name: "Should skip disk with users", expectedFilterValue: false, diskName: sampleDiskName, diskCreationTimestamp: timeTwoHoursAgoFormatted, diskUsers: nonEmptyUsers}, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { //when disk := createDisk(testCase.diskName, testCase.diskCreationTimestamp, testCase.diskUsers) collected, err := filterFunc(disk) //then require.NoError(t, err) assert.Equal(t, testCase.expectedFilterValue, collected) }) } t.Run("Should return error on invalid timestamp", func(t *testing.T) { //given badTimestamp := "@@@" disk := compute.Disk{ CreationTimestamp: badTimestamp, } _, err := filterFunc(&disk) assert.Contains(t, err.Error(), fmt.Sprintf("parsing time \"%s\" as", badTimestamp)) }) } func TestGarbageDiskCollector(t *testing.T) { diskMatching1 := createDisk(sampleDiskName+"1", timeTwoHoursAgoFormatted, []string{}) //matches removal filter diskNonMatchingName := createDisk("otherName"+"2", timeTwoHoursAgoFormatted, []string{}) //non matching name diskHasUsers := createDisk(sampleDiskName+"3", timeTwoHoursAgoFormatted, nonEmptyUsers) //still has users diskCreatedTooRecently := createDisk(sampleDiskName+"4", timeNowFormatted, []string{}) //not old enough diskMatching2 := createDisk(sampleDiskName+"5", timeTwoHoursAgoFormatted, []string{}) //matches removal filter t.Run("list() should find disks to remove", func(t *testing.T) { mockZoneAPI := &automock.ZoneAPI{} defer mockZoneAPI.AssertExpectations(t) mockDiskAPI := &automock.DiskAPI{} defer mockDiskAPI.AssertExpectations(t) testProject := "testProject" mockZoneAPI.On("ListZones", testProject).Return([]string{"a", "b", "c"}, nil) mockDiskAPI.On("ListDisks", testProject, "a").Return([]*compute.Disk{diskMatching1}, nil) mockDiskAPI.On("ListDisks", testProject, "b").Return([]*compute.Disk{diskNonMatchingName, diskHasUsers, diskCreatedTooRecently}, nil) mockDiskAPI.On("ListDisks", testProject, "c").Return([]*compute.Disk{diskMatching2}, nil) gdc := NewDisksGarbageCollector(mockZoneAPI, mockDiskAPI, filterFunc) res, err := gdc.list(testProject) require.NoError(t, err) assert.Len(t, res, 2) assert.Equal(t, "a", res[0].zone) assert.Equal(t, diskMatching1, res[0].disk) assert.Equal(t, "c", res[1].zone) assert.Equal(t, diskMatching2, res[1].disk) }) t.Run("list() should collect disks even when some zones fail", func(t *testing.T) { mockZoneAPI := &automock.ZoneAPI{} defer mockZoneAPI.AssertExpectations(t) mockDiskAPI := &automock.DiskAPI{} defer mockDiskAPI.AssertExpectations(t) testProject := "testProject" mockZoneAPI.On("ListZones", testProject).Return([]string{"a", "b", "c", "d"}, nil) mockDiskAPI.On("ListDisks", testProject, "a").Return(nil, errors.New("No such zone")) mockDiskAPI.On("ListDisks", testProject, "b").Return([]*compute.Disk{diskMatching2, diskCreatedTooRecently, diskHasUsers}, nil) mockDiskAPI.On("ListDisks", testProject, "c").Return(nil, errors.New("No such zone")) mockDiskAPI.On("ListDisks", testProject, "d").Return([]*compute.Disk{diskNonMatchingName, diskMatching1}, nil) gdc := NewDisksGarbageCollector(mockZoneAPI, mockDiskAPI, filterFunc) res, err := gdc.list(testProject) require.NoError(t, err) assert.Len(t, res, 2) assert.Equal(t, "b", res[0].zone) assert.Equal(t, diskMatching2, res[0].disk) assert.Equal(t, "d", res[1].zone) assert.Equal(t, diskMatching1, res[1].disk) }) t.Run("Run(makeChanges=true) should remove matching disks", func(t *testing.T) { mockZoneAPI := &automock.ZoneAPI{} defer mockZoneAPI.AssertExpectations(t) mockDiskAPI := &automock.DiskAPI{} defer mockDiskAPI.AssertExpectations(t) testProject := "testProject" mockZoneAPI.On("ListZones", testProject).Return([]string{"a", "b"}, nil) mockDiskAPI.On("ListDisks", testProject, "a").Return([]*compute.Disk{diskMatching1, diskNonMatchingName, diskHasUsers}, nil) mockDiskAPI.On("ListDisks", testProject, "b").Return([]*compute.Disk{diskCreatedTooRecently, diskMatching2}, nil) mockDiskAPI.On("RemoveDisk", testProject, "a", diskMatching1.Name).Return(nil) mockDiskAPI.On("RemoveDisk", testProject, "b", diskMatching2.Name).Return(nil) gdc := NewDisksGarbageCollector(mockZoneAPI, mockDiskAPI, filterFunc) allSucceeded, err := gdc.Run(testProject, true) require.NoError(t, err) assert.True(t, allSucceeded) }) t.Run("Run(makeChanges=true) should continue process if a call fails", func(t *testing.T) { mockZoneAPI := &automock.ZoneAPI{} defer mockZoneAPI.AssertExpectations(t) mockDiskAPI := &automock.DiskAPI{} defer mockDiskAPI.AssertExpectations(t) testProject := "testProject" mockZoneAPI.On("ListZones", testProject).Return([]string{"a", "b"}, nil) mockDiskAPI.On("ListDisks", testProject, "a").Return([]*compute.Disk{diskMatching1, diskNonMatchingName, diskHasUsers}, nil) mockDiskAPI.On("ListDisks", testProject, "b").Return([]*compute.Disk{diskCreatedTooRecently, diskMatching2}, nil) mockDiskAPI.On("RemoveDisk", testProject, "a", diskMatching1.Name).Return(errors.New("test error")) //Called first, returns error mockDiskAPI.On("RemoveDisk", testProject, "b", diskMatching2.Name).Return(nil) //Called although the previous call failed gdc := NewDisksGarbageCollector(mockZoneAPI, mockDiskAPI, filterFunc) allSucceeded, err := gdc.Run(testProject, true) require.NoError(t, err) assert.False(t, allSucceeded) }) t.Run("Run(makeChanges=false) should not remove anything (dry run)", func(t *testing.T) { mockZoneAPI := &automock.ZoneAPI{} defer mockZoneAPI.AssertExpectations(t) mockDiskAPI := &automock.DiskAPI{} defer mockDiskAPI.AssertExpectations(t) testProject := "testProject" mockZoneAPI.On("ListZones", testProject).Return([]string{"a", "b"}, nil) mockDiskAPI.On("ListDisks", testProject, "a").Return([]*compute.Disk{diskMatching1, diskNonMatchingName, diskHasUsers}, nil) mockDiskAPI.On("ListDisks", testProject, "b").Return([]*compute.Disk{diskCreatedTooRecently, diskMatching2}, nil) gdc := NewDisksGarbageCollector(mockZoneAPI, mockDiskAPI, filterFunc) allSucceeded, err := gdc.Run(testProject, false) require.NoError(t, err) assert.True(t, allSucceeded) }) } func createDisk(name, creationTimestamp string, users []string) *compute.Disk { return &compute.Disk{ Name: name, CreationTimestamp: creationTimestamp, Users: users, } }
{ "content_hash": "6122cbf2ee6bf503202d43a1c3efb9a9", "timestamp": "", "source": "github", "line_count": 218, "max_line_length": 144, "avg_line_length": 37.87614678899082, "alnum_prop": 0.7086108756206855, "repo_name": "kyma-project/test-infra", "id": "6b0e55c7cae7452c73a76acb9c206fd049e3b973", "size": "8257", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "development/tools/pkg/diskscollector/collector_test.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "29768" }, { "name": "Go", "bytes": "1320295" }, { "name": "Jsonnet", "bytes": "86522" }, { "name": "Makefile", "bytes": "21421" }, { "name": "Mustache", "bytes": "1403" }, { "name": "Python", "bytes": "19305" }, { "name": "Shell", "bytes": "592845" } ], "symlink_target": "" }
package org.apache.flex.compiler.internal.tree.mxml; import java.io.Reader; import java.util.Collection; import org.apache.flex.compiler.internal.parsing.ISourceFragment; import org.apache.flex.compiler.internal.parsing.SourceFragment; import org.apache.flex.compiler.internal.parsing.SourceFragmentsReader; import org.apache.flex.compiler.internal.parsing.as.ASParser; import org.apache.flex.compiler.internal.parsing.as.OffsetLookup; import org.apache.flex.compiler.internal.scopes.MXMLFileScope; import org.apache.flex.compiler.internal.tree.as.ExpressionNodeBase; import org.apache.flex.compiler.internal.tree.as.NodeBase; import org.apache.flex.compiler.internal.workspaces.Workspace; import org.apache.flex.compiler.mxml.IMXMLTagAttributeData; import org.apache.flex.compiler.problems.ICompilerProblem; import org.apache.flex.compiler.tree.ASTNodeID; import org.apache.flex.compiler.tree.as.IASNode; import org.apache.flex.compiler.tree.as.IExpressionNode; import org.apache.flex.compiler.tree.mxml.IMXMLBindingAttributeNode; /** * Implementation of the {@code IMXMLBindingAttributeNode} interface. */ class MXMLBindingAttributeNode extends MXMLNodeBase implements IMXMLBindingAttributeNode { /** * Constructor * * @param parent The parent node of this node, or <code>null</code> if there * is no parent. */ MXMLBindingAttributeNode(NodeBase parent) { super(parent); } /** * Constructor * * @param parent The parent node of this node * @param expr The IExpressionNode for this BindingAttributeNode */ MXMLBindingAttributeNode(NodeBase parent, IExpressionNode expr) { super(parent); this.expressionNode = expr; } private String attributeName; // "source" or "destination" private IExpressionNode expressionNode; @Override public ASTNodeID getNodeID() { // TODO Auto-generated method stub return ASTNodeID.MXMLBindingAttributeID; } @Override public String getName() { return attributeName; } @Override public int getChildCount() { return expressionNode != null ? 1 : 0; } @Override public IASNode getChild(int i) { return i == 0 ? expressionNode : null; } @Override public IExpressionNode getExpressionNode() { return expressionNode; } public void initializeFromAttribute(MXMLTreeBuilder builder, IMXMLTagAttributeData attribute) { attributeName = attribute.getName(); setLocation(attribute); expressionNode = createExpressionNode(builder, attribute); // ASParser creates the ExpressionNodeBase as a child of a FileNode. // Make it a child of the this MXMLBindingAttributeNode. if (expressionNode != null) ((ExpressionNodeBase)expressionNode).setParent(this); } private IExpressionNode createExpressionNode(MXMLTreeBuilder builder, IMXMLTagAttributeData attribute) { Workspace workspace = builder.getWorkspace(); Collection<ICompilerProblem> problems = builder.getProblems(); ISourceFragment[] fragments = attribute.getValueFragments(problems); // Adjust fragment offsets from local to absolute, // to take into account include files, source attributes, etc. final MXMLFileScope fileScope = builder.getFileScope(); final OffsetLookup offsetLookup = fileScope.getOffsetLookup(); assert offsetLookup != null : "Expected OffsetLookup on FileScope."; for (ISourceFragment fragment : fragments) { int physicalStart = fragment.getPhysicalStart(); final int[] absoluteOffsets = offsetLookup.getAbsoluteOffset(attribute.getSourcePath(), physicalStart); ((SourceFragment)fragment).setPhysicalStart(absoluteOffsets[0]); } // Parse the fragments inside the databinding expression. Reader reader = new SourceFragmentsReader(attribute.getSourcePath(), fragments); return ASParser.parseDataBinding(workspace, reader, problems); } /** * For debugging only. Builds a string such as <code>"source"</code> from * the attribute name. */ @Override protected boolean buildInnerString(StringBuilder sb) { sb.append('"'); sb.append(getName()); sb.append('"'); return true; } }
{ "content_hash": "848288a6d948abbea8075776f383030e", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 115, "avg_line_length": 32.70289855072464, "alnum_prop": 0.6897850653667184, "repo_name": "adufilie/flex-falcon", "id": "91055214083bd4f6500e977d1ac521cc33604333", "size": "5332", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "compiler/src/org/apache/flex/compiler/internal/tree/mxml/MXMLBindingAttributeNode.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ActionScript", "bytes": "310711" }, { "name": "Batchfile", "bytes": "13714" }, { "name": "CSS", "bytes": "102901" }, { "name": "GAP", "bytes": "131269" }, { "name": "HTML", "bytes": "102114" }, { "name": "Java", "bytes": "16260432" }, { "name": "JavaScript", "bytes": "12551351" }, { "name": "Lex", "bytes": "37784" }, { "name": "Shell", "bytes": "21262" } ], "symlink_target": "" }