text
stringlengths
2
14k
meta
dict
using System; namespace GoodAI.BasicNodes.DyBM { public class MyRandom { static Random rand = new Random(); public double NextDouble(float mean, float stdDev) { //these are uniform(0,1) random doubles double u1 = (float)rand.NextDouble(); double u2 = (float)rand.NextDouble(); //random normal(0,1) double randStdNormal = Math.Sqrt(-2.0 * Math.Log(u1)) * Math.Sin(2.0 * Math.PI * u2); //random normal(mean,stdDev^2) return mean + stdDev * randStdNormal; } } }
{ "pile_set_name": "Github" }
package gen import ( "fmt" "github.com/Aptomi/aptomi/pkg/config" "github.com/Aptomi/aptomi/pkg/lang" "github.com/Aptomi/aptomi/pkg/plugin/k8s" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "gopkg.in/yaml.v2" "k8s.io/client-go/tools/clientcmd" "k8s.io/client-go/tools/clientcmd/api" ) func newClusterCommand(_ *config.Client) *cobra.Command { var sourceContext, clusterName, defaultNamespace string var local bool cmd := &cobra.Command{ Use: "cluster", Short: "gen cluster", Long: "gen cluster long", Run: func(cmd *cobra.Command, args []string) { if !local && len(sourceContext) == 0 { log.Fatalf("context should be specified") } if local && len(sourceContext) > 0 { log.Fatalf("one of local or context could be specified") } var clusterConfig *k8s.ClusterConfig var err error if local { if len(clusterName) == 0 { clusterName = "local" } clusterConfig = &k8s.ClusterConfig{Local: true, DefaultNamespace: "default"} } else { if len(clusterName) == 0 { clusterName = sourceContext } clusterConfig, err = handleKubeConfigCluster(sourceContext) } if err != nil { panic(err) } if len(defaultNamespace) > 0 { clusterConfig.DefaultNamespace = defaultNamespace } cluster := lang.Cluster{ TypeKind: lang.TypeCluster.GetTypeKind(), Metadata: lang.Metadata{ Name: clusterName, Namespace: "system", }, Type: "kubernetes", Config: clusterConfig, } log.Infof("Generating cluster: %s", clusterName) data, err := yaml.Marshal(cluster) if err != nil { panic(fmt.Sprintf("error while marshaling generated cluster: %s", err)) } fmt.Println(string(data)) }, } cmd.Flags().BoolVarP(&local, "local", "l", false, "Build Aptomi cluster with local kubernetes") cmd.Flags().StringVarP(&sourceContext, "context", "c", "", "Context in kubeconfig to be used for Aptomi cluster creation (run 'kubectl config get-contexts' to get list of available contexts and clusters") cmd.Flags().StringVarP(&defaultNamespace, "default-namespace", "N", "", "Set default k8s namespace for all deployments into this cluster") cmd.Flags().StringVarP(&clusterName, "name", "n", "", "Name of the Aptomi cluster to create") return cmd } func handleKubeConfigCluster(sourceContext string) (*k8s.ClusterConfig, error) { kubeConfig, err := buildTempKubeConfigWith(sourceContext) if err != nil { return nil, fmt.Errorf("error while building temp kube config with context %s: %s", sourceContext, err) } clusterConfig := &k8s.ClusterConfig{ KubeConfig: kubeConfig, } return clusterConfig, err } func buildTempKubeConfigWith(sourceContext string) (*interface{}, error) { rawConf, err := getKubeConfig() if err != nil { return nil, err } newConfig := api.NewConfig() newConfig.CurrentContext = sourceContext context, exist := rawConf.Contexts[sourceContext] if !exist { return nil, fmt.Errorf("requested context not found: %s", sourceContext) } newConfig.Contexts[sourceContext] = context if newConfig.Clusters[context.Cluster], exist = rawConf.Clusters[context.Cluster]; !exist { return nil, fmt.Errorf("requested cluster (from specified context) not found: %s", context.Cluster) } if newConfig.AuthInfos[context.AuthInfo], exist = rawConf.AuthInfos[context.AuthInfo]; !exist { return nil, fmt.Errorf("requested auth info (user from specified context) not found: %s", context.AuthInfo) } content, err := clientcmd.Write(*newConfig) if err != nil { return nil, fmt.Errorf("error while marshaling temp kubeconfig: %s", err) } kubeConfig := new(interface{}) err = yaml.Unmarshal(content, kubeConfig) if err != nil { return nil, fmt.Errorf("error while unmarshaling temp kubeconfig: %s", err) } return kubeConfig, err } func getKubeConfig() (*api.Config, error) { rules := clientcmd.NewDefaultClientConfigLoadingRules() overrides := &clientcmd.ConfigOverrides{} conf := clientcmd.NewNonInteractiveDeferredLoadingClientConfig(rules, overrides) rawConf, err := conf.RawConfig() if err != nil { return nil, fmt.Errorf("error while getting raw kube config: %s", err) } return &rawConf, err }
{ "pile_set_name": "Github" }
package healthcheck import ( "bytes" "testing" "github.com/mitchellh/cli" "github.com/spiffe/go-spiffe/v2/proto/spiffe/workload" common_cli "github.com/spiffe/spire/pkg/common/cli" "github.com/spiffe/spire/test/fakes/fakeworkloadapi" "github.com/stretchr/testify/suite" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) func TestHealthCheck(t *testing.T) { suite.Run(t, new(HealthCheckSuite)) } type HealthCheckSuite struct { suite.Suite stdin *bytes.Buffer stdout *bytes.Buffer stderr *bytes.Buffer cmd cli.Command } func (s *HealthCheckSuite) SetupTest() { s.stdin = new(bytes.Buffer) s.stdout = new(bytes.Buffer) s.stderr = new(bytes.Buffer) s.cmd = newHealthCheckCommand(&common_cli.Env{ Stdin: s.stdin, Stdout: s.stdout, Stderr: s.stderr, }) } func (s *HealthCheckSuite) TestSynopsis() { s.Equal("Determines agent health status", s.cmd.Synopsis()) } func (s *HealthCheckSuite) TestHelp() { s.Equal("", s.cmd.Help()) s.Equal(`Usage of health: -shallow Perform a less stringent health check -socketPath string Path to Workload API socket (default "/tmp/agent.sock") -verbose Print verbose information `, s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestBadFlags() { code := s.cmd.Run([]string{"-badflag"}) s.NotEqual(0, code, "exit code") s.Equal("", s.stdout.String(), "stdout") s.Equal(`flag provided but not defined: -badflag Usage of health: -shallow Perform a less stringent health check -socketPath string Path to Workload API socket (default "/tmp/agent.sock") -verbose Print verbose information `, s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestFailsOnUnavailable() { code := s.cmd.Run([]string{"--socketPath", "doesnotexist.sock"}) s.NotEqual(0, code, "exit code") s.Equal("", s.stdout.String(), "stdout") s.Equal("Agent is unavailable.\n", s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestFailsOnUnavailableVerbose() { code := s.cmd.Run([]string{"--socketPath", "doesnotexist.sock", "--verbose"}) s.NotEqual(0, code, "exit code") s.Equal(`Contacting Workload API... Workload API returned rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing dial unix doesnotexist.sock: connect: no such file or directory" `, s.stdout.String(), "stdout") s.Equal("Agent is unavailable.\n", s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestSucceedsOnPermissionDenied() { w := s.makeFailedWorkloadAPI(status.Error(codes.PermissionDenied, "permission denied")) code := s.cmd.Run([]string{"--socketPath", w.Addr().Name}) s.Equal(0, code, "exit code") s.Equal("Agent is healthy.\n", s.stdout.String(), "stdout") s.Equal("", s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestSucceedsOnUnknown() { w := s.makeFailedWorkloadAPI(status.Error(codes.Unknown, "unknown")) code := s.cmd.Run([]string{"--socketPath", w.Addr().Name}) s.Equal(0, code, "exit code") s.Equal("Agent is healthy.\n", s.stdout.String(), "stdout") s.Equal("", s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestSucceedsOnGoodResponse() { w := s.makeGoodWorkloadAPI() code := s.cmd.Run([]string{"--socketPath", w.Addr().Name}) s.Equal(0, code, "exit code") s.Equal("Agent is healthy.\n", s.stdout.String(), "stdout") s.Equal("", s.stderr.String(), "stderr") } func (s *HealthCheckSuite) TestSucceedsOnGoodResponseVerbose() { w := s.makeGoodWorkloadAPI() code := s.cmd.Run([]string{"--socketPath", w.Addr().Name, "--verbose"}) s.Equal(0, code, "exit code") s.Equal(`Contacting Workload API... SVID received over Workload API. Agent is healthy. `, s.stdout.String(), "stdout") s.Equal("", s.stderr.String(), "stderr") } func (s *HealthCheckSuite) makeFailedWorkloadAPI(err error) *fakeworkloadapi.WorkloadAPI { return fakeworkloadapi.New(s.T(), fakeworkloadapi.FetchX509SVIDErrorOnce(err)) } func (s *HealthCheckSuite) makeGoodWorkloadAPI() *fakeworkloadapi.WorkloadAPI { return fakeworkloadapi.New(s.T(), fakeworkloadapi.FetchX509SVIDResponses(&workload.X509SVIDResponse{})) }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <configuration> <startup> <supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.5.2" /> </startup> </configuration>
{ "pile_set_name": "Github" }
//======== (C) Copyright 2002 Charles G. Cleveland All rights reserved. ========= // // The copyright to the contents herein is the property of Charles G. Cleveland. // The contents may be used and/or copied only with the written permission of // Charles G. Cleveland, or in accordance with the terms and conditions stipulated in // the agreement/contract under which the contents have been supplied. // // Purpose: // // $Workfile: hldm.h $ // $Date: 2002/08/02 21:42:34 $ // //------------------------------------------------------------------------------- // $Log: hldm.h,v $ // Revision 1.4 2002/08/02 21:42:34 Flayra // - Allow ability to control how often a ricochet sound plays // // Revision 1.3 2002/07/08 16:17:46 Flayra // - Reworked bullet firing to add random spread (bug #236) // //=============================================================================== #ifndef HLDM_H #define HLDM_H #include "common/pmtrace.h" void EV_HLDM_GunshotDecalTrace( pmtrace_t *pTrace, char *decalName, int inChanceOfSound = 1); void EV_HLDM_DecalGunshot( pmtrace_t *pTrace, int iBulletType, int inChanceOfSound = 1); int EV_HLDM_CheckTracer( int idx, float *vecSrc, float *end, float *forward, float *right, int iBulletType, int iTracerFreq, int *tracerCount ); void EV_HLDM_FireBullets( int idx, float *forward, float *right, float *up, int cShots, float *vecSrc, float *vecDirShooting, float flDistance, int iBulletType, int iTracerFreq, int *tracerCount, float flSpreadX, float flSpreadY ); void EV_HLDM_FireBulletsPlayer( int idx, float *forward, float *right, float *up, int cShots, float *vecSrc, float *vecDirShooting, float flDistance, int iBulletType, int iTracerFreq, int *tracerCount, Vector& inSpread, int inRandomSeed); #endif
{ "pile_set_name": "Github" }
--- title: StorSimple 8000 シリーズ デバイスへの Update 5.1 のインストール | Microsoft Docs description: StorSimple 8000 シリーズ デバイスに StorSimple 8000 シリーズの Update 5.1 をインストールする方法について説明します。 services: storsimple documentationcenter: NA author: twooley ms.assetid: '' ms.service: storsimple ms.devlang: NA ms.topic: how-to ms.tgt_pltfrm: NA ms.workload: TBD ms.date: 03/05/2020 ms.author: twooley ms.openlocfilehash: f9cc5181d6cc29ee4b3c2373dbbc91d6290fbe6e ms.sourcegitcommit: 9c3cfbe2bee467d0e6966c2bfdeddbe039cad029 ms.translationtype: HT ms.contentlocale: ja-JP ms.lasthandoff: 08/24/2020 ms.locfileid: "88782772" --- # <a name="install-update-51-on-your-storsimple-device"></a>StorSimple デバイスへの Update 5.1 のインストール ## <a name="overview"></a>概要 このチュートリアルでは、Update 5.1 より前のソフトウェア バージョンを実行している StorSimple デバイスに、Azure portal 経由で Update 5.1 をインストールする方法について説明します。 <!--The hotfix method is used when you are trying to install Update 5.1 on a device running pre-Update 3 versions. The hotfix method is also used when a gateway is configured on a network interface other than DATA 0 of the StorSimple device and you are trying to update from a pre-Update 1 software version.--> Update 5.1 には、中断なしのセキュリティ更新プログラムが含まれています。 中断なしまたは通常の更新プログラムは、Azure portal を使用して適用できます <!--or by the hotfix method-->。 > [!IMPORTANT] > > * Update 5.1 は必須の更新プログラムであり、すぐにインストールする必要があります。 詳細については、[Update 5.1 リリース ノート](storsimple-update51-release-notes.md)に関する記事を参照してください。 > * インストールの前に、ハードウェアの状態とネットワーク接続の点からデバイスの正常性を判断するための手動と自動の一連の事前チェックが行われます。 これらの事前チェックは、Azure portal から更新プログラムを適用する場合にのみ実行されます。 > * 修正プログラムによる方法を使用してインストールする場合は、[Microsoft サポート](mailto:support@microsoft.com)にお問い合わせください。 <!-- > * We strongly recommend that when updating a device running versions prior to Update 3, you install the updates using hotfix method. If you encounter any issues, [log a support ticket](storsimple-8000-contact-microsoft-support.md). > * We recommend that you install the software and other regular updates via the Azure portal. You should only go to the Windows PowerShell interface of the device (to install updates) if the pre-update gateway check fails in the portal. Depending upon the version you are updating from, the updates may take 4 hours (or greater) to install. The maintenance mode updates must be installed through the Windows PowerShell interface of the device. As maintenance mode updates are disruptive updates, these result in a down time for your device. > * If running the optional StorSimple Snapshot Manager, ensure that you have upgraded your Snapshot Manager version to Update 5.1 prior to updating the device. --> [!INCLUDE [storsimple-preparing-for-update](../../includes/storsimple-preparing-for-updates.md)] ## <a name="install-update-51-through-the-azure-portal"></a>Azure portal を使用して Update 5.1 をインストールする デバイスを [Update 5.1](storsimple-update51-release-notes.md) に更新するには、次の手順を実行します。 > [!NOTE] > Microsoft はデバイスから追加の診断情報を取得します。 その結果、Microsoft の運用チームが問題のあるデバイスを識別したときに、デバイスから情報を収集して問題を診断する能力が向上します。 #### <a name="to-install-an-update-from-the-azure-portal"></a>Azure ポータルから 更新プログラムをインストールするには 1. StorSimple サービス ページでデバイスを選択します。 ![デバイスの削除](./media/storsimple-8000-install-update-51/update1.png) 2. **[デバイスの設定]** > **[デバイスの更新プログラム]** の順に移動します。 ![[デバイスの更新プログラム] をクリック](./media/storsimple-8000-install-update-51/update2.png) 3. 新しい更新プログラムが利用できる場合は、通知が表示されます。 または、 **[デバイスの更新プログラム]** ブレードで **[更新プログラムのスキャン]** をクリックします。 利用可能な更新プログラムをスキャンするジョブが作成されます。 ジョブが正常に完了すると、その旨が通知されます。 ![[デバイスの更新プログラム] をクリック](./media/storsimple-8000-install-update-51/update3.png) 4. 更新プログラムをデバイスに適用する前に、リリース ノートを確認することをお勧めします。 **[更新プログラムのインストール]** をクリックすると、更新プログラムが適用されます。 **[定期更新プログラムの確認]** ブレードで、更新プログラムを適用する前に完了する必要のある前提条件を確認します。 デバイスを更新する準備ができたことを示すチェック ボックスをオンにし、 **[インストール]** をクリックします。 ![[デバイスの更新プログラム] をクリック](./media/storsimple-8000-install-update-51/update4.png) 5. 一連の前提条件のチェックが開始されます。 これらのチェックは次のとおりです。 * **コントローラーの正常性チェック** では、両方のデバイス コントローラーが正常であり、オンラインであることを確認します。 * **ハードウェア コンポーネントの正常性チェック** では、StorSimple デバイスのすべてのハードウェア コンポーネントが正常であることを確認します。 * **DATA 0 チェック** では、デバイスで DATA 0 が有効であることを確認します。 このインターフェイスが有効でない場合は、有効にしてから再試
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 %s -fcxx-exceptions -fexceptions -fsyntax-only -verify -fblocks -std=c++11 -Wunreachable-code-aggressive -Wno-unused-value -Wno-tautological-compare int &halt() __attribute__((noreturn)); int &live(); int dead(); int liveti() throw(int); int (*livetip)() throw(int); int test1() { try { live(); } catch (int i) { live(); } return 1; } void test2() { try { live(); } catch (int i) { live(); } try { liveti(); } catch (int i) { live(); } try { livetip(); } catch (int i) { live(); } throw 1; dead(); // expected-warning {{will never be executed}} } void test3() { halt() --; // expected-warning {{will never be executed}} // FIXME: The unreachable part is just the '?', but really all of this // code is unreachable and shouldn't be separately reported. halt() // expected-warning {{will never be executed}} ? dead() : dead(); live(), float (halt()); // expected-warning {{will never be executed}} } void test4() { struct S { int mem; } s; S &foor(); halt(), foor()// expected-warning {{will never be executed}} .mem; } void test5() { struct S { int mem; } s; S &foonr() __attribute__((noreturn)); foonr() .mem; // expected-warning {{will never be executed}} } void test6() { struct S { ~S() { } S(int i) { } }; live(), S (halt()); // expected-warning {{will never be executed}} } // Don't warn about unreachable code in template instantiations, as // they may only be unreachable in that specific instantiation. void isUnreachable(); template <typename T> void test_unreachable_templates() { T::foo(); isUnreachable(); // no-warning } struct TestUnreachableA { static void foo() __attribute__((noreturn)); }; struct TestUnreachableB { static void foo(); }; void test_unreachable_templates_harness() { test_unreachable_templates<TestUnreachableA>(); test_unreachable_templates<TestUnreachableB>(); } // Do warn about explict template specializations, as they represent // actual concrete functions that somebody wrote. template <typename T> void funcToSpecialize() {} template <> void funcToSpecialize<int>() { halt(); dead(); // expected-warning {{will never be executed}} } // Handle 'try' code dominating a dead return. enum PR19040_test_return_t { PR19040_TEST_FAILURE }; namespace PR19040_libtest { class A { public: ~A (); }; } PR19040_test_return_t PR19040_fn1 () { try { throw PR19040_libtest::A (); } catch (...) { return PR19040_TEST_FAILURE; } return PR19040_TEST_FAILURE; // expected-warning {{will never be executed}} } __attribute__((noreturn)) void raze(); namespace std { template<typename T> struct basic_string { basic_string(const T* x) {} ~basic_string() {}; }; typedef basic_string<char> string; } std::string testStr() { raze(); return ""; // expected-warning {{'return' will never be executed}} } std::string testStrWarn(const char *s) { raze(); return s; // expected-warning {{will never be executed}} } bool testBool() { raze(); return true; // expected-warning {{'return' will never be executed}} } static const bool ConditionVar = 1; int test_global_as_conditionVariable() { if (ConditionVar) return 1; return 0; // no-warning } // Handle unreachable temporary destructors. class A { public: A(); ~A(); }; __attribute__((noreturn)) void raze(const A& x); void test_with_unreachable_tmp_dtors(int x) { raze(x ? A() : A()); // no-warning } // Test sizeof - sizeof in enum declaration. enum { BrownCow = sizeof(long) - sizeof(char) }; enum { CowBrown = 8 - 1 }; int test_enum_sizeof_arithmetic() { if (BrownCow) return 1; return 2; } int test_enum_arithmetic() { if (CowBrown) return 1; return 2; // expected-warning {{never be executed}} } int test_arithmetic() { if (8 -1) return 1; return 2; // expected-warning {{never be executed}} } int test_treat_const_bool_local_as_config_value() { const bool controlValue = false; if (!controlValue) return 1; test_treat_const_bool_local_as_config_value(); // no-warning return 0; } int test_treat_non_const_bool_local_as_non_config_value() { bool controlValue = false; if (!controlValue) return 1; // There is no warning here because 'controlValue' isn't really // a control value at all. The CFG will not treat this // branch as unreachable. test_treat_non_const_bool_local_as_non_config_value(); // no-warning return 0; } void test_do_while(int x) { // Handle trivial expressions with // implicit casts to bool. do { break; } while (0); // no-warning } class Frobozz { public: Frobozz(int x); ~Frobozz(); }; Frobozz test_return_object(int flag) { return Frobozz(flag); return Frobozz(42); // expected-warning {{'return' will never be executed}} } Frobozz test_return_object_control_flow(int flag) { return Frobozz(flag); return Frobozz(flag ? 42 : 24); // expected-warning {{code will never be executed}} } void somethingToCall(); static constexpr bool isConstExprConfigValue() { return true; } int test_const_expr_config_value() { if (isConstExprConfigValue()) { somethingToCall(); return 0; } somethingToCall(); // no-warning return 1; } int test_const_expr_config_value_2() { if (!isConstExprConfigValue()) { somethingToCall(); // no-warning return 0; } somethingToCall(); return 1; } class Frodo { public: static const bool aHobbit = true; }; void test_static_class_var() { if (Frodo::aHobbit) somethingToCall(); else somethingToCall(); // no-warning } void test_static_class_var(Frodo &F) { if (F.aHobbit) somethingToCall(); else somethingToCall(); // no-warning } void test_unreachable_for_null_increment() { for (unsigned i = 0; i < 10 ; ) // no-warning break; } void test_unreachable_forrange_increment() {
{ "pile_set_name": "Github" }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ #pragma warning disable 1634, 1691 namespace System.Workflow.Activities { using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Globalization; using System.Reflection; using System.ServiceModel; internal sealed class ContractType : Type, ICloneable { private static readonly char[] elementDecorators = new char[] { '[', '*', '&' }; private static readonly char[] nameSeparators = new char[] { '.', '+' }; private Attribute[] attributes = null; private ConstructorInfo[] constructors = null; private EventInfo[] events = null; private FieldInfo[] fields = null; private string fullName; private Guid guid = Guid.Empty; private MethodInfo[] methods = null; private string name; private Type[] nestedTypes = Type.EmptyTypes; private PropertyInfo[] properties = null; private TypeAttributes typeAttributes; internal ContractType(string name) { if (string.IsNullOrEmpty(name)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgument( "name", SR2.GetString(SR2.Error_ArgumentValueNullOrEmptyString)); } this.fullName = name; this.name = this.fullName; // detect first bracket, any name seperators after it are part of a generic parameter... int idx = name.IndexOf('['); // Get the name after the last dot if (idx != -1) { idx = this.name.Substring(0, idx).LastIndexOfAny(nameSeparators); } else { idx = this.name.LastIndexOfAny(nameSeparators); } if (idx != -1) { this.name = this.fullName.Substring(idx + 1); } this.typeAttributes = TypeAttributes.Interface | TypeAttributes.Sealed | TypeAttributes.Public | TypeAttributes.Abstract; this.attributes = new Attribute[] { new ServiceContractAttribute() }; this.methods = new MethodInfo[0]; } public override Assembly Assembly { get { return null; } } public override string AssemblyQualifiedName { get { return this.FullName; } } public override Type BaseType { get { return null; } } public override Type DeclaringType { get { return null; } } public override string FullName { get { return this.fullName; } } public override Guid GUID { get { if (this.guid == Guid.Empty) { this.guid = Guid.NewGuid(); } return this.guid; } } public override Module Module { get { return null; } } public override string Name { get { return this.name; } } public override string Namespace { get { if (this.fullName == Name) { return string.Empty; } return this.fullName.Substring(0, this.fullName.Length - Name.Length - 1); } } public override RuntimeTypeHandle TypeHandle { get { #pragma warning suppress 56503 throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new NotImplementedException(SR2.GetString(SR2.Error_RuntimeNotSupported))); } } public override Type UnderlyingSystemType { get { return this; } } public object Clone() { return this; } public override bool Equals(object obj) { if (obj == null) { return false; } ContractType contract = obj as ContractType; if (contract == null) { return false; } if (string.Compare(this.AssemblyQualifiedName, contract.AssemblyQualifiedName, StringComparison.Ordinal) != 0 || this.methods.Length != contract.methods.Length) { return false; } foreach (MethodInfo methodInfo in this.methods) { if (this.GetMemberHelper<MethodInfo>(BindingFlags.Public | BindingFlags.Instance, new MemberSignature(methodInfo), ref contract.methods) == null) { return false; } } return true; } public override int GetArrayRank() { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgument( SR2.GetString(SR2.Error_CurrentTypeNotAnArray)); } public override ConstructorInfo[] GetConstructors(BindingFlags bindingAttr) { return GetMembersHelper<ConstructorInfo>(bindingAttr, ref this.constructors, false); } public override object[] GetCustomAttributes(bool inherit) { return GetCustomAttributes(typeof(object), inherit); } public override object[] GetCustomAttributes(Type attributeType, bool inherit) { if (attributeType == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("attributeType"); } return ServiceOperationHelpers.GetCustomAttributes(attributeType, this.attributes); } public override MemberInfo[] GetDefaultMembers() { // Get all of the custom attributes DefaultMemberAttribute attr = null; for (Type t = this; t != null; t = t.BaseType) { object[] attrs = GetCustomAttributes(typeof(DefaultMemberAttribute), false); if (attrs != null && attrs.Length > 0) { attr = attrs[0] as DefaultMemberAttribute; } if (attr != null) { break; } } if (attr == null) { return new MemberInfo[0]; } String defaultMember = attr.MemberName; MemberInfo[] members = GetMember(defaultMember); if (members == null) { members = new MemberInfo[0]; } return members; } public override Type GetElementType() { return null; } public override EventInfo GetEvent(string name, BindingFlags bindingAttr) { return GetMemberHelper<EventInfo>(bindingAttr, new MemberSignature(name, null, null), ref this.events); } public override EventInfo[] GetEvents(BindingFlags bindingAttr) { return GetMembersHelper<EventInfo>(bindingAttr, ref this.events, true); } public override FieldInfo GetField(string name, BindingFlags bindingAttr) { return GetMemberHelper<FieldInfo>(bindingAttr, new MemberSignature(name, null, null), ref this.fields); } public override FieldInfo[] GetFields(BindingFlags bindingAttr) { return GetMembersHelper<FieldInfo>(bindingAttr, ref this.fields, true); } public override int GetHashCode() { return this.name.GetHashCode(); } public override Type GetInterface(string name, bool ignoreCase) { if (string.IsNullOrEmpty(name)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgument( "name", SR2.GetString(SR2.Error_ArgumentValueNullOrEmptyString)); } if (string.Compare(this.name, name, StringComparison.Ordinal) == 0) { return this; } return null; } public override Type[] GetInterfaces() { return Type.EmptyTypes; } public override MemberInfo[] GetMember(string name, MemberTypes type, BindingFlags bindingAttr) { List<MemberInfo> members = new List<MemberInfo>(); // Methods if ((type & MemberTypes.Method) != 0) { members.AddRange(
{ "pile_set_name": "Github" }
using System; using System.Diagnostics; using System.IO; using System.Linq; using System.Windows.Forms; using Microsoft.Win32; using Nexus.Client.Settings; using Nexus.Client.UI; using Nexus.Client.Util; using Nexus.Client.Games.Steam; using System.Xml; namespace Nexus.Client.Games.NoMansSky { /// <summary> /// The base game mode factory that provides the commond functionality for /// factories that build game modes for NoMansSky based games. /// </summary> public class NoMansSkyGameModeFactory : IGameModeFactory { private readonly IGameModeDescriptor m_gmdGameModeDescriptor = null; #region Properties /// <summary> /// Gets the application's environement info. /// </summary> /// <value>The application's environement info.</value> protected IEnvironmentInfo EnvironmentInfo { get; private set; } /// <summary> /// Gets the descriptor of the game mode that this factory builds. /// </summary> /// <value>The descriptor of the game mode that this factory builds.</value> public IGameModeDescriptor GameModeDescriptor { get { return m_gmdGameModeDescriptor; } } #endregion #region Constructors /// <summary> /// A simple consturctor that initializes the object with the given values. /// </summary> /// <param name="p_eifEnvironmentInfo">The application's environement info.</param> public NoMansSkyGameModeFactory(IEnvironmentInfo p_eifEnvironmentInfo) { EnvironmentInfo = p_eifEnvironmentInfo; m_gmdGameModeDescriptor = new NoMansSkyGameModeDescriptor(p_eifEnvironmentInfo); } #endregion /// <summary> /// Gets the path where mod files should be installed. /// </summary> /// <returns>The path where mod files should be installed, or /// <c>null</c> if the path could not be determined.</returns> public string GetInstallationPath() { string strValue = SteamInstallationPathDetector.Instance.GetSteamInstallationPath("275850", "No Man's Sky", "NMS.exe"); return strValue; } /// <summary> /// Gets the path where mod files should be installed. /// </summary> /// <remarks> /// This method uses the given path to the installed game /// to determine the installaiton path for mods. /// </remarks> /// <returns>The path where mod files should be installed, or /// <c>null</c> if the path could be be determined.</returns> public string GetInstallationPath(string p_strGameInstallPath) { return p_strGameInstallPath; } /// <summary> /// Gets the path to the game executable. /// </summary> /// <returns>The path to the game executable, or /// <c>null</c> if the path could not be determined.</returns> public string GetExecutablePath(string p_strPath) { return p_strPath; } /// <summary> /// Builds the game mode. /// </summary> /// <param name="p_futFileUtility">The file utility class to be used by the game mode.</param> /// <param name="p_imsWarning">The resultant warning resultant from the creation of the game mode. /// <c>null</c> if there are no warnings.</param> /// <returns>The game mode.</returns> public IGameMode BuildGameMode(FileUtil p_futFileUtility, out ViewMessage p_imsWarning) { if (EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId] == null) EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId] = new PerGameModeSettings<object>(); if (!EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId].ContainsKey("AskAboutReadOnlySettingsFiles")) { EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId]["AskAboutReadOnlySettingsFiles"] = true; EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId]["UnReadOnlySettingsFiles"] = true; EnvironmentInfo.Settings.Save(); } NoMansSkyGameMode gmdGameMode = InstantiateGameMode(p_futFileUtility); p_imsWarning = null; return gmdGameMode; } /// <summary> /// Instantiates the game mode. /// </summary> /// <param name="p_futFileUtility">The file utility class to be used by the game mode.</param> /// <returns>The game mode for which this is a factory.</returns> protected NoMansSkyGameMode InstantiateGameMode(FileUtil p_futFileUtility) { return new NoMansSkyGameMode(EnvironmentInfo, p_futFileUtility); } /// <summary> /// Performs the initial setup for the game mode being created. /// </summary> /// <param name="p_dlgShowView">The delegate to use to display a view.</param> /// <param name="p_dlgShowMessage">The delegate to use to display a message.</param> /// <returns><c>true</c> if the setup completed successfully; /// <c>false</c> otherwise.</returns> public bool PerformInitialSetup(ShowViewDelegate p_dlgShowView, ShowMessageDelegate p_dlgShowMessage) { if (EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId] == null) EnvironmentInfo.Settings.CustomGameModeSettings[GameModeDescriptor.ModeId] = new PerGameModeSettings<object>(); NoMansSkySetupVM vmlSetup = new NoMansSkySetupVM(EnvironmentInfo, GameModeDescriptor); SetupForm frmSetup = new SetupForm(vmlSetup); if (((DialogResult)p_dlgShowView(frmSetup, true)) == DialogResult.Cancel) return false; return vmlSetup.Save(); } /// <summary> /// Performs the initializtion for the game mode being created. /// </summary> /// <param name="p_dlgShowView">The delegate to use to display a view.</param> /// <param name="p_dlgShowMessage">The delegate to use to display a message.</param> /// <returns><c>true</c> if the setup completed successfully; /// <c>false</c> otherwise.</returns> public bool PerformInitialization(ShowViewDelegate p_dlgShowView, ShowMessageDelegate p_dlgShowMessage) { return true; } } }
{ "pile_set_name": "Github" }
&FORCE_EVAL METHOD Quickstep &DFT &QS METHOD PM6 &SE &END &END QS &SCF MAX_SCF 0 &END SCF &END DFT &SUBSYS &CELL #Mn_2 O_3 & Ia(-3)-T_h^7 #206 (bde) & D5_3 & cI80 & Dachs, Zf Kristall. 107, 37 A -4.70000000 4.70000000 4.70000000 B 4.70000000 -4.70000000 4.70000000 C 4.70000000 4.70000000 -4.70000000 &END CELL &COORD SCALED Fe 0.50000000 0.50000000 0.50000000 2.35000000 2.35000000 2.35000000 Fe 0.50000000 0.00000000 0.00000000 -2.35000000 2.35000000 2.35000000 Fe 0.00000000 0.50000000 0.00000000 2.35000000 -2.35000000 2.35000000 Fe 0.00000000 0.00000000 0.50000000 2.35000000 2.35000000 -2.35000000 Mn 0.25000000 0.21560000 -0.03440000 -0.32336000 0.00000000 2.35000000 Mn -0.25000000 0.28440000 -0.46560000 0.32336000 -4.70000000 2.35000000 Mn -0.03440000 0.25000000 0.21560000 2.35000000 -0.32336000 0.00000000 Mn -0.46560000 -0.25000000 0.28440000 2.35000000 0.32336000 -4.70000000 Mn 0.21560000 -0.03440000 0.25000000 0.00000000 2.35000000 -0.32336000 Mn 0.28440000 -0.46560000 -0.25000000 -4.70000000 2.35000000 0.32336000 Mn -0.25000000 -0.21560000 0.03440000 0.32336000 0.00000000 -2.35000000 Mn 0.25000000 -0.28440000 0.46560000 -0.32336000 4.70000000 -2.35000000 Mn 0.03440000 -0.25000000 -0.21560000 -2.35000000 0.32336000 0.00000000 Mn 0.46560000 0.25000000 -0.28440000 -2.35000000 -0.32336000 4.70000000 Mn -0.21560000 0.03440000 -0.25000000 0.00000000 -2.35000000 0.32336000 Mn -0.28440000 0.46560000 0.25000000 4.70000000 -2.35000000 -0.32336000 O 0.22500000 0.46300000 0.43800000 3.17720000 0.94000000 1.17500000 O -0.47500000 -0.21300000 0.06200000 1.52280000 -0.94000000 -3.52500000 O -0.02500000 0.03700000 0.26200000 1.52280000 0.94000000 -1.17500000 O 0.27500000 -0.28700000 0.23800000 -1.52280000 3.76000000 -1.17500000 O 0.43800000 0.22500000 0.46300000 1.17500000 3.17720000 0.94000000 O 0.06200000 -0.47500000 -0.21300000 -3.52500000 1.52280000 -0.94000000 O 0.26200000 -0.02500000 0.03700000 -1.17500000 1.52280000 0.94000000 O 0.23800000 0.27500000 -0.28700000 -1.17500000 -1.52280000 3.76000000 O 0.46300000 0.43800000 0.22500000 0.94000000 1.17500000 3.17720000 O -0.21300000 0.06200000 -0.47500000 -0.94000000 -3.52500000 1.52280000 O 0.03700000 0.26200000 -0.02500000 0.94000000 -1.17500000 1.52280000 O -0.28700000 0.23800000 0.27500000 3.76000000 -1.17500000 -1.52280000 O -0.22500000 -0.46300000 -0.43800000 -3.17720000 -0.94000000 -1.17500000 O 0.47500000 0.21300000 -0.06200000 -1.52280000 0.94000000 3.52500000 O 0.02500000 -0.03700000 -0.26200000 -1.52280000 -0.94000000 1.17500000 O -0.27500000 0.28700000 -0.23800000 1.52280000 -3.76000000 1.17500000 O -0.43800000 -0.22500000 -0.46300000 -1.17500000 -3.17720000 -0.94000000 O -0.06200000 0.47500000 0.21300000 3.52500000 -1.52280000 0.94000000 O -0.26200000 0.02500000 -0.03700000 1.17500000 -1.52280000 -0.94000000 O -0.23800000 -0.27500000 0.28700000 1.17500000 1.52280000 -3.76000000 O -0.46300000 -0.43800000 -0.22500000 -0.94000000 -1.17500000 -3.17720000 O 0.21300000 -0.06200000 0.47500000 0.94000000 3.52500000 -1.52280000 O -0.03700000 -0.26200000 0.02500000 -0.94000000 1.17500000 -1.52280000 O 0.28700000 -0.23800000 -0.27500000 -3.76000000 1.17500000 1.52280000 &END COORD &PRINT &SYMMETRY CHECK_SYMMETRY m<3> &END &END &TOPOLOGY CONNECTIVITY OFF &END &END SUBSYS &END FORCE_EVAL &GLOBAL PROJECT c_29_bixbyite RUN_TYPE ENERGY &END GLOBAL
{ "pile_set_name": "Github" }
import * as ts from "typescript"; import * as Lint from "tslint"; import * as utils from "tsutils/typeguard/2.8"; import * as Ignore from "./shared/ignore"; import { createInvalidNode, CheckNodeResult, createCheckNodeRule } from "./shared/check-node"; type Options = Ignore.IgnoreLocalOption & Ignore.IgnoreOption; // tslint:disable-next-line:variable-name export const Rule = createCheckNodeRule( Ignore.checkNodeWithIgnore(checkNode), "Unexpected let, use const instead." ); function checkNode( node: ts.Node, ctx: Lint.WalkContext<Options> ): CheckNodeResult { const results = [ checkVariableStatement(node, ctx), checkForStatements(node, ctx) ]; return { invalidNodes: results.reduce( (merged, result) => [...merged, ...result.invalidNodes], [] ), skipChildren: results.some(result => result.skipChildren === true) }; } function checkVariableStatement( node: ts.Node, ctx: Lint.WalkContext<Options> ): CheckNodeResult { if (utils.isVariableStatement(node)) { return checkDeclarationList(node.declarationList, ctx); } return { invalidNodes: [] }; } function checkForStatements( node: ts.Node, ctx: Lint.WalkContext<Options> ): CheckNodeResult { if ( (utils.isForStatement(node) || utils.isForInStatement(node) || utils.isForOfStatement(node)) && node.initializer && utils.isVariableDeclarationList(node.initializer) && Lint.isNodeFlagSet(node.initializer, ts.NodeFlags.Let) ) { return checkDeclarationList(node.initializer, ctx); } return { invalidNodes: [] }; } function checkDeclarationList( declarationList: ts.VariableDeclarationList, ctx: Lint.WalkContext<Options> ): CheckNodeResult { if (Lint.isNodeFlagSet(declarationList, ts.NodeFlags.Let)) { // It is a let declaration, now check each variable that is declared const invalidVariableDeclarationNodes = []; // If the declaration list contains multiple variables, eg. let x = 0, y = 1, mutableZ = 3; then // we should only provide one fix for the list even if two variables are invalid. // NOTE: When we have a mix of allowed and disallowed variables in the same DeclarationList // there is no sure way to know if we should do a fix or not, eg. if ignore-prefix=mutable // and the list is "let x, mutableZ", then "x" is invalid but "mutableZ" is valid, should we change // "let" to "const" or not? For now we change to const if at least one variable is invalid. let addFix = true; for (const variableDeclarationNode of declarationList.declarations) { if ( !Ignore.shouldIgnore( variableDeclarationNode, ctx.options, ctx.sourceFile ) ) { invalidVariableDeclarationNodes.push( createInvalidNode( variableDeclarationNode, addFix ? [ new Lint.Replacement( declarationList.getStart(ctx.sourceFile), "let".length, "const" ) ] : [] ) ); addFix = false; } } return { invalidNodes: invalidVariableDeclarationNodes }; } return { invalidNodes: [] }; }
{ "pile_set_name": "Github" }
require(['GlobalShortcutsHelp'], function (gs) { 'use strict'; function init() { $('.global-shortcuts-help-entry-point').on('click', gs.open); } $(init); });
{ "pile_set_name": "Github" }
using System; using System.Reflection; using FluentAssertions; using Newtonsoft.Json.Linq; using Xunit; namespace Serilog.Sinks.Elasticsearch.Tests.Templating { public class SendsTemplateTests : ElasticsearchSinkTestsBase { private readonly Tuple<Uri, string> _templatePut; public SendsTemplateTests() { _options.AutoRegisterTemplate = true; var loggerConfig = new LoggerConfiguration() .MinimumLevel.Debug() .Enrich.WithMachineName() .WriteTo.ColoredConsole() .WriteTo.Elasticsearch(_options); var logger = loggerConfig.CreateLogger(); using (logger as IDisposable) { logger.Error("Test exception. Should not contain an embedded exception object."); } this._seenHttpPosts.Should().NotBeNullOrEmpty().And.HaveCount(1); this._seenHttpPuts.Should().NotBeNullOrEmpty().And.HaveCount(1); _templatePut = this._seenHttpPuts[0]; } [Fact] public void ShouldRegisterTheCorrectTemplateOnRegistration() { var method = typeof(SendsTemplateTests).GetMethod(nameof(ShouldRegisterTheCorrectTemplateOnRegistration)); JsonEquals(_templatePut.Item2, method, "template"); } [Fact] public void TemplatePutToCorrectUrl() { var uri = _templatePut.Item1; uri.AbsolutePath.Should().Be("/_template/serilog-events-template"); } protected void JsonEquals(string json, MethodBase method, string fileName = null) { #if DOTNETCORE var assembly = typeof(SendsTemplateTests).GetTypeInfo().Assembly; #else var assembly = Assembly.GetExecutingAssembly(); #endif var expected = TestDataHelper.ReadEmbeddedResource(assembly, "template.json"); var nJson = JObject.Parse(json); var nOtherJson = JObject.Parse(expected); var equals = JToken.DeepEquals(nJson, nOtherJson); if (equals) return; expected.Should().BeEquivalentTo(json); } } }
{ "pile_set_name": "Github" }
// Sandstorm - Personal Cloud Sandbox // Copyright (c) 2016 Sandstorm Development Group, Inc. and contributors // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. "use strict"; var crypto = require("crypto"); var utils = require("../utils"), short_wait = utils.short_wait, medium_wait = utils.medium_wait, long_wait = utils.long_wait; var COLLECTIONS_APP_ID = "s3u2xgmqwznz2n3apf30sm3gw1d85y029enw5pymx734cnk5n78h"; var COLLECTIONS_PACKAGE_ID = "e9408a7c077f7a9baeb9c02f0437ae40"; var COLLECTIONS_PACKAGE_URL = "https://sandstorm.io/apps/david/collections3.spk"; module.exports = {}; function setGrainTitle(browser, collectionTitle) { return browser .waitForElementVisible("#grainTitle", short_wait) .click("#grainTitle") .setAlertText(collectionTitle) .acceptAlert() .grainFrame() .waitForElementVisible("button[title='add description']", short_wait) .click("button[title='add description']") .waitForElementVisible("form.description-row>input[type=text]", short_wait) .setValue("form.description-row>input[type=text]", "This is " + collectionTitle) .click("form.description-row>button") .frame(null) } function powerboxCardSelector(grainId) { return ".popup.request .candidate-cards .powerbox-card button[data-card-id=grain-" + grainId + "]"; } module.exports["Test Collections"] = function (browser) { // Prepend 'A' so that the default handle is valid. var devNameAlice = "A" + crypto.randomBytes(10).toString("hex"); var devNameBob = "A" + crypto.randomBytes(10).toString("hex"); var devNameCarol = "A" + crypto.randomBytes(10).toString("hex"); browser .loginDevAccount(devNameBob) .executeAsync(function (done) { done(Meteor.userId()); }, [], function (result) { var bobAccountId = result.value; browser.execute("window.Meteor.logout()") .loginDevAccount(devNameCarol) .executeAsync(function (done) { done(Meteor.userId()); }, [], function (result) { var carolAccountId = result.value; browser.execute("window.Meteor.logout()") .init() .loginDevAccount(devNameAlice) .installApp(COLLECTIONS_PACKAGE_URL, COLLECTIONS_PACKAGE_ID, COLLECTIONS_APP_ID); browser = setGrainTitle(browser, "Collection A"); browser.executeAsync(function (bobAccountId, done) { // Share Collection A to Bob. var grainId = Grains.findOne()._id; Meteor.call("newApiToken", { accountId: Meteor.userId() }, grainId, "petname", { allAccess: null }, { user: { accountId: bobAccountId, title: "Collection A", } }, function(error, result) { done({ error: error, grainId: grainId, }); }); }, [bobAccountId], function (result) { var grainIdA = result.value.grainId; browser.assert.equal(!result.value.error, true); browser.newGrain(COLLECTIONS_APP_ID, function (grainIdB) { browser = setGrainTitle(browser, "Collection B"); browser.newGrain(COLLECTIONS_APP_ID, function (grainIdC) { browser = setGrainTitle(browser, "Collection C"); browser = browser .url(browser.launch_url + "/grain/" + grainIdA) .grainFrame() .waitForElementVisible("table.grain-list-table>tbody>tr.add-grain>td>button", medium_wait) .click("table.grain-list-table>tbody>tr.add-grain>td>button") .frame(null) .waitForElementVisible(powerboxCardSelector(grainIdB), short_wait) .click(powerboxCardSelector(grainIdB)) // Add with 'editor' permissions. .waitForElementVisible(".popup.request .selected-card>form input[value='0']", short_wait) .click(".popup.request .selected-card>form input[value='0']") .click(".popup.request .selected-card>form button.connect-button") .grainFrame() .waitForElementVisible("table.grain-list-table>tbody>tr.add-grain>td>button", short_wait) .click("table.grain-list-table>tbody>tr.add-grain>td>button") .frame(null) .waitForElementVisible(powerboxCardSelector(grainIdC), short_wait) .click(powerboxCardSelector(grainIdC)) // Add with 'viewer' permissions. .waitForElementVisible(".popup.request .selected-card>form input[value='1']", short_wait) .click(".popup.request .selected-card>form input[value='1']") .click(".popup.request .selected-card>form button.connect-button") .grainFrame() .waitForElementVisible("table.grain-list-table>tbody tr:nth-child(3).grain", short_wait) .click("table.grain-list-table>tbody tr:nth-child(3).grain .click-to-go") .frame(null) .grainFrame(grainIdC) .waitForElementVisible(".description-row p", short_wait) .assert.containsText(".description-row p", "This is Collection C") .waitForElementVisible(".description-row button.description-button", short_wait) .frame(null) .execute("window.Meteor.logout()") // Log in as Bob .loginDevAccount(devNameBob) .url(browser.launch_url + "/grain/" + grainIdA) .grainFrame() .waitForElementVisible(".description-row p", short_wait) .assert.containsText(".description-row p", "This is Collection A") .waitForElementVisible(".description-row button.description-button", short_wait) .waitForElementVisible("table.grain-list-table>tbody>tr.add-grain>td>button", short_wait) .waitForElementVisible("table.grain-list-table>tbody tr:nth-child(2).grain", short_wait) .assert.containsText("table.grain-list-table>tbody tr:nth-child(2).grain td>button", "Collection B") .click("table.grain-list-table>tbody tr:nth-child(2).grain .click-to-go") .frame(null) .grainFrame(grainIdB) .waitForElementVisible(".description-row p", short_wait) .assert.containsText(".description-row p", "This is Collection B") .waitForElementVisible(".description-row button.description-button", short_wait) // As Bob, add collection A to collection B, creating a cycle of references. .waitForElementVisible("table.grain
{ "pile_set_name": "Github" }
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved. // Use of this source code is governed by a MIT license found in the LICENSE file. /* MSGPACK Msgpack-c implementation powers the c, c++, python, ruby, etc libraries. We need to maintain compatibility with it and how it encodes integer values without caring about the type. For compatibility with behaviour of msgpack-c reference implementation: - Go intX (>0) and uintX IS ENCODED AS msgpack +ve fixnum, unsigned - Go intX (<0) IS ENCODED AS msgpack -ve fixnum, signed */ package codec import ( "fmt" "io" "math" "net/rpc" "reflect" ) const ( mpPosFixNumMin byte = 0x00 mpPosFixNumMax = 0x7f mpFixMapMin = 0x80 mpFixMapMax = 0x8f mpFixArrayMin = 0x90 mpFixArrayMax = 0x9f mpFixStrMin = 0xa0 mpFixStrMax = 0xbf mpNil = 0xc0 _ = 0xc1 mpFalse = 0xc2 mpTrue = 0xc3 mpFloat = 0xca mpDouble = 0xcb mpUint8 = 0xcc mpUint16 = 0xcd mpUint32 = 0xce mpUint64 = 0xcf mpInt8 = 0xd0 mpInt16 = 0xd1 mpInt32 = 0xd2 mpInt64 = 0xd3 // extensions below mpBin8 = 0xc4 mpBin16 = 0xc5 mpBin32 = 0xc6 mpExt8 = 0xc7 mpExt16 = 0xc8 mpExt32 = 0xc9 mpFixExt1 = 0xd4 mpFixExt2 = 0xd5 mpFixExt4 = 0xd6 mpFixExt8 = 0xd7 mpFixExt16 = 0xd8 mpStr8 = 0xd9 // new mpStr16 = 0xda mpStr32 = 0xdb mpArray16 = 0xdc mpArray32 = 0xdd mpMap16 = 0xde mpMap32 = 0xdf mpNegFixNumMin = 0xe0 mpNegFixNumMax = 0xff ) // MsgpackSpecRpcMultiArgs is a special type which signifies to the MsgpackSpecRpcCodec // that the backend RPC service takes multiple arguments, which have been arranged // in sequence in the slice. // // The Codec then passes it AS-IS to the rpc service (without wrapping it in an // array of 1 element). type MsgpackSpecRpcMultiArgs []interface{} // A MsgpackContainer type specifies the different types of msgpackContainers. type msgpackContainerType struct { fixCutoff int bFixMin, b8, b16, b32 byte hasFixMin, has8, has8Always bool } var ( msgpackContainerStr = msgpackContainerType{32, mpFixStrMin, mpStr8, mpStr16, mpStr32, true, true, false} msgpackContainerBin = msgpackContainerType{0, 0, mpBin8, mpBin16, mpBin32, false, true, true} msgpackContainerList = msgpackContainerType{16, mpFixArrayMin, 0, mpArray16, mpArray32, true, false, false} msgpackContainerMap = msgpackContainerType{16, mpFixMapMin, 0, mpMap16, mpMap32, true, false, false} ) //--------------------------------------------- type msgpackEncDriver struct { noBuiltInTypes encNoSeparator e *Encoder w encWriter h *MsgpackHandle x [8]byte } func (e *msgpackEncDriver) EncodeNil() { e.w.writen1(mpNil) } func (e *msgpackEncDriver) EncodeInt(i int64) { if i >= 0 { e.EncodeUint(uint64(i)) } else if i >= -32 { e.w.writen1(byte(i)) } else if i >= math.MinInt8 { e.w.writen2(mpInt8, byte(i)) } else if i >= math.MinInt16 { e.w.writen1(mpInt16) bigenHelper{e.x[:2], e.w}.writeUint16(uint16(i)) } else if i >= math.MinInt32 { e.w.writen1(mpInt32) bigenHelper{e.x[:4], e.w}.writeUint32(uint32(i)) } else { e.w.writen1(mpInt64) bigenHelper{e.x[:8], e.w}.writeUint64(uint64(i)) } } func (e *msgpackEncDriver) EncodeUint(i uint64) { if i <= math.MaxInt8 { e.w.writen1(byte(i)) } else if i <= math.MaxUint8 { e.w.writen2(mpUint8, byte(i)) } else if i <= math.MaxUint16 { e.w.writen1(mpUint16) bigenHelper{e.x[:2], e.w}.writeUint16(uint16(i)) } else if i <= math.MaxUint32 { e.w.writen1(mpUint32) bigenHelper{e.x[:4], e.w}.writeUint32(uint32(i)) } else { e.w.writen1(mpUint64) bigenHelper{e.x[:8], e.w}.writeUint64(uint64(i)) } } func (e *msgpackEncDriver) EncodeBool(b bool) { if b { e.w.writen1(mpTrue) } else { e.w.writen1(mpFalse) } } func (e *msgpackEncDriver) EncodeFloat32(f float32) { e.w.writen1(mpFloat) bigenHelper{e.x[:4], e.w}.writeUint32(math.Float32bits(f)) } func (e *msgpackEncDriver) EncodeFloat64(f float64) { e.w.writen1(mpDouble) bigenHelper{e.x[:8], e.w}.writeUint64(math.Float64bits(f)) } func (e *msgpackEncDriver) EncodeExt(v interface{}, xtag uint64, ext Ext, _ *Encoder) { bs := ext.WriteExt(v) if bs == nil { e.EncodeNil() return } if e.h.WriteExt { e.encodeExtPreamble(uint8(xtag), len(bs)) e.w.writeb(bs) } else { e.EncodeStringBytes(c_RAW, bs) } } func (e *msgpackEncDriver) EncodeRawExt(re *RawExt, _ *Encoder) { e.encodeExtPreamble(uint8(re.Tag), len(re.Data)) e.w.writeb(re.Data) } func (e *msgpackEncDriver) encodeExtPreamble(xtag byte, l int) { if l == 1 { e.w.writen2(mpFixExt1, xtag) } else if l == 2 { e.w.writen2(mpFixExt2, xtag) } else if l == 4 { e.w.writen2(mpFixExt4, xtag) } else if l == 8 { e.w.writen2(mpFixExt8, xtag) } else if l == 16 { e.w.writen2(mpFixExt16, xtag) }
{ "pile_set_name": "Github" }
define( [ "./core", "./core/toType", "./var/rcheckableType", "./var/isFunction", "./core/init", "./traversing", // filter "./attributes/prop" ], function( jQuery, toType, rcheckableType, isFunction ) { "use strict"; var rbracket = /\[\]$/, rCRLF = /\r?\n/g, rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, rsubmittable = /^(?:input|select|textarea|keygen)/i; function buildParams( prefix, obj, traditional, add ) { var name; if ( Array.isArray( obj ) ) { // Serialize array item. jQuery.each( obj, function( i, v ) { if ( traditional || rbracket.test( prefix ) ) { // Treat each array item as a scalar. add( prefix, v ); } else { // Item is non-scalar (array or object), encode its numeric index. buildParams( prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", v, traditional, add ); } } ); } else if ( !traditional && toType( obj ) === "object" ) { // Serialize object item. for ( name in obj ) { buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); } } else { // Serialize scalar item. add( prefix, obj ); } } // Serialize an array of form elements or a set of // key/values into a query string jQuery.param = function( a, traditional ) { var prefix, s = [], add = function( key, valueOrFunction ) { // If value is a function, invoke it and use its return value var value = isFunction( valueOrFunction ) ? valueOrFunction() : valueOrFunction; s[ s.length ] = encodeURIComponent( key ) + "=" + encodeURIComponent( value == null ? "" : value ); }; if ( a == null ) { return ""; } // If an array was passed in, assume that it is an array of form elements. if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { // Serialize the form elements jQuery.each( a, function() { add( this.name, this.value ); } ); } else { // If traditional, encode the "old" way (the way 1.3.2 or older // did it), otherwise encode params recursively. for ( prefix in a ) { buildParams( prefix, a[ prefix ], traditional, add ); } } // Return the resulting serialization return s.join( "&" ); }; jQuery.fn.extend( { serialize: function() { return jQuery.param( this.serializeArray() ); }, serializeArray: function() { return this.map( function() { // Can add propHook for "elements" to filter or add form elements var elements = jQuery.prop( this, "elements" ); return elements ? jQuery.makeArray( elements ) : this; } ) .filter( function() { var type = this.type; // Use .is( ":disabled" ) so that fieldset[disabled] works return this.name && !jQuery( this ).is( ":disabled" ) && rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && ( this.checked || !rcheckableType.test( type ) ); } ) .map( function( i, elem ) { var val = jQuery( this ).val(); if ( val == null ) { return null; } if ( Array.isArray( val ) ) { return jQuery.map( val, function( val ) { return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; } ); } return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; } ).get(); } } ); return jQuery; } );
{ "pile_set_name": "Github" }
#pragma once #include <util/generic/vector.h> #include <util/generic/stroka.h> #include "synchain.h" #include "factfields.h" #include "factgroup.h" #include "tomitaitemsholder.h" #include "normalization.h" typedef ymap< Stroka, yvector<CFactFields> > CFactSubsets; class CCommonGrammarInterpretation: public CTomitaItemsHolder { public: CCommonGrammarInterpretation(const CWordVector& Words, const CWorkGrammar& gram, yvector<CWordSequence*>& foundFacts, const yvector<SWordHomonymNum>& clauseWords, CReferenceSearcher* RefSearcher, size_t factCountConstraint = 128); ~CCommonGrammarInterpretation(); virtual void Run(bool allowAmbiguity=false); bool AddFactField(SReduceConstraints* rReduceConstraints) const; void AddTextFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef) const; void AddTextFactFieldValue(CTextWS& ws, const fact_field_descr_t fieldDescr) const; void AddFioFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef) const; void AddDateFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef) const; void AddBoolFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef) const; void GetFioWSForFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef, CFioWS& newFioWS) const; void AddCommonFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef, CWordsPair& rValueWS) const; //ф-ция берет заданное поле из уже построенного другой грамматикой факта bool TryCutOutFactField(CFactSynGroup* pItem, const fact_field_reference_t& rFieldRef) const; //ф-ция разбивает все можество фактов на подмножества по имени факта void DivideFactsSetIntoSubsets(CFactSubsets& rFactSubsets, const yvector<CFactFields>& vec_facts) const; void AddWordsInfo(const CFactSynGroup* rItem, CWordSequence& fact_group, bool bArtificial = false) const; void AddFioFromFactToSequence(CFactSynGroup* pNewGroup); void AddWordsInfoToCompanyPostSequence(EWordSequenceType e_WSType, int iWStart, int iWEnd, CWordSequence& fdo_froup); virtual bool MergeFactGroups(yvector<CFactFields>& vec_facts1, const yvector<CFactFields>& vec_facts2, const TGrammarBunch& child_grammems, CWordSequenceWithAntecedent::EEllipses eEllipseType); bool MergeEqualNameFacts(yvector<CFactFields>& vec_facts1, yvector<CFactFields>& vec_facts2) const; using CTomitaItemsHolder::CreateNewItem; // to avoid hiding of CTomitaItemsHolder::CreateNewItem(size_t, size_t) virtual CInputItem* CreateNewItem(size_t SymbolNo, const CRuleAgreement& agreements, size_t iFirstItemNo, size_t iLastItemNo, size_t iSynMainItemNo, yvector<CInputItem*>& children); void CheckTextFieldAlgorithms(CFactsWS* pFacts); void MergeTwoFacts(CFactFields& rFact1, CFactFields& rFact2) const; void MiddleInterpretation(yvector<CFactFields>& facts, const yvector<CInputItem*>& children) const; void MiddleInterpretationExchangeMerge(yvector<CFactFields>& facts, const Stroka& rKeyFactName) const; bool InterpretationExchangeMerge(CFactFields& fact1, CFactFields& fact2) const; CWordSequenceWithAntecedent::EEllipses HasGroupEllipse(CFactSynGroup* pNewGroup) const; //ограничение на кол-во фактов в предложении void CheckFactCountConstraintInSent(); //(если несклоняемая или ненайденная в морфологии фамилия - одиночная или содержит только инициалы) и (должность стоит в косвенном падеже), //то не строить факт bool CheckConstraintForLonelyFIO(yvector<CFactFields>& facts); void SetGenFlag(CFactFields& occur); protected: void AdjustChildHomonyms(CFactSynGroup* pFactGroup); void RefillFactFields(CFactSynGroup* pFactGroup); bool RefillFioFactField(CFactSynGroup* pFactGroup, Stroka factName, Stroka factField, CFioWS& value); bool RefillTextFactField(CFactSynGroup* pFactGroup, Stroka factName, Stroka factField, CTextWS& value); bool IsFactIntersection(yvector<CFactFields>& vec_facts1, yvector<CFactFields>& vec_facts2) const; void SimpleFactsMerge(yvector<CFactFields>& vec_facts1, const yvector<CFactFields>& vec_facts2); bool IsEqualCoordinationMembers(const yvector<CFactFields>& vec_facts1, const yvector<CFactFields>& vec_facts2) const; bool IsOneToMany(const yvector<CFactFields>& vec_facts1, const yvector<CFactFields>& vec_facts2) const; void BuildFactWS(const COccurrence& occur, CWordSequence* pFactSequence); void LogRule(size_t SymbolNo, size_t iFirstItemNo, size_t iLastItemNo, size_t iSynMainItemNo, const yvector<CInputItem*>& children, const Stroka& msg = Stroka()) const; Stroka GetAgreementStr(const SRuleExternalInformationAndAgreements& agr, ECharset encoding) const; bool FillFactFieldFromFactWS(const fact_field_reference_t& fact_field, const CFactsWS* factWS, yvector<CFactFields>& newFacts) const; bool HasAllNecessaryFields(CFactFields& fact, const CWordsPair& artificialPair) const; bool CheckNecessaryFields(yvector<CFactFields>& newFacts, const CWordsPair& artificialPair) const; bool TrimTree(CFactsWS* pFactWordSequence, CFactSynGroup* pGroup, yset<int>& excludedWords, CWordsPair& newPair) const; bool IsTrimChild(const yvector<CInputItem*>& rChildren) const; void ComparePair(int& iL, int& iR, const CWordsPair* rSPair) const; void FieldsConcatenation(CFactFields& rFact1, CFactFields& rFact2, const Stroka& sFieldName, EFactFieldType eFieldType) const; bool CheckUnwantedStatusAlg(const CFactFields& rStatusFact) const; bool CheckShortNameAlg(const CFactFields& rShortNameFact) const; void CheckDelPronounAlg(CFactFields& rPronounFact); void CheckQuotedOrgNormAlg(CFactFields& rOrgFact); void CheckNotNorm(CFactFields& rFact); void CheckCapitalized(CFactFields& rFact); void CheckCommonAlg(CFactFields& rFact, EFactAlgorithm eAlg, yvector<CTextWS*>& rTWS); void NormalizeSequence(CTextWS* sequence, const CFactSynGroup* parent, const TGramBitSet& grammems) const; void ReplaceLemmaAlways(CTextWS* sequence, const CFactSynGroup* parent) const; void NormalizeFacts(CFactsWS* pFactWordSequence, const CF
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Foundation --> <window type="window" id="1119"> <defaultcontrol always="true">9501</defaultcontrol> <controls> <include content="DefHubWindow"> <param name="list" value="skinshortcuts-group-x1119" /> </include> </controls> </window>
{ "pile_set_name": "Github" }
<?php class CoverageNamespacedFunctionTest extends PHPUnit_Framework_TestCase { /** * @covers foo\func() */ public function testFunc() { foo\func(); } }
{ "pile_set_name": "Github" }
#第二次课总结: ##编程中为什么会有丰富化的数据格式? + 编程操作的对象是多种多样的,如名字、数值、属性、关系等。 + 数据格式有数值、字符串、布尔、null和undefined、对象、数组、函数等。 + 我们编程的目的是处理数据,得到结果。这个过程需要时间,而多样的数据格式可供选择与使用,可以缩短时间,节省大量内存,提高运算的效率。 + 丰富的数据格式因为拆分的细,所以使用很灵活。即使碰到复杂的数据格式,也可以用基础类型通过搭积木的方式组合出来。 + 多样的数据格式方便理解应用。多人协同工作的时候有标准可依、互相之间底层共识相同,大大提高协同工作效率。 ###个人静态网页 https://sam-tiao.github.io/diaozhicong/
{ "pile_set_name": "Github" }
<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>netcoreapp3.0</TargetFramework> <RuntimeFrameworkVersion>3.0.3</RuntimeFrameworkVersion> <LangVersion>8.0</LangVersion> <AssemblyName>AgileObjects.AgileMapper.UnitTests.NetCore3</AssemblyName> <RootNamespace>AgileObjects.AgileMapper.UnitTests.NetCore3</RootNamespace> <TreatWarningsAsErrors>true</TreatWarningsAsErrors> <WarningsAsErrors></WarningsAsErrors> <NoWarn>0649;1701;1702</NoWarn> <GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles> <IsPackable>false</IsPackable> </PropertyGroup> <PropertyGroup> <DefineConstants>$(DefineConstants);NET_STANDARD;NET_STANDARD_2;TRACE;FEATURE_SERIALIZATION;FEATURE_DYNAMIC;FEATURE_DYNAMIC_ROOT_SOURCE;FEATURE_ISET;FEATURE_STRINGSPLIT_OPTIONS</DefineConstants> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'"> <DefineConstants>$(DefineConstants);DEBUG</DefineConstants> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'"> <DefineConstants>$(DefineConstants);RELEASE</DefineConstants> </PropertyGroup> <ItemGroup> <Compile Include="..\AgileMapper.UnitTests\**\*.cs" Exclude="..\AgileMapper.UnitTests\obj\**\*.cs;..\AgileMapper.UnitTests\Properties\*.cs;..\AgileMapper.UnitTests\WhenUsingPartialTrust.cs;"> <Link>%(RecursiveDir)%(Filename)%(Extension)</Link> </Compile> </ItemGroup> <ItemGroup> <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="3.0.3" /> <PackageReference Include="Microsoft.Extensions.Primitives" Version="3.0.3" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" /> <PackageReference Include="System.Data.Common" Version="4.3.0" /> <PackageReference Include="xunit" Version="2.4.1" /> <PackageReference Include="xunit.runner.visualstudio" Version="2.4.3"> <PrivateAssets>all</PrivateAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets> </PackageReference> </ItemGroup> <ItemGroup> <ProjectReference Include="..\AgileMapper.UnitTests.Common\AgileMapper.UnitTests.Common.csproj" /> <ProjectReference Include="..\AgileMapper.UnitTests.MoreTestClasses\AgileMapper.UnitTests.MoreTestClasses.csproj" /> </ItemGroup> </Project>
{ "pile_set_name": "Github" }
function UnityProgress (dom, args) { this.progress = 0.0; this.message = ""; this.dom = dom; var parent = dom.parentNode; var background = document.createElement("div"); if (args.backgroundcolor) background.style.background = "#"+args.backgroundcolor; else background.style.background = "#ffffff"; background.style.position = "absolute"; parent.appendChild(background); this.background = background; var logoImage = document.createElement("img"); if (args.logoimage) logoImage.src = args.logoimage; else logoImage.src = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHoAAAAoCAYAAAAxH+4YAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAADIxJREFUeNrsXAtUlHUWv/MChhnQ4aUDIggC8vI1KZKuYam5KVumuJ2go6cS02x7mFYm5qNt4WRKbZtp57TVHsvHOZ21UmKBFRFkBUkNeSMLgoDyhhlmeMyw9/7ho48ENGRgZOee84fv/fr97+/+7v3+3wi6urqAZxbYgrFNxeaCTQhmu5/MgO0GtmJs6djauRUCHtDu2B7HpjA/rzFhDdhOYivlA+2L7Y+meLU6nU5o0OsF1jKZ3ozdkOwYtjwx/pFiW2lS4Gq1wvPnzzuXlZV5NDY1uctlsivrIyOvmDEbkhG2peKemGwxmleiVqtFP1+5Yl9QUOBy69Ytt6bmJhdc5qBuUUOrVgsuSmUbbmYGemjGdBcB7T0aZ8/Pz7e9evXqxPLycncE1+fmzZuK1tZWQVtbG3R0dACFFJFICAKBEFrUarecnBwbf3//FjNuQzJvAtpxJM/4988/n5afXzC/pvaWo0atsSSPbW/vFocCgYD9Fwm7xb5eb0DA9Qh0i/RiZqbSDPSQbTwBLRqps2HMlX733XfLNRqNXK9HbYW4dhm6QG/QM1AtJBIYP348eTB0Yy5g/5HGIS8vbxIuKDRjNiSzEo/k2Q59+qmquaVF3qbTgcFgAJFYDCi0YMKECeDm7g4B/gGQ/p90yMjIAIlYAgb0ZpFIjNt2QUVFxaTOzk6BWCzuMuP2281oQNfX10vSz593pWmJRGK4ceOGdWpa2gKKuy4uLuDj4wPe3t6sEchOTk5QXFwMH370EesE7R1tMG4cendzM+iRymtqayYUFhbK/fz8zPRtKkBXVt6wemfnO6uqqqumEiWTwJqAQD737LMM2MmTJ4O9gwNYWVn9UtJBcA8cOADNzU0sRgcEBEBoaCjs27ePebXeYLBOTEiYjEDnmMKDq6ystEpNTXXm5tesWVPyfwV0XV2dZMfbO1YXFRd76pCiCWQHB3uIjIyERQ8/POB+8fHxkJycDNZSKWhxv9VhqyEkZBF8cvAggt+MOYIFoCCj0qxJAE0gR0dHP8MDercpAy0cbpDf2LZtNaZOnlqmpttALpfDjh1Rg4Lc2NgIu3btgqqqKigtK4OZM2fCsmW/R+oex2herdEAxmeoqq5WIpWL4T4x8vrLly8rxpRHYx5s+eabb65BdexBntzY1Ag2chvYvXs3PDwIyGQxMTFMgLHUSiSCjRs3gqWlJZufNWsWfPPNNyBD0WZjY2OflZWlCFm0qMaUAUb2WnHx4kUVTS9fvjwRO27amACaQN6yZcsqvDmP2ro6aEIPdXR0hD/HvgtLliwZdN/S0lI4/NlnIESADZhyPYtxfMGCBb3rFy9eDBYWFizFwiZOTEycZApAo9ZoIBD7W1dbW2tyL4buGWi8KYv1658PO5Oc7EklSzIC+VOMrX94/PE77h8VFQX12DnIk11dXWH79u191k+bNo0JuOzsbBBjynXhwgV3XHxptB8cemmDKXjqiADd1NQkjggPX5OQlORJ3kiVLWdnZ/jqq6/uSNdk6enpcOz4cZBaWwPF9E1I2e7u7n0zfVTmDzzwAAPawtKiC70ojxS6UDiwvHj11VcXl5WVKWk6ODg4d+vWrVncuoSEBOXBgwcXc/PffvvtP/jLnnrqqTRklMojR474Y3z1wDRRgYylxFy/6oknnkjdsGFD7kDHef/991V4T36oNZx5ok315JNPetC0m5tbVU/hqN9r4yw8PDwMn4cVdz3DoejF90LX69auXZV05oynEAEWI71SfZpAwQcEGEvZPJW2BPwduTInevDJkycZYFQl+x3S9aYXX+z3XETfX375JcybOzf36aefvuNN04PEkMAerr+/f5/tGxoapNy6/pYheApU04p+7lf5xRdfhAYFBVWRN/d3HMwOrH69DJ1BQY0DGsWp9lfb9AGaOhDqHD9uHjvdiVHz6JqaGouIiIjVqWlpUwk2qnCRNxNop+PiGIC/AAvsxUQ3xj2g94BN3iqTWbPy55bXXgNbW9t+zxcSEgIBgYFq7P3n7OztOwbz5ns1DpT+DFNFKwTCA4HO6m89Xr8OGamEPJq2pWWYOTQoFAoaBEBsR3RfeerUKU6feJAyx+U67hgpKSm9ncDX1zeXv27Egda2toradLp2omuxRALk0dADtLhnxApN84EV9DTOqwXdFTPcTsQ6wnjFwPqFFLhELBbR/iNRAiVw0JOyVq1alevk5KRFKo3kOgB57UD79dBwFlL1M5zX0nH27t3bJ5a/9957DdzxKETw6RsFrT9PBwxbEWZIQCvs7Nr/9skn8ai0O8+np0/nQCWqDnnoIXB3c2Mxl/Nc7gUFz8lZB7mI9I40C3rMkakqtnDhwl86A8/OpqRAXkGBdP/+/Y88umzZ18YG+syZMx/1uV/0yME8fQhCLvfs2bPzabqoqEjJLaecm0IEL1bnjCrQmM/qPTw8NDHR0f9
{ "pile_set_name": "Github" }
#!/usr/bin/env bash set -euo pipefail IFS=$'\n\t' cd "$(dirname "$0")/.." source scripts/utils.sh if [[ -x "$(command -v clang-format-9)" ]]; then CF=clang-format-9 else CF=clang-format clang-format --version | grep " 9." > /dev/null || ( print_warning "WARNING: MediaElch requires clang-format version 9") fi print_important "Format all source files using ${CF}" find src -type f \( -name "*.cpp" -o -name "*.h" -o -name "*.hpp" \) -exec ${CF} -i -style=file {} \+ print_important "Format all test files using ${CF}" find test -type f \( -name "*.cpp" -o -name "*.h" -o -name "*.hpp" \) -exec ${CF} -i -style=file {} \+ print_success "Done"
{ "pile_set_name": "Github" }
/// <reference path="modernizr-2.6.2.js" /> /// <reference path="jquery-1.9.1.js" /> /// <reference path="knockout-2.1.0.debug.js" /> /// <reference path="q.js" /> /// <reference path="toastr.js" /> /// <reference path="require.js" /> /// <reference path="breeze.debug.js" />
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package fake import ( "context" v1beta1 "k8s.io/api/authentication/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" schema "k8s.io/apimachinery/pkg/runtime/schema" testing "k8s.io/client-go/testing" ) // FakeTokenReviews implements TokenReviewInterface type FakeTokenReviews struct { Fake *FakeAuthenticationV1beta1 } var tokenreviewsResource = schema.GroupVersionResource{Group: "authentication.k8s.io", Version: "v1beta1", Resource: "tokenreviews"} var tokenreviewsKind = schema.GroupVersionKind{Group: "authentication.k8s.io", Version: "v1beta1", Kind: "TokenReview"} // Create takes the representation of a tokenReview and creates it. Returns the server's representation of the tokenReview, and an error, if there is any. func (c *FakeTokenReviews) Create(ctx context.Context, tokenReview *v1beta1.TokenReview, opts v1.CreateOptions) (result *v1beta1.TokenReview, err error) { obj, err := c.Fake. Invokes(testing.NewRootCreateAction(tokenreviewsResource, tokenReview), &v1beta1.TokenReview{}) if obj == nil { return nil, err } return obj.(*v1beta1.TokenReview), err }
{ "pile_set_name": "Github" }
[Icon Data] DisplayName=New DisplayName[af]=Nuut DisplayName[am]=አዲስ DisplayName[ar]=جديد DisplayName[az]=Yeni DisplayName[be]=Новы DisplayName[be@latin]=Novy DisplayName[bg]=Ново DisplayName[bn]=নতুন DisplayName[bn_IN]=নতুন DisplayName[bs]=Novo DisplayName[ca]=Nou DisplayName[cs]=Nové DisplayName[cy]=Newydd DisplayName[da]=Ny DisplayName[de]=Neu DisplayName[dz]=གསརཔ། DisplayName[el]=Νέο DisplayName[en_CA]=New DisplayName[en_GB]=New DisplayName[es]=Nuevo DisplayName[et]=Uus DisplayName[eu]=Berria DisplayName[fa]=جدید DisplayName[fi]=Uusi DisplayName[fr]=Nouveau DisplayName[fur]=Gnûf DisplayName[ga]=Nua DisplayName[gl]=Novo DisplayName[gu]=નવું DisplayName[he]=חדש DisplayName[hi]=नया DisplayName[hr]=Novi DisplayName[hu]=Új DisplayName[id]=Baru DisplayName[is]=Nýtt DisplayName[it]=Nuovo DisplayName[ja]=新規 DisplayName[ka]=ახალი DisplayName[kn]=ಹೊಸ DisplayName[ko]=새 것 DisplayName[li]=Nuuj DisplayName[lt]=Nauja DisplayName[lv]=Jauns DisplayName[mg]=Vaovao DisplayName[mk]=Ново DisplayName[ml]=പുതിയ DisplayName[mn]=Шинэ DisplayName[mr]=नविन DisplayName[ms]=Baru DisplayName[nb]=Ny DisplayName[ne]=नयाँ DisplayName[nl]=Nieuw DisplayName[nn]=Ny DisplayName[oc]=Novèl DisplayName[or]=ନୂତନ DisplayName[pa]=ਨਵਾਂ DisplayName[pl]=Nowy DisplayName[pt]=Novo DisplayName[pt_BR]=Novo DisplayName[ro]=Nou DisplayName[ru]=Новый DisplayName[rw]=Gishya DisplayName[si]=නව DisplayName[sk]=Nový DisplayName[sl]=Novo DisplayName[sq]=E re DisplayName[sr]=Ново DisplayName[sr@Latn]=Novo DisplayName[sv]=Ny DisplayName[ta]=புதிய DisplayName[te]=కొత్త DisplayName[th]=ใหม่ DisplayName[tk]=Täze DisplayName[tr]=Yeni DisplayName[uk]=Нове DisplayName[vi]=Mới DisplayName[wa]=Novea DisplayName[xh]=Entsha DisplayName[zh_CN]=新 DisplayName[zh_HK]=嶄新 DisplayName[zh_TW]=嶄新
{ "pile_set_name": "Github" }
.\" Automatically generated by Pod::Man 2.27 (Pod::Simple 3.28) .\" .\" Standard preamble: .\" ======================================================================== .de Sp \" Vertical space (when we can't use .PP) .if t .sp .5v .if n .sp .. .de Vb \" Begin verbatim text .ft CW .nf .ne \\$1 .. .de Ve \" End verbatim text .ft R .fi .. .\" Set up some character translations and predefined strings. \*(-- will .\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left .\" double quote, and \*(R" will give a right double quote. \*(C+ will .\" give a nicer C++. Capital omega is used to do unbreakable dashes and .\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff, .\" nothing in troff, for use with C<>. .tr \(*W- .ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p' .ie n \{\ . ds -- \(*W- . ds PI pi . if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch . if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch . ds L" "" . ds R" "" . ds C` "" . ds C' "" 'br\} .el\{\ . ds -- \|\(em\| . ds PI \(*p . ds L" `` . ds R" '' . ds C` . ds C' 'br\} .\" .\" Escape single quotes in literal strings from groff's Unicode transform. .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" .\" If the F register is turned on, we'll generate index entries on stderr for .\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index .\" entries marked with X<> in POD. Of course, you'll have to process the .\" output yourself in some meaningful fashion. .\" .\" Avoid warning from groff about undefined register 'F'. .de IX .. .nr rF 0 .if \n(.g .if rF .nr rF 1 .if (\n(rF:(\n(.g==0)) \{ . if \nF \{ . de IX . tm Index:\\$1\t\\n%\t"\\$2" .. . if !\nF==2 \{ . nr % 0 . nr F 2 . \} . \} .\} .rr rF .\" .\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2). .\" Fear. Run. Save yourself. No user-serviceable parts. . \" fudge factors for nroff and troff .if n \{\ . ds #H 0 . ds #V .8m . ds #F .3m . ds #[ \f1 . ds #] \fP .\} .if t \{\ . ds #H ((1u-(\\\\n(.fu%2u))*.13m) . ds #V .6m . ds #F 0 . ds #[ \& . ds #] \& .\} . \" simple accents for nroff and troff .if n \{\ . ds ' \& . ds ` \& . ds ^ \& . ds , \& . ds ~ ~ . ds / .\} .if t \{\ . ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u" . ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u' . ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u' . ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u' . ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u' . ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u' .\} . \" troff and (daisy-wheel) nroff accents .ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V' .ds 8 \h'\*(#H'\(*b\h'-\*(#H' .ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#] .ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H' .ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u' .ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#] .ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#] .ds ae a\h'-(\w'a'u*4/10)'e .ds Ae A\h'-(\w'A'u*4/10)'E . \" corrections for vroff .if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u' .if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u' . \" for low resolution devices (crt and lpr) .if \n(.H>23 .if \n(.V>19 \ \{\ . ds : e . ds 8 ss . ds o a . ds d- d\h'-1'\(ga . ds D- D\h'-1'\(hy . ds th \o'bp' . ds Th \o'LP' . ds ae ae . ds Ae AE .\} .rm #[ #] #H #V #F C .\" ======================================================================== .\" .IX Title "SSL_session_reused 3" .TH SSL_session_reused 3 "2017-12-07" "0.9.8y" "OpenSSL" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l .nh .SH "NAME" SSL_session_reused \- query whether a reused session was negotiated during handshake .SH "SYNOPSIS" .IX Header "SYNOPSIS" .Vb 1 \& #include <openssl/ssl.h> \& \& int SSL_session_reused(SSL *ssl); .Ve .SH "DESCRIPTION" .IX Header "DESCRIPTION" Query, whether a reused session was negotiated during the handshake. .SH "NOTES" .IX Header "NOTES" During the negotiation, a client can propose to reuse a session. The server then looks up the session in its cache. If both client and server agree on the session, it
{ "pile_set_name": "Github" }
## \file ## \ingroup tutorial_pyroot_legacy ## \notebook ## This macro displays the Tree data structures ## ## \macro_image ## \macro_code ## ## \author Wim Lavrijsen from ROOT import TCanvas, TPaveLabel, TPaveText, TPavesText, TText from ROOT import TArrow, TLine from ROOT import gROOT, gBenchmark #gROOT.Reset() c1 = TCanvas('c1','Tree Data Structure',200,10,750,940) c1.Range(0,-0.1,1,1.15) gBenchmark.Start('tree') branchcolor = 26 leafcolor = 30 basketcolor = 42 offsetcolor = 43 #title = TPaveLabel(.3,1.05,.8,1.13,c1.GetTitle()) title = TPaveLabel(.3,1.05,.8,1.13,'Tree Data Structure') title.SetFillColor(16) title.Draw() tree = TPaveText(.01,.75,.15,1.00) tree.SetFillColor(18) tree.SetTextAlign(12) tnt = tree.AddText('Tree') tnt.SetTextAlign(22) tnt.SetTextSize(0.030) tree.AddText('fScanField') tree.AddText('fMaxEventLoop') tree.AddText('fMaxVirtualSize') tree.AddText('fEntries') tree.AddText('fDimension') tree.AddText('fSelectedRows') tree.Draw() farm = TPavesText(.01,1.02,.15,1.1,9,'tr') tfarm = farm.AddText('CHAIN') tfarm.SetTextSize(0.024) farm.AddText('Collection') farm.AddText('of Trees') farm.Draw() link = TLine(.15,.92,.80,.92) link.SetLineWidth(2) link.SetLineColor(1) link.Draw() link.DrawLine(.21,.87,.21,.275) link.DrawLine(.23,.87,.23,.375) link.DrawLine(.25,.87,.25,.775) link.DrawLine(.41,.25,.41,-.025) link.DrawLine(.43,.25,.43,.075) link.DrawLine(.45,.25,.45,.175) branch0 = TPaveLabel(.20,.87,.35,.97,'Branch 0') branch0.SetTextSize(0.35) branch0.SetFillColor(branchcolor) branch0.Draw() branch1 = TPaveLabel(.40,.87,.55,.97,'Branch 1') branch1.SetTextSize(0.35) branch1.SetFillColor(branchcolor) branch1.Draw() branch2 = TPaveLabel(.60,.87,.75,.97,'Branch 2') branch2.SetTextSize(0.35) branch2.SetFillColor(branchcolor) branch2.Draw() branch3 = TPaveLabel(.80,.87,.95,.97,'Branch 3') branch3.SetTextSize(0.35) branch3.SetFillColor(branchcolor) branch3.Draw() leaf0 = TPaveLabel(.4,.75,.5,.8,'Leaf 0') leaf0.SetFillColor(leafcolor) leaf0.Draw() leaf1 = TPaveLabel(.6,.75,.7,.8,'Leaf 1') leaf1.SetFillColor(leafcolor) leaf1.Draw() leaf2 = TPaveLabel(.8,.75,.9,.8,'Leaf 2') leaf2.SetFillColor(leafcolor) leaf2.Draw() firstevent = TPaveText(.4,.35,.9,.4) firstevent.AddText('First event of each basket') firstevent.AddText('Array of fMaxBaskets Integers') firstevent.SetFillColor(basketcolor) firstevent.Draw() basket0 = TPaveLabel(.4,.25,.5,.3,'Basket 0') basket0.SetFillColor(basketcolor) basket0.Draw() basket1 = TPaveLabel(.6,.25,.7,.3,'Basket 1') basket1.SetFillColor(basketcolor) basket1.Draw() basket2 = TPaveLabel(.8,.25,.9,.3,'Basket 2') basket2.SetFillColor(basketcolor) basket2.Draw() offset = TPaveText(.55,.15,.9,.2) offset.AddText('Offset of events in fBuffer') offset.AddText('Array of fEventOffsetLen Integers') offset.AddText('(if variable length structure)') offset.SetFillColor(offsetcolor) offset.Draw() buffer = TPaveText(.55,.05,.9,.1) buffer.AddText('Basket buffer') buffer.AddText('Array of fBasketSize chars') buffer.SetFillColor(offsetcolor) buffer.Draw() zipbuffer = TPaveText(.55,-.05,.75,.0) zipbuffer.AddText('Basket compressed buffer') zipbuffer.AddText('(if compression)') zipbuffer.SetFillColor(offsetcolor) zipbuffer.Draw() ar1 = TArrow() ar1.SetLineWidth(2) ar1.SetLineColor(1) ar1.SetFillStyle(1001) ar1.SetFillColor(1) ar1.DrawArrow(.21,.275,.39,.275,0.015,'|>') ar1.DrawArrow(.23,.375,.39,.375,0.015,'|>') ar1.DrawArrow(.25,.775,.39,.775,0.015,'|>') ar1.DrawArrow(.50,.775,.59,.775,0.015,'|>') ar1.DrawArrow(.70,.775,.79,.775,0.015,'|>') ar1.DrawArrow(.50,.275,.59,.275,0.015,'|>') ar1.DrawArrow(.70,.275,.79,.275,0.015,'|>') ar1.DrawArrow(.45,.175,.54,.175,0.015,'|>') ar1.DrawArrow(.43,.075,.54,.075,0.015,'|>') ar1.DrawArrow(.41,-.025,.54,-.025,0.015,'|>') ldot = TLine(.95,.92,.99,.92) ldot.SetLineStyle(3) ldot.Draw() ldot.DrawLine(.9,.775,.99,.775) ldot.DrawLine(.9,.275,.99,.275) ldot.DrawLine(.55,.05,.55,0) ldot.DrawLine(.9,.05,.75,0) pname = TText(.46,.21,'fEventOffset') pname.SetTextFont(72) pname.SetTextSize(0.018) pname.Draw() pname.DrawText(.44,.11,'fBuffer') pname.DrawText(.42,.01,'fZipBuffer') pname.DrawText(.26,.81,'fLeaves = TObjArray of TLeaf') pname.DrawText(.24,.40,'fBasketEvent') pname.DrawText(.22,.31,'fBaskets = TObjArray of TBasket') pname.DrawText(.20,1.0,'fBranches = TObjArray of TBranch') ntleaf = TPaveText(0.30,.42,.62,.7) ntleaf.SetTextSize(0.014) ntleaf.SetFillColor(leafcolor) ntleaf.SetTextAlign(12) ntleaf.AddText('fLen: number of fixed elements') ntleaf.AddText('fLenType: number of bytes of data type') ntleaf.AddText('fOffset: relative to Leaf0-fAddress') ntleaf.AddText('fNbytesIO: number of bytes used for I/O') ntleaf.AddText('fIsPointer: True if pointer') ntleaf.AddText('fIsRange: True if leaf has a range') ntleaf.AddText('fIsUnsigned: True if unsigned') ntleaf.AddText('*fLeafCount: points to Leaf counter') ntleaf.AddText(' ') ntleaf.AddLine(0,0,0,0) ntleaf.AddText('fName = Leaf name') ntleaf.AddText('fTitle = Leaf type (see Type codes)') ntleaf.Draw() type = TPaveText(.65,.42,.95,.7) type.SetTextAlign(12
{ "pile_set_name": "Github" }
// Copyright 2013-2015 Bowery, Inc. package prompt import ( "os" "syscall" "unsafe" ) // Flags to control the terminals mode. const ( echoInputFlag = 0x0004 insertModeFlag = 0x0020 lineInputFlag = 0x0002 mouseInputFlag = 0x0010 processedInputFlag = 0x0001 windowInputFlag = 0x0008 ) // Error number returned for an invalid handle. const errnoInvalidHandle = 0x6 var ( kernel = syscall.NewLazyDLL("kernel32.dll") getConsoleScreenBufferInfo = kernel.NewProc("GetConsoleScreenBufferInfo") setConsoleMode = kernel.NewProc("SetConsoleMode") ) // consoleScreenBufferInfo contains various fields for the terminal. type consoleScreenBufferInfo struct { size coord cursorPosition coord attributes uint16 window smallRect maximumWindowSize coord } // coord contains coords for positioning. type coord struct { x int16 y int16 } // smallRect contains positions for the window edges. type smallRect struct { left int16 top int16 right int16 bottom int16 } // terminalSize retrieves the cols/rows for the terminal connected to out. func terminalSize(out *os.File) (int, int, error) { csbi := new(consoleScreenBufferInfo) ret, _, err := getConsoleScreenBufferInfo.Call(out.Fd(), uintptr(unsafe.Pointer(csbi))) if ret == 0 { return 0, 0, err } // Results are always off by one. cols := csbi.window.right - csbi.window.left + 1 rows := csbi.window.bottom - csbi.window.top + 1 return int(cols), int(rows), nil } // isNotTerminal checks if an error is related to the input not being a terminal. func isNotTerminal(err error) bool { errno, ok := err.(syscall.Errno) return ok && errno == errnoInvalidHandle } // terminal contains the private fields for a Windows terminal. type terminal struct { supportsEditing bool fd uintptr origMode uint32 } // newTerminal creates a terminal and sets it to raw input mode. func newTerminal(in *os.File) (*terminal, error) { term := &terminal{fd: in.Fd()} err := syscall.GetConsoleMode(syscall.Handle(term.fd), &term.origMode) if err != nil { return term, nil } mode := term.origMode term.supportsEditing = true // Set new mode flags. mode &^= (echoInputFlag | insertModeFlag | lineInputFlag | mouseInputFlag | processedInputFlag | windowInputFlag) ret, _, err := setConsoleMode.Call(term.fd, uintptr(mode)) if ret == 0 { return nil, err } return term, nil } // Close disables the terminals raw input. func (term *terminal) Close() error { if term.supportsEditing { ret, _, err := setConsoleMode.Call(term.fd, uintptr(term.origMode)) if ret == 0 { return err } } return nil }
{ "pile_set_name": "Github" }
Android Accordion View ====================== Example ------- * git pull * import to eclipse * properties->android uncheck is library * run as android application See [main.xml](https://github.com/hamsterready/android-accordion-view/blob/master/res/layout/main.xml). Screenshot ---------- ![Screenshot](https://raw.github.com/hamsterready/android-accordion-view/master/screenshot.png) License ------- The MIT License Copyright (c) 2011 Sentaca Poland sp. z o.o. / http://sentaca.com/ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{ "pile_set_name": "Github" }
# coding=utf-8 """ The Batch Webhooks API Endpoint Documentation: https://developer.mailchimp.com/documentation/mailchimp/reference/batch-webhooks/ """ from __future__ import unicode_literals from mailchimp3.baseapi import BaseApi class BatchWebhooks(BaseApi): """ Manage webhooks for batch operations. """ def __init__(self, *args, **kwargs): """ Initialize the endpoint """ super(BatchWebhooks, self).__init__(*args, **kwargs) self.endpoint = 'batch-webhooks' self.batch_webhook_id = None def create(self, data): """ Configure a webhook that will fire whenever any batch request completes processing. :param data: The request body parameters :type data: :py:class:`dict` data = { "url": string* } """ if 'url' not in data: raise KeyError('The batch webhook must have a valid url') response = self._mc_client._post(url=self._build_path(), data=data) if response is not None: self.batch_webhook_id = response['id'] else: self.batch_webhook_id = None return response def all(self, get_all=False, **queryparams): """ Get all webhooks that have been configured for batches. :param get_all: Should the query get all results :type get_all: :py:class:`bool` :param queryparams: The query string parameters queryparams['fields'] = [] queryparams['exclude_fields'] = [] queryparams['count'] = integer queryparams['offset'] = integer """ self.batch_webhook_id = None if get_all: return self._iterate(url=self._build_path(), **queryparams) else: return self._mc_client._get(url=self._build_path(), **queryparams) def get(self, batch_webhook_id, **queryparams): """ Get information about a specific batch webhook. :param batch_webhook_id: The unique id for the batch webhook. :type batch_webhook_id: :py:class:`str` :param queryparams: The query string parameters queryparams['fields'] = [] queryparams['exclude_fields'] = [] """ self.batch_webhook_id = batch_webhook_id return self._mc_client._get(url=self._build_path(batch_webhook_id), **queryparams) def update(self, batch_webhook_id, data): """ Update a webhook that will fire whenever any batch request completes processing. :param batch_webhook_id: The unique id for the batch webhook. :type batch_webhook_id: :py:class:`str` :param data: The request body parameters :type data: :py:class:`dict` data = { "url": string* } """ self.batch_webhook_id = batch_webhook_id if 'url' not in data: raise KeyError('The batch webhook must have a valid url') return self._mc_client._patch(url=self._build_path(batch_webhook_id), data=data) def delete(self, batch_webhook_id): """ Remove a batch webhook. Webhooks will no longer be sent to the given URL. :param batch_webhook_id: The unique id for the batch webhook. :type batch_webhook_id: :py:class:`str` """ self.batch_webhook_id = batch_webhook_id return self._mc_client._delete(url=self._build_path(batch_webhook_id))
{ "pile_set_name": "Github" }
package com.itcast.ay05 /** * ClassName:`19.排序` * Description: */ fun main(args: Array<String>) { val list = listOf("z","b","d") // 正序排序 b d z println(list.sorted()) // 倒序排序 println(list.sortedDescending()) // 按字段排序 val list1 = listOf(Person("林青霞",50),Person("张曼玉",30),Person("柳岩",70)) // list1.sorted() val list3 = list1.sortedBy { it.age } // println(list3) val list4 = list1.sortedByDescending { it.age } println(list4) } data class Person(var name:String,var age:Int)
{ "pile_set_name": "Github" }
#include <iostream> #include <sophus/test_macros.hpp> #include <sophus/formatstring.hpp> namespace Sophus { namespace { bool testFormatString() { bool passed = true; SOPHUS_TEST_EQUAL(passed, details::FormatString(), std::string()); std::string test_str = "Hello World!"; SOPHUS_TEST_EQUAL(passed, details::FormatString(test_str.c_str()), test_str); SOPHUS_TEST_EQUAL(passed, details::FormatString("Number: %", 5), std::string("Number: 5")); SOPHUS_TEST_EQUAL(passed, details::FormatString("Real: % msg %", 1.5, test_str), std::string("Real: 1.5 msg Hello World!")); SOPHUS_TEST_EQUAL(passed, details::FormatString( "vec: %", Eigen::Vector3f(0.f, 1.f, 1.5f).transpose()), std::string("vec: 0 1 1.5")); SOPHUS_TEST_EQUAL( passed, details::FormatString("Number: %", 1, 2), std::string("Number: 1\nFormat-Warning: There are 1 args unused.")); return passed; } bool testSmokeDetails() { bool passed = true; std::cout << details::pretty(4.2) << std::endl; std::cout << details::pretty(Vector2f(1, 2)) << std::endl; bool dummy = true; details::testFailed(dummy, "dummyFunc", "dummyFile", 99, "This is just a pratice alarm!"); SOPHUS_TEST_EQUAL(passed, dummy, false); double val = transpose(42.0); SOPHUS_TEST_EQUAL(passed, val, 42.0); Matrix<float, 1, 2> row = transpose(Vector2f(1, 7)); Matrix<float, 1, 2> expected_row(1, 7); SOPHUS_TEST_EQUAL(passed, row, expected_row); optional<int> opt(nullopt); SOPHUS_TEST(passed, !opt); return passed; } void runAll() { std::cerr << "Common tests:" << std::endl; bool passed = testFormatString(); passed &= testSmokeDetails(); processTestResult(passed); } } // namespace } // namespace Sophus int main() { Sophus::runAll(); }
{ "pile_set_name": "Github" }
/** ****************************************************************************** * @file stm32f10x_tim.h * @author MCD Application Team * @version V3.5.0 * @date 11-March-2011 * @brief This file contains all the functions prototypes for the TIM firmware * library. ****************************************************************************** * @attention * * THE PRESENT FIRMWARE WHICH IS FOR GUIDANCE ONLY AIMS AT PROVIDING CUSTOMERS * WITH CODING INFORMATION REGARDING THEIR PRODUCTS IN ORDER FOR THEM TO SAVE * TIME. AS A RESULT, STMICROELECTRONICS SHALL NOT BE HELD LIABLE FOR ANY * DIRECT, INDIRECT OR CONSEQUENTIAL DAMAGES WITH RESPECT TO ANY CLAIMS ARISING * FROM THE CONTENT OF SUCH FIRMWARE AND/OR THE USE MADE BY CUSTOMERS OF THE * CODING INFORMATION CONTAINED HEREIN IN CONNECTION WITH THEIR PRODUCTS. * * <h2><center>&copy; COPYRIGHT 2011 STMicroelectronics</center></h2> ****************************************************************************** */ /* Define to prevent recursive inclusion -------------------------------------*/ #ifndef __STM32F10x_TIM_H #define __STM32F10x_TIM_H #ifdef __cplusplus extern "C" { #endif /* Includes ------------------------------------------------------------------*/ #include "stm32f10x.h" /** @addtogroup STM32F10x_StdPeriph_Driver * @{ */ /** @addtogroup TIM * @{ */ /** @defgroup TIM_Exported_Types * @{ */ /** * @brief TIM Time Base Init structure definition * @note This structure is used with all TIMx except for TIM6 and TIM7. */ typedef struct { uint16_t TIM_Prescaler; /*!< Specifies the prescaler value used to divide the TIM clock. This parameter can be a number between 0x0000 and 0xFFFF */ uint16_t TIM_CounterMode; /*!< Specifies the counter mode. This parameter can be a value of @ref TIM_Counter_Mode */ uint16_t TIM_Period; /*!< Specifies the period value to be loaded into the active Auto-Reload Register at the next update event. This parameter must be a number between 0x0000 and 0xFFFF. */ uint16_t TIM_ClockDivision; /*!< Specifies the clock division. This parameter can be a value of @ref TIM_Clock_Division_CKD */ uint8_t TIM_RepetitionCounter; /*!< Specifies the repetition counter value. Each time the RCR downcounter reaches zero, an update event is generated and counting restarts from the RCR value (N). This means in PWM mode that (N+1) corresponds to: - the number of PWM periods in edge-aligned mode - the number of half PWM period in center-aligned mode This parameter must be a number between 0x00 and 0xFF. @note This parameter is valid only for TIM1 and TIM8. */ } TIM_TimeBaseInitTypeDef; /** * @brief TIM Output Compare Init structure definition */ typedef struct { uint16_t TIM_OCMode; /*!< Specifies the TIM mode. This parameter can be a value of @ref TIM_Output_Compare_and_PWM_modes */ uint16_t TIM_OutputState; /*!< Specifies the TIM Output Compare state. This parameter can be a value of @ref TIM_Output_Compare_state */ uint16_t TIM_OutputNState; /*!< Specifies the TIM complementary Output Compare state. This parameter can be a value of @ref TIM_Output_Compare_N_state @note This parameter is valid only for TIM1 and TIM8. */ uint16_t TIM_Pulse; /*!< Specifies the pulse value to be loaded into the Capture Compare Register. This parameter can be a number between 0x0000 and 0xFFFF */ uint16_t TIM_OCPolarity; /*!< Specifies the output polarity. This parameter can be a value of @ref TIM_Output_Compare_Polarity */ uint16_t TIM_OCNPolarity; /*!< Specifies the complementary output polarity. This parameter can be a value of @ref TIM_Output_Compare_N_Polarity @note This parameter is valid only for TIM1 and TIM8. */ uint16_t TIM_OCIdleState; /*!< Specifies the TIM Output Compare pin state during Idle state. This parameter can be a value of @ref TIM_Output_Compare_Idle_State @note This parameter is valid only for TIM1 and TIM8. */ uint16_t TIM_OCNIdleState; /*!< Specifies the TIM Output Compare pin state during Idle state. This parameter can be a value of @ref TIM_Output_Compare_N_Idle_State @note This parameter is valid only for TIM1 and TIM8. */ } TIM_OCInitTypeDef; /** * @brief TIM Input Capture Init structure definition */ typedef struct { uint16_t TIM_Channel; /*!< Specifies the TIM channel. This parameter can be a value of @ref TIM_Channel */ uint16_t TIM_ICPolarity; /*!< Specifies the active edge of the input signal. This parameter can be a value of @ref TIM_Input_Capture_Polarity */ uint16_t TIM_ICSelection; /*!< Specifies the input. This parameter can be a value of @ref TIM_Input_Capture_Selection */ uint16_t TIM_ICPrescaler; /*!< Specifies the Input Capture Prescaler. This parameter can be a value of @ref TIM_Input_Capture_Prescaler */ uint16_t TIM_ICFilter; /*!< Specifies the input capture filter. This parameter can be a number between 0x0 and 0xF */ } TIM_ICInitTypeDef; /** * @brief BDTR structure definition * @note This structure is used only with TIM1 and TIM8. */ typedef struct { uint16_t TIM_OSSRState; /*!< Specifies the Off-State selection used in Run mode. This parameter can be a value of @ref OSSR_Off_State_Selection_for_Run_mode_state */ uint16_t TIM_OSSIState; /*!< Specifies the Off-State used in Idle state. This parameter can be a value of @ref OSSI_Off_State_Selection_for_Idle_mode_state */ uint16_t TIM_LOCKLevel; /*!< Specifies the LOCK level parameters. This parameter can be a value of @ref Lock_level */ uint16_t TIM_DeadTime; /*!< Specifies the delay time between the switching-off and the switching-on of the outputs. This parameter can be a number between 0x00 and 0xFF */ uint16_t TIM_Break; /*!< Specifies whether the TIM Break input is enabled or not. This parameter can be a value of @ref Break_Input_enable_disable */ uint16_t TIM_BreakPolarity; /*!< Specifies the TIM Break Input pin polarity. This parameter can be a value of @ref Break_Polarity */ uint16_t TIM_AutomaticOutput; /*!< Specifies whether the TIM Automatic Output feature is enabled or not. This parameter can be a value of @ref TIM_AOE_Bit_Set_Reset */ } TIM_BDTRInitTypeDef; /** @defgroup TIM_Exported_constants * @{ */ #define IS_TIM_ALL_PERIPH(PERIPH) (((PERIPH) == TIM1) || \ ((PERIPH) == TIM2) || \ ((PERIPH) == TIM3) || \ ((PERIPH) == TIM4) || \ ((PERIPH) == TIM5) || \ ((PERIPH) == TIM6) ||
{ "pile_set_name": "Github" }
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #ifndef GAY_SHORTEST_H_ #define GAY_SHORTEST_H_ namespace v8 { namespace internal { struct PrecomputedShortest { double v; const char* representation; int decimal_point; }; Vector<const PrecomputedShortest> PrecomputedShortestRepresentations(); } } // namespace v8::internal #endif // GAY_SHORTEST_H_
{ "pile_set_name": "Github" }
// go run linux/mksysnum.go -Wall -Werror -static -I/tmp/include /tmp/include/asm/unistd.h // Code generated by the command above; see README.md. DO NOT EDIT. // +build ppc64le,linux package unix const ( SYS_RESTART_SYSCALL = 0 SYS_EXIT = 1 SYS_FORK = 2 SYS_READ = 3 SYS_WRITE = 4 SYS_OPEN = 5 SYS_CLOSE = 6 SYS_WAITPID = 7 SYS_CREAT = 8 SYS_LINK = 9 SYS_UNLINK = 10 SYS_EXECVE = 11 SYS_CHDIR = 12 SYS_TIME = 13 SYS_MKNOD = 14 SYS_CHMOD = 15 SYS_LCHOWN = 16 SYS_BREAK = 17 SYS_OLDSTAT = 18 SYS_LSEEK = 19 SYS_GETPID = 20 SYS_MOUNT = 21 SYS_UMOUNT = 22 SYS_SETUID = 23 SYS_GETUID = 24 SYS_STIME = 25 SYS_PTRACE = 26 SYS_ALARM = 27 SYS_OLDFSTAT = 28 SYS_PAUSE = 29 SYS_UTIME = 30 SYS_STTY = 31 SYS_GTTY = 32 SYS_ACCESS = 33 SYS_NICE = 34 SYS_FTIME = 35 SYS_SYNC = 36 SYS_KILL = 37 SYS_RENAME = 38 SYS_MKDIR = 39 SYS_RMDIR = 40 SYS_DUP = 41 SYS_PIPE = 42 SYS_TIMES = 43 SYS_PROF = 44 SYS_BRK = 45 SYS_SETGID = 46 SYS_GETGID = 47 SYS_SIGNAL = 48 SYS_GETEUID = 49 SYS_GETEGID = 50 SYS_ACCT = 51 SYS_UMOUNT2 = 52 SYS_LOCK = 53 SYS_IOCTL = 54 SYS_FCNTL = 55 SYS_MPX = 56 SYS_SETPGID = 57 SYS_ULIMIT = 58 SYS_OLDOLDUNAME = 59 SYS_UMASK = 60 SYS_CHROOT = 61 SYS_USTAT = 62 SYS_DUP2 = 63 SYS_GETPPID = 64 SYS_GETPGRP = 65 SYS_SETSID = 66 SYS_SIGACTION = 67 SYS_SGETMASK = 68 SYS_SSETMASK = 69 SYS_SETREUID = 70 SYS_SETREGID = 71 SYS_SIGSUSPEND = 72 SYS_SIGPENDING = 73 SYS_SETHOSTNAME = 74 SYS_SETRLIMIT = 75 SYS_GETRLIMIT = 76 SYS_GETRUSAGE = 77 SYS_GETTIMEOFDAY = 78 SYS_SETTIMEOFDAY = 79 SYS_GETGROUPS = 80 SYS_SETGROUPS = 81 SYS_SELECT = 82 SYS_SYMLINK = 83 SYS_OLDLSTAT = 84 SYS_READLINK = 85 SYS_USELIB = 86 SYS_SWAPON = 87 SYS_REBOOT = 88 SYS_READDIR = 89 SYS_MMAP = 90 SYS_MUNMAP = 91 SYS_TRUNCATE = 92 SYS_FTRUNCATE = 93 SYS_FCHMOD = 94 SYS_FCHOWN = 95 SYS_GETPRIORITY = 96 SYS_SETPRIORITY = 97 SYS_PROFIL = 98 SYS_STATFS = 99 SYS_FSTATFS = 100 SYS_IOPERM = 101 SYS_SOCKETCALL = 102 SYS_SYSLOG = 103 SYS_SETITIMER = 104 SYS_GETITIMER = 105 SYS_STAT = 106 SYS_LSTAT = 107 SYS_FSTAT = 108 SYS_OLDUNAME = 109 SYS_IOPL = 110 SYS_VHANGUP = 111 SYS_IDLE = 112 SYS_VM86 = 113 SYS_WAIT4 = 114 SYS_SWAPOFF = 115 SYS_SYSINFO = 116 SYS_IPC = 117 SYS_FSYNC = 118 SYS_SIGRETURN = 119 SYS_CLONE = 120 SYS_SETDOMAINNAME = 121 SYS_UNAME = 122 SYS_MODIFY_LDT = 123 SYS_ADJTIMEX = 124 SYS_MPROTECT = 125 SYS_SIGPROCMASK = 126 SYS_CREATE_MODULE = 127 SYS_INIT_MODULE = 128 SYS_DELETE_MODULE = 129 SYS_GET_KERNEL_SYMS = 130 SYS_QUOTACTL = 131 SYS_GETPGID = 132 SYS_FCHDIR = 133 SYS_BDFLUSH = 134 SYS_SYSFS = 135 SYS_PERSONALITY = 136 SYS_AFS_SYSCALL = 137 SYS_SETFSUID = 138 SYS_SETFSGID = 139 SYS__LLSEEK = 140 SYS_GETDENTS = 141 SYS__NEWSELECT = 142 SYS_FLOCK = 143 SYS_MSYNC = 144 SYS_READV = 145 SYS_WRITEV = 146 SYS_GETSID = 147 SYS_FDATASYNC = 148 SYS__SYSCTL = 149 SYS_MLOCK = 150 SYS_MUNLOCK = 151 SYS_MLOCKALL = 152 SYS_MUNLOCKALL = 153 SYS_SCHED_SETPARAM = 154 SYS_SCHED_GETPARAM = 155 SYS_SCHED_SETSCHEDULER = 156 SYS_SCHED_GETSCHEDULER = 157 SYS_SCHED_YIELD = 158 SYS_SCHED_GET_PRIORITY_MAX = 159 SYS_SCHED_GET_PRIORITY_MIN = 160 SYS_SCHED_RR_GET_INTERVAL = 161 SYS_NANOSLEEP = 162 SYS_MREMAP = 163 SYS_SETRESUID = 164 SYS_GETRESUID = 165 SYS_QUERY_MODULE = 166 SYS_POLL = 167 SYS_NFSSERVCTL = 168 SYS_SETRESGID = 169 SYS_GETRESGID = 170 SYS_PRCTL = 171 SYS_RT_SIGRETURN = 172 SYS_RT_SIGACTION = 173 SYS_RT_SIGPROCMASK = 174 SYS_RT_SIGPENDING = 175 SYS_RT_SIGTIMEDWAIT = 176 SYS_RT_SIGQUEUEINFO = 177 SYS_RT_SIGSUSPEND = 178 SYS_PREAD64 = 179 SYS_PWRITE64 = 180 SYS_CHOWN = 181 SYS_GETCWD = 182 SYS_CAPGET = 183 SYS_CAPSET = 184 SYS_SIGALTSTACK = 185 SYS_SENDFILE = 186 SYS_GETPMSG = 187 SYS_PUTPMSG = 188 SYS_VFORK = 189 SYS_UGETRLIMIT = 190 SYS_READAHEAD = 191 SYS_PCICONFIG_READ = 198 SYS_PCICONFIG_WRITE = 199 SYS_PCICONFIG_IOBASE = 200 SYS_MULTIPLEX
{ "pile_set_name": "Github" }
# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. $(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) # The files that need updating when incrementing the version number. VERSIONED_FILES := *.js *.json README* # Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. # Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment # where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") UTILS := semver # Make sure that all required utilities can be located. UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) # Default target (by virtue of being the first non '.'-prefixed in the file). .PHONY: _no-target-specified _no-target-specified: $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) # Lists all targets defined in this makefile. .PHONY: list list: @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort # All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). .PHONY: test test: @npm test .PHONY: _ensure-tag _ensure-tag: ifndef TAG $(error Please invoke with `make TAG=<new-version> release`, where <new-version> is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) endif CHANGELOG_ERROR = $(error No CHANGELOG specified) .PHONY: _ensure-changelog _ensure-changelog: @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) # Ensures that the git workspace is clean. .PHONY: _ensure-clean _ensure-clean: @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } # Makes a release; invoke with `make TAG=<versionOrIncrementSpec> release`. .PHONY: release release: _ensure-tag _ensure-changelog _ensure-clean @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ if printf "$$new_ver" | command grep -q '^[0-9]'; then \ semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ else \ new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ fi; \ printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ git tag -a -m "v$$new_ver" "v$$new_ver"
{ "pile_set_name": "Github" }
// Copyright (c) 2016, 2018, 2019, Oracle and/or its affiliates. All rights reserved. // Code generated. DO NOT EDIT. // Key Management Service API // // API for managing and performing operations with keys and vaults. // package keymanagement import ( "github.com/oracle/oci-go-sdk/common" ) // EncryptDataDetails The representation of EncryptDataDetails type EncryptDataDetails struct { // The OCID of the key to encrypt with. KeyId *string `mandatory:"true" json:"keyId"` // The plaintext data to encrypt. Plaintext *string `mandatory:"true" json:"plaintext"` // Information that can be used to provide an encryption context for the // encrypted data. The length of the string representation of the associatedData // must be fewer than 4096 characters. AssociatedData map[string]string `mandatory:"false" json:"associatedData"` // Information that can be used to provide context for audit logging. It is a map that contains any addtional // data the users may have and will be added to the audit logs (if audit logging is enabled) LoggingContext map[string]string `mandatory:"false" json:"loggingContext"` } func (m EncryptDataDetails) String() string { return common.PointerString(m) }
{ "pile_set_name": "Github" }
/** @file EFI Shell protocol as defined in the UEFI Shell 2.0 specification including errata. (C) Copyright 2014 Hewlett-Packard Development Company, L.P.<BR> Copyright (c) 2006 - 2015, Intel Corporation. All rights reserved.<BR> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ #ifndef __EFI_SHELL_PROTOCOL__ #define __EFI_SHELL_PROTOCOL__ #include <ShellBase.h> #include <Guid/FileInfo.h> #define EFI_SHELL_PROTOCOL_GUID \ { \ 0x6302d008, 0x7f9b, 0x4f30, { 0x87, 0xac, 0x60, 0xc9, 0xfe, 0xf5, 0xda, 0x4e } \ } // replaced EFI_LIST_ENTRY with LIST_ENTRY for simplicity. // they are identical outside of the name. typedef struct { LIST_ENTRY Link; ///< Linked list members. EFI_STATUS Status; ///< Status of opening the file. Valid only if Handle != NULL. CONST CHAR16 *FullName; ///< Fully qualified filename. CONST CHAR16 *FileName; ///< name of this file. SHELL_FILE_HANDLE Handle; ///< Handle for interacting with the opened file or NULL if closed. EFI_FILE_INFO *Info; ///< Pointer to the FileInfo struct for this file or NULL. } EFI_SHELL_FILE_INFO; /** Returns whether any script files are currently being processed. @retval TRUE There is at least one script file active. @retval FALSE No script files are active now. **/ typedef BOOLEAN (EFIAPI *EFI_SHELL_BATCH_IS_ACTIVE) ( VOID ); /** Closes the file handle. This function closes a specified file handle. All 'dirty' cached file data is flushed to the device, and the file is closed. In all cases, the handle is closed. @param[in] FileHandle The file handle to be closed. @retval EFI_SUCCESS The file closed sucessfully. **/ typedef EFI_STATUS (EFIAPI *EFI_SHELL_CLOSE_FILE)( IN SHELL_FILE_HANDLE FileHandle ); /** Creates a file or directory by name. This function creates an empty new file or directory with the specified attributes and returns the new file's handle. If the file already exists and is read-only, then EFI_INVALID_PARAMETER will be returned. If the file already existed, it is truncated and its attributes updated. If the file is created successfully, the FileHandle is the file's handle, else, the FileHandle is NULL. If the file name begins with >v, then the file handle which is returned refers to the shell environment variable with the specified name. If the shell environment variable already exists and is non-volatile then EFI_INVALID_PARAMETER is returned. @param[in] FileName Pointer to NULL-terminated file path. @param[in] FileAttribs The new file's attrbiutes. The different attributes are described in EFI_FILE_PROTOCOL.Open(). @param[out] FileHandle On return, points to the created file handle or directory's handle. @retval EFI_SUCCESS The file was opened. FileHandle points to the new file's handle. @retval EFI_INVALID_PARAMETER One of the parameters has an invalid value. @retval EFI_UNSUPPORTED The file path could not be opened. @retval EFI_NOT_FOUND The specified file could not be found on the device, or could not file the file system on the device. @retval EFI_NO_MEDIA The device has no medium. @retval EFI_MEDIA_CHANGED The device has a different medium in it or the medium is no longer supported. @retval EFI_DEVICE_ERROR The device reported an error or can't get the file path according the DirName. @retval EFI_VOLUME_CORRUPTED The file system structures are corrupted. @retval EFI_WRITE_PROTECTED An attempt was made to create a file, or open a file for write when the media is write-protected. @retval EFI_ACCESS_DENIED The service denied access to the file. @retval EFI_OUT_OF_RESOURCES Not enough resources were available to open the file. @retval EFI_VOLUME_FULL The volume is full. **/ typedef EFI_STATUS (EFIAPI *EFI_SHELL_CREATE_FILE)( IN CONST CHAR16 *FileName, IN UINT64 FileAttribs, OUT SHELL_FILE_HANDLE *FileHandle ); /** Deletes the file specified by the file handle. This function closes and deletes a file. In all cases, the file handle is closed. If the file cannot be deleted, the warning code EFI_WARN_DELETE_FAILURE is returned, but the handle is still closed. @param[in] FileHandle The file handle to delete. @retval EFI_SUCCESS The file was closed and deleted and the handle was closed. @retval EFI_WARN_DELETE_FAILURE The handle was closed but the file was not deleted. **/ typedef EFI_STATUS (EFIAPI *EFI_SHELL_DELETE_FILE)( IN SHELL_FILE_HANDLE FileHandle ); /** Deletes the file specified by the file name. This function deletes a file. @param[in] FileName Points to the NULL-terminated file name. @retval EFI_SUCCESS The file was deleted. @retval EFI_WARN_DELETE_FAILURE The handle was closed but the file was not deleted. **/ typedef EFI_STATUS (EFIAPI *EFI_SHELL_DELETE_FILE_BY_NAME)( IN CONST CHAR16 *FileName ); /** Disables the page break output mode. **/ typedef VOID (EFIAPI *EFI_SHELL_DISABLE_PAGE_BREAK) ( VOID ); /** Enables the page break output mode. **/ typedef VOID (EFIAPI *EFI_SHELL_ENABLE_PAGE_BREAK) ( VOID ); /** Execute the command line. This function creates a nested instance of the shell and executes the specified command (CommandLine) with the specified environment (Environment). Upon return, the status code returned by the specified command is placed in StatusCode. If Environment is NULL, then the current environment is used and all changes made by the commands executed will be reflected in the current environment. If the Environment is non-NULL, then the changes made will be discarded. The CommandLine is executed from the current working directory on the current device. @param[in] ParentImageHandle A handle of the image that is executing the specified command line. @param[in] CommandLine Points to the NULL-terminated UCS-2 encoded string containing the command line. If NULL then the command- line will be empty. @param[in] Environment Points to a NULL-terminated array of environment variables with the format 'x=y', where x is the environment variable name and y is the value. If this is NULL, then the current shell environment is used. @param[out] ErrorCode Points to the status code returned by the command. @retval EFI_SUCCESS The command executed successfully. The status code returned by the command is pointed to
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; using NLog; using YiSha.Util.Extension; namespace YiSha.Util { public class LogHelper { private static readonly Logger log = LogManager.GetLogger(string.Empty); public static void Trace(object msg, Exception ex = null) { if (ex == null) { log.Trace(msg.ParseToString()); } else { log.Trace(msg + GetExceptionMessage(ex)); } } public static void Debug(object msg, Exception ex = null) { if (ex == null) { log.Debug(msg.ParseToString()); } else { log.Debug(msg + GetExceptionMessage(ex)); } } public static void Info(object msg, Exception ex = null) { if (ex == null) { log.Info(msg.ParseToString()); } else { log.Info(msg + GetExceptionMessage(ex)); } } public static void Warn(object msg, Exception ex = null) { if (ex == null) { log.Warn(msg.ParseToString()); } else { log.Warn(msg + GetExceptionMessage(ex)); } } public static void Error(object msg, Exception ex = null) { if (ex == null) { log.Error(msg.ParseToString()); } else { log.Error(msg + GetExceptionMessage(ex)); } } public static void Error(Exception ex) { if (ex != null) { log.Error(GetExceptionMessage(ex)); } } public static void Fatal(object msg, Exception ex = null) { if (ex == null) { log.Fatal(msg.ParseToString()); } else { log.Fatal(msg + GetExceptionMessage(ex)); } } public static void Fatal(Exception ex) { if (ex != null) { log.Fatal(GetExceptionMessage(ex)); } } private static string GetExceptionMessage(Exception ex) { string message = string.Empty; if (ex != null) { message += ex.Message; message += Environment.NewLine; Exception originalException = ex.GetOriginalException(); if (originalException != null) { if (originalException.Message != ex.Message) { message += originalException.Message; message += Environment.NewLine; } } message += ex.StackTrace; message += Environment.NewLine; } return message; } } }
{ "pile_set_name": "Github" }
{ "$schema": "../../../../testbot.schema", "$kind": "Microsoft.AdaptiveDialog", "recognizer": { "$kind": "Microsoft.RegexRecognizer", "intents": [ { "intent": "CreateMeetingIntent", "pattern": "(?i)create meeting" }, { "intent": "HelpIntent", "pattern": "(?i)help" }, { "intent": "ShowNextPageIntent", "pattern": "(?i)next page meeting" }, { "intent": "UpdateMeetingIntent", "pattern": "(?i)update meeting" }, { "intent": "AcceptMeetingIntent", "pattern": "(?i)accept meeting" }, { "intent": "DeclineMeetingIntent", "pattern": "(?i)decline meeting" } ] }, "triggers": [ { "$kind": "Microsoft.OnBeginDialog", "actions": [ "GetDisplayMeetings", { "$kind": "Microsoft.SendActivity", "activity": "${ShowMeetingSummaryResponse(user)}" }, { "$kind": "Microsoft.IfCondition", "condition": "count(user.meetings) > 0", "actions": [ { "$kind": "Microsoft.SendActivity", "activity": "${ShowMeetingList(user)}" }, { "$kind": "Microsoft.TextInput", "prompt": "${ChooseReadMeetingList(user)}", "property": "dialog.choice" }, { "$kind": "Microsoft.IfCondition", "condition": "dialog.choice == '1'", "actions": [ { "$kind": "Microsoft.SetProperty", "property": "user.focusedMeeting", "value": "=user.meetings[0]" } ] }, { "$kind": "Microsoft.IfCondition", "condition": "dialog.choice == '2'", "actions": [ { "$kind": "Microsoft.SetProperty", "property": "user.focusedMeeting", "value": "=user.meetings[1]" } ] }, { "$kind": "Microsoft.IfCondition", "condition": "dialog.choice == '3'", "actions": [ { "$kind": "Microsoft.SetProperty", "property": "user.focusedMeeting", "value": "=user.meetings[2]" } ] }, { "$kind": "Microsoft.SendActivity", "activity": "${ReadFocusedMeeting(user)}" }, { "$kind": "Microsoft.SendActivity", "activity": "${ShowMeetingCard(user.focusedMeeting)}" }, { "$kind": "Microsoft.ConfirmInput", "property": "dialog.confirmed", "prompt": "You can say 'update meeting' or 'decline meeting' or 'accept meeting' to do action on this meeting.", "alwaysPrompt": true } ] } ] }, { "$kind": "Microsoft.OnIntent", "intent": "UpdateMeetingIntent", "actions": [ "UpdateMeeting" ] }, { "$kind": "Microsoft.OnIntent", "intent": "AcceptMeetingIntent", "actions": [ "AcceptMeeting" ] }, { "$kind": "Microsoft.OnIntent", "intent": "DeclineMeetingIntent", "actions": [ "DeclineMeeting" ] } ] }
{ "pile_set_name": "Github" }
# OE has been triggered. # there should be no %pass shunts on either side and an active tunnel ipsec trafficstatus ipsec shuntstatus ../../pluto/bin/ipsec-look.sh
{ "pile_set_name": "Github" }
// Emacs style mode select -*- C++ -*- //----------------------------------------------------------------------------- // // Copyright (C) 2013 James Haley et al. // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see http://www.gnu.org/licenses/ // //-------------------------------------------------------------------------- // // DESCRIPTION: // Created by the sound utility written by Dave Taylor. // Kept as a sample, DOOM2 sounds. Frozen. // // haleyjd: erm... ::lights a fire to warm it up:: // //----------------------------------------------------------------------------- #ifndef __SOUNDS__ #define __SOUNDS__ #include "doomtype.h" #include "m_dllist.h" // // SoundFX struct. // // haleyjd 06/12/08: origin subchannels typedef enum { CHAN_ALL = -1, // to S_StopSound, this means stop everything CHAN_AUTO, CHAN_WEAPON, CHAN_VOICE, CHAN_ITEM, CHAN_BODY, CHAN_SLOT5, CHAN_SLOT6, CHAN_SLOT7, NUMSCHANNELS } schannel_e; // haleyjd 03/27/11: sound flags enum { SFXF_PREFIX = 0x00000001, // lumpname should be prefixed with "DS" SFXF_NOPCSOUND = 0x00000002, // sound won't play under PC speaker emulation SFXF_EDF = 0x00000004, // sfxinfo was born via EDF SFXF_SNDINFO = 0x00000008, // sfxinfo was born via SNDINFO lump SFXF_WAD = 0x00000010 // sfxinfo was born as an implicit wad sound }; struct sfxinfo_t { // Sfx singularity (only one at a time) enum { sg_none, sg_itemup, sg_wpnup, sg_oof, sg_getpow }; // haleyjd 09/23/06: pitch variance types enum { pitch_none, // none: always at normal pitch. pitch_doom, // normal variance for DOOM v1.1 pitch_doomsaw, // variance for DOOM chainsaw pitch_heretic, // normal variance for Heretic pitch_hticamb, // variance for Heretic ambient sounds }; char name[9]; // haleyjd: up to 8-character lump name char pcslump[9]; // haleyjd: explicitly provided PC speaker effect lump int singularity; // killough 12/98: implement separate classes of singularity int priority; // Sfx priority int pitch; // pitch if a link int volume; // volume if a link int pitch_type; // haleyjd 09/23/06: pitch variance type int skinsound; // sf: skin sound number to use in place int subchannel; // haleyjd 06/12/08: origin subchannels - default = CHAN_AUTO. unsigned int flags; // haleyjd 03/27/11: sound effect flags // haleyjd 07/13/05: sound attenuation properties now customizable // on a per-sound basis to allow differing behaviors. int clipping_dist; // distance when sound is clipped entirely int close_dist; // distance when sound is at maximum volume // links to other sound definitions sfxinfo_t *link; // referenced sound if a link sfxinfo_t *alias; // haleyjd 09/24/06: referenced sound if an alias sfxinfo_t **randomsounds; // haleyjd 05/12/09: random sounds int numrandomsounds; // haleyjd 04/23/08: additional caching data void *data; // sound data int length; // lump length unsigned int alen; // length of converted sound pointed to by data // this is checked every second to see if sound // can be thrown out (if 0, then decrement, if -1, // then throw out, if > 0, then it is in use) int usefulness; // haleyjd: EDF mnemonic char mnemonic[129]; char *lfn; // alison: long file name char *pcslfn; // alison: long file name for PC speaker sound // haleyjd 09/03/03: revised for dynamic EDF sounds DLListItem<sfxinfo_t> numlinks; // haleyjd 04/13/08: numeric hash links sfxinfo_t *next; // next in mnemonic hash chain int dehackednum; // dehacked number }; // // MusicInfo struct. // struct musicinfo_t { // up to 6-character name const char *name; // haleyjd 04/10/11: whether to apply prefix or not bool prefix; // lump number of music // int lumpnum; // music data void *data; // music handle once registered int handle; // sf: for hashing musicinfo_t *next; }; // the complete set of sound effects // haleyjd 11/05/03: made dynamic with EDF extern int NUMSFX; // the complete set of music extern musicinfo_t S_music[]; // heretic music extern musicinfo_t H_music[]; // haleyjd: clever indirection for heretic maps extern int H_Mus_Matrix[6][9]; // // Identifiers for all music in game. // typedef enum { mus_None, mus_e1m1, mus_e1m2, mus_e1m3, mus_e1m4, mus_e1m5, mus_e1m6, mus_e1m7, mus_e1m8, mus_e1m9, mus_e2m1, mus_e2m2, mus_e2m3, mus_e2m4, mus_e2m5, mus_e2m6, mus_e2m7, mus_e2m8, mus_e2m9, mus_e3m1, mus_e3m2, mus_e3m3, mus_e3m4, mus_e3m5, mus_e3m6, mus_e3m7, mus_e3m8, mus_e3m9, mus_inter, mus_intro, mus_bunny, mus_victor, mus_introa, mus_runnin, mus_stalks, mus_countd, mus_betwee, mus_doom, mus_the_da, mus_shawn, mus_ddtblu, mus_in_cit, mus_dead, mus_stlks2, mus_theda2, mus_doom2, mus_ddtbl2, mus_runni2, mus_dead2, mus_stlks3, mus_romero, mus_shawn2, mus_messag, mus_count2, mus_ddtbl3, mus_ampie, mus_theda3, mus_adrian, mus_messg2, mus_romer2, mus_tense, mus_sh
{ "pile_set_name": "Github" }
{ "title": "Predefined Symbols (Marker)", "callback": "initMap", "libraries": [], "version": "weekly", "tag": "marker_symbol_predefined", "name": "marker-symbol-predefined" }
{ "pile_set_name": "Github" }
/* drbd_req.c This file is part of DRBD by Philipp Reisner and Lars Ellenberg. Copyright (C) 2001-2008, LINBIT Information Technologies GmbH. Copyright (C) 1999-2008, Philipp Reisner <philipp.reisner@linbit.com>. Copyright (C) 2002-2008, Lars Ellenberg <lars.ellenberg@linbit.com>. drbd is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. drbd is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with drbd; see the file COPYING. If not, write to the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include <linux/module.h> #include <linux/slab.h> #include <linux/drbd.h> #include "drbd_int.h" #include "drbd_req.h" /* Update disk stats at start of I/O request */ static void _drbd_start_io_acct(struct drbd_conf *mdev, struct drbd_request *req, struct bio *bio) { const int rw = bio_data_dir(bio); int cpu; cpu = part_stat_lock(); part_stat_inc(cpu, &mdev->vdisk->part0, ios[rw]); part_stat_add(cpu, &mdev->vdisk->part0, sectors[rw], bio_sectors(bio)); part_inc_in_flight(&mdev->vdisk->part0, rw); part_stat_unlock(); } /* Update disk stats when completing request upwards */ static void _drbd_end_io_acct(struct drbd_conf *mdev, struct drbd_request *req) { int rw = bio_data_dir(req->master_bio); unsigned long duration = jiffies - req->start_time; int cpu; cpu = part_stat_lock(); part_stat_add(cpu, &mdev->vdisk->part0, ticks[rw], duration); part_round_stats(cpu, &mdev->vdisk->part0); part_dec_in_flight(&mdev->vdisk->part0, rw); part_stat_unlock(); } static void _req_is_done(struct drbd_conf *mdev, struct drbd_request *req, const int rw) { const unsigned long s = req->rq_state; /* if it was a write, we may have to set the corresponding * bit(s) out-of-sync first. If it had a local part, we need to * release the reference to the activity log. */ if (rw == WRITE) { /* remove it from the transfer log. * well, only if it had been there in the first * place... if it had not (local only or conflicting * and never sent), it should still be "empty" as * initialized in drbd_req_new(), so we can list_del() it * here unconditionally */ list_del(&req->tl_requests); /* Set out-of-sync unless both OK flags are set * (local only or remote failed). * Other places where we set out-of-sync: * READ with local io-error */ if (!(s & RQ_NET_OK) || !(s & RQ_LOCAL_OK)) drbd_set_out_of_sync(mdev, req->sector, req->size); if ((s & RQ_NET_OK) && (s & RQ_LOCAL_OK) && (s & RQ_NET_SIS)) drbd_set_in_sync(mdev, req->sector, req->size); /* one might be tempted to move the drbd_al_complete_io * to the local io completion callback drbd_endio_pri. * but, if this was a mirror write, we may only * drbd_al_complete_io after this is RQ_NET_DONE, * otherwise the extent could be dropped from the al * before it has actually been written on the peer. * if we crash before our peer knows about the request, * but after the extent has been dropped from the al, * we would forget to resync the corresponding extent. */ if (s & RQ_LOCAL_MASK) { if (get_ldev_if_state(mdev, D_FAILED)) { drbd_al_complete_io(mdev, req->sector); put_ldev(mdev); } else if (__ratelimit(&drbd_ratelimit_state)) { dev_warn(DEV, "Should have called drbd_al_complete_io(, %llu), " "but my Disk seems to have failed :(\n", (unsigned long long) req->sector); } } } /* if it was a local io error, we want to notify our * peer about that, and see if we need to * detach the disk and stuff. * to avoid allocating some special work * struct, reuse the request. */ /* THINK * why do we do this not when we detect the error, * but delay it until it is "done", i.e. possibly * until the next barrier ack? */ if (rw == WRITE && ((s & RQ_LOCAL_MASK) && !(s & RQ_LOCAL_OK))) { if (!(req->w.list.next == LIST_POISON1 || list_empty(&req->w.list))) { /* DEBUG ASSERT only; if this triggers, we * probably corrupt the worker list here */ dev_err(DEV, "req->w.list.next = %p\n", req->w.list.next); dev_err(DEV, "req->w.list.prev = %p\n", req->w.list.prev); } req->w.cb = w_io_error; drbd_queue_work(&mdev->data.work, &req->w); /* drbd_req_free() is done in w_io_error */ } else { drbd_req_free(req); } } static void queue_barrier(struct drbd_conf *mdev) { struct drbd_tl_epoch *b; /* We are within the req_lock. Once we queued the barrier for sending, * we set the CREATE_BARRIER bit. It is cleared as soon as a new * barrier/epoch object is added. This is the only place this bit is * set. It indicates that the barrier for this epoch is already queued, * and no new epoch has been created yet. */ if (test_bit(CREATE_BARRIER, &mdev->flags)) return; b = mdev->newest_tle; b->w.cb = w_send_barrier; /* inc_ap_pending done here, so we won't * get imbalanced on connection loss. * dec_ap_pending will be done in got_BarrierAck * or (on connection loss) in tl_clear. */ inc_ap_pending(mdev); drbd_queue_work(&mdev->data.work, &b->w); set_bit(CREATE_BARRIER, &mdev->flags); } static void _about_to_complete_local_write(struct drbd_conf *mdev, struct drbd_request *req) { const unsigned long s = req->rq_state; struct drbd_request *i; struct drbd_epoch_entry *e; struct hlist_node *n; struct hlist_head *slot; /* before we can signal completion to the upper layers, * we may need to close the current epoch */ if (mdev->state.conn >= C_CONNECTED && req->epoch == mdev->newest_tle->br_number) queue_barrier(mdev);
{ "pile_set_name": "Github" }
<test-metadata> <benchmark-version>1.2</benchmark-version> <category>sqli</category> <test-number>01877</test-number> <vulnerability>false</vulnerability> <cwe>89</cwe> </test-metadata>
{ "pile_set_name": "Github" }
/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package streaming implements encoder and decoder for streams // of runtime.Objects over io.Writer/Readers. package streaming import ( "bytes" "fmt" "io" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" ) // Encoder is a runtime.Encoder on a stream. type Encoder interface { // Encode will write the provided object to the stream or return an error. It obeys the same // contract as runtime.VersionedEncoder. Encode(obj runtime.Object) error } // Decoder is a runtime.Decoder from a stream. type Decoder interface { // Decode will return io.EOF when no more objects are available. Decode(defaults *schema.GroupVersionKind, into runtime.Object) (runtime.Object, *schema.GroupVersionKind, error) // Close closes the underlying stream. Close() error } // Serializer is a factory for creating encoders and decoders that work over streams. type Serializer interface { NewEncoder(w io.Writer) Encoder NewDecoder(r io.ReadCloser) Decoder } type decoder struct { reader io.ReadCloser decoder runtime.Decoder buf []byte maxBytes int resetRead bool } // NewDecoder creates a streaming decoder that reads object chunks from r and decodes them with d. // The reader is expected to return ErrShortRead if the provided buffer is not large enough to read // an entire object. func NewDecoder(r io.ReadCloser, d runtime.Decoder) Decoder { return &decoder{ reader: r, decoder: d, buf: make([]byte, 1024), maxBytes: 16 * 1024 * 1024, } } var ErrObjectTooLarge = fmt.Errorf("object to decode was longer than maximum allowed size") // Decode reads the next object from the stream and decodes it. func (d *decoder) Decode(defaults *schema.GroupVersionKind, into runtime.Object) (runtime.Object, *schema.GroupVersionKind, error) { base := 0 for { n, err := d.reader.Read(d.buf[base:]) if err == io.ErrShortBuffer { if n == 0 { return nil, nil, fmt.Errorf("got short buffer with n=0, base=%d, cap=%d", base, cap(d.buf)) } if d.resetRead { continue } // double the buffer size up to maxBytes if len(d.buf) < d.maxBytes { base += n d.buf = append(d.buf, make([]byte, len(d.buf))...) continue } // must read the rest of the frame (until we stop getting ErrShortBuffer) d.resetRead = true base = 0 return nil, nil, ErrObjectTooLarge } if err != nil { return nil, nil, err } if d.resetRead { // now that we have drained the large read, continue d.resetRead = false continue } base += n break } return d.decoder.Decode(d.buf[:base], defaults, into) } func (d *decoder) Close() error { return d.reader.Close() } type encoder struct { writer io.Writer encoder runtime.Encoder buf *bytes.Buffer } // NewEncoder returns a new streaming encoder. func NewEncoder(w io.Writer, e runtime.Encoder) Encoder { return &encoder{ writer: w, encoder: e, buf: &bytes.Buffer{}, } } // Encode writes the provided object to the nested writer. func (e *encoder) Encode(obj runtime.Object) error { if err := e.encoder.Encode(obj, e.buf); err != nil { return err } _, err := e.writer.Write(e.buf.Bytes()) e.buf.Reset() return err }
{ "pile_set_name": "Github" }
# # Author:: Daniel DeLeo (<dan@chef.io>) # Copyright:: Copyright (c) Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class Chef class RunList class RunListItem QUALIFIED_RECIPE = /^recipe\[([^\]@]+)(@([0-9]+(\.[0-9]+){1,2}))?\]$/.freeze QUALIFIED_ROLE = /^role\[([^\]]+)\]$/.freeze VERSIONED_UNQUALIFIED_RECIPE = /^([^@]+)(@([0-9]+(\.[0-9]+){1,2}))$/.freeze FALSE_FRIEND = /[\[\]]/.freeze attr_reader :name, :type, :version def initialize(item) @version = nil case item when Hash assert_hash_is_valid_run_list_item!(item) @type = (item["type"] || item[:type]).to_sym @name = item["name"] || item[:name] if item.key?("version") || item.key?(:version) @version = item["version"] || item[:version] end when String if match = QUALIFIED_RECIPE.match(item) # recipe[recipe_name] # recipe[recipe_name@1.0.0] @type = :recipe @name = match[1] @version = match[3] if match[3] elsif match = QUALIFIED_ROLE.match(item) # role[role_name] @type = :role @name = match[1] elsif match = VERSIONED_UNQUALIFIED_RECIPE.match(item) # recipe_name@1.0.0 @type = :recipe @name = match[1] @version = match[3] if match[3] elsif match = FALSE_FRIEND.match(item) # Recipe[recipe_name] # roles[role_name] name = match[1] raise ArgumentError, "Unable to create #{self.class} from #{item.class}:#{item.inspect}: must be recipe[#{name}] or role[#{name}]" else # recipe_name @type = :recipe @name = item end else raise ArgumentError, "Unable to create #{self.class} from #{item.class}:#{item.inspect}: must be a Hash or String" end end def to_s "#{@type}[#{@name}#{@version ? "@#{@version}" : ""}]" end def role? @type == :role end def recipe? @type == :recipe end def ==(other) if other.is_a?(String) to_s == other.to_s else other.respond_to?(:type) && other.respond_to?(:name) && other.respond_to?(:version) && other.type == @type && other.name == @name && other.version == @version end end def assert_hash_is_valid_run_list_item!(item) unless (item.key?("type") || item.key?(:type)) && (item.key?("name") || item.key?(:name)) raise ArgumentError, "Initializing a #{self.class} from a hash requires that it have a 'type' and 'name' key" end end end end end
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <script src="http://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script> <!-- d3 is used by histogram.js <script src="http://d3js.org/d3.v3.min.js" charset="utf-8"></script> --> <script src="https://raw.githubusercontent.com/nnnick/Chart.js/master/Chart.js"></script> <script src="script.js"></script> <script src="histogram.js"></script> <head> <meta content="text/html; charset=utf-8" http-equiv="Content-Type"> <meta charset="utf-8"> <meta content="width=device-width" name="viewport"> <link href="/style.css" rel="stylesheet"> <title>((( - PRODUCTION -))) Guestbook</title> </head> <body> <TABLE> <TR> <TD> <div id="k8petstore-entries"> <p>Waiting for database connection...This will get overwritten...</p> </div> </TD> <TD> <div id="header"> <h1>k8-bps.</h1> </div><br> <div> <p><h2 id="k8petstore-host-address"></h2></p> <p><a href="/env">/env</a> <a href="/info">/info</a></p> </div> </TD> </TR> <TR > <TD colspan="2"> <canvas id="myChart" width="2000" height="600"></canvas> </TD> </TR> </TABLE> </body> </html>
{ "pile_set_name": "Github" }
#include <QtScript/QScriptEngine> #include <QtScript/QScriptContext> #include <QtScript/QScriptValue> #include <QtCore/QStringList> #include <QtCore/QDebug> #include <qmetaobject.h> #include <qhttp.h> #include <QVariant> #include <qauthenticator.h> #include <qbytearray.h> #include <qcoreevent.h> #include <qhttp.h> #include <qiodevice.h> #include <qlist.h> #include <qnetworkproxy.h> #include <qobject.h> #include <qsslerror.h> #include <qtcpsocket.h> #include "qtscriptshell_QHttp.h" static const char * const qtscript_QHttp_function_names[] = { "QHttp" // static // prototype , "bytesAvailable" , "clearPendingRequests" , "close" , "currentDestinationDevice" , "currentId" , "currentRequest" , "currentSourceDevice" , "error" , "errorString" , "get" , "hasPendingRequests" , "head" , "lastResponse" , "post" , "read" , "readAll" , "request" , "setHost" , "setProxy" , "setSocket" , "setUser" , "state" , "toString" }; static const char * const qtscript_QHttp_function_signatures[] = { "QObject parent\nString hostname, ConnectionMode mode, unsigned short port, QObject parent\nString hostname, unsigned short port, QObject parent" // static // prototype , "" , "" , "" , "" , "" , "" , "" , "" , "" , "String path, QIODevice to" , "" , "String path" , "" , "String path, QIODevice data, QIODevice to\nString path, QByteArray data, QIODevice to" , "char data, qint64 maxlen" , "" , "QHttpRequestHeader header, QIODevice device, QIODevice to\nQHttpRequestHeader header, QByteArray data, QIODevice to" , "String hostname, ConnectionMode mode, unsigned short port\nString hostname, unsigned short port" , "QNetworkProxy proxy\nString host, int port, String username, String password" , "QTcpSocket socket" , "String username, String password" , "" "" }; static const int qtscript_QHttp_function_lengths[] = { 4 // static // prototype , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 2 , 0 , 1 , 0 , 3 , 2 , 0 , 3 , 3 , 4 , 1 , 2 , 0 , 0 }; static QScriptValue qtscript_QHttp_throw_ambiguity_error_helper( QScriptContext *context, const char *functionName, const char *signatures) { QStringList lines = QString::fromLatin1(signatures).split(QLatin1Char('\n')); QStringList fullSignatures; for (int i = 0; i < lines.size(); ++i) fullSignatures.append(QString::fromLatin1("%0(%1)").arg(functionName).arg(lines.at(i))); return context->throwError(QString::fromLatin1("QHttp::%0(): could not find a function match; candidates are:\n%1") .arg(functionName).arg(fullSignatures.join(QLatin1String("\n")))); } Q_DECLARE_METATYPE(QHttp*) Q_DECLARE_METATYPE(QtScriptShell_QHttp*) Q_DECLARE_METATYPE(QHttp::Error) Q_DECLARE_METATYPE(QHttp::ConnectionMode) Q_DECLARE_METATYPE(QHttp::State) Q_DECLARE_METATYPE(QIODevice*) Q_DECLARE_METATYPE(QHttpRequestHeader) Q_DECLARE_METATYPE(QHttpResponseHeader) Q_DECLARE_METATYPE(char*) Q_DECLARE_METATYPE(QNetworkProxy) Q_DECLARE_METATYPE(QTcpSocket*) static QScriptValue qtscript_create_enum_class_helper( QScriptEngine *engine, QScriptEngine::FunctionSignature construct, QScriptEngine::FunctionSignature valueOf, QScriptEngine::FunctionSignature toString) { QScriptValue proto = engine->newObject(); proto.setProperty(QString::fromLatin1("valueOf"), engine->newFunction(valueOf), QScriptValue::SkipInEnumeration); proto.setProperty(QString::fromLatin1("toString"), engine->newFunction(toString), QScriptValue::SkipInEnumeration); return engine->newFunction(construct, proto, 1); } // // QHttp::Error // static const QHttp::Error qtscript_QHttp_Error_values[] = { QHttp::NoError , QHttp::UnknownError , QHttp::HostNotFound , QHttp::ConnectionRefused , QHttp::UnexpectedClose , QHttp::InvalidResponseHeader , QHttp::WrongContentLength , QHttp::Aborted , QHttp::AuthenticationRequiredError , QHttp::ProxyAuthenticationRequiredError }; static const char * const qtscript_QHttp_Error_keys[] = { "NoError" , "UnknownError" , "HostNotFound" , "ConnectionRefused" , "UnexpectedClose" , "InvalidResponseHeader" , "WrongContentLength" , "Aborted" , "AuthenticationRequiredError" , "ProxyAuthenticationRequiredError" }; static QString qtscript_QHttp_Error_toStringHelper(QHttp::Error value) { if ((value >= QHttp::NoError) && (value <= QHttp::ProxyAuthenticationRequiredError)) return qtscript_QHttp_Error_keys[static_cast<int>(value)-static_cast<int>(QHttp::NoError)]; return QString(); } static QScriptValue qtscript_QHttp_Error_toScriptValue(QScriptEngine *engine, const QHttp::Error &value) { QScriptValue clazz = engine->globalObject().property(QString::fromLatin1("QHttp")); return clazz.property(qtscript_QHttp_Error_toStringHelper(value)); } static void qtscript_QHttp_Error_fromScriptValue(const QScriptValue &value, QHttp::Error &out) { out = qvariant_cast<QHttp::Error>(value.toVariant()); } static QScriptValue qtscript_construct_QHttp_Error(QScriptContext *context, QScriptEngine *engine) { int arg = context->argument(0).toInt32(); if ((arg >= QHttp::NoError) && (arg <= QHttp::ProxyAuthenticationRequiredError)) return qScriptValueFromValue(engine, static_cast<QHttp::Error>(arg)); return context->throwError(QString::fromLatin1("Error(): invalid enum value (%0)").arg(arg)); } static QScriptValue qtscript_QHttp_Error_valueOf(QScriptContext *context, QScriptEngine *engine) { QHttp::Error value = qscriptvalue_cast<QHttp::Error>(context->thisObject()); return QScriptValue(engine, static_cast<int>(value)); } static QScriptValue qtscript_QHttp_Error_toString(QScriptContext *context, QScriptEngine *engine) { QHttp::Error value = qscriptvalue_cast<QHttp::Error>(context->thisObject()); return QScriptValue(engine, qtscript_QHttp_Error_toStringHelper(value)); } static QScriptValue qtscript_create_Q
{ "pile_set_name": "Github" }
// hat_yaw00_lev2_p43.mh: #ifndef stasm_hat_yaw00_lev2_p43_mh #define stasm_hat_yaw00_lev2_p43_mh namespace stasm { // tasm -V1 /b/stasm/train/conf/tasm_muct77.conf // static const int EYEMOUTH_DIST = 100; // static const int FACESCALE = 1 // static const int PYRSCALE = 1 // static const double SIGMOIDSCALE = 0 // static const double PYR_RATIO = 2; // static const int NEGTRAIN_SEED = 2013; // static const int HAT_PATCH_WIDTH = 19; // static const int HAT_PATCH_WIDTH_ADJ = -6; // static const int GRIDHEIGHT = 4; // static const int GRIDWIDTH = 5; // static const int BINS_PER_HIST = 8; // static const double WINDOW_SCALE = 0.5; static double hatfit_yaw00_lev2_p43(const double* const d) // d has 160 elements { const double intercept = 0.00648366; const double coef[160] = { -0.0152163, -0.00938193, -0.0190419, -0.0687831, -0.0468449, -0.0880119, 0.0812563, -0.00542163, -0.0303208, 0.00806224, 0.0623734, 0.0413201, -0.00380485, -0.0133008, -0.00241608, -0.0323745, -0.0738325, -0.0242139, 0.0190181, 0.0157716, 0.0080979, 0.00166531, -0.0480585, -0.0434921, -0.0302065, -0.0234743, -0.0184186, 0.0244355, 0.0155824, 0.0468844, 0.0080336, 0.00117218, -0.0331888, -0.0584217, -0.0338365, -0.026523, -0.0119799, 0.0596711, 0.0115523, -0.0122433, -0.0458657, -0.0397486, 0.00973897, 0.0146185, 0.069101, -0.0167322, 0.0669437, 0.0339211, 0.00604615, 0.00201924, -0.0142197, -0.0430294, -0.00720905, -0.0326489, -0.0359383, 0.0285836, 0.0545388, 0.0379828, 0.0598856, 0.0112929, -0.0060677, -0.00368719, 0.0259153, 0.0502069, 0.0226217, 0.0012846, 0.0148117, -0.0016945, -0.000385082, 0.00444569, 0.0713966, 0.090965, 0.0507963, -0.00174389, -0.0463333, 0.00418675, 0.0053341, -0.0537939, -0.0550999, 0.0945129, 0.113893, -0.0139617, 0.0316123, 0.0529792, 0.0495666, 0.0340494, -0.039953, 0.0260375, 0.00641793, 0.0377733, -0.0625308, -0.0868844, -0.048856, 0.0186374, -0.0411507, -0.0667005, 0.0029778, 0.0232979, 0.0265188, 0.0512988, 0.0191031, 0.00196293, -0.0345508, -0.0318195, 0.00777352, 0.0336489, 0.0340635, 0.0523148, 0.03348, 0.0224716, 0.0471509, -0.0111932, 0.0163388, 0.0360007, -0.0550825, -0.0319889, 0.00674877, 0.0246315, 0.0166669, -0.00832142, -0.0104868, -0.0427722, 0.0921329, 0.0220887, -0.0355666, -0.0185576, -0.00916089, -0.0092353, -0.0317364, 0.0305874, 0.00675156, -0.0249873, 0.01326, 0.0109879, -0.016378, 0.00128751, -0.0210845, -0.0114771, -0.00718837, -0.00888316, -0.0110957, 0.0127997, 0.0179059, 0.0174434, -0.0127198, -0.0120589, -0.0196548, -0.0179925, -0.071387, 0.00189062, -0.0285034, 0.0140863, -0.0971454, 0.0467506, -0.00717021, -0.0347361, -0.0281356, -0.0441394, -0.0500878, -0.0092695 }; return linmod(d, intercept, coef, 160); } static const HatDescMod hat_yaw00_lev2_p43(hatfit_yaw00_lev2_p43); } // namespace stasm #endif // stasm_hat_yaw00_lev2_p43_mh
{ "pile_set_name": "Github" }
### WinUI 3 To build Fluent experiences on Windows using WinUI controls, please see our [WinUI controls documentation](https://docs.microsoft.com/en-us/windows/uwp/design/controls-and-patterns/). ### Fluent UI React Native To build Fluent experiences on Windows using Fluent UI React Native, please see our [Cross-platform Controls page](#/controls/crossplatform).
{ "pile_set_name": "Github" }
/** * Copyright 2013-2020 the original author or authors from the JHipster project. * * This file is part of the JHipster project, see http://www.jhipster.tech/ * for more information. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const Options = { DTO: 'dto', SERVICE: 'service', PAGINATION: 'pagination', MICROSERVICE: 'microservice', SEARCH: 'search', ANGULAR_SUFFIX: 'angularSuffix', CLIENT_ROOT_FOLDER: 'clientRootFolder' }; const optionNames = Object.values(Options); const Values = { [Options.DTO]: { MAPSTRUCT: 'mapstruct' }, [Options.SERVICE]: { SERVICE_CLASS: 'serviceClass', SERVICE_IMPL: 'serviceImpl' }, [Options.PAGINATION]: { PAGINATION: 'pagination', 'INFINITE-SCROLL': 'infinite-scroll' }, [Options.SEARCH]: { ELASTIC_SEARCH: 'elasticsearch', COUCHBASE: 'couchbase' } }; function forEach(passedFunction) { if (!passedFunction) { throw new Error('A function has to be passed to loop over the binary options.'); } optionNames.forEach(passedFunction); } function exists(passedOption, passedValue) { return ( !Object.values(Options).includes(passedOption) || Object.values(Options).some( option => passedOption === option && (passedOption === Options.MICROSERVICE || passedOption === Options.ANGULAR_SUFFIX || passedOption === Options.CLIENT_ROOT_FOLDER || Object.values(Values[option]).includes(passedValue)) ) ); } module.exports = { Options, Values, exists, forEach };
{ "pile_set_name": "Github" }
DROP TABLE IF EXISTS DECIMAL_6_1; DROP TABLE IF EXISTS DECIMAL_6_2; DROP TABLE IF EXISTS DECIMAL_6_3; CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE; CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE; LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1; LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2; SELECT T.key from ( SELECT key, value from DECIMAL_6_1 UNION ALL SELECT key, value from DECIMAL_6_2 ) T order by T.key; CREATE TABLE DECIMAL_6_3 AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v; desc DECIMAL_6_3;
{ "pile_set_name": "Github" }
package com.xiaolyuh.service.impl; import com.github.pagehelper.Page; import com.github.pagehelper.PageHelper; import com.xiaolyuh.domain.mapper.PersonMapper; import com.xiaolyuh.domain.model.Person; import com.xiaolyuh.service.PersonService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; /** * Created by yuhao.wang on 2017/6/19. */ @Service public class PersonServiceImpl implements PersonService { @Autowired private PersonMapper personMapper; public static AtomicInteger atomicInteger = new AtomicInteger(); @Override public List<Person> findAll() { return personMapper.findAll(); } @Override public Page<Person> findByPage(int pageNo, int pageSize) { PageHelper.startPage(pageNo, pageSize); return personMapper.findByPage(); } @Override @Transactional(rollbackFor = Exception.class) public void insert(Person person) { personMapper.insert(person); } @Override @Transactional(rollbackFor = Exception.class) public int updateAge(long id) { int result = personMapper.updateAge(id); int i = atomicInteger.getAndIncrement(); if (i > 990) { System.out.println(i); } try { Thread.sleep(1000); } catch (InterruptedException e) { } return result; } }
{ "pile_set_name": "Github" }
<refentry xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:src="http://nwalsh.com/xmlns/litprog/fragment" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="5.0" xml:id="ulink.footnotes"> <refmeta> <refentrytitle>ulink.footnotes</refentrytitle> <refmiscinfo class="other" otherclass="datatype">boolean</refmiscinfo> </refmeta> <refnamediv> <refname>ulink.footnotes</refname> <refpurpose>Generate footnotes for <tag>ulink</tag>s?</refpurpose> </refnamediv> <refsynopsisdiv> <src:fragment xml:id="ulink.footnotes.frag"> <xsl:param name="ulink.footnotes" select="0"/> </src:fragment> </refsynopsisdiv> <refsection><info><title>Description</title></info> <para>If non-zero, and if <parameter>ulink.show</parameter> also is non-zero, the URL of each <tag>ulink</tag> will appear as a footnote.</para> <note><para>DocBook 5 does not have an <tag>ulink</tag> element. When processing DocBoook 5 documents, <parameter>ulink.footnotes</parameter> applies to all inline elements that are marked up with <tag class="attribute">xlink:href</tag> attributes that point to external resources.</para> </note> </refsection> </refentry>
{ "pile_set_name": "Github" }
// Copyright 2014 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build !darwin,!freebsd,!linux,!solaris package ipv4 import ( "net" "golang.org/x/net/internal/socket" ) func (so *sockOpt) setGroupReq(c *socket.Conn, ifi *net.Interface, grp net.IP) error { return errNotImplemented } func (so *sockOpt) setGroupSourceReq(c *socket.Conn, ifi *net.Interface, grp, src net.IP) error { return errNotImplemented }
{ "pile_set_name": "Github" }
#define _GNU_SOURCE #include <errno.h> #include <sched.h> #include "syscall.h" #include "atomic.h" #ifdef VDSO_GETCPU_SYM static void *volatile vdso_func; typedef long (*getcpu_f)(unsigned *, unsigned *, void *); static long getcpu_init(unsigned *cpu, unsigned *node, void *unused) { void *p = __vdsosym(VDSO_GETCPU_VER, VDSO_GETCPU_SYM); getcpu_f f = (getcpu_f)p; a_cas_p(&vdso_func, (void *)getcpu_init, p); return f ? f(cpu, node, unused) : -ENOSYS; } static void *volatile vdso_func = (void *)getcpu_init; #endif int sched_getcpu(void) { int r; unsigned cpu; #ifdef VDSO_GETCPU_SYM getcpu_f f = (getcpu_f)vdso_func; if (f) { r = f(&cpu, 0, 0); if (!r) return cpu; if (r != -ENOSYS) return __syscall_ret(r); } #endif r = __syscall(SYS_getcpu, &cpu, 0, 0); if (!r) return cpu; return __syscall_ret(r); }
{ "pile_set_name": "Github" }
//Copyright (c) ServiceStack, Inc. All Rights Reserved. //License: https://raw.github.com/ServiceStack/ServiceStack/master/license.txt using System; using System.Collections; using System.Collections.Generic; namespace ServiceStack.Data { public interface IEntityStore : IDisposable { T GetById<T>(object id); IList<T> GetByIds<T>(ICollection ids); T Store<T>(T entity); void StoreAll<TEntity>(IEnumerable<TEntity> entities); void Delete<T>(T entity); void DeleteById<T>(object id); void DeleteByIds<T>(ICollection ids); void DeleteAll<TEntity>(); } }
{ "pile_set_name": "Github" }
test_noargs Example program is called without arguments so that default range [0..9] is used. test_2_to_2000 Example program is called with "2" and "2000" as arguments. test_overflow Example program is called with "0" and "100000" as arguments. The resulting sum is too large to be stored as an int variable.
{ "pile_set_name": "Github" }
--- layout : blocks/working-session title : Top 10 2017 - Call for Data and Weightings Discussion type : workshop track : Owasp Top 10 2017 technology : related-to : status : done when-day : Mon when-time : PM-1 location : Room-2 organizers : Dave Wichers, Andrew Van Der Stock participants : Amani Altarawneh, Chris Cooper, Christian DeHoyos, Daniel Miessler, Erez Yalon, Jason Li, Jonas vanalderweireldt, Kevin Greene, Nuno Loureiro, Sandor Lenart, Tiago Mendo, Tiffany Long, Torsten Gigler outcomes : mapped --- ## Why ## What What sort of data does the Top 10 need, and where/who do we ask for it, and how do we weight the various types of responses (see Brian Glas' blog for "tools augmented with humans" vs "humans with augmented tooling"). This weightings of data will help define the approach in OWASP Top 10 2017 RC2, and also be used in the 2020 and 2023 OWASP Top 10. I want the community to drive this discussion. ## Outcomes ### Synopsis and Takeaways We talked about how data was collected and the process by which it was analysed. For Top 10 2017, there was an open call for data, but it wasn't widely reported nor sufficiently pushed once open. This possibly resulted in fewer responses than in a perfect world. There was a lot of discussion around the process such as "if we use data scientists, can we use the existing data?", and "should we re-open the data collection?" It was incredibly valuable discussion, and struck good pragmatic balance. We want to drive a release this year, but RC2 will not come out this week, so we will not be running editing / creating sessions this week, but will instead work on collecting some more data. The outcomes from this session were: - A data collection process and timeline will be published to the wiki to make sure everyone knows how data is collected and analysed, and when the next data call will be held. Some of this will appear in the text, probably an appendix to make sure that our process is transparent and open. - Andrew van der Stock will work on a process with Foundation staff to ensure that we can maximise publicity for the next data call round in 2019. There was suggestion to keep it open all the time, but this was felt to be unworkable without a data scientist to volunteer. For smaller consultancies, obtaining this data is already difficult, and we don't want people to be overly burdened by the data call. - A data call extension will be pushed out for interested parties. Andrew will take care of this on Tuesday 12 June, 2017. As long as data is roughly in the same Excel format as the existing data and provided by the end of July, It ought to be possible to use it. - Dave Wichers will reach out to Brian Glas for feedback for tomorrow morning's data weighting session. - For 2020, we will try to find data scientists to help us to improve our data methodology and analysis, so we can at least ensure that data drives inclusion for the non-forward looking data. - Ordering will never be strictly data order; to provide continuity, there is a decision (which will now be documented) that if A1 ... A3 in 2010 are the same in 2017 but in a slightly different order, those will retain a previous order. This helps folks map results year on year and prevents massive upheaval between years. - Feedback obtained from the OWASP Top 10 mail list will end up in Git Hub tomorrow as issues. For feedback sent privately to Dave, Andrew will reach out to these individuals to ask permission to create issues at GitHub. This will help with project transparency. From now on, if you have feedback, please provide it at GitHub: https://github.com/OWASP/Top10/issues ## Who The target audience for this Working Session is: - OWASP Top 10 2017 Track participants ## References --- ## Working materials
{ "pile_set_name": "Github" }
# Copyright 2018/2019 The RLgraph authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import, division, print_function from rlgraph import get_backend from rlgraph.components.component import Component from rlgraph.utils.decorators import rlgraph_api from rlgraph.utils.ops import DataOpDict, DataOpTuple, FLATTEN_SCOPE_PREFIX class ContainerMerger(Component): """ Merges incoming items into one FlattenedDataOp. """ def __init__(self, *input_names_or_num_items, **kwargs): """ Args: *input_names_or_num_items (Union[str,int]): List of the names of the different inputs in the order they will be passed into the `merge` API-method in the returned merged Dict. Or the number of items in the Tuple to be merged. Example: input_names_or_num_items = ["A", "B"] - merge(Dict(c=1, d=2), Tuple(3, 4)) - returned value: Dict(A=Dict(c=1, d=2), B=Tuple(3, 4)) input_names_or_num_items = 3: 3 items will be merged into a Tuple. Keyword Args: merge_tuples_into_one (bool): Whether to merge incoming DataOpTuples into one single DataOpTuple. If True: tupleA + tupleB -> (tupleA[0] + tupleA[1] + tupleA[...] + tupleB[0] + tupleB[1] ...). If False: tupleA + tupleB -> (tupleA + tupleB). is_tuple (bool): Whether we should merge a tuple. """ self.merge_tuples_into_one = kwargs.pop("merge_tuples_into_one", False) self.is_tuple = kwargs.pop("is_tuple", self.merge_tuples_into_one) super(ContainerMerger, self).__init__(scope=kwargs.pop("scope", "container-merger"), **kwargs) self.dict_keys = None if len(input_names_or_num_items) == 1 and isinstance(input_names_or_num_items[0], int): self.is_tuple = True else: # and not re.search(r'/', i) # or some of them have '/' characters in them, which are not allowed assert all(isinstance(i, str) for i in input_names_or_num_items), \ "ERROR: Not all input names of DictMerger Component '{}' are strings.".format(self.global_scope) self.dict_keys = input_names_or_num_items def check_input_spaces(self, input_spaces, action_space=None): spaces = [] idx = 0 while True: key = "inputs[{}]".format(idx) if key not in input_spaces: break spaces.append(input_spaces[key]) idx += 1 # If Tuple -> Incoming inputs could be of any number. if self.dict_keys: len_ = len(self.dict_keys) assert len(spaces) == len_,\ "ERROR: Number of incoming Spaces ({}) does not match number of given `dict_keys` ({}) in" \ "ContainerMerger Component '{}'!".format(len(spaces), len_, self.global_scope) @rlgraph_api def _graph_fn_merge(self, *inputs): """ Merges the inputs into a single DataOpDict OR DataOpTuple with the flat keys given in `self.dict_keys`. Args: *inputs (FlattenedDataOp): The input items to be merged into a ContainerDataOp. Returns: ContainerDataOp: The DataOpDict or DataOpTuple as a merger of all *inputs. """ if self.is_tuple is True: ret = [] for op in inputs: # Merge single items inside a DataOpTuple into resulting tuple. if self.merge_tuples_into_one and isinstance(op, DataOpTuple): ret.extend(list(op)) # Strict by-input merging. else: ret.append(op) return DataOpTuple(ret) else: ret = DataOpDict() for i, op in enumerate(inputs): if get_backend() == "pytorch" and self.execution_mode == "define_by_run": ret[FLATTEN_SCOPE_PREFIX + self.dict_keys[i]] = op else: ret[self.dict_keys[i]] = op return ret
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -fsyntax-only -verify %s #include "Inputs/cuda.h" // expected-no-diagnostics // Check that we can handle gnu_inline functions when compiling in CUDA mode. void foo(); inline __attribute__((gnu_inline)) void bar() { foo(); }
{ "pile_set_name": "Github" }
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 package software.aws.toolkits.jetbrains.services.lambda.nodejs import com.intellij.openapi.roots.ProjectFileIndex import com.intellij.openapi.vfs.VfsUtilCore import com.intellij.testFramework.runInEdtAndWait import org.assertj.core.api.Assertions.assertThat import org.junit.Rule import org.junit.Test import software.aws.toolkits.jetbrains.utils.rules.NodeJsCodeInsightTestFixtureRule import software.aws.toolkits.jetbrains.utils.rules.addLambdaHandler import software.aws.toolkits.jetbrains.utils.rules.addPackageJsonFile class NodeJsHelperTest { @Rule @JvmField val projectRule = NodeJsCodeInsightTestFixtureRule() @Test fun inferSourceRoot_noPackageJsonReturnsContentRoot() { val element = projectRule.fixture.addLambdaHandler( subPath = "foo/bar", fileName = "app.js", handlerName = "someHandler" ) runInEdtAndWait { val contentRoot = ProjectFileIndex.getInstance(projectRule.project).getContentRootForFile(element.containingFile.virtualFile) val sourceRoot = inferSourceRoot(projectRule.project, element.containingFile.virtualFile) assertThat(contentRoot).isEqualTo(sourceRoot) } } @Test fun inferSourceRoot_packageJsonInSubFolder() { val element = projectRule.fixture.addLambdaHandler( subPath = "foo/bar", fileName = "app.js", handlerName = "someHandler" ) projectRule.fixture.addPackageJsonFile( subPath = "foo" ) runInEdtAndWait { val contentRoot = ProjectFileIndex.getInstance(projectRule.project).getContentRootForFile(element.containingFile.virtualFile) val sourceRoot = inferSourceRoot(projectRule.project, element.containingFile.virtualFile) assertThat(VfsUtilCore.findRelativeFile("foo", contentRoot)).isEqualTo(sourceRoot) } } @Test fun inferSourceRoot_packageJsonInRootFolder() { val element = projectRule.fixture.addLambdaHandler( subPath = "foo/bar", fileName = "app.js", handlerName = "someHandler" ) projectRule.fixture.addPackageJsonFile( subPath = "." ) runInEdtAndWait { val contentRoot = ProjectFileIndex.getInstance(projectRule.project).getContentRootForFile(element.containingFile.virtualFile) val sourceRoot = inferSourceRoot(projectRule.project, element.containingFile.virtualFile) assertThat(contentRoot).isEqualTo(sourceRoot) } } }
{ "pile_set_name": "Github" }
/* * This file is part of mpv. * * mpv is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * mpv is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with mpv. If not, see <http://www.gnu.org/licenses/>. */ #include "config.h" #include "ao.h" #include "internal.h" #include "audio/format.h" #include "osdep/timer.h" #include "options/m_option.h" #include "common/msg.h" #include "ao_coreaudio_utils.h" #include "ao_coreaudio_chmap.h" #import <AudioUnit/AudioUnit.h> #import <CoreAudio/CoreAudioTypes.h> #import <AudioToolbox/AudioToolbox.h> #import <AVFoundation/AVFoundation.h> #import <mach/mach_time.h> struct priv { AudioUnit audio_unit; double device_latency; }; static OSStatus render_cb_lpcm(void *ctx, AudioUnitRenderActionFlags *aflags, const AudioTimeStamp *ts, UInt32 bus, UInt32 frames, AudioBufferList *buffer_list) { struct ao *ao = ctx; struct priv *p = ao->priv; void *planes[MP_NUM_CHANNELS] = {0}; for (int n = 0; n < ao->num_planes; n++) planes[n] = buffer_list->mBuffers[n].mData; int64_t end = mp_time_us(); end += p->device_latency * 1e6; end += ca_get_latency(ts) + ca_frames_to_us(ao, frames); ao_read_data(ao, planes, frames, end); return noErr; } static bool init_audiounit(struct ao *ao) { AudioStreamBasicDescription asbd; OSStatus err; uint32_t size; struct priv *p = ao->priv; AVAudioSession *instance = AVAudioSession.sharedInstance; AVAudioSessionPortDescription *port = nil; NSInteger maxChannels = instance.maximumOutputNumberOfChannels; NSInteger prefChannels = MIN(maxChannels, ao->channels.num); [instance setCategory:AVAudioSessionCategoryPlayback error:nil]; [instance setMode:AVAudioSessionModeMoviePlayback error:nil]; [instance setActive:YES error:nil]; [instance setPreferredOutputNumberOfChannels:prefChannels error:nil]; AudioComponentDescription desc = (AudioComponentDescription) { .componentType = kAudioUnitType_Output, .componentSubType = kAudioUnitSubType_RemoteIO, .componentManufacturer = kAudioUnitManufacturer_Apple, .componentFlags = 0, .componentFlagsMask = 0, }; AudioComponent comp = AudioComponentFindNext(NULL, &desc); if (comp == NULL) { MP_ERR(ao, "unable to find audio component\n"); goto coreaudio_error; } err = AudioComponentInstanceNew(comp, &(p->audio_unit)); CHECK_CA_ERROR("unable to open audio component"); err = AudioUnitInitialize(p->audio_unit); CHECK_CA_ERROR_L(coreaudio_error_component, "unable to initialize audio unit"); if (af_fmt_is_spdif(ao->format) || instance.outputNumberOfChannels <= 2) { ao->channels = (struct mp_chmap)MP_CHMAP_INIT_STEREO; } else { port = instance.currentRoute.outputs.firstObject; if (port.channels.count == 2 && port.channels[0].channelLabel == kAudioChannelLabel_Unknown) { // Special case when using an HDMI adapter. The iOS device will // perform SPDIF conversion for us, so send all available channels // using the AC3 mapping. ao->channels = (struct mp_chmap)MP_CHMAP6(FL, FC, FR, SL, SR, LFE); } else { ao->channels.num = (uint8_t)port.channels.count; for (AVAudioSessionChannelDescription *ch in port.channels) { ao->channels.speaker[ch.channelNumber - 1] = ca_label_to_mp_speaker_id(ch.channelLabel); } } } ca_fill_asbd(ao, &asbd); size = sizeof(AudioStreamBasicDescription); err = AudioUnitSetProperty(p->audio_unit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &asbd, size); CHECK_CA_ERROR_L(coreaudio_error_audiounit, "unable to set the input format on the audio unit"); AURenderCallbackStruct render_cb = (AURenderCallbackStruct) { .inputProc = render_cb_lpcm, .inputProcRefCon = ao, }; err = AudioUnitSetProperty(p->audio_unit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &render_cb, sizeof(AURenderCallbackStruct)); CHECK_CA_ERROR_L(coreaudio_error_audiounit, "unable to set render callback on audio unit"); return true; coreaudio_error_audiounit: AudioUnitUninitialize(p->audio_unit); coreaudio_error_component: AudioComponentInstanceDispose(p->audio_unit); coreaudio_error: return false; } static void stop(struct ao *ao) { struct priv *p = ao->priv; OSStatus err = AudioOutputUnitStop(p->audio_unit); CHECK_CA_WARN("can't stop audio unit"); } static void start(struct ao *ao) { struct priv *p = ao->priv; AVAudioSession *instance = AVAudioSession.sharedInstance; p->device_latency = [instance outputLatency] + [instance IOBufferDuration]; OSStatus err = AudioOutputUnitStart(p->audio_unit); CHECK_CA_WARN("can't start audio unit"); } static void uninit(struct ao *ao) { struct priv *p = ao->priv; AudioOutputUnitStop(p->audio_unit); AudioUnitUninitialize(p->audio_unit); AudioComponentInstanceDispose(p->audio_unit); [AVAudioSession.sharedInstance setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil]; } static int init(struct ao *ao) { if (!init_audiounit(ao)) goto coreaudio_error; return CONTROL_OK; coreaudio_error: return CONTROL_ERROR; } #define OPT_BASE_STRUCT struct priv const struct ao_driver audio_out_audiounit = { .description = "AudioUnit (iOS)", .name = "audiounit", .uninit = uninit, .init = init, .reset = stop, .start = start, .priv_size = sizeof(struct priv), };
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools" android:id="@+id/net_cellular" android:layout_width="match_parent" android:layout_height="match_parent" tools:context="com.pencilbox.netknight.view.NetCellular"> <LinearLayout android:id="@+id/topchart" android:layout_width="match_parent" android:layout_height="match_parent"> <com.github.mikephil.charting.charts.LineChart android:id="@+id/celluar_chart" android:layout_width="match_parent" android:layout_height="match_parent"> </com.github.mikephil.charting.charts.LineChart> </LinearLayout> </RelativeLayout>
{ "pile_set_name": "Github" }
(:name howm :website "http://howm.sourceforge.jp/" :description "Write fragmentarily and read collectively." :type http-tar :options ("xzf") :url "http://howm.sourceforge.jp/a/howm-1.4.4.tar.gz" :build `(("./configure" ,(concat "--with-emacs=" el-get-emacs)) ("make")))
{ "pile_set_name": "Github" }
0.7.1 / 2015-04-20 ================== * prevent extraordinary long inputs (@evilpacket) * Fixed broken readme link 0.7.0 / 2014-11-24 ================== * add time abbreviations, updated tests and readme for the new units * fix example in the readme. * add LICENSE file 0.6.2 / 2013-12-05 ================== * Adding repository section to package.json to suppress warning from NPM. 0.6.1 / 2013-05-10 ================== * fix singularization [visionmedia] 0.6.0 / 2013-03-15 ================== * fix minutes 0.5.1 / 2013-02-24 ================== * add component namespace 0.5.0 / 2012-11-09 ================== * add short formatting as default and .long option * add .license property to component.json * add version to component.json 0.4.0 / 2012-10-22 ================== * add rounding to fix crazy decimals 0.3.0 / 2012-09-07 ================== * fix `ms(<String>)` [visionmedia] 0.2.0 / 2012-09-03 ================== * add component.json [visionmedia] * add days support [visionmedia] * add hours support [visionmedia] * add minutes support [visionmedia] * add seconds support [visionmedia] * add ms string support [visionmedia] * refactor tests to facilitate ms(number) [visionmedia] 0.1.0 / 2012-03-07 ================== * Initial release
{ "pile_set_name": "Github" }
/** * SyntaxHighlighter * http://alexgorbatchev.com/ * * SyntaxHighlighter is donationware. If you are using it, please donate. * http://alexgorbatchev.com/wiki/SyntaxHighlighter:Donate * * @version * 2.0.320 (May 03 2009) * * @copyright * Copyright (C) 2004-2009 Alex Gorbatchev. * * @license * This file is part of SyntaxHighlighter. * * SyntaxHighlighter is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * SyntaxHighlighter is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with SyntaxHighlighter. If not, see <http://www.gnu.org/copyleft/lesser.html>. */ SyntaxHighlighter.brushes.Perl = function() { // Contributed by David Simmons-Duffin and Marty Kube var funcs = 'abs accept alarm atan2 bind binmode chdir chmod chomp chop chown chr ' + 'chroot close closedir connect cos crypt defined delete each endgrent ' + 'endhostent endnetent endprotoent endpwent endservent eof exec exists ' + 'exp fcntl fileno flock fork format formline getc getgrent getgrgid ' + 'getgrnam gethostbyaddr gethostbyname gethostent getlogin getnetbyaddr ' + 'getnetbyname getnetent getpeername getpgrp getppid getpriority ' + 'getprotobyname getprotobynumber getprotoent getpwent getpwnam getpwuid ' + 'getservbyname getservbyport getservent getsockname getsockopt glob ' + 'gmtime grep hex index int ioctl join keys kill lc lcfirst length link ' + 'listen localtime lock log lstat map mkdir msgctl msgget msgrcv msgsnd ' + 'oct open opendir ord pack pipe pop pos print printf prototype push ' + 'quotemeta rand read readdir readline readlink readpipe recv rename ' + 'reset reverse rewinddir rindex rmdir scalar seek seekdir select semctl ' + 'semget semop send setgrent sethostent setnetent setpgrp setpriority ' + 'setprotoent setpwent setservent setsockopt shift shmctl shmget shmread ' + 'shmwrite shutdown sin sleep socket socketpair sort splice split sprintf ' + 'sqrt srand stat study substr symlink syscall sysopen sysread sysseek ' + 'system syswrite tell telldir time times tr truncate uc ucfirst umask ' + 'undef unlink unpack unshift utime values vec wait waitpid warn write'; var keywords = 'bless caller continue dbmclose dbmopen die do dump else elsif eval exit ' + 'for foreach goto if import last local my next no our package redo ref ' + 'require return sub tie tied unless untie until use wantarray while'; this.regexList = [ { regex: new RegExp('#[^!].*$', 'gm'), css: 'comments' }, { regex: new RegExp('^\\s*#!.*$', 'gm'), css: 'preprocessor' }, // shebang { regex: SyntaxHighlighter.regexLib.doubleQuotedString, css: 'string' }, { regex: SyntaxHighlighter.regexLib.singleQuotedString, css: 'string' }, { regex: new RegExp('(\\$|@|%)\\w+', 'g'), css: 'variable' }, { regex: new RegExp(this.getKeywords(funcs), 'gmi'), css: 'functions' }, { regex: new RegExp(this.getKeywords(keywords), 'gm'), css: 'keyword' } ]; this.forHtmlScript(SyntaxHighlighter.regexLib.phpScriptTags); } SyntaxHighlighter.brushes.Perl.prototype = new SyntaxHighlighter.Highlighter(); SyntaxHighlighter.brushes.Perl.aliases = ['perl', 'Perl', 'pl'];
{ "pile_set_name": "Github" }
module com.networknt.monadresult { exports com.networknt.monad; requires com.networknt.status; requires com.networknt.config; requires com.networknt.utility; requires java.management; requires org.slf4j; requires jdk.unsupported; }
{ "pile_set_name": "Github" }
# Use public servers from the pool.ntp.org project. server {{ groups['chrony'][0] }} iburst # Ignor source level stratumweight 0 # Record the rate at which the system clock gains/losses time. driftfile /var/lib/chrony/drift # Allow the system clock to be stepped in the first three updates # if its offset is larger than 1 second. makestep 1.0 3 # Enable kernel synchronization of the real-time clock (RTC). rtcsync # Enable hardware timestamping on all interfaces that support it. #hwtimestamp * # Increase the minimum number of selectable sources required to adjust # the system clock. #minsources 2 # Allow NTP client access from local network. allow {{ local_network }} # bindcmdaddress 127.0.0.1 bindcmdaddress ::1 # Serve time even if not synchronized to a time source. #local stratum 10 # Specify file containing keys for NTP authentication. keyfile /etc/chrony.keys # Specify directory for log files. logdir /var/log/chrony # Select which information is logged. #log measurements statistics tracking # logchange 1
{ "pile_set_name": "Github" }
import { Convert } from "../ExtensionMethods"; import { DateTime } from "../DateTime"; import { EwsServiceJsonReader } from "../Core/EwsServiceJsonReader"; import { ExchangeService } from "../Core/ExchangeService"; import { ExtendedPropertyCollection } from "../ComplexProperties/ExtendedPropertyCollection"; import { FailedSearchMailbox } from "./FailedSearchMailbox"; import { Importance } from "../Enumerations/Importance"; import { ItemId } from "../ComplexProperties/ItemId"; import { KeywordStatisticsSearchResult } from "./KeywordStatisticsSearchResult"; import { MailboxQuery } from "./MailboxQuery"; import { MailboxSearchLocation } from "../Enumerations/MailboxSearchLocation"; import { MailboxSearchScope } from "./MailboxSearchScope"; import { MailboxStatisticsItem } from "./MailboxStatisticsItem"; import { PreviewItemMailbox } from "./PreviewItemMailbox"; import { SearchPreviewItem } from "./SearchPreviewItem"; import { SearchRefinerItem } from "./SearchRefinerItem"; import { SearchResultType } from "../Enumerations/SearchResultType"; import { XmlElementNames } from "../Core/XmlElementNames"; /** * Represents search mailbox result. * * @sealed */ export class SearchMailboxesResult { /** * Search queries */ SearchQueries: MailboxQuery[] = null; /** * Result type */ ResultType: SearchResultType = SearchResultType.StatisticsOnly; /** * Item count */ ItemCount: number = 0; /** * Total size * [CLSCompliant(false)] */ Size: number = 0; /** * Page item count */ PageItemCount: number = 0; /** * Total page item size * [CLSCompliant(false)] */ PageItemSize: number = 0; /** * Keyword statistics search result */ KeywordStats: KeywordStatisticsSearchResult[] = null; /** * Search preview items */ PreviewItems: SearchPreviewItem[] = null; /** * Failed mailboxes */ FailedMailboxes: FailedSearchMailbox[] = null; /** * Refiners */ Refiners: SearchRefinerItem[] = null; /** * Mailbox statistics */ MailboxStats: MailboxStatisticsItem[] = null; /** * Get collection of recipients * * @param {any} jsObject Json Object converted from XML. * @return {string[]} Array of recipients */ private static GetRecipients(jsObject: any): string[] { let recipients: string[] = EwsServiceJsonReader.ReadAsArray(jsObject, XmlElementNames.SmtpAddress) return recipients.length === 0 ? null : recipients; } /** * Load extended properties from XML. * * @param {any} jsObject Json Object converted from XML. * @param {ExchangeService} service The service. * @return {ExtendedPropertyCollection} Extended properties collection */ private static LoadExtendedPropertiesXmlJsObject(jsObject: any, service: ExchangeService): ExtendedPropertyCollection { let extendedProperties: ExtendedPropertyCollection = new ExtendedPropertyCollection(); for (let extendedProperty of EwsServiceJsonReader.ReadAsArray(jsObject, XmlElementNames.ExtendedProperty)) { extendedProperties.LoadFromXmlJsObject(extendedProperty, service); } return extendedProperties.Count === 0 ? null : extendedProperties; } /** * Loads service object from XML. * * @param {any} jsObject Json Object converted from XML. * @param {ExchangeService} service The service. * @return {SearchMailboxesResult} Search result object */ static LoadFromXmlJsObject(jsObject: any, service: ExchangeService): SearchMailboxesResult { let searchResult: SearchMailboxesResult = new SearchMailboxesResult(); if (jsObject[XmlElementNames.SearchQueries]) { searchResult.SearchQueries = []; for (let searchQuery of EwsServiceJsonReader.ReadAsArray(jsObject[XmlElementNames.SearchQueries], XmlElementNames.SearchQuery)) { let query: string = searchQuery[XmlElementNames.Query]; let mailboxSearchScopes: MailboxSearchScope[] = []; if (searchQuery[XmlElementNames.MailboxSearchScopes]) { for (let mailboxSearchScope of EwsServiceJsonReader.ReadAsArray(searchQuery[XmlElementNames.MailboxSearchScopes], XmlElementNames.MailboxSearchScope)) { let mailbox: string = mailboxSearchScope[XmlElementNames.Mailbox]; let searchScope: MailboxSearchLocation = MailboxSearchLocation[<string>mailboxSearchScope[XmlElementNames.SearchScope]]; mailboxSearchScopes.push(new MailboxSearchScope(mailbox, searchScope)); } } searchResult.SearchQueries.push(new MailboxQuery(query, mailboxSearchScopes)); } } if (jsObject[XmlElementNames.ResultType]) { searchResult.ResultType = SearchResultType[<string>jsObject[XmlElementNames.ResultType]] } if (jsObject[XmlElementNames.ItemCount]) { searchResult.ItemCount = Convert.toNumber(jsObject[XmlElementNames.ItemCount]); } if (jsObject[XmlElementNames.Size]) { searchResult.Size = Convert.toNumber(jsObject[XmlElementNames.Size]); } if (jsObject[XmlElementNames.PageItemCount]) { searchResult.PageItemCount = Convert.toNumber(jsObject[XmlElementNames.PageItemCount]); } if (jsObject[XmlElementNames.PageItemSize]) { searchResult.PageItemSize = Convert.toNumber(jsObject[XmlElementNames.PageItemSize]); } if (jsObject[XmlElementNames.KeywordStats]) { searchResult.KeywordStats = this.LoadKeywordStatsXmlJsObject(jsObject[XmlElementNames.KeywordStats]); } if (jsObject[XmlElementNames.Items]) { searchResult.PreviewItems = this.LoadPreviewItemsXmlJsObject(jsObject[XmlElementNames.Items], service); } if (jsObject[XmlElementNames.FailedMailboxes]) { searchResult.FailedMailboxes = FailedSearchMailbox.LoadFromXmlJsObject(jsObject[XmlElementNames.FailedMailboxes], service); } if (jsObject[XmlElementNames.Refiners]) { let refiners: SearchRefinerItem[] = []; for (let refiner of EwsServiceJsonReader.ReadAsArray(jsObject[XmlElementNames.Refiners], XmlElementNames.Refiner)) { refiners.push(SearchRefinerItem.LoadFromXmlJsObject(refiner, service)); } if (refiners.length > 0) { searchResult.Refiners = refiners; } } if (jsObject[XmlElementNames.MailboxStats]) { let mailboxStats: MailboxStatisticsItem[] = []; for (let mailboxStat of EwsServiceJsonReader.ReadAsArray(jsObject[XmlElementNames.MailboxStats], XmlElementNames.MailboxStat)) { mailboxStats.push(MailboxStatisticsItem.LoadFromXmlJsObject(mailboxStat, service)); } if (mailboxStats.length > 0) { searchResult.MailboxStats = mailboxStats; } } return searchResult; } /** * Load keyword stats from XML. * * @param {any} jsObject Json Object converted from XML. * @return {KeywordStatisticsSearchResult[]} Array of keyword statistics */ private static LoadKeywordStatsXmlJsObject(jsObject: any): KeywordStatisticsSearchResult[] { let keywordStats: KeywordStatisticsSearchResult[] = []; for (let keywordStatObj of EwsServiceJsonReader.ReadAsArray(jsObject, XmlElementNames.KeywordStat)) { let keywordStat: KeywordStatisticsSearchResult = new KeywordStatisticsSearchResult(); keywordStat.Keyword = jsObject[XmlElementNames.Keyword]; keywordStat.ItemHits = Convert.toNumber(jsObject[XmlElementNames.ItemHits]);
{ "pile_set_name": "Github" }
/* Copyright (C) 2012 Sebastian Herbord. All rights reserved. This file is part of Mod Organizer. Mod Organizer is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Mod Organizer is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Mod Organizer. If not, see <http://www.gnu.org/licenses/>. */ #ifndef OVERWRITEINFODIALOG_H #define OVERWRITEINFODIALOG_H #include "modinfo.h" #include <QDialog> #include <QFileSystemModel> namespace Ui { class OverwriteInfoDialog; } class OverwriteFileSystemModel : public QFileSystemModel { Q_OBJECT; public: OverwriteFileSystemModel(QObject *parent) : QFileSystemModel(parent), m_RegularColumnCount(0) {} virtual int columnCount(const QModelIndex &parent) const { m_RegularColumnCount = QFileSystemModel::columnCount(parent); return m_RegularColumnCount; } virtual QVariant headerData(int section, Qt::Orientation orientation, int role) const { if ((orientation == Qt::Horizontal) && (section >= m_RegularColumnCount)) { if (role == Qt::DisplayRole) { return tr("Overwrites"); } else { return QVariant(); } } else { return QFileSystemModel::headerData(section, orientation, role); } } virtual QVariant data(const QModelIndex &index, int role) const { if (index.column() == m_RegularColumnCount + 0) { if (role == Qt::DisplayRole) { return tr("not implemented"); } else { return QVariant(); } } else { return QFileSystemModel::data(index, role); } } private: mutable int m_RegularColumnCount; }; class OverwriteInfoDialog : public QDialog { Q_OBJECT public: explicit OverwriteInfoDialog(ModInfo::Ptr modInfo, QWidget *parent = 0); ~OverwriteInfoDialog(); ModInfo::Ptr modInfo() const { return m_ModInfo; } // saves geometry // void done(int r) override; void setModInfo(ModInfo::Ptr modInfo); protected: // restores geometry // void showEvent(QShowEvent* e) override; private: void openFile(const QModelIndex &index); bool recursiveDelete(const QModelIndex &index); void deleteFile(const QModelIndex &index); private slots: void delete_activated(); void deleteTriggered(); void renameTriggered(); void openTriggered(); void createDirectoryTriggered(); void on_explorerButton_clicked(); void on_filesView_customContextMenuRequested(const QPoint &pos); private: Ui::OverwriteInfoDialog *ui; QFileSystemModel *m_FileSystemModel; QModelIndexList m_FileSelection; QAction *m_DeleteAction; QAction *m_RenameAction; QAction *m_OpenAction; QAction *m_NewFolderAction; ModInfo::Ptr m_ModInfo; }; #endif // OVERWRITEINFODIALOG_H
{ "pile_set_name": "Github" }
package org.robolectric.shadows; import static org.robolectric.shadow.api.Shadow.directlyOn; import android.os.Message; import android.os.Messenger; import android.os.RemoteException; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.annotation.RealObject; import org.robolectric.annotation.Resetter; @Implements(Messenger.class) public class ShadowMessenger { private static Message lastMessageSent = null; /** Returns the last {@link Message} sent, or {@code null} if there isn't any message sent. */ public static Message getLastMessageSent() { return lastMessageSent; } @RealObject private Messenger messenger; @Implementation protected void send(Message message) throws RemoteException { lastMessageSent = Message.obtain(message); directlyOn(messenger, Messenger.class).send(message); } @Resetter public static void reset() { lastMessageSent = null; } }
{ "pile_set_name": "Github" }
.. currentmodule:: PyQt5.QtWidgets QKeyEventTransition ------------------- .. class:: QKeyEventTransition `C++ documentation <https://doc.qt.io/qt-5/qkeyeventtransition.html>`_
{ "pile_set_name": "Github" }
encode_krb5_ldap_seqof_key_data: [Sequence/Sequence Of] . [0] [Integer] 1 . [1] [Integer] 1 . [2] [Integer] 42 . [3] [Integer] 14 . [4] [Sequence/Sequence Of] . . [Sequence/Sequence Of] . . . [0] [Sequence/Sequence Of] . . . . [0] [Integer] 0 . . . . [1] [Octet String] "salt0" . . . [1] [Sequence/Sequence Of] . . . . [0] [Integer] 2 . . . . [1] [Octet String] "key0" . . [Sequence/Sequence Of] . . . [0] [Sequence/Sequence Of] . . . . [0] [Integer] 1 . . . . [1] [Octet String] "salt1" . . . [1] [Sequence/Sequence Of] . . . . [0] [Integer] 2 . . . . [1] [Octet String] "key1" . . [Sequence/Sequence Of] . . . [0] [Sequence/Sequence Of] . . . . [0] [Integer] 2 . . . . [1] [Octet String] "salt2" . . . [1] [Sequence/Sequence Of] . . . . [0] [Integer] 2 . . . . [1] [Octet String] "key2"
{ "pile_set_name": "Github" }
package au.com.dius.pact.provider.junit import au.com.dius.pact.core.model.Pact import au.com.dius.pact.core.model.ProviderState import au.com.dius.pact.core.model.Request import au.com.dius.pact.core.model.RequestResponseInteraction import au.com.dius.pact.core.model.RequestResponsePact import au.com.dius.pact.core.model.Response import au.com.dius.pact.provider.junitsupport.IgnoreNoPactsToVerify import au.com.dius.pact.provider.junitsupport.Provider import au.com.dius.pact.provider.junitsupport.loader.PactFilter import au.com.dius.pact.provider.junitsupport.loader.PactFolder import au.com.dius.pact.provider.junitsupport.target.Target import au.com.dius.pact.provider.junitsupport.target.TestTarget import org.junit.runner.notification.RunNotifier import org.junit.runners.model.InitializationError import spock.lang.Specification @SuppressWarnings('UnnecessaryGetter') class FilteredPactRunnerSpec extends Specification { private List<Pact> pacts private au.com.dius.pact.core.model.Consumer consumer, consumer2 private au.com.dius.pact.core.model.Provider provider private List<RequestResponseInteraction> interactions, interactions2 @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter('State 1') @IgnoreNoPactsToVerify class TestClass { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter('') class TestClassEmptyFilter { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter(['', '', '']) class TestClassEmptyFilters { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter('') class TestClassNoFilterAnnotations { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter(['State 1', 'State 3']) @IgnoreNoPactsToVerify class TestMultipleStatesClass { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter('State \\d+') @IgnoreNoPactsToVerify class TestRegexClass { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter(['State 6']) class TestFilterOutAllPactsClass { @TestTarget Target target } @Provider('myAwesomeService') @PactFolder('pacts') @PactFilter(['State 6']) @IgnoreNoPactsToVerify class TestFilterOutAllPactsIgnoreNoPactsToVerifyClass { @TestTarget Target target } def setup() { consumer = new au.com.dius.pact.core.model.Consumer('Consumer 1') consumer2 = new au.com.dius.pact.core.model.Consumer('Consumer 2') provider = new au.com.dius.pact.core.model.Provider('myAwesomeService') interactions = [ new RequestResponseInteraction('Req 1', [ new ProviderState('State 1') ], new Request(), new Response()), new RequestResponseInteraction('Req 2', [ new ProviderState('State 1'), new ProviderState('State 2') ], new Request(), new Response()) ] interactions2 = [ new RequestResponseInteraction('Req 3', [ new ProviderState('State 3') ], new Request(), new Response()), new RequestResponseInteraction('Req 4', [ new ProviderState('State X') ], new Request(), new Response()) ] pacts = [ new RequestResponsePact(provider, consumer, interactions), new RequestResponsePact(provider, consumer2, interactions2) ] } def 'handles a test class with no filter annotations'() { given: PactRunner pactRunner = new PactRunner(TestClassNoFilterAnnotations) when: def result = pactRunner.filterPacts(pacts) then: result.is pacts } def 'handles a test class with an empty filter annotation'() { given: PactRunner pactRunner = new PactRunner(TestClassEmptyFilter) PactRunner pactRunner2 = new PactRunner(TestClassEmptyFilters) when: def result = pactRunner.filterPacts(pacts) def result2 = pactRunner2.filterPacts(pacts) then: result.is pacts result2.is pacts } def 'filters the interactions by provider state'() { given: PactRunner pactRunner = new PactRunner(TestClass) when: def result = pactRunner.filterPacts(pacts) then: result.size() == 1 result*.interactions*.description.flatten() == ['Req 1', 'Req 2'] result[0].isNotFiltered() !result[0].isFiltered() } def 'filters the interactions correctly when given multiple provider states'() { given: PactRunner pactRunner = new PactRunner(TestMultipleStatesClass) when: def result = pactRunner.filterPacts(pacts) then: result.size() == 2 result*.interactions*.description.flatten() == ['Req 1', 'Req 2', 'Req 3'] result[0].isNotFiltered() !result[0].isFiltered() !result[1].isNotFiltered() result[1].isFiltered() } def 'filters the interactions correctly when given a regex'() { given: PactRunner pactRunner = new PactRunner(TestRegexClass) when: def result = pactRunner.filterPacts(pacts) then: result.size() == 2 result*.interactions*.description.flatten() == ['Req 1', 'Req 2', 'Req 3'] result[0].isNotFiltered() !result[0].isFiltered() !result[1].isNotFiltered() result[1].isFiltered() } @SuppressWarnings('UnusedObject') def 'Throws an initialisation error if all pacts are filtered out'() { when: new PactRunner(TestFilterOutAllPactsClass).run(new RunNotifier()) then: thrown(InitializationError) } @SuppressWarnings('UnusedObject') def 'Does not throw an initialisation error if all pacts are filtered out but @IgnoreNoPactsToVerify is present'() { when: new PactRunner(TestFilterOutAllPactsIgnoreNoPactsToVerifyClass) then: notThrown(InitializationError) } }
{ "pile_set_name": "Github" }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 1997-2011 Oracle and/or its affiliates. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can * obtain a copy of the License at * https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html * or packager/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at packager/legal/LICENSE.txt. * * GPL Classpath Exception: * Oracle designates this particular file as subject to the "Classpath" * exception as provided by Oracle in the GPL Version 2 section of the License * file that accompanied this code. * * Modifications: * If applicable, add the following below the License Header, with the fields * enclosed by brackets [] replaced by your own identifying information: * "Portions Copyright [year] [name of copyright owner]" * * Contributor(s): * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ // Portions Copyright [2018] [Payara Foundation and/or its affiliates] package com.sun.enterprise.security.jauth; import java.util.*; import java.lang.reflect.Method; import java.lang.reflect.InvocationTargetException; import java.util.logging.Level; import java.util.logging.Logger; import javax.security.auth.login.AppConfigurationEntry; /** * Shared logic from Client and ServerAuthContext reside here. */ final class AuthContext { static final String INIT = "initialize"; static final String DISPOSE_SUBJECT = "disposeSubject"; static final String SECURE_REQUEST = "secureRequest"; static final String VALIDATE_RESPONSE = "validateResponse"; static final String VALIDATE_REQUEST = "validateRequest"; static final String SECURE_RESPONSE = "secureResponse"; // managesSessions method is implemented by looking for // corresponding option value in module configuration static final String MANAGES_SESSIONS = "managesSessions"; static final String MANAGES_SESSIONS_OPTION = "managessessions"; private ConfigFile.Entry[] entries; private Logger logger; AuthContext(ConfigFile.Entry[] entries, Logger logger) throws AuthException { this.entries = entries; this.logger = logger; } /** * Invoke modules according to configuration */ Object[] invoke(final String methodName, final Object[] args) throws AuthException { // invoke modules in a doPrivileged final Object rValues[] = new Object[entries.length]; try { java.security.AccessController.doPrivileged(new java.security.PrivilegedExceptionAction() { @Override public Object run() throws AuthException { invokePriv(methodName, args, rValues); return null; } }); } catch (java.security.PrivilegedActionException pae) { if (pae.getException() instanceof AuthException) { throw (AuthException) pae.getException(); } else { AuthException ae = new AuthException(); ae.initCause(pae.getException()); throw ae; } } return rValues; } void invokePriv(String methodName, Object[] args, Object[] rValues) throws AuthException { // special treatment for managesSessions until the module // interface can be extended. if (methodName.equals(AuthContext.MANAGES_SESSIONS)) { for (int i = 0; i < entries.length; i++) { Map options = entries[i].getOptions(); String mS = (String) options.get(AuthContext.MANAGES_SESSIONS_OPTION); rValues[i] = Boolean.valueOf(mS); } return; } boolean success = false; AuthException firstRequiredError = null; AuthException firstError = null; // XXX no way to reverse module invocation for (int i = 0; i < entries.length; i++) { // get initialized module instance Object module = entries[i].module; // invoke the module try { Method[] mArray = module.getClass().getMethods(); for (int j = 0; j < mArray.length; j++) { if (mArray[j].getName().equals(methodName)) { // invoke module rValues[i] = mArray[j].invoke(module, args); // success - // return if SUFFICIENT and no previous REQUIRED errors if (firstRequiredError == null && entries[i].getControlFlag() == AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT) { if (logger != null && logger.isLoggable(Level.FINE)) { logger.fine(entries[i].getLoginModuleName() + "." + methodName + " SUFFICIENT success"); } return; } if (logger != null && logger.isLoggable(Level.FINE)) { logger.fine(entries[i].getLoginModuleName() + "." + methodName + " success"); } success = true; break; } } if (!success) { // PLEASE NOTE: // this exception will be thrown if any module // in the context does not support the method. NoSuchMethodException nsme = new NoSuchMethodException("module " + module.getClass().getName() + " does not implement " + methodName); AuthException ae = new AuthException(); ae.initCause(nsme); throw ae; } } catch (IllegalAccessException iae) { AuthException ae = new AuthException(); ae.initCause(iae); throw ae; } catch (InvocationTargetException ite) { // failure cases AuthException ae; if (ite.getCause() instanceof AuthException) { ae = (AuthException) ite.getCause(); } else { ae = new AuthException(); ae.initCause(ite.getCause()); } if (entries[i].getControlFlag() == AppConfigurationEntry.LoginModuleControlFlag.REQUISITE) { if (logger != null && logger.isLoggable(Level.FINE)) { logger.fine(entries[i].getLoginModuleName() + "." + methodName + " REQUISITE failure"); } // immediately throw exception if (firstRequiredError != null) { throw firstRequiredError; } else { throw ae; } } else if (entries[i].getControlFlag() == AppConfigurationEntry.LoginModuleControlFlag.REQUIRED) { if (logger != null && logger.isLoggable(Level.FINE)) { logger.fine(entries[i].getLoginModuleName() + "." + methodName + " REQUIRED failure"); } // save exception and continue if (firstRequiredError == null) { firstRequiredError = ae; } } else { if (logger != null && logger.isLoggable(Level
{ "pile_set_name": "Github" }
import { ChangeDetectionStrategy, Component, Input, OnDestroy, OnInit, Output, } from '@angular/core'; import { RxState } from '@rx-angular/state'; import { distinctUntilKeyChanged, map, startWith, switchMap, tap } from 'rxjs/operators'; import { ListServerItem, ListService, } from '../../../data-access/list-resource'; import { interval, Subject, Subscription } from 'rxjs'; export interface DemoBasicsItem { id: string; name: string; } interface ComponentState { refreshInterval: number; list: DemoBasicsItem[]; listExpanded: boolean; } const initComponentState = { refreshInterval: 10000, listExpanded: false, list: [], }; @Component({ selector: 'output-bindings-solution', template: ` <h3> Output Bindings </h3> <mat-expansion-panel *ngIf="model$ | async as vm" (expandedChange)="listExpandedChanges.next($event)" [expanded]="vm.listExpanded" > <mat-expansion-panel-header class="list"> <mat-progress-bar *ngIf="false" [mode]="'query'"></mat-progress-bar> <mat-panel-title> List </mat-panel-title> <mat-panel-description> <span >{{ (storeList$ | async)?.length }} Repositories Updated every: {{ vm.refreshInterval }} ms </span> </mat-panel-description> </mat-expansion-panel-header> <button mat-raised-button color="primary" (click)="onRefreshClicks($event)" > Refresh List </button> <ng-container *ngIf="storeList$ | async as list"> <div *ngIf="list?.length; else noList"> <mat-list> <mat-list-item *ngFor="let item of list"> {{ item.name }} </mat-list-item> </mat-list> </div> </ng-container> <ng-template #noList> <mat-card>No list given!</mat-card> </ng-template> </mat-expansion-panel> `, changeDetection: ChangeDetectionStrategy.OnPush, }) export class OutputBindingsSolution extends RxState<ComponentState> implements OnInit, OnDestroy { model$ = this.select(); intervalSubscription = new Subscription(); listExpandedChanges = new Subject<boolean>(); storeList$ = this.listService.list$.pipe( map(this.parseListItems), startWith(initComponentState.list) ); @Input() set refreshInterval(refreshInterval: number) { if (refreshInterval > 4000) { this.set({refreshInterval}); this.resetRefreshTick(); } } listExpanded: boolean = initComponentState.listExpanded; @Output() listExpandedChange = this.$.pipe(distinctUntilKeyChanged('listExpanded'), map(s => s.listExpanded)); constructor(private listService: ListService) { super(); this.set(initComponentState); this.connect('listExpanded', this.listExpandedChanges) } ngOnDestroy(): void { this.intervalSubscription.unsubscribe(); } ngOnInit(): void { this.resetRefreshTick(); } resetRefreshTick() { this.intervalSubscription.unsubscribe(); this.intervalSubscription = interval(this.get('refreshInterval')) .pipe(tap((_) => this.listService.refetchList())) .subscribe(); } onRefreshClicks(event) { this.listService.refetchList(); } parseListItems(l: ListServerItem[]): DemoBasicsItem[] { return l.map(({ id, name }) => ({ id, name })); } }
{ "pile_set_name": "Github" }
#include <stdio.h> int main() { int map[256], i; for (i = 0; i < 256; i++) map[i] = i; for (i = 0; i < 26; i++) map['A'+i] = map['a'+i] = "22233344455566677778889999"[i]; while ((i = getchar()) != EOF) putchar(map[i]); return 0; }
{ "pile_set_name": "Github" }
Nevada is a state in the Western, Mountain West, and Southwestern regions of the United States .
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Telerik.Windows.Controls; namespace CustomizingScatterPoints { public class ViewModel : ViewModelBase { private List<ChartData> data; public ViewModel() { this.Data = this.GetData(); } public List<ChartData> Data { get { return this.data; } set { if (this.data != value) { this.data = value; this.OnPropertyChanged("Data"); } } } private List<ChartData> GetData() { List<ChartData> data = new List<ChartData>(); data.Add(new ChartData(0.1, 100)); data.Add(new ChartData(0.1, 101)); data.Add(new ChartData(11, 106)); data.Add(new ChartData(101, 104)); data.Add(new ChartData(101, 108)); return data; } } }
{ "pile_set_name": "Github" }
function arrowFunctionBodyToCase( j, test, body ) { if ( body.type === 'BlockStatement' ) { return j.switchCase( test, [ body ] ); } return j.switchCase( test, [ j.returnStatement( body ) ] ); } function getCases( j, handlerMap ) { let hasPersistence = false; const cases = handlerMap.properties.map( ( actionNode ) => { const test = actionNode.computed ? actionNode.key : j.literal( actionNode.key.name || String( actionNode.key.value ) ); const fn = actionNode.value; if ( test.type === 'Identifier' && ( test.name === 'SERIALIZE' || test.name === 'DESERIALIZE' ) ) { hasPersistence = true; } if ( test.type === 'Literal' && ( test.value === 'SERIALIZE' || test.value === 'DESERIALIZE' ) ) { hasPersistence = true; } // If it's an arrow function without parameters, just return the body. if ( fn.type === 'ArrowFunctionExpression' && fn.params.length === 0 ) { return arrowFunctionBodyToCase( j, test, fn.body ); } // If it's an arrow function with the right parameter names, just return the body. if ( fn.type === 'ArrowFunctionExpression' && fn.params[ 0 ].name === 'state' && ( fn.params.length === 1 || ( fn.params.length === 2 && fn.params[ 1 ].name === 'action' ) ) ) { return arrowFunctionBodyToCase( j, test, fn.body ); } // If it's an arrow function with a deconstructed action, do magic. if ( fn.type === 'ArrowFunctionExpression' && fn.params[ 0 ].name === 'state' && fn.params.length === 2 && fn.params[ 1 ].type === 'ObjectPattern' ) { const declaration = j.variableDeclaration( 'const', [ j.variableDeclarator( fn.params[ 1 ], j.identifier( 'action' ) ), ] ); const prevBody = fn.body.type === 'BlockStatement' ? fn.body.body : [ j.returnStatement( fn.body ) ]; const body = j.blockStatement( [ declaration, ...prevBody ] ); return arrowFunctionBodyToCase( j, test, body ); } return j.switchCase( test, [ j.returnStatement( j.callExpression( actionNode.value, [ j.identifier( 'state' ), j.identifier( 'action' ) ] ) ), ] ); } ); return { cases, hasPersistence }; } function handlePersistence( j, createReducerPath, newNode ) { const parent = createReducerPath.parentPath; const grandParentValue = parent && parent.parentPath.value && parent.parentPath.value.length === 1 && parent.parentPath.value[ 0 ]; const greatGrandParent = grandParentValue && parent && parent.parentPath && parent.parentPath.parentPath; if ( parent && grandParentValue && greatGrandParent && parent.value.type === 'VariableDeclarator' && grandParentValue.type === 'VariableDeclarator' && greatGrandParent.value.type === 'VariableDeclaration' ) { const varName = parent.value.id.name; const persistenceNode = j.expressionStatement( j.assignmentExpression( '=', j.memberExpression( j.identifier( varName ), j.identifier( 'hasCustomPersistence' ), false ), j.literal( true ) ) ); if ( greatGrandParent.parentPath.value.type === 'ExportNamedDeclaration' ) { // Handle `export const reducer = ...` case. greatGrandParent.parentPath.insertAfter( persistenceNode ); } else { // Handle `const reducer = ...` case. greatGrandParent.insertAfter( persistenceNode ); } } else if ( parent && parent.value.type === 'AssignmentExpression' ) { const persistenceNode = j.expressionStatement( j.assignmentExpression( '=', j.memberExpression( parent.value.left, j.identifier( 'hasCustomPersistence' ), false ), j.literal( true ) ) ); parent.parentPath.insertAfter( persistenceNode ); } else { newNode.comments = newNode.comments || []; newNode.comments.push( j.commentLine( ' TODO: HANDLE PERSISTENCE', true, false ) ); } return newNode; } export default function transformer( file, api ) { const j = api.jscodeshift; const root = j( file.source ); let usedWithoutPersistence = false; // Handle createReducer root .find( j.CallExpression, ( node ) => node.callee.type === 'Identifier' && node.callee.name === 'createReducer' ) .forEach( ( createReducerPath ) => { if ( createReducerPath.value.arguments.length !== 2 ) { throw new Error( 'Unable to translate createReducer' ); } const [ defaultState, handlerMap ] = createReducerPath.value.arguments; const { cases, hasPersistence } = getCases( j, handlerMap ); let newNode = j.arrowFunctionExpression( [ j.assignmentPattern( j.identifier( 'state' ), defaultState ), j.identifier( 'action' ) ], j.blockStatement( [ j.switchStatement( j.memberExpression( j.identifier( 'action' ), j.identifier( 'type' ) ), cases ), j.returnStatement( j.identifier( 'state' ) ), ] ) ); if ( hasPersistence ) { newNode = handlePersistence( j, createReducerPath, newNode ); } else { usedWithoutPersistence = true; newNode = j.callExpression( j.identifier( 'withoutPersistence' ), [ newNode ] ); } createReducerPath.replace( newNode ); } ); // Handle createReducerWithValidation root .find( j.CallExpression, ( node ) => node.callee.type === 'Identifier' && node.callee.name === 'createReducerWithValidation' ) .forEach( ( createReducerPath ) => { if ( createReducerPath.value.arguments.length !== 3 ) { throw new Error( 'Unable to translate createReducerWithValidation' ); } const [ defaultState, handlerMap, schema ] = createReducerPath.value.arguments; const { cases } = getCases( j, handlerMap ); const newNode = j.callExpression( j.identifier( 'withSchemaValidation' ), [ schema, j.arrowFunctionExpression( [ j.assignmentPattern( j.identifier( 'state' ), defaultState ), j.identifier( 'action' ), ], j.blockStatement( [ j.switchStatement( j.memberExpression( j.identifier( 'action' ), j.identifier( 'type' ) ), cases ), j.returnStatement( j.identifier( 'state' ) ), ] ) ), ] ); createReducerPath.replace( newNode ); } ); // Handle imports. root .find( j.ImportDeclaration, ( node ) => node.specifiers && node.specifiers.some( ( s ) => s && s.imported && ( s.imported.name === 'createReducer' || s.imported.name === 'createReducerWithValidation' ) ) ) .forEach( ( nodePath ) => { const filtered = nodePath.value.specifiers.filter( ( s ) => s.imported.name !== 'createReducer' && s.imported.name !== 'createReducerWithValidation' );
{ "pile_set_name": "Github" }
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.phoenixnap.oss.ramlplugin.raml2code.rules.spring; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestParam; import com.phoenixnap.oss.ramlplugin.raml2code.data.ApiActionMetadata; import com.phoenixnap.oss.ramlplugin.raml2code.data.ApiParameterMetadata; import com.phoenixnap.oss.ramlplugin.raml2code.helpers.CodeModelHelper; import com.sun.codemodel.JAnnotationUse; import com.sun.codemodel.JClass; import com.sun.codemodel.JVar; /** * Overrides method parameters set by {@link SpringMethodParamsRule}. * * * @author Aleksandar Stojsavljevic (aleksandars@ccbill.com) * @since 2.0.4 */ public class SpringFeignClientMethodParamsRule extends SpringMethodParamsRule { private static final List<String> ANNOTATIONS_TO_OVERRIDE = new ArrayList<String>(); static { ANNOTATIONS_TO_OVERRIDE.add(RequestParam.class.getName()); ANNOTATIONS_TO_OVERRIDE.add(RequestHeader.class.getName()); ANNOTATIONS_TO_OVERRIDE.add(PathVariable.class.getName()); } @Override protected JVar paramQueryForm(ApiParameterMetadata paramMetaData, CodeModelHelper.JExtMethod generatableType, ApiActionMetadata endpointMetadata) { JVar paramQueryForm = super.paramQueryForm(paramMetaData, generatableType, endpointMetadata); // name of request/header/path parameter needs to be set for feign // client even when it matches method parameter name // if name is already set this will not override it Collection<JAnnotationUse> annotations = paramQueryForm.annotations(); for (JAnnotationUse annotation : annotations) { JClass annotationClass = annotation.getAnnotationClass(); if (ANNOTATIONS_TO_OVERRIDE.contains(annotationClass.fullName())) { annotation.param("name", paramMetaData.getName()); } } return paramQueryForm; } }
{ "pile_set_name": "Github" }
package: name: conda-build-test-extra-metadata version: 1.0 test: requires: - pyyaml extra: custom: metadata however: {we: want}
{ "pile_set_name": "Github" }
// mksyscall_aix_ppc64.pl -aix -tags aix,ppc64 syscall_aix.go syscall_aix_ppc64.go // Code generated by the command above; see README.md. DO NOT EDIT. // +build aix,ppc64 // +build gccgo package unix /* #include <stdint.h> int utimes(uintptr_t, uintptr_t); int utimensat(int, uintptr_t, uintptr_t, int); int getcwd(uintptr_t, size_t); int accept(int, uintptr_t, uintptr_t); int getdirent(int, uintptr_t, size_t); int wait4(int, uintptr_t, int, uintptr_t); int ioctl(int, int, uintptr_t); int fcntl(uintptr_t, int, uintptr_t); int acct(uintptr_t); int chdir(uintptr_t); int chroot(uintptr_t); int close(int); int dup(int); void exit(int); int faccessat(int, uintptr_t, unsigned int, int); int fchdir(int); int fchmod(int, unsigned int); int fchmodat(int, uintptr_t, unsigned int, int); int fchownat(int, uintptr_t, int, int, int); int fdatasync(int); int fsync(int); int getpgid(int); int getpgrp(); int getpid(); int getppid(); int getpriority(int, int); int getrusage(int, uintptr_t); int getsid(int); int kill(int, int); int syslog(int, uintptr_t, size_t); int mkdir(int, uintptr_t, unsigned int); int mkdirat(int, uintptr_t, unsigned int); int mkfifo(uintptr_t, unsigned int); int mknod(uintptr_t, unsigned int, int); int mknodat(int, uintptr_t, unsigned int, int); int nanosleep(uintptr_t, uintptr_t); int open64(uintptr_t, int, unsigned int); int openat(int, uintptr_t, int, unsigned int); int read(int, uintptr_t, size_t); int readlink(uintptr_t, uintptr_t, size_t); int renameat(int, uintptr_t, int, uintptr_t); int setdomainname(uintptr_t, size_t); int sethostname(uintptr_t, size_t); int setpgid(int, int); int setsid(); int settimeofday(uintptr_t); int setuid(int); int setgid(int); int setpriority(int, int, int); int statx(int, uintptr_t, int, int, uintptr_t); int sync(); uintptr_t times(uintptr_t); int umask(int); int uname(uintptr_t); int unlink(uintptr_t); int unlinkat(int, uintptr_t, int); int ustat(int, uintptr_t); int write(int, uintptr_t, size_t); int dup2(int, int); int posix_fadvise64(int, long long, long long, int); int fchown(int, int, int); int fstat(int, uintptr_t); int fstatat(int, uintptr_t, uintptr_t, int); int fstatfs(int, uintptr_t); int ftruncate(int, long long); int getegid(); int geteuid(); int getgid(); int getuid(); int lchown(uintptr_t, int, int); int listen(int, int); int lstat(uintptr_t, uintptr_t); int pause(); int pread64(int, uintptr_t, size_t, long long); int pwrite64(int, uintptr_t, size_t, long long); int pselect(int, uintptr_t, uintptr_t, uintptr_t, uintptr_t, uintptr_t); int setregid(int, int); int setreuid(int, int); int shutdown(int, int); long long splice(int, uintptr_t, int, uintptr_t, int, int); int stat(uintptr_t, uintptr_t); int statfs(uintptr_t, uintptr_t); int truncate(uintptr_t, long long); int bind(int, uintptr_t, uintptr_t); int connect(int, uintptr_t, uintptr_t); int getgroups(int, uintptr_t); int setgroups(int, uintptr_t); int getsockopt(int, int, int, uintptr_t, uintptr_t); int setsockopt(int, int, int, uintptr_t, uintptr_t); int socket(int, int, int); int socketpair(int, int, int, uintptr_t); int getpeername(int, uintptr_t, uintptr_t); int getsockname(int, uintptr_t, uintptr_t); int recvfrom(int, uintptr_t, size_t, int, uintptr_t, uintptr_t); int sendto(int, uintptr_t, size_t, int, uintptr_t, uintptr_t); int recvmsg(int, uintptr_t, int); int sendmsg(int, uintptr_t, int); int munmap(uintptr_t, uintptr_t); int madvise(uintptr_t, size_t, int); int mprotect(uintptr_t, size_t, int); int mlock(uintptr_t, size_t); int mlockall(int); int msync(uintptr_t, size_t, int); int munlock(uintptr_t, size_t); int munlockall(); int pipe(uintptr_t); int poll(uintptr_t, int, int); int gettimeofday(uintptr_t, uintptr_t); int time(uintptr_t); int utime(uintptr_t, uintptr_t); int getrlimit(int, uintptr_t); int setrlimit(int, uintptr_t); long long lseek(int, long long, int); uintptr_t mmap64(uintptr_t, uintptr_t, int, int, int, long long); */ import "C" import ( "syscall" ) // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func callutimes(_p0 uintptr, times uintptr) (r1 uintptr, e1 Errno) { r1 = uintptr(C.utimes(C.uintptr_t(_p0), C.uintptr_t(times))) e1 = syscall.GetErrno() return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func callutimensat(dirfd int, _p0 uintptr, times uintptr, flag int) (r1 uintptr, e1 Errno) { r1 = uintptr(C.utimensat(C.int(dirfd), C.uintptr_t(_p0), C.uintptr_t(times), C.int(flag))) e1 = syscall.GetErrno() return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func callgetcwd(_p0 uintptr, _lenp0 int) (r1 uintptr, e1 Errno) { r1 = uintptr(C.getcwd(C.uintptr_t(_p0), C.size_t(_lenp0))) e1 = syscall.GetErrno() return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func callaccept(s int, rsa uintptr, addrlen uintptr) (r1 uintptr, e1 Errno) { r1 = uintptr(C.accept(C.int(s), C.uintptr_t(rsa), C.uintptr_t(addrlen))) e1 = syscall.GetErrno() return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func callgetdirent(fd int, _p0 uintptr, _lenp0 int) (r1 uintptr, e1 Errno) { r1 = uintptr(C.getdirent(C.int(fd), C.uintptr_t(_p0), C.size_t(_
{ "pile_set_name": "Github" }
#!{{pkgPathFor "core/bash"}}/bin/bash set -e exec 2>&1 # Call the script to block until user accepts the MLSA via the package's config {{pkgPathFor "chef/mlsa"}}/bin/accept {{cfg.mlsa.accept}} pg-helper ensure-service-database chef_license_control_service # Run the License Control gRPC server exec license-control-service serve --config {{pkg.svc_config_path}}/config.toml
{ "pile_set_name": "Github" }
#include <linux/bio.h> void dummy(struct bio *bio) { bio->bi_status = BLK_STS_IOERR; bio_endio(bio); }
{ "pile_set_name": "Github" }
package io.buoyant.namer import com.twitter.finagle.Namer /** For better java compatibility */ abstract class JNamer extends Namer
{ "pile_set_name": "Github" }
#pragma once enum LocationServiceStatus { kLocationServiceStopped, kLocationServiceInitializing, kLocationServiceRunning, kLocationServiceFailed }; class LocationService { public: static void SetDesiredAccuracy(float val); static float GetDesiredAccuracy(); static void SetDistanceFilter(float val); static float GetDistanceFilter(); static bool IsServiceEnabledByUser(); static void StartUpdatingLocation(); static void StopUpdatingLocation(); static void SetHeadingUpdatesEnabled(bool enabled); static bool IsHeadingUpdatesEnabled(); static LocationServiceStatus GetLocationStatus(); static LocationServiceStatus GetHeadingStatus(); static bool IsHeadingAvailable(); }; #if UNITY_TVOS_SIMULATOR_FAKE_REMOTE void ReportSimulatedRemoteButtonPress(UIPressType type); void ReportSimulatedRemoteButtonRelease(UIPressType type); void ReportSimulatedRemoteTouchesBegan(UIView* view, NSSet* touches); void ReportSimulatedRemoteTouchesMoved(UIView* view, NSSet* touches); void ReportSimulatedRemoteTouchesEnded(UIView* view, NSSet* touches); #endif
{ "pile_set_name": "Github" }
# Translation of Odoo Server. # This file contains the translation of the following modules: # * partner_identification # # Translators: # OCA Transbot <transbot@odoo-community.org>, 2017 # Giuliano Lotta <giuliano.lotta@gmail.com>, 2017 msgid "" msgstr "" "Project-Id-Version: Odoo Server 10.0\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2017-11-21 01:49+0000\n" "PO-Revision-Date: 2018-12-24 17:58+0000\n" "Last-Translator: Sergio Zanchetta <primes2h@gmail.com>\n" "Language-Team: Italian (https://www.transifex.com/oca/teams/23907/it/)\n" "Language: it\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: \n" "Plural-Forms: nplurals=2; plural=n != 1;\n" "X-Generator: Weblate 3.3\n" #. module: partner_identification #: code:addons/partner_identification/models/res_partner_id_category.py:0 #, python-format msgid "" "\n" "# Python code. Use failed = True to specify that the id number is not " "valid.\n" "# You can use the following variables :\n" "# - self: browse_record of the current ID Category browse_record\n" "# - id_number: browse_record of ID number to validate" msgstr "" "\n" "# Codice Python. Usare failed = True per specificare che il numero " "documento non è valido.\n" "# È possibile utilizzare le seguenti variabili :\n" "# - self: browse_record della categoria documento corrente browse_record\n" "# - id_number: browse_record del numero documento da validare" #. module: partner_identification #: code:addons/partner_identification/models/res_partner_id_category.py:0 #, python-format msgid "%s is not a valid %s identifier" msgstr "%s non è un identificatore %s valido" #. module: partner_identification #: model:ir.model.fields,help:partner_identification.field_res_partner_id_category__code msgid "Abbreviation or acronym of this ID type. For example, 'driver_license'" msgstr "" "Abbreviazione o acronimo per questo tipo di documento. Per esempio, " "\"driver_license\"" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__active #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__active msgid "Active" msgstr "Attivo" #. module: partner_identification #: model:ir.model.fields,help:partner_identification.field_res_partner_id_number__partner_issued_id msgid "" "Another partner, who issued this ID. For example, Traffic National " "Institution" msgstr "" "Un altro partner, che ha emesso questo documento. Per esempio, " "Motorizzazione Civile" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__category_id msgid "Category" msgstr "Categoria" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__code msgid "Code" msgstr "Codice" #. module: partner_identification #: model:ir.model,name:partner_identification.model_res_partner msgid "Contact" msgstr "Contatto" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__create_uid #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__create_uid msgid "Created by" msgstr "Creato da" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__create_date #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__create_date msgid "Created on" msgstr "Creato il" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__display_name #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__display_name msgid "Display Name" msgstr "Nome visualizzato" #. module: partner_identification #: code:addons/partner_identification/models/res_partner_id_category.py:0 #, python-format msgid "" "Error when evaluating the id_category validation code::\n" " %s \n" "(%s)" msgstr "" "Errore durante l'esame del codice di convalida id_category.:\n" " %s \n" "(%s)" #. module: partner_identification #: model:ir.model.fields,help:partner_identification.field_res_partner_id_number__valid_until msgid "" "Expiration date. For example, date when person needs to renew his driver " "license, 21/10/2019" msgstr "" "Data di scadenza. Per esempio, data in cui la persona deve rinnovare la sua " "patente di guida, 21/10/2019" #. module: partner_identification #: model:ir.model.fields.selection,name:partner_identification.selection__res_partner_id_number__status__close msgid "Expired" msgstr "Scaduto" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__id #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__id msgid "ID" msgstr "Documento" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__name msgid "ID Number" msgstr "Numero documento" #. module: partner_identification #: model_terms:ir.ui.view,arch_db:partner_identification.view_partner_form msgid "ID Numbers" msgstr "Numeri documenti" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_category__name msgid "ID name" msgstr "Nome documento" #. module: partner_identification #: model:ir.model.fields,help:partner_identification.field_res_partner_id_number__category_id msgid "ID type defined in configuration. For example, Driver License" msgstr "" "Tipo di documento definito nella configurazione. Per esempio, patente di " "guida" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner__id_numbers #: model:ir.model.fields,field_description:partner_identification.field_res_users__id_numbers msgid "Identification Numbers" msgstr "Numeri di identificazione" #. module: partner_identification #: model:ir.model.fields,field_description:partner_identification.field_res_partner_id_number__partner_issued_id msgid "Issued by"
{ "pile_set_name": "Github" }
--- layout: base title: 'Statistics of obl in UD_Moksha-JR' udver: '2' --- ## Treebank Statistics: UD_Moksha-JR: Relations: `obl` This relation is universal. There are 3 language-specific subtypes of `obl`: <tt><a href="mdf_jr-dep-obl-agent.html">obl:agent</a></tt>, <tt><a href="mdf_jr-dep-obl-lmod.html">obl:lmod</a></tt>, <tt><a href="mdf_jr-dep-obl-tmod.html">obl:tmod</a></tt>. 46 nodes (4%) are attached to their parents as `obl`. 24 instances of `obl` (52%) are left-to-right (parent precedes child). Average distance between parent and child is 1.71739130434783. The following 6 pairs of parts of speech are connected with `obl`: <tt><a href="mdf_jr-pos-VERB.html">VERB</a></tt>-<tt><a href="mdf_jr-pos-NOUN.html">NOUN</a></tt> (30; 65% instances), <tt><a href="mdf_jr-pos-ADJ.html">ADJ</a></tt>-<tt><a href="mdf_jr-pos-NOUN.html">NOUN</a></tt> (6; 13% instances), <tt><a href="mdf_jr-pos-VERB.html">VERB</a></tt>-<tt><a href="mdf_jr-pos-PRON.html">PRON</a></tt> (5; 11% instances), <tt><a href="mdf_jr-pos-NOUN.html">NOUN</a></tt>-<tt><a href="mdf_jr-pos-NOUN.html">NOUN</a></tt> (2; 4% instances), <tt><a href="mdf_jr-pos-VERB.html">VERB</a></tt>-<tt><a href="mdf_jr-pos-ADP.html">ADP</a></tt> (2; 4% instances), <tt><a href="mdf_jr-pos-VERB.html">VERB</a></tt>-<tt><a href="mdf_jr-pos-PROPN.html">PROPN</a></tt> (1; 2% instances). ~~~ conllu # visual-style 5 bgColor:blue # visual-style 5 fgColor:white # visual-style 6 bgColor:blue # visual-style 6 fgColor:white # visual-style 6 5 obl color:blue 1 Лётчикне лётчик NOUN N Case=Nom|Definite=Def|Number=Plur 6 nsubj _ SpaceAfter=No 2 , , PUNCT CLB _ 3 punct _ _ 3 улема улема PART Pcle _ 6 advmod:mmod _ SpaceAfter=No 4 , , PUNCT CLB _ 3 punct _ _ 5 кядьса кядь NOUN N Case=Ine|Definite=Ind|Number=Plur,Sing 6 obl _ _ 6 токсесазь токсемс VERB V Mood=Ind|Number[obj]=Plur|Number[subj]=Plur|Person[obj]=3|Person[subj]=3|Tense=Pres|Valency=2 0 root _ _ 7 коволнятнень ковол NOUN N Case=Gen|Definite=Def|Derivation=Dimin|Number=Plur 6 obj _ SpaceAfter=No 8 . . PUNCT CLB _ 6 punct _ _ ~~~ ~~~ conllu # visual-style 4 bgColor:blue # visual-style 4 fgColor:white # visual-style 6 bgColor:blue # visual-style 6 fgColor:white # visual-style 6 4 obl color:blue 1 Ну ну INTJ Interj _ 6 discourse _ SpaceAfter=No 2 , , PUNCT CLB _ 6 punct _ _ 3 кли кли ADV Adv _ 4 advmod _ _ 4 онцтон он NOUN N Case=Ela|Number=Plur,Sing|Number[psor]=Sing|Person[psor]=1 6 obl _ SpaceAfter=No 5 , , PUNCT CLB _ 4 punct _ _ 6 пара пара ADJ A Case=Nom|Definite=Ind|Number=Sing 0 root _ SpaceAfter=No 7 : : PUNCT CLB _ 11 punct _ _ 8 гулянять гуляня NOUN N Case=Gen|Definite=Def|Number=Sing 9 nmod _ _ 9 пацяняц паця NOUN N Case=Nom|Derivation=Dimin|Number=Sing|Number[psor]=Sing|Person[psor]=3 11 obj _ _ 10 апак апак AUX Aux Polarity=Neg 11 aux:neg _ _ 11 синтть синдемс VERB V Connegative=Yes|Valency=2 6 csubj _ SpaceAfter=No 12 . . PUNCT CLB _ 6 punct _ _ ~~~ ~~~ conllu # visual-style 4 bgColor:blue # visual-style 4 fgColor:white # visual-style 3 bgColor:blue # visual-style 3 fgColor:white # visual-style 3 4 obl color:blue 1 ― ― PUNCT PUNCT _ 3 punct _ _ 2 Терешкова Терешкова PROPN N Case=Nom|Definite=Ind|NameType=Sur|Number=Sing 3 nsubj _ _ 3 панчсь панжемс VERB V Mood=Ind|Number[subj]=Sing|Person[subj]=3|Tense=Past|Valency=2 0 root _ _ 4 теень мон PRON Pron Case=Dat|Number=Sing|Person=1|PronType=Prs|Variant=Short 3 obl _ _ 5 ки ки NOUN N Case=Nom|Definite=Ind|Number=Sing 3 obj _ SpaceAfter=No 6 . . PUNCT CLB _ 3 punct _ _ ~~~
{ "pile_set_name": "Github" }
# Sourcegraph development documentation This documentation is for developers contributing to Sourcegraph itself. Sourcegraph development is open source at [github.com/sourcegraph/sourcegraph](https://github.com/sourcegraph/sourcegraph). ### Project links - [Repository](https://github.com/sourcegraph/sourcegraph) - [Issue tracker](https://github.com/sourcegraph/sourcegraph/issues) ### Technical - [Quickstart](local_development.md) - [Documentation guidelines](documentation.md) - [Tech stack](tech_stack.md) - [Architecture](architecture/index.md) - [Developing the web clients](web/index.md) - [Developing the web app](web/web_app.md) - [Developing the code host integrations](code_host_integrations.md) - [Developing the GraphQL API](graphql_api.md) - [Developing indexed search](zoekt.md) - [Developing campaigns](campaigns_development.md) - [Developing code intelligence](codeintel/index.md) - [Using PostgreSQL](postgresql.md) - [Testing](testing.md) - [Go style guide](https://about.sourcegraph.com/handbook/engineering/languages/go) - [TypeScript style guide](https://about.sourcegraph.com/handbook/engineering/languages/typescript) - [Code reviews](https://about.sourcegraph.com/handbook/engineering/code_reviews) - [Renovate updates](renovate.md) - [Telemetry](telemetry.md) - [Observability](observability.md) - [Phabricator/Gitolite documentation](phabricator_gitolite.md) ### Other - [Open source FAQ](https://about.sourcegraph.com/community/faq) - [Code of conduct](https://about.sourcegraph.com/community/code_of_conduct)
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <examples> <example> <code>Add-AWSLoggingListener -Name MyAWSLogs -LogFilePath c:\logs\aws.txt</code> <description>Attaches a listener for the source 'Amazon', matching responses from all services for the current script or shell. Log output will be written to the specified file (the folder path must exist). Multiple listeners for different namespaces can be active at a time. By default only error responses are logged.</description> </example> <example> <code>Add-AWSLoggingListener -Name MyS3Logs -LogFilePath c:\logs\s3.txt -Source Amazon.S3</code> <description>Attaches a listener for the source 'Amazon.S3'. Responses matching only this namespace will be logged to the specified file (the folder path must exist). Multiple listeners for different namespaces can be active at a time. By default only error responses are logged.</description> </example> </examples>
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8" /> <meta name="generator" content="JsDoc Toolkit" /> <title>JsDoc Reference - Guacamole.ChainedTunnel</title> <style type="text/css"> /* default.css */ body { font: 12px "Lucida Grande", Tahoma, Arial, Helvetica, sans-serif; width: 800px; } .header { clear: both; background-color: #ccc; padding: 8px; } h1 { font-size: 150%; font-weight: bold; padding: 0; margin: 1em 0 0 .3em; } hr { border: none 0; border-top: 1px solid #7F8FB1; height: 1px; } pre.code { display: block; padding: 8px; border: 1px dashed #ccc; } #index { margin-top: 24px; float: left; width: 160px; position: absolute; left: 8px; background-color: #F3F3F3; padding: 8px; } #content { margin-left: 190px; width: 600px; } .classList { list-style-type: none; padding: 0; margin: 0 0 0 8px; font-family: arial, sans-serif; font-size: 1em; overflow: auto; } .classList li { padding: 0; margin: 0 0 8px 0; } .summaryTable { width: 100%; } h1.classTitle { font-size:170%; line-height:130%; } h2 { font-size: 110%; } caption, div.sectionTitle { background-color: #7F8FB1; color: #fff; font-size:130%; text-align: left; padding: 2px 6px 2px 6px; border: 1px #7F8FB1 solid; } div.sectionTitle { margin-bottom: 8px; } .summaryTable thead { display: none; } .summaryTable td { vertical-align: top; padding: 4px; border-bottom: 1px #7F8FB1 solid; border-right: 1px #7F8FB1 solid; } /*col#summaryAttributes {}*/ .summaryTable td.attributes { border-left: 1px #7F8FB1 solid; width: 140px; text-align: right; } td.attributes, .fixedFont { line-height: 15px; color: #002EBE; font-family: "Courier New",Courier,monospace; font-size: 13px; } .summaryTable td.nameDescription { text-align: left; font-size: 13px; line-height: 15px; } .summaryTable td.nameDescription, .description { line-height: 15px; padding: 4px; padding-left: 4px; } .summaryTable { margin-bottom: 8px; } ul.inheritsList { list-style: square; margin-left: 20px; padding-left: 0; } .detailList { margin-left: 20px; line-height: 15px; } .detailList dt { margin-left: 20px; } .detailList .heading { font-weight: bold; padding-bottom: 6px; margin-left: 0; } .light, td.attributes, .light a:link, .light a:visited { color: #777; font-style: italic; } .fineprint { text-align: right; font-size: 10px; } </style> </head> <body> <!-- ============================== header ================================= --> <!-- begin static/header.html --> <div id="header"> </div> <!-- end static/header.html --> <!-- ============================== classes index ============================ --> <div id="index"> <!-- begin publish.classesIndex --> <div align="center"><a href="../index.html">Class Index</a> | <a href="../files.html">File Index</a></div> <hr /> <h2>Classes</h2> <ul class="classList"> <li><i><a href="../symbols/_global_.html">_global_</a></i></li> <li><a href="../symbols/Guacamole.ArrayBufferReader.html">Guacamole.ArrayBufferReader</a></li> <li><a href="../symbols/Guacamole.ArrayBufferWriter.html">Guacamole.ArrayBufferWriter</a></li> <li><a href="../symbols/Guacamole.AudioChannel.html">Guacamole.AudioChannel</a></li> <li><a href="../symbols/Guacamole.AudioChannel.Packet.html">Guacamole.AudioChannel.Packet</a></li> <li><a href="../symbols/Guacamole.BlobReader.html">Guacamole.BlobReader</a></li> <li><a href="../symbols/Guacamole.ChainedTunnel.html">Guacamole.ChainedTunnel</a></li> <li><a href="../symbols/Guacamole.Client.html">Guacamole.Client</a></li> <li><a href="../symbols/Guacamole.Display.html">Guacamole.Display</a></li> <li><a href="../symbols/Guacamole.Display.VisibleLayer.html">Guacamole.Display.VisibleLayer</a></li> <li><a href="../symbols/Guacamole.HTTPTunnel.html">Guacamole.HTTPTunnel</a></li> <li><a href="../symbols/Guacamole.InputStream.html">Guacamole.InputStream</a></li> <li><a href="../symbols/Guacamole.IntegerPool.html">Guacamole.IntegerPool</a></li> <li><a href="../symbols/Guacamole.Keyboard.html">Guacamole.Keyboard</a></li> <li><a href="../symbols/Guacamole.Keyboard.ModifierState.html">Guacamole.Keyboard.ModifierState</a></li> <li><a href="../symbols/Guacamole.Layer.html">Guacamole.Layer</a></li> <li><a href="../symbols/Guacamole.Layer.Pixel.html">Guacamole.Layer.Pixel</a></li> <li><a href="../symbols/Guacamole.Mouse.html">Guacamole.Mouse</a></li> <li><a href="../symbols/Guacamole.Mouse.State.html">Guacamole.Mouse.State</a></li> <li><a href="../symbols/Guacamole.Mouse.Touchpad.html">Guacamole.Mouse.Touchpad</a></li> <li><a href="../symbols/Guacamole.Mouse.Touchscreen.html">Guacamole.Mouse.Touchscreen</a></li> <li><a href="../symbols/Guacamole.OnScreenKeyboard.html">Guacamole.OnScreenKeyboard</a></li> <li><a href="../symbols/Guacamole.OnScreenKeyboard.Cap.html">Guacamole.OnScreenKeyboard.Cap</a></li> <li><
{ "pile_set_name": "Github" }
// #include <ogg/ogg.h> (FIXME) int main() { }
{ "pile_set_name": "Github" }
#!/usr/bin/env bash # --build: Build images before starting containers. # --abort-on-container-exit: Stops all containers if any container is stopped docker-compose -f 'docker-compose.test.yml' -p ci up --build --abort-on-container-exit exit $(docker wait ci_express-mongoose-es6-rest-api_1)
{ "pile_set_name": "Github" }
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!159 &1 EditorSettings: m_ObjectHideFlags: 0 serializedVersion: 7 m_ExternalVersionControlSupport: Visible Meta Files m_SerializationMode: 2 m_LineEndingsForNewScripts: 1 m_DefaultBehaviorMode: 0 m_SpritePackerMode: 0 m_SpritePackerPaddingPower: 1 m_EtcTextureCompressorBehavior: 1 m_EtcTextureFastCompressor: 1 m_EtcTextureNormalCompressor: 2 m_EtcTextureBestCompressor: 4 m_ProjectGenerationIncludedExtensions: txt;xml;fnt;cd m_ProjectGenerationRootNamespace: m_UserGeneratedProjectSuffix: m_CollabEditorSettings: inProgressEnabled: 1 m_EnableTextureStreamingInPlayMode: 1
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 5930d7d2e705ea74ca30ee6652eb4571 timeCreated: 1454362609 licenseType: Store AudioImporter: serializedVersion: 6 defaultSettings: loadType: 0 sampleRateSetting: 0 sampleRateOverride: 44100 compressionFormat: 0 quality: 1 conversionMode: 0 platformSettingOverrides: {} forceToMono: 0 normalize: 1 preloadAudioData: 1 loadInBackground: 0 3D: 1 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.workflow; import java.io.IOException; import java.sql.SQLException; import java.util.List; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.WorkspaceItem; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.xmlworkflow.WorkflowConfigurationException; /** * Service interface class for the WorkflowService framework. * All WorkflowServices service classes should implement this class since it offers some basic methods which all * Workflows * are required to have. * * @param <T> some implementation of workflow item. * @author kevinvandevelde at atmire.com */ public interface WorkflowService<T extends WorkflowItem> { /** * startWorkflow() begins a workflow - in a single transaction do away with * the PersonalWorkspace entry and turn it into a WorkflowItem. * * @param context The relevant DSpace Context. * @param wsi The WorkspaceItem to convert to a workflow item * @return The resulting workflow item * @throws SQLException An exception that provides information on a database access error or other errors. * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws WorkflowException if workflow error */ public T start(Context context, WorkspaceItem wsi) throws SQLException, AuthorizeException, IOException, WorkflowException; /** * startWithoutNotify() starts the workflow normally, but disables * notifications (useful for large imports,) for the first workflow step - * subsequent notifications happen normally * * @param c The relevant DSpace Context. * @param wsi workspace item * @return the resulting workflow item. * @throws SQLException An exception that provides information on a database access error or other errors. * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws WorkflowException if workflow error */ public T startWithoutNotify(Context c, WorkspaceItem wsi) throws SQLException, AuthorizeException, IOException, WorkflowException; /** * abort() aborts a workflow, completely deleting it (administrator do this) * (it will basically do a reject from any state - the item ends up back in * the user's PersonalWorkspace * * @param c The relevant DSpace Context. * @param wi WorkflowItem to operate on * @param e EPerson doing the operation * @return workspace item returned to workspace * @throws SQLException An exception that provides information on a database access error or other errors. * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. */ public WorkspaceItem abort(Context c, T wi, EPerson e) throws SQLException, AuthorizeException, IOException; public WorkspaceItem sendWorkflowItemBackSubmission(Context c, T workflowItem, EPerson e, String provenance, String rejection_message) throws SQLException, AuthorizeException, IOException; public String getMyDSpaceLink(); public void deleteCollection(Context context, Collection collection) throws SQLException, IOException, AuthorizeException; public List<String> getEPersonDeleteConstraints(Context context, EPerson ePerson) throws SQLException; public Group getWorkflowRoleGroup(Context context, Collection collection, String roleName, Group roleGroup) throws SQLException, IOException, WorkflowConfigurationException, AuthorizeException, WorkflowException; /** * This method will create the workflowRoleGroup for a collection and the given rolename * @param context The relevant DSpace context * @param collection The collection * @param roleName The rolename * @return The created Group * @throws AuthorizeException If something goes wrong * @throws SQLException If something goes wrong * @throws IOException If something goes wrong * @throws WorkflowConfigurationException If something goes wrong */ public Group createWorkflowRoleGroup(Context context, Collection collection, String roleName) throws AuthorizeException, SQLException, IOException, WorkflowConfigurationException; public List<String> getFlywayMigrationLocations(); }
{ "pile_set_name": "Github" }