text
stringlengths
1
1.05M
package baker_test import ( "testing" "github.com/alinz/baker.go" ) type DummyContainer struct { id string addr string } func (d DummyContainer) ID() string { return d.id } func (d DummyContainer) Addr() string { return d.addr } func TestService(t *testing.T) { dummyContainer1 := DummyContainer{"1", "1.1.1.1"} dummyContainer2 := DummyContainer{"2", "1.1.1.2"} dummyContainer3 := DummyContainer{"3", "1.1.1.3"} dummyEndpoint := &baker.Endpoint{ Domain: "example.com", Path: "/", Ready: true, } service := baker.NewService() t.Run("it should add 3 containers to service", func(t *testing.T) { service.Add(dummyEndpoint, dummyContainer1) service.Add(dummyEndpoint, dummyContainer2) service.Add(dummyEndpoint, dummyContainer3) if service.Size() != 3 { t.Errorf("expect 3 but got %d", service.Size()) } }) t.Run("it should add 3 containers to service, duplicates should be ignored", func(t *testing.T) { service.Add(dummyEndpoint, dummyContainer1) service.Add(dummyEndpoint, dummyContainer2) service.Add(dummyEndpoint, dummyContainer3) service.Add(dummyEndpoint, dummyContainer1) service.Add(dummyEndpoint, dummyContainer2) service.Add(dummyEndpoint, dummyContainer3) if service.Size() != 3 { t.Errorf("expect 3 but got %d", service.Size()) } }) t.Run("it should remove one container", func(t *testing.T) { service.Remove(dummyContainer1) if service.Size() != 2 { t.Errorf("expect 2 but got %d", service.Size()) } }) t.Run("it should randomize getting access to container", func(t *testing.T) { read := make(map[string]bool) for { container, _ := service.Random() if len(read) == 2 { break } read[container.ID()] = true } }) t.Run("it should return nil if no container available", func(t *testing.T) { service.Remove(dummyContainer2) service.Remove(dummyContainer3) if service.Size() != 0 { t.Errorf("expect 0 containers but got %d", service.Size()) } container, _ := service.Random() if container != nil { t.Errorf("expect no container but got one") } }) } func TestServices(t *testing.T) { dummyContainer1 := DummyContainer{"1", "1.1.1.1"} dummyContainer2 := DummyContainer{"2", "1.1.1.2"} dummyContainer3 := DummyContainer{"3", "1.1.1.3"} dummyEndpoint1 := &baker.Endpoint{ Domain: "example.com", Path: "/a/b/c", Ready: true, } dummyEndpoint2 := &baker.Endpoint{ Domain: "example.com", Path: "/d/a/v", Ready: true, } services := baker.NewServices() t.Run("it should add 2 paths to services", func(t *testing.T) { services.Add(dummyEndpoint1, dummyContainer1) services.Add(dummyEndpoint2, dummyContainer2) services.Add(dummyEndpoint2, dummyContainer3) container, _ := services.Get(dummyEndpoint1.Path) if container != dummyContainer1 { t.Errorf("expect to get container %s but got %s", dummyContainer1.ID(), container.ID()) } }) t.Run("it should not return any container because there is no path match", func(t *testing.T) { services.Remove(dummyEndpoint1, dummyContainer1) container, _ := services.Get(dummyEndpoint1.Path) if container != nil { t.Errorf("expect to get no containers but got one") } }) t.Run("it should get random 2 containers for given path", func(t *testing.T) { services.Remove(dummyEndpoint1, dummyContainer1) read := make(map[string]bool) for { if len(read) == 2 { break } container, _ := services.Get(dummyEndpoint2.Path) if container == nil { t.Errorf("expect to get a container but got none") } read[container.ID()] = true } }) }
<filename>dubbo-afi/src/main/java/com/rpc/RPCClient.java package com.rpc; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; public class RPCClient { public static void main(String[] args) throws Exception { Socket s = new Socket(); s.connect(new InetSocketAddress("127.0.0.1",9998)); OutputStream out = s.getOutputStream(); InputStream in = s.getInputStream(); //返回结果 Req req = new Req(); req.setNum1(5); req.setNum2(6); //2.返回结果 ObjectOutputStream oout = new ObjectOutputStream(out); //3.回写 oout.writeObject(req); //写入值 ObjectInputStream oin = new ObjectInputStream(in); Resp resp = (Resp) oin.readObject(); //5.返回结果 System.out.println("返回结果"+resp.getResult()); //刷新 oout.close(); oin.close(); s.close(); } }
#!/bin/bash -x # Trigger the deployment keptn send event new-artifact --project=sockshop --service=carts-db --image=docker.io/mongo --tag=4.2.2 # Trigger the deployment keptn send event new-artifact --project=sockshop --service=carts --image=docker.io/keptnexamples/carts --tag=0.10.1
zip -r json_2.1.1_tw-pddl.zip json_2.1.1/ -i "*.tw-pddl" zip -r json_2.1.1_pddl.zip json_2.1.1/ -i "*.pddl" zip -r json_2.1.1_json.zip json_2.1.1/ -i "*.json"
<filename>src/main/java/net/andreaskluth/elefantenstark/setup/InitializerException.java package net.andreaskluth.elefantenstark.setup; import net.andreaskluth.elefantenstark.common.ElefantenStarkException; public class InitializerException extends ElefantenStarkException { private static final long serialVersionUID = 4940823217792570933L; public InitializerException(Throwable cause) { super(cause); } }
#pragma once #include <php.h> /* {{{ proto UVLock uv_rwlock_init(void) */ PHP_FUNCTION(uv_rwlock_init); /* }}} */ /* {{{ proto null|false uv_rwlock_rdlock(UVLock $handle) */ PHP_FUNCTION(uv_rwlock_rdlock); /* }}} */ /* {{{ proto bool uv_rwlock_tryrdlock(UVLock $handle) */ PHP_FUNCTION(uv_rwlock_tryrdlock); /* }}} */ /* {{{ proto void uv_rwlock_rdunlock(UVLock $handle) */ PHP_FUNCTION(uv_rwlock_rdunlock); /* }}} */ /* {{{ proto null|false uv_rwlock_wrlock(UVLock $handle) */ PHP_FUNCTION(uv_rwlock_wrlock); /* }}} */ /* {{{ proto bool uv_rwlock_trywrlock(UVLock $handle) */ PHP_FUNCTION(uv_rwlock_trywrlock); /* }}} */ /* {{{ proto void uv_rwlock_wrunlock(UVLock $handle) */ PHP_FUNCTION(uv_rwlock_wrunlock); /* }}} */
public class Point { public int X { get; set; } public int Y { get; set; } } public class DistanceCalculator { public double CalculateDistance(Point x, Point y) { return Math.Sqrt(Math.Pow(x.X - y.X, 2) + Math.Pow(x.Y - y.Y, 2)); } }
import numpy as np from scipy.special import gamma from prml.rv.rv import RandomVariable np.seterr(all="ignore") class Gamma(RandomVariable): """ Gamma distribution p(x|a, b) = b^a x^(a-1) exp(-bx) / gamma(a) """ def __init__(self, a, b): """ construct Gamma distribution Parameters ---------- a : int, float, or np.ndarray shape parameter b : int, float, or np.ndarray rate parameter """ super().__init__() a = np.asarray(a) b = np.asarray(b) assert a.shape == b.shape self.a = a self.b = b @property def a(self): return self.parameter["a"] @a.setter def a(self, a): if isinstance(a, (int, float, np.number)): if a <= 0: raise ValueError("a must be positive") self.parameter["a"] = np.asarray(a) elif isinstance(a, np.ndarray): if (a <= 0).any(): raise ValueError("a must be positive") self.parameter["a"] = a else: if a is not None: raise TypeError(f"{type(a)} is not supported for a") self.parameter["a"] = None @property def b(self): return self.parameter["b"] @b.setter def b(self, b): if isinstance(b, (int, float, np.number)): if b <= 0: raise ValueError("b must be positive") self.parameter["b"] = np.asarray(b) elif isinstance(b, np.ndarray): if (b <= 0).any(): raise ValueError("b must be positive") self.parameter["b"] = b else: if b is not None: raise TypeError(f"{type(b)} is not supported for b") self.parameter["b"] = None @property def ndim(self): return self.a.ndim @property def shape(self): return self.a.shape @property def size(self): return self.a.size def _pdf(self, X): return ( self.b ** self.a * X ** (self.a - 1) * np.exp(-self.b * X) / gamma(self.a)) def _draw(self, sample_size=1): return np.random.gamma( shape=self.a, scale=1 / self.b, size=(sample_size,) + self.shape )
package git import "strings" //Refs list type Refs map[string]Ref const ( peeledSuffix = "^{}" ) //NewRefs create ref list func NewRefs() Refs { return make(Refs, 0) } //AddRef add ref to the list func (ref Refs) AddRef(name, oid string) { ref[name] = Ref{ name, Object{ID: oid}, Object{}, } } //Ref is a human name for a commit type Ref struct { Name string Object //i think it is a commit object Child Object //contain some info for this ref } //IsPeeled check if it is peeled func (ref *Ref) IsPeeled() bool { return strings.HasSuffix(ref.Name, peeledSuffix) }
import math class SpherePoint: def __init__(self, radius): self.radius = radius self.x = 0 self.y = 0 self.z = radius def move_to(self, theta, phi): self.x = self.radius * math.sin(phi) * math.cos(theta) self.y = self.radius * math.sin(phi) * math.sin(theta) self.z = self.radius * math.cos(phi) # Visualize the movement using manim def wander_on_inner_sphere(self, duration): # Implement the animation for wandering on the inner sphere using manim pass def poke_outside_outer_box(self): # Implement the animation for poking outside the outer box using manim pass # Example usage sphere_point = SpherePoint(3) sphere_point.move_to(math.pi/4, math.pi/3) sphere_point.wander_on_inner_sphere(5) sphere_point.poke_outside_outer_box()
// +build dfsecrets package dockerfile2llb import ( "path" "github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/pkg/errors" ) func dispatchSecret(m *instructions.Mount) (llb.RunOption, error) { id := m.CacheID if m.Source != "" { id = m.Source } if id == "" { if m.Target == "" { return nil, errors.Errorf("one of source, target required") } id = path.Base(m.Target) } target := m.Target if target == "" { target = "/run/secrets/" + path.Base(id) } opts := []llb.SecretOption{llb.SecretID(id)} if !m.Required { opts = append(opts, llb.SecretOptional) } if m.UID != nil || m.GID != nil || m.Mode != nil { var uid, gid, mode int if m.UID != nil { uid = int(*m.UID) } if m.GID != nil { gid = int(*m.GID) } if m.Mode != nil { mode = int(*m.Mode) } else { mode = 0400 } opts = append(opts, llb.SecretFileOpt(uid, gid, mode)) } return llb.AddSecret(target, opts...), nil }
#!/bin/bash TMUX_VERSION=3.2a apt update mkdir -p /tmp/tmux cd /tmp/tmux git clone https://github.com/tmux/tmux.git cd tmux git checkout $TMUX_VERSION sh autogen.sh ./configure && make make install cd - rm -rf /tmp/tmux
<form action="/register" method="POST"> <input type="text" name="firstName" required /> <input type="text" name="lastName" required /> <input type="email" name="email" required /> <input type="password" name="password" required /> <input type="password" name="confirmPassword" required /> <input type="submit" value="Register" /> </form>
#!/bin/sh ##################################################################### # usage: # sh stop.sh -- stop application @dev # sh stop.sh ${env} -- stop application @${env} # examples: # sh stop.sh prod -- use conf/nginx-prod.conf to stop Nginx # sh stop.sh -- use conf/nginx-dev.conf to stop Nginx ##################################################################### mkdir -p logs & mkdir -p tmp nginx -s stop -p `pwd`/ -c conf/nginx.conf
#!/bin/bash MONTH="01" for i in {1..31}; do if [[ $i -lt 10 ]]; then DAY="0$i" else DAY="$i" fi echo "--> Starting Day $DAY" date hadoop jar /opt/cloudera/parcels/CDH/jars/hadoop-streaming-2.6.0-cdh5.7.0-SNAPSHOT.jar \ -D mapred.reduce.tasks=0 -D mapred.map.tasks=600 -D mapred.output.compress=true \ -D mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec \ -D mapred.job.name="CEF->JSON [2016-$MONTH-$DAY]" -input /data/cef/2016${MONTH}${DAY}.cef.bz2 \ -output /parsed/jsoncef/2016/${MONTH}/${DAY} -mapper "lognormalizer -r /tmp/cef.rf -e json -p </dev/stdin" date done
/** * Copyright 2017, ScaleFT Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef _xjwt_validator_h_ #define _xjwt_validator_h_ #include <time.h> #include <jansson.h> #include "xjwt_error.h" #include "xjwt_keyset.h" #include "xjwt_visibility.h" #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ /** * JWT Validation Functions */ typedef struct xjwt_validator_t xjwt_validator_t; typedef uint64_t(xjwt_time_cb)(void* baton); typedef struct xjwt_verify_options_t { /* Baton passed into all callbacks */ void* baton; xjwt_time_cb* now; xjwt_keyset_t* keyset; const char* expected_issuer; const char* expected_subject; const char* expected_audience; } xjwt_verify_options_t; typedef struct xjwt_verify_success_t { json_t* payload; } xjwt_verify_success_t; typedef enum XJWT_VERIFY_FAILURES { XJWT_VERIFY_UNKNOWN = 0, XJWT_VERIFY_NOT_PRESENT = 1, XJWT_VERIFY_EXPIRED = 2, XJWT_VERIFY_INVALID_SIGNATURE = 3, XJWT_VERIFY_NO_VALIDATORS = 4, XJWT_VERIFY_MALFORMED = 5, XJWT_VERIFY_EXPECT_MISMATCH = 6 } XJWT_VERIFY_FAILURES; /** * Contains an enum of possible reasons valilcation failed for a JWT. * * *err may be empty, but reason will always contain a reason. * **/ typedef struct xjwt_verify_failure_t { XJWT_VERIFY_FAILURES reason; xjwt_error_t* err; } xjwt_verify_failure_t; /** * Verifies a JWT according to a strict sub-set of the JWT standards meant to * intersect with real world use cases. * * On Success, *outsucess is set to non-NULL. * On Failure, *outfailure is set to non-NULL and explains why verification * failed. */ XJWT_API(void) xjwt_verify(xjwt_verify_options_t* opts, const char* data, size_t len, xjwt_verify_success_t** outsuccess, xjwt_verify_failure_t** outfailure); XJWT_API(void) xjwt_verify_success_destroy(xjwt_verify_success_t* success); XJWT_API(void) xjwt_verify_failure_destroy(xjwt_verify_failure_t* fail); #ifdef __cplusplus } #endif /* __cplusplus */ #endif /* _xjwt_validator_h_ */
const base64Url = require('base64-url'); const header = { alg: 'HS256', typ: 'JWT', }; const payload = { username: '<EMAIL>', name: '<NAME>', exp: new Date().getTime(), }; const key = '<KEY>'; const headerEncoded = base64Url.encode(JSON.stringify(header)); const payloadEncoded = base64Url.encode(JSON.stringify(payload)); const crypt = require('crypto'); const signature = crypt .createHmac('sha256', key) .update(`${headerEncoded}.${payloadEncoded}`) .digest('base64-url'); console.log( `${headerEncoded}.${payloadEncoded}.${base64Url.encode(signature)}`, );
//##################################################################### // Copyright 2010, <NAME>. // This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt. //##################################################################### #include <PhysBAM_Tools/Grids_Uniform_Arrays/ARRAYS_ND.h> #include <PhysBAM_Tools/Grids_Uniform_Interpolation/QUADRATIC_INTERPOLATION_UNIFORM.h> #include <PhysBAM_Tools/Vectors/VECTOR_3D.h> using namespace PhysBAM; //##################################################################### // Constructor //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: QUADRATIC_INTERPOLATION_UNIFORM() :a_scheme(1) { } //##################################################################### // Destructor //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: ~QUADRATIC_INTERPOLATION_UNIFORM() { } //##################################################################### // Function Clamped_To_Array //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> T2 QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: Clamped_To_Array(const T_GRID& grid,const T_ARRAYS_T2& u,const TV& X) const { return From_Base_Node(grid,u,X,INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>::Clamped_Index_Interior_End_Minus_One(grid,u,X)); } //##################################################################### // Function Clamped_To_Array_Weights //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> ARRAY<PAIR<typename T_GRID::VECTOR_INT,typename T_GRID::VECTOR_T::SCALAR> > QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: Clamped_To_Array_Weights(const T_GRID& grid,const T_ARRAYS_T2& u,const TV& X) const { return From_Base_Node_Weights(grid,u,X,INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>::Clamped_Index_Interior_End_Minus_One(grid,u,X)); } //##################################################################### // Function From_Base_Node_Helper //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> T2 QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node_Helper(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,1> >& u,const VECTOR<T,1>& X,const VECTOR<int,1>& index) const { T w=(X.x-grid.X(index.x).x)*grid.one_over_dX.x;T one_over_dX2=1;//grid.one_over_dX.x*grid.one_over_dX.x; T leftDxx=(u(index.x+1)-2*u(index.x)+u(index.x-1)),rightDxx=(u(index.x+2)-2*u(index.x+1)+u(index.x)); if(abs(leftDxx)<abs(rightDxx)) return u(index.x)*(1-w)+u(index.x+1)*w-leftDxx*one_over_dX2*w*(1-w)/2.; else return u(index.x)*(1-w)+u(index.x+1)*w-rightDxx*one_over_dX2*w*(1-w)/2.; } //##################################################################### // Function From_Base_Node //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> T2 QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,1> >& u,const VECTOR<T,1>& X,const VECTOR<int,1>& index) const { ARRAY<PAIR<TV_INT,T> > weights=From_Base_Node_Weights(grid,u,X,index); T2 sum=T2(); for(int i=1;i<=weights.m;i++) sum+=u(weights(i).x)*weights(i).y; return sum; } //##################################################################### // Function From_Base_Node_Weights //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> ARRAY<PAIR<typename T_GRID::VECTOR_INT,typename T_GRID::VECTOR_T::SCALAR> > QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node_Weights(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,1> >& u,const VECTOR<T,1>& X,const VECTOR<int,1>& index) const { ARRAY<PAIR<TV_INT,T> > weights; T w=(X.x-grid.X(index.x).x)*grid.one_over_dX.x; if(a_scheme==1){ if(abs(u(index.x))>1e-5 && abs(u(index.x+1))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w-w*(w-(T)1)/(T)4*((T)1-u(index.x-1)/u(index.x))));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w-w*(w-(T)1)/(T)4*((T)1-u(index.x+2)/u(index.x+1))));} else if(abs(u(index.x))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w-w*(w-(T)1)/(T)4*((T)1-(u(index.x-1)-u(index.x+2))/u(index.x))));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w-w*(w-(T)1)/(T)4));} else if(abs(u(index.x+1))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w-w*(w-(T)1)/(T)4));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w-w*(w-(T)1)/(T)4*((T)1-(u(index.x-1)-u(index.x+2))/u(index.x+1))));} else{weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w));}} else if(a_scheme==2){ T a1=(u(index.x+2)+u(index.x)-(T)2*u(index.x+1))/(T)2;T a2=(u(index.x+1)+u(index.x-1)-(T)2*u(index.x))/(T)2; if(abs(a1)<abs(a2)){ if(abs(u(index.x+1))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w+w*(w-(T)1)/(T)2));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w-w*(w-(T)1)*((T)1-u(index.x+2)/((T)2*u(index.x+1)))));} else if(abs(u(index.x))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w+w*(w-(T)1)/(T)2*((T)1.+u(index.x+2)/u(index.x))));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w-w*(w-(T)1)));} else{weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w));weights.Append(PAIR<TV_INT,T2>(TV_INT(index.x+1),w));}} else{ if(abs(u(index.x))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w-w*(w-(T)1)*((T)1-u(index.x-1)/((T)2*u(index.x)))));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w+w*(w-(T)1)/(T)2));} else if(abs(u(index.x+1))>1e-5){weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w-w*(w-(T)1)));weights.Append(PAIR<TV_INT,T>(TV_INT(index.x+1),w+w*(w-(T)1)/(T)2*((T)1+u(index.x-1)/(u(index.x+1)))));} else{weights.Append(PAIR<TV_INT,T>(TV_INT(index.x),(T)1-w));weights.Append(PAIR<TV_INT,T2>(TV_INT(index.x+1),w));}}} return weights; } //##################################################################### // Function From_Base_Node //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> T2 QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,2> >& u,const VECTOR<T,2>& X,const VECTOR<int,2>& index) const { PHYSBAM_NOT_IMPLEMENTED(); return T2(); } //##################################################################### // Function From_Base_Node_Weights //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> ARRAY<PAIR<typename T_GRID::VECTOR_INT,typename T_GRID::VECTOR_T::SCALAR> > QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node_Weights(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,2> >& u,const VECTOR<T,2>& X,const VECTOR<int,2>& index) const { PHYSBAM_NOT_IMPLEMENTED(); return ARRAY<PAIR<TV_INT,T> >(); } //##################################################################### // Function From_Base_Node //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> T2 QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,3> >& u,const VECTOR<T,3>& X,const VECTOR<int,3>& index) const { PHYSBAM_NOT_IMPLEMENTED(); return T2(); } //##################################################################### // Function From_Base_Node_Weights //##################################################################### template<class T_GRID,class T2,class T_FACE_LOOKUP> ARRAY<PAIR<typename T_GRID::VECTOR_INT,typename T_GRID::VECTOR_T::SCALAR> > QUADRATIC_INTERPOLATION_UNIFORM<T_GRID,T2,T_FACE_LOOKUP>:: From_Base_Node_Weights(const GRID<TV>& grid,const ARRAYS_ND_BASE<VECTOR<T2,3> >& u,const VECTOR<T,3>& X,const VECTOR<int,3>& index) const { PHYSBAM_NOT_IMPLEMENTED(); return ARRAY<PAIR<TV_INT,T> >(); } template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,1> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,1> > > >::QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,1> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,1> > > >::~QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,2> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,2> > > >::QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,2> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,2> > > >::~QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,3> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,3> > > >::QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,3> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,3> > > >::~QUADRATIC_INTERPOLATION_UNIFORM(); template float QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,1> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,1> > > >::Clamped_To_Array(GRID<VECTOR<float,1> > const&,ARRAYS_ND_BASE<VECTOR<float,1> > const&,VECTOR<float,1> const&) const; template float QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,2> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,2> > > >::Clamped_To_Array(GRID<VECTOR<float,2> > const&,ARRAYS_ND_BASE<VECTOR<float,2> > const&,VECTOR<float,2> const&) const; template float QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<float,3> >,float,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,3> > > >::Clamped_To_Array(GRID<VECTOR<float,3> > const&,ARRAYS_ND_BASE<VECTOR<float,3> > const&,VECTOR<float,3> const&) const; #ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,1> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,1> > > >::QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,2> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,2> > > >::QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,2> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,2> > > >::~QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,3> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,3> > > >::QUADRATIC_INTERPOLATION_UNIFORM(); template QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,3> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,3> > > >::~QUADRATIC_INTERPOLATION_UNIFORM(); template double QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,2> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,2> > > >::Clamped_To_Array(GRID<VECTOR<double,2> > const&,ARRAYS_ND_BASE<VECTOR<double,2> > const&,VECTOR<double,2> const&) const; template double QUADRATIC_INTERPOLATION_UNIFORM<GRID<VECTOR<double,3> >,double,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,3> > > >::Clamped_To_Array(GRID<VECTOR<double,3> > const&,ARRAYS_ND_BASE<VECTOR<double,3> > const&,VECTOR<double,3> const&) const; #endif
from torch import nn from .base_model import BaseModel from fqf_iqn_qrdqn.network import DQNBase, NoisyLinear class QRDQN(BaseModel): def __init__(self, num_channels, num_actions, N=200, embedding_dim=64, dueling_net=False, noisy_net=False): super(QRDQN, self).__init__() linear = NoisyLinear if noisy_net else nn.Linear # Feature extractor of DQN. self.dqn_net = DQNBase(num_channels=num_channels) # Quantile network. if not dueling_net: self.q_net = nn.Sequential( linear(embedding_dim, 512), nn.ReLU(), linear(512, num_actions * N), ) else: self.advantage_net = nn.Sequential( linear(embedding_dim, 512), nn.ReLU(), linear(512, num_actions * N), ) self.baseline_net = nn.Sequential( linear(embedding_dim, 512), nn.ReLU(), linear(512, N), ) self.N = N self.num_channels = num_channels self.num_actions = num_actions self.embedding_dim = embedding_dim self.dueling_net = dueling_net self.noisy_net = noisy_net def forward(self, states=None, state_embeddings=None): assert states is not None or state_embeddings is not None batch_size = states.shape[0] if states is not None\ else state_embeddings.shape[0] if state_embeddings is None: state_embeddings = self.dqn_net(states) if not self.dueling_net: quantiles = self.q_net( state_embeddings).view(batch_size, self.N, self.num_actions) else: advantages = self.advantage_net( state_embeddings).view(batch_size, self.N, self.num_actions) baselines = self.baseline_net( state_embeddings).view(batch_size, self.N, 1) quantiles = baselines + advantages\ - advantages.mean(dim=2, keepdim=True) assert quantiles.shape == (batch_size, self.N, self.num_actions) return quantiles def calculate_q(self, states=None, state_embeddings=None): assert states is not None or state_embeddings is not None batch_size = states.shape[0] if states is not None\ else state_embeddings.shape[0] # Calculate quantiles. quantiles = self(states=states, state_embeddings=state_embeddings) # Calculate expectations of value distributions. q = quantiles.mean(dim=1) assert q.shape == (batch_size, self.num_actions) return q
<reponame>bodymovin/skia-buildbot /** * @module modules/commands-sk * @description A list view of draw commands for a single frame, and a tool for * filtering them. contains the logic for processing the parsed json object from * wasm, and extracting things like layer info and command counts which drive * other modules. * * Contains play-sk as a submodule, which playes over the filtered list of * commands. * * Data flows along this path in one direction depending on which end triggers a * change. * filter text box <=> this._includedSet <=> histogram-sk * * @evt histogram-update: An event containing the list of histogram entries. * Emitted every time the histogram is recomputed. * * @evt move-command-position: When the play-sk module or user selects a different * command, this event is emitted, and it's detail contains the command index in * the unfiltered command list for this frame. * */ import { define } from 'elements-sk/define'; import { html, TemplateResult } from 'lit-html'; import { ElementDocSk } from '../element-doc-sk/element-doc-sk'; import { PlaySk, PlaySkMoveToEventDetail } from '../play-sk/play-sk'; import { HistogramSkToggleEventDetail } from '../histogram-sk/histogram-sk' import { DefaultMap } from '../default-map'; import 'elements-sk/icon/save-icon-sk'; import 'elements-sk/icon/content-copy-icon-sk'; import 'elements-sk/icon/image-icon-sk'; import { SkpJsonCommandList, SkpJsonCommand, SkpJsonAuditTrail, SkpJsonGpuOp } from '../debugger'; import '../play-sk'; export interface CommandsSkMovePositionEventDetail { // the index of a command in the frame to which the wasm view should move. position: number, // true if we're currently paused paused: boolean, } export type CommandRange = [number, number]; // Represents one of the icons that can appear on a command export interface PrefixItem { icon: string, color: string, count: number, }; /** A processed command object, created from a SkpJsonCommand */ export interface Command { // Index of the command in the unfiltered list. index: number, // the save/restore depth before this command is executed. depth: number, // the parsed json representation of the command. exact type depends on the command. details: SkpJsonCommand, name: string, // if this command is one of an indenting pair, the command index range that the pair enclose // (save, restore) range?: CommandRange, prefixes: PrefixItem[], // Whether the command will be executed during playback visible: boolean, // index of any image referenced by this command imageIndex?: number, }; /** An entry of the command histogram * obtained by totalling up occurances in the range filtered command list */ export interface HistogramEntry { // name of a command (original CamelCase) name: string, // number of occurances in the current frame (or the whole file for a single-frame SKP) countInFrame: number, // number of occurances in the current range filter countInRange: number, } /** An event detail containing a new histogram * or new filter set to be displayed by the histogram-sk module. * The event may update one or both of the two fields. */ export interface CommandsSkHistogramEventDetail { /** A newly computed histogram that needs to be displayed by histogram-sk */ hist?: HistogramEntry[]; /** whether the command is include by the filter */ included?: Set<string>; } // Information about layers collected by processCommands. // TODO(nifong): This could be collected in the C++ and returned from // getLayerSummaries and then commands-sk wouldn't have to be involved // with layer things at all. export interface LayerInfo { // A Map from layer ids to command indices where they were drawn // with a DrawImageRectLayer command. Includes only layer used this frame uses: DefaultMap<number, number[]>; // A map from layer ids to names that were provided in the render node annotations. // This should be sufficient for it to always contain what we attempt to look up. // Only valid for the duration of this frame. names: Map<number, string>; } // Jumpting to a command by it's unfiltered index can be done by emitting // 'jump-command' with this event detail export interface CommandsSkJumpEventDetail { unfilteredIndex: number; } // event issued when the user clicks 'Image' to jump to this image with this id. export interface CommandsSkSelectImageEventDetail { id: number; } // Colors to use for gpu op ids const COLORS = [ "#1B9E77", "#D95F02", "#7570B3", "#E7298A", "#66A61E", "#E6AB02", "#A6761D", "#666666", "#09c5d2", "#064f77", "#3a4ce4", "#d256f0", "#feb4c7", "#fa3029", "#ff6821", "#a8ff21", "#a5cf80", "#36d511", "#95f19c", ]; // Commands that increase save/restore depth const INDENTERS: {[key: string]: PrefixItem} = { 'Save': { icon: 'save-icon-sk', color: '#B2DF8A', count: 1 }, 'SaveLayer': { icon: 'content-copy-icon-sk', color: '#FDBF6F', count: 1 }, 'BeginDrawPicture': { icon: 'image-icon-sk', color: '#A6CEE3', count: 1 }, }; // commands that decrease save/restore depth const OUTDENTERS: string[] = ['Restore', 'EndDrawPicture']; export class CommandsSk extends ElementDocSk { private static template = (ele: CommandsSk) => html` <div> ${CommandsSk.filterTemplate(ele)} <div class="horizontal-flex"> <button @click=${ele._opIdFilter} class="short">Show By Op-Id</button> <play-sk .visual=${'full'}></play-sk> </div> <div class="list"> ${ ele._filtered.map((i: number, filtPos: number) => CommandsSk.opTemplate(ele, filtPos, ele._cmd[i])) } </div> </div>`; private static opTemplate = (ele: CommandsSk, filtpos: number, op: Command) => html`<div class="op" id="op-${op.index}" @click=${ (e: MouseEvent) => {ele._clickItem(e, filtpos)}}> <details> <summary class="command-summary ${ ele.position == op.index ? 'selected' : ''}"> <div class="command-icons-group"> <span class="index">${op.index}</span> ${ op.prefixes.map((pre: PrefixItem) => CommandsSk.prefixItemTemplate(ele, pre)) } </div> <div class="command-title">${ op.name }</div> <code class="short-desc">${ op.details.shortDesc }</code> ${ op.range ? html`<button @click=${() => {ele.range = op.range!}} title="Range-filter the command list to this save/restore pair">Zoom</button>` : '' } ${ op.imageIndex ? html`<button @click=${()=>{ele._jumpToImage(op.imageIndex!)}} title="Show the image referenced by this command in the resource viewer" >Image</button>` : '' } <div class="gpu-ops-group"> ${ (op.details.auditTrail && op.details.auditTrail.Ops) ? op.details.auditTrail.Ops.map((gpuOp: SkpJsonGpuOp) => CommandsSk.gpuOpIdTemplate(ele, gpuOp) ) : '' } </div> </summary> <div> <checkbox-sk title="Toggle command visibility" checked=${ op.visible } @change=${ele._toggleVisible(op.index)}></checkbox-sk> <strong>Index: </strong> <span class=index>${op.index}</span> </div> ${ele._renderRullOpRepresentation(ele, op)} </details> </div> <hr>`; private static prefixItemTemplate = (ele: CommandsSk, item: PrefixItem) => html`${ ele._icon(item) } ${ item.count > 1 ? html`<div title="depth of indenting operation" class=count>${ item.count }</div>` : '' }`; private static gpuOpIdTemplate = (ele: CommandsSk, gpuOp: SkpJsonGpuOp) => html`<span title="GPU Op ID - group of commands this was executed with on the GPU" class="gpu-op-id" style="background: ${ ele._gpuOpColor(gpuOp.OpsTaskID) }" >${ gpuOp.OpsTaskID }</span>`; private static filterTemplate = (ele: CommandsSk) => html` <div class="horizontal-flex"> <label title="Filter command names (Single leading ! negates entire filter). Command types can also be filted by clicking on their names in the histogram" >Filter</label> <input @change=${ele._textFilter} value="!DrawAnnotation" id="text-filter"></input>&nbsp; <label>Range</label> <input @change=${ele._rangeInputHandler} class=range-input value="${ ele._range[0] }" id="rangelo"></input> <b>:</b> <input @change=${ele._rangeInputHandler} class=range-input value="${ ele._range[1] }" id="rangehi"></input> <button @click=${ele.clearFilter} id="clear-filter-button">Clear</button> </div>`; // processed command list (no filtering applied). change with processCommands private _cmd: Command[] = []; // list of indices of commands that passed the range and name filters. private _filtered: number[] = []; // position in filtered (visible) command list private _item: number = 0; // range filter private _range: CommandRange = [0, 100]; // counts of command occurances private _histogram: HistogramEntry[] = []; // known command names (set by processCommands) names are lowercased. private _available = new Set<string>(); // subset of command names that should pass the command filter // (names are lowercased) private _includedSet = new Set<string>(); // Play bar submodule private _playSk: PlaySk | null = null; // information about layers collected from commands private _layerInfo: LayerInfo = { uses: new DefaultMap<number, number[]>(() => []), names: new Map<number, string>(), }; // the command count with no filtering get count() { return this._cmd.length; } // the command count with all filters applied get countFiltered() { return this._filtered.length; } get layerInfo(): LayerInfo { return this._layerInfo; } // set the current playback position in the list // (index in filtered list) set item(i: number) { this._item = i; this.querySelector<HTMLDivElement>('#op-' + this._filtered[this._item] )?.scrollIntoView({block: 'nearest'}); this._render(); // notify debugger-page-sk that it needs to draw this.position this.dispatchEvent( new CustomEvent<CommandsSkMovePositionEventDetail>( 'move-command-position', { detail: {position: this.position, paused: this._playSk!.mode === 'pause'}, bubbles: true, })); this._playSk!.movedTo(this._item); } // get the playback index in _cmd after filtering is applied. get position() { return this._filtered[this._item]; } set range(range: CommandRange) { this._range = range; this._applyRangeFilter(); } set textFilter(q: string) { this.querySelector<HTMLInputElement>('#text-filter')!.value = q; if (!this.count) { return; } this._textFilter(); // does render } // Return a list of op indices that pass the current filters. get filtered(): number[] { return this._filtered; } constructor() { super(CommandsSk.template); } connectedCallback() { super.connectedCallback(); this._render(); this._playSk = this.querySelector<PlaySk>('play-sk')!; this._playSk.addEventListener('moveto', (e) => { this.item = (e as CustomEvent<PlaySkMoveToEventDetail>).detail.item; }); this.addDocumentEventListener('toggle-command-inclusion', (e) => { this._toggleName((e as CustomEvent<HistogramSkToggleEventDetail>).detail.name); }); // Jump to a command by it's unfiltered index. this.addDocumentEventListener('jump-command', (e) => { const i = (e as CustomEvent<CommandsSkJumpEventDetail>).detail.unfilteredIndex; const filteredIndex = this._filtered.findIndex(e => e==i); if (filteredIndex !== undefined) { this.item = filteredIndex; } }); } // _processCommands iterates over the commands to extract several things. // * A depth at every command based on Save/Restore pairs. // * A histogram showing how many times each type of command is used. // * A map from layer node ids to the index of any layer use events in the command list. // * The full set of command names that occur processCommands(cmd: SkpJsonCommandList) { const commands: Command[] = []; let depth = 0; const prefixes: PrefixItem[] = []; // A stack of indenting commands // Match up saves and restores, a stack of indices const matchup: number[] = []; // All command types that occur in this frame this._available = new Set<string>(); interface tally { count_in_frame: number, count_in_range_filter: number, } this._layerInfo.uses = new DefaultMap<number, number[]>(() => []); this._layerInfo.names = new Map<number, string>(); // Finds things like "RenderNode(id=10, name='DecorView')" const renderNodeRe = /^RenderNode\(id=([0-9]+), name='([A-Za-z0-9_]+)'\)/; cmd.commands.forEach((com, i) => { const name = com.command; this._available.add(name.toLowerCase()); const out: Command = { index: i, depth: depth, details: com, // unaltered object from json name: name, prefixes: [], visible: true, }; // DrawCommand.cpp will write this field if the command references an image if (com.imageIndex) { out.imageIndex = com.imageIndex; } if (name in INDENTERS) { depth++; matchup.push(i); // If this is the same type of indenting op we've already seen // then just increment the count, otherwise add as a new // op in prefixes. if (depth > 1 && prefixes[prefixes.length-1].icon == INDENTERS[name].icon) { prefixes[prefixes.length-1].count++; } else { prefixes.push(this._copyPrefix(INDENTERS[name])); } } else if (OUTDENTERS.indexOf(name) !== -1) { depth--; // Now that we can match an OUTDENTER with an INDENTER we can set // the _zoom property for both commands. const begin: number = matchup.pop()!; const range = [begin, i] as CommandRange; out.range = range; commands[begin].range = range; // Only pop the op from prefixes if its count has reached 1. if (prefixes[prefixes.length-1].count > 1) { prefixes[prefixes.length-1].count--; } else { prefixes.pop(); } out.depth = depth; } else if (name === 'DrawImageRectLayer') { // A command indicating that a render node with an offscreen buffer (android only) // was drawn as an image. const node = com.layerNodeId!; this._layerInfo.uses.get(node).push(i); } else if (name === 'DrawAnnotation') { // DrawAnnotation is a bit of metadata added by the android view system. // All render nodes have names, but not all of them are drawn with offscreen buffers const annotationKey = com.key; const found = com.key!.match(renderNodeRe); if (found) { // group 1 is the render node id // group 2 is the name of the rendernode. this._layerInfo.names.set(parseInt(found[1]), found[2]); } } // deep copy prefixes because we want a snapshot of the current list and counts out.prefixes = prefixes.map((p: PrefixItem) => this._copyPrefix(p)); commands.push(out); }); this._cmd = commands; this.range = [0, this._cmd.length-1]; // this assignment also triggers render } // User clicked the clear filter button, clear both filters clearFilter() { this.querySelector<HTMLInputElement>('#text-filter')!.value = ''; if (!this.count) { return; } this.range = [0, this._cmd.length-1]; // setter triggers _applyRangeFilter, follow that } // Stop playback and move by a given offset in the filtered list. keyMove(offset: number) { this._playSk!.mode = 'pause'; this.item = Math.max(0, Math.min(this._item + offset, this.countFiltered)); } end() { this.item = this._filtered.length - 1; } private _clickItem(e: MouseEvent, filtIndex: number) { if (this._item !== filtIndex) { // Don't open the dropdown unless you click the already selected item again e.preventDefault(); } this.item = filtIndex; } // filter change coming from histogram private _toggleName(name: string) { const lowerName = name.toLowerCase(); if (!this._available.has(lowerName)) { return; } if (this._includedSet.has(lowerName)) { this._includedSet.delete(lowerName); } else { this._includedSet.add(lowerName); } // represent _includedSet as a negative text filter and put it in the box const diff = new Set(this._available); for (let c of this._includedSet) { diff.delete(c) } let filter = ''; if (diff.size > 0) { filter = '!'+Array.from(diff).join(' '); } this.querySelector<HTMLInputElement>('#text-filter')!.value = filter; // don't trigger _textFilter() since that would send an event back to histogram and // start an infinite loop. this._includedSet is correct, apply it and render. this._applyCommandFilter(); } // Returns a JSON string representation of the command, augmentend with visually rich // or interactive elements for certain types. private _renderRullOpRepresentation(ele: CommandsSk, op: Command) { // Use json.stringify's replacer feature to replace certain objects. // we would like to replace them directly with html templates, but json.stringify // toStrings them, so instead replace them with a magic string and add the template // to a list, then replace those magic strings with items from the list afterwards. // An unlikely string meaning 'insert html template here' const magic = '546rftvyghbjjkjiuytre'; // a list of templates to be used to replaces occurrences of magic. const inserts: TemplateResult[] = []; const replacer = function(name: string, value: any) { if (name === 'imageIndex') { // Show a clickable button that takes the user to the image resource viewer. inserts.push(html`<b>${value}</b> <button @click=${()=>{ele._jumpToImage(value)}} title="Show the image referenced by this command in the resource viewer" >Image</button>`); return magic; } return value; } const strung = JSON.stringify(op.details, replacer, 2); // JSON.stringify adds some quotes around the magic word. // including these in our delimeter removes them. const jsonparts = strung.split('"'+magic+'"'); let result = [html`${jsonparts[0]}`]; for (let i = 1; i < jsonparts.length; i++) { result.push(inserts[i-1]); result.push(html`${jsonparts[i]}`) } return html`<pre>${result}</pre>`; } private _jumpToImage(index: number){ this.dispatchEvent(new CustomEvent<CommandsSkSelectImageEventDetail>( 'select-image', { detail: { id: index }, bubbles: true, })); } // (index is in the unfiltered list) private _toggleVisible(index: number){ this._cmd[index].visible = !this._cmd[index].visible; } // lit-html does not appear to support setting a tag's name with a ${} so here's // a crummy workaround private _icon(item: PrefixItem) { if (item.icon === 'save-icon-sk') { return html`<save-icon-sk style="fill: ${ item.color };" class=icon> </save-icon-sk>`; } else if (item.icon === 'content-copy-icon-sk') { return html`<content-copy-icon-sk style="fill: ${ item.color };" class=icon> </content-copy-icon-sk>`; } else if (item.icon === 'image-icon-sk') { return html`<image-icon-sk style="fill: ${ item.color };" class=icon> </image-icon-sk>`; } } // Any deterministic mapping between integers and colors will do private _gpuOpColor(index: number) { return COLORS[index % COLORS.length]; } // deep copy private _copyPrefix(p: PrefixItem): PrefixItem { return {icon: p.icon, color: p.color, count: p.count}; } private _rangeInputHandler(e: Event) { const lo = parseInt(this.querySelector<HTMLInputElement>('#rangelo')!.value); const hi = parseInt(this.querySelector<HTMLInputElement>('#rangehi')!.value); this.range = [lo, hi]; } // parse the text filter input, and if it is possible to represent it purely as // a command filter, store it in this._includedSet private _textFilter() { let rawFilter = this.querySelector<HTMLInputElement>('#text-filter' )!.value.trim().toLowerCase(); const negative = (rawFilter[0] == '!'); // make sure to copy it so we don't alter this._available this._includedSet = new Set<string>(this._available); if (rawFilter !== '') { if (negative) { rawFilter = rawFilter.slice(1).trim(); const tokens = rawFilter.split(/\s+/); // negative filters can always be represented with histogram selections for (const token of tokens) { this._includedSet.delete(token); } } else { // for positive filters, the text could either be a set of command names, // or a free text search. const tokens = rawFilter.split(/\s+/); this._includedSet = new Set<string>(); for (const token of tokens) { if (this._available.has(token)) { this._includedSet.add(token); } else { // not a command name, bail out, reset this, do a free text search this._includedSet = new Set<string>(this._available); // since we just altered this._includedSet we have to let histogram know. this.dispatchEvent(new CustomEvent<CommandsSkHistogramEventDetail>( 'histogram-update', { detail: { included: new Set<string>(this._includedSet) }, bubbles: true, })); this._freeTextSearch(tokens); // TODO(nifong): need some visual feedback to let the user know console.log(`Query interpreted as free text search because ${token}\ doesn't appear to be a command name`); return; } } } } this.dispatchEvent(new CustomEvent<CommandsSkHistogramEventDetail>( 'histogram-update', { detail: { included: new Set<string>(this._includedSet) }, bubbles: true, })); this._applyCommandFilter(); // note we still do this for emtpy filters. } private _freeTextSearch(tokens: string[]) { // Free text search every command's json representation and include its index in // this._filtered if any token is found const matches = function(s: string) { for (const token of tokens) { if (s.indexOf(token) >= 0) { return true; } } return false; } this._filtered = []; for (let i = this._range[0]; i <= this._range[1]; i++) { const commandText = JSON.stringify(this._cmd[i].details).toLowerCase(); if (matches(commandText)) { this._filtered.push(i); } } this._render(); if (this._filtered.length > 0) { this.item = this._filtered.length - 1; // after render because it causes a scroll } } // Applies range filter and recalculates command name histogram. // The range filter is the first filter applied. The histogram shows any command // that passes // the range filter, and shows a nonzero count for any command that passes the command // filter. private _applyRangeFilter() { // Despite the name, there's not much to "apply" but // the histogram needs to change when the range filter changes which is // why this function is seperate from _applyCommandFilter // Calculate data for histogram // Each command type gets two different counts interface tally { count_in_frame: number, count_in_range_filter: number, } const counts = new DefaultMap<string, tally>(() => ({ count_in_frame: 0, count_in_range_filter: 0 })); for (let i = 0; i < this._cmd.length; i++) { let c = this._cmd[i]; counts.get(c.name).count_in_frame += 1; // always increment first count if (i >= this._range[0] && i <= this._range[1]) { counts.get(c.name)!.count_in_range_filter += 1; // optionally increment filtered count. } } // Now format the histogram as a sorted array suitable for use in the template. // First convert the counts map into an Array of HistogramEntry. this._histogram = []; counts.forEach((value, key) => { this._histogram.push({ name: key, countInFrame: value.count_in_frame, countInRange: value.count_in_range_filter, }) }); // Now sort the array, descending on the rangeCount, ascending // on the op name. // sort by rangeCount so entries don't move on enable/disable this._histogram.sort(function(a,b) { if (a.countInRange == b.countInRange) { if (a.name < b.name) { return -1; } if (a.name > b.name) { return 1; } return 0; } else { return b.countInRange - a.countInRange; } }); // the user's selections are present in the text filter. Apply them now // triggers render this._textFilter(); // that populated this._includedSet, which we also need to notify histogram of. // send this to the histogram element this.dispatchEvent( new CustomEvent<CommandsSkHistogramEventDetail>( 'histogram-update', { detail: { hist: this._histogram, // Make a copy so listener can't accidently write to it. included: new Set<string>(this._includedSet), }, bubbles: true, })); } // Apply a filter specified by this._includedSet and set the filtered list to be visible. private _applyCommandFilter() { // Try to retain the user's playback position in the unfiltered list when doing this // (it is not always possible) const oldPos = this._filtered[this._item]; let newPos: number | null = null; this._filtered = []; for (let i = this._range[0]; i <= this._range[1]; i++) { if (this._includedSet.has(this._cmd[i].name.toLowerCase())) { this._filtered.push(i); if (i === oldPos) { newPos = this._filtered.length - 1; } } } this._playSk!.size = this._filtered.length; this._render(); // gotta render before you can scroll if (newPos !== null) { this.item = newPos; // setter triggers scroll } else { this.item = this._filtered.length - 1; } } // Filters out all but the last command of each gpu op group // Experimental, probably breaks assumptions elsewhere private _opIdFilter() { this._filtered = []; const commandsOfEachOp = new DefaultMap<number, number[]>(() => []); this._cmd.forEach((command, index) => { if (command.details.auditTrail && command.details.auditTrail.Ops) { const opid = command.details.auditTrail.Ops[0].OpsTaskID; commandsOfEachOp.get(opid).push(index); } }); const sortedKeys: number[] = Array.from(commandsOfEachOp.keys()); sortedKeys.sort((a, b) => a - b); // force it to sort as a number, not a string sortedKeys.forEach((k) => { commandsOfEachOp.get(k)!.forEach((i) => { this._filtered.push(i); }); }); this._playSk!.size = this._filtered.length; this.item = this._filtered.length - 1; } } define('commands-sk', CommandsSk);
<gh_stars>1-10 /** Copyright 2016 IPCO 2012 Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * This configuration file is used to define and override variables and functions for the Pay By Bank App Branded or Custom Web Merchant Button. * * As a default implementation, this file contains the Branded Web Merchant Button implementation. * * If you want to implement the custom web merchant button, then copy the contents of pbbacustomconfig_custom.template into this file. * * If you want to implement the branded web merchant button, then copy the contents of pbbacustomconfig_branded.template into this file. * */ /* Define the PBBA variables */ var zappVersion = "2.0.0"; // Current web merchant button library version. var cookieManagementUrl = "https://www.paybybankapp.co.uk/" // Cookie management URL for PayConnect. var imageKey = 1; // Default imageKey is 1 for the standard Pay By Bank App Web Merchant Button. var merchantPollInterval = 5000; // Default merchant poll interval of 5 seconds to poll the merchant server for payment notification. var hoverOverPopupEnabled = true; // Flag to enable or disable the hover over popup /* Initialize PayConnect. */ window.onload = function() { setupPayConnect(cookieManagementUrl, document); } /* Override the pay() and notify() functions. */ zapp.load(zappVersion, { pay : function(data, callback) { /* * The only data that is required to be posted to the merchant server from the PBBA Web Merchant Button is the pcid. * merchantRequestToPayPostData is the merchant's request to pay object that is posted to the merchant's server. * */ /* * The below merchantRequestToPayPostData is a JSON object defined by merchant to hold the checkout information * and other things, in addition to all of merchants data element, they have to include the payConnectID element * to this object, a sample declaration is given below. * * var merchantRequestToPayPostData = { * payConnectID: null; * }; */ if (typeof data.pcid !== "undefined") merchantRequestToPayPostData.payConnectID = data.pcid; //Merchant specific JSON object merchantRequestToPayPostData.payConnectID /* * 1. Post the data to the merchant server. * * 2. SUCCESSFUL RESPONSE - Upon receipt of a successful response from the merchant server: * * A. Create a response object by populating the following mandatory PBBA attributes: * * NOTE: merchantRequestToPayResponseObject is assumed here to be the merchant's variable name of the JSON response object for the request to pay from the merchant server */ var response = new zapppopup.response.payment({ success : true, // Leave it as is secureToken : merchantRequestToPayResponseObject.secureToken, brn : merchantRequestToPayResponseObject.pbbaCode, retrievalExpiryInterval : merchantRequestToPayResponseObject.retrievalTimeOutPeriod, confirmationExpiryInterval : merchantRequestToPayResponseObject.confirmationTimeoutPeriod, notificationSent: merchantRequestToPayResponseObject.cookieSentStatus, pcid: null, // Leave it as is cfiShortName: merchantRequestToPayResponseObject.bankName }); /* * B. Make a callback passing in the response object created in Step A above: */ callback(response); /* * 3. ERROR - Upon receipt of an error from the merchant server: * * A. Create a new response object by populating the following fields: * */ var response = new zapppopup.response.payment({ success : false, // Leave it as is data : MerchantErrorJSONObject // MerchantErrorJSONObject is assumed to be merchant naming for their error object }); /* * B. Make a callback passing in the response object created in Step A above: */ callback(response); }, notify : function(secureToken, callback) { /* NOTE: If jQuery.ajax is used for polling the merchant server and the method is GET then Zapp suggests doing the following to prevent caching: * * Step 1: Add the following property to AJAX call: * cache: false * * Step 2: Add a cache busting parameter to the polling URL. This parameter can be any random number (for example, date timestamp) * appended to the polling URL. For example, if the polling URL is "/responseForPayment.aspx?secureToken=<PASSWORD>&orderId=12345" then * the URL with a cache busting parameter called time would be: * "/responseForPayment.aspx?secureToken=<PASSWORD>&orderId=12345&time="+Date.now() * */ /* 1. This method polls the merchant server for a response every X seconds. * X is the value for merchantPollInterval. * * 2. secureToken must be passed to the merchant server to enable polling the zapp server for a * payment notification. * * 3. SUCCESSFUL RESPONSE - Upon receipt of a successful response from the merchant server: * * A. Create a new response object by populating the following fields: * */ var response = new zapppopup.response.notify({ success : true }); /* * B. Make a callback passing in the response object created in Step A above: * */ callback(response); /* C. Check if the pcid is present in the response from the merchant server. If present then * set the pcid cookie by calling the setCookie function: * * NOTE: merchantGetPaymentStatusObject is the payment notification object returned from the merchant server */ setCookie("pcid", merchantGetPaymentStatusObject.payConnectID, merchantGetPaymentStatusObject.cookieExpiryDays, cookieManagementUrl); /* * D. Continue further order processing. * * 4. IN PROGRESS - Upon receipt of an IN PROGRESS status from the distributor server: * * A. Create a new response object by populating the following fields: * */ var response = new zapppopup.response.notify({ success : false }); /* * B. Make a callback passing in the response object created in Step A above: * */ callback(response); /* * 5. ERROR - Upon receipt of an error from the merchant server: * * A. Create a new response object by populating the following fields: * */ var response = new zapppopup.response.notify({ success : false }); /* B. Make a callback passing in the response object created in Step A above: * */ callback(response); /* * C. Merchant implements their own Error Handling process * */ /* Example of a jQuery AJAX polling mechanism using method GET with caching set to false and a cache buster (time) in the URL. * */ jQuery.ajax({ url : merchantPollingUrl, // Merchant URL to poll for the payment notification. Modify appropriately. dataType : "json", // If merchant expects a JSON object to be returned from the polled server. Modify appropriately. crossDomain : true, // If merchant requires cross domain polling. Modify appropriately. cache: false, // Disables caching in IE type : "GET", // In case the polling method is GET. Modify appropriately. contentType : "application/json; charset=UTF-8", // The content type to be posted to the polling server. Modify appropriately. success : function(merchantGetPaymentStatusObject) { // merchantGetPaymentStatusObject is the merchant's response object from the polled server var response = null; // Check for the response status from the polled server. If the status is in progress the continue polling using the following: response = new zapppopup.response.notify({success: false}); // Continue polling // Check for the response status from the polled server. If the status is success (indicating an authorised or a declined transaction) then do the following: response = new zapppopup.response.notify({success: true}); // Stop polling // If the PayConnect cookie is present in the response from the merchant server, then call the setCookie() function to // setup the PayConnect option. if (typeof merchantGetPaymentStatusObject.payConnectID != "undefined" ) { setCookie("pcid", merchantGetPaymentStatusObject.payConnectID, merchantGetPaymentStatusObject.cookieExpiryDays, cookieManagementUrl); // Set up PayConnect cookie, // merchantGetPaymentStatusObject.payConnectID being the payConnectId within the Merchant Response Object } callback(response); // Leave it as is // Continue further merchant specific processing. Example - showing the order success or cancel page. }, error : function(merchantGetPaymentStatusObject) { // Error handling var response = new zapppopup.response.notify({success : false}); // Stop polling callback(response); } }); /* * */ }, error : function(errors) { /* Place any other error handling logic here */ }, cookieManagementUrl: cookieManagementUrl, imageKey: imageKey, merchantPollInterval: merchantPollInterval, hoverOverPopupEnabled: hoverOverPopupEnabled });
/////////////////////////////////////////////////////////////////////////////// // // // hcttodo.js // // Copyright (C) Microsoft Corporation. All rights reserved. // // This file is distributed under the University of Illinois Open Source // // License. See LICENSE.TXT for details. // // // /////////////////////////////////////////////////////////////////////////////// // // Verifies that all TODO comments have some explanation. // eval(new ActiveXObject("Scripting.FileSystemObject").OpenTextFile(new ActiveXObject("WScript.Shell").ExpandEnvironmentStrings("%HLSL_SRC_DIR%\\utils\\hct\\hctjs.js"), 1).ReadAll()); // It would be nice to tag with issue numbers. // var goodToDoLine = /TODO: HLSL #[0-9]+ - [0-9a-zA-Z]+/; var goodToDoLine = /TODO: HLSL #[0-9]+ - [0-9a-zA-Z]+/; var fileExts = [ ".c", ".cpp", ".h", ".td", ".cs", ".hlsl", ".fx" ]; function CountBadTodoLines(files) { var badTodoCount = 0; for (var i = 0; i < files.length; i++) { var path = files[i]; var extension = PathGetExtension(path).toLowerCase(); if (ArrayIndexOf(fileExts, extension) >= 0) { var content = ReadAllTextFile(path); var lines = StringSplit(content, "\n"); for (var j = 0; j < lines.length; j++) { var line = lines[j]; var todoIndex = line.indexOf("TODO"); if (todoIndex !== -1) { // if (!goodToDoLine.exec(line)) { WScript.Echo(path + "(" + (j+1) + ":" + (todoIndex+1) + "): " + lines[j]); // badTodoCount += 1; // } } } } } return badTodoCount; } function ExpandPath(path) { return new ActiveXObject("WScript.Shell").ExpandEnvironmentStrings(path); } var ignorePaths = [ "%HLSL_SRC_DIR%\\lib\\target\\hexagon", "%HLSL_SRC_DIR%\\lib\\executionengine", "%HLSL_SRC_DIR%\\lib\\target\\arm", "%HLSL_SRC_DIR%\\lib\\target\\mips", "%HLSL_SRC_DIR%\\lib\\target\\nvptx", "%HLSL_SRC_DIR%\\lib\\target\\r600", "%HLSL_SRC_DIR%\\lib\\target\\powerpc", "%HLSL_SRC_DIR%\\lib\\target\\x86", "%HLSL_SRC_DIR%\\lib\\target\\xcore" ]; ignorePaths = ArraySelect(ignorePaths, function(path) { return StringToLower(ExpandPath(path)); }); var files = GetFilesRecursive(ExpandPath("%HLSL_SRC_DIR%")); files = ArraySelectMany(files, function (path) { path = StringToLower(path); if (ArrayAny(ignorePaths, function (ignore) { return path.indexOf(ignore) === 0; })) { return []; } else { return [path]; } }); var counts = CountBadTodoLines(files); // Fail on any bad TODO lines. if (counts !== 0) { WScript.Echo("Badly formatted TODO comments found in " + files.length + " files: " + counts); WScript.Echo("\nFormat should be something like this, with the right number and description (spaces and symbols enforced for consistency)."); WScript.Echo(" // TODO: HLSL #123 - description"); WScript.Quit(1); } else { WScript.Echo("No badly formatted TODO comments found in " + files.length + " files."); WScript.Quit(0); }
// Copyright 2018 Sogou Inc. All rights reserved. // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. package com.sogou.sogocommon.net; /** * created by zhouwei 2018/6/15 */ public enum Error { NET_ERROR(-1,"net error"); public int errorCode; public String errorMsg; Error(int errorCode, String errorMsg) { this.errorCode = errorCode; this.errorMsg = errorMsg; } }
<gh_stars>10-100 import DruxtView from './DruxtView.vue' export default { title: 'Druxt/Views/DruxtView', component: DruxtView } const Template = (args, { argTypes }) => ({ components: { DruxtView }, props: Object.keys(argTypes), template: `<DruxtView v-bind="$props" />` }) export const Default = Template.bind({}) Default.storyName = 'DruxtView'
"""This module contains functions for filesystem actions. All functions work almost the same way as normal python3 os.path module functions do.""" import os DIR_TYPE = const(0x4000) FILE_TYPE = const(0x8000) def normcase(s: str) -> str: """Normalize path case. Just for compatibility with normal python3.""" return s def normpath(s: str) -> str: """Normalize path. Just for compatibility with normal python3.""" return s def abspath(s: str) -> str: """Get absolute filepath.""" return "/".join((os.getcwd(), s)) if s[0] != "/" else s def join(*args): """Concatenation of paths. Just for compatibility with normal python3.""" return "/".join(args) def split(path: str): """Split path into (head, tail) tuple. Tail is the last path component. Head is all the rest.""" if path == "": return "", "" r = path.rsplit("/", 1) if len(r) == 1: return "", path head = "/" if not r[0] else r[0] return head, r[1] def basename(path: str) -> str: """Return the base name of pathname path.""" return split(path)[1] def dirname(path: str) -> str: """Get directory name from filepath.""" slash_i = path.rfind("/") if slash_i == -1: return "" return path[:slash_i] if slash_i != 0 else "/" def getsize(path: str): """Get size of file in bytes. Dirs are not supported.""" filename = path[path.rfind("/") + 1:] for f in os.ilistdir(dirname(path)): if f[0] == filename and f[1] == FILE_TYPE: return f[3] raise FileNotFoundError("No such file or directory: '%s'." % filename) def isdir(path: str): """Check if a directory exists, or if the path given is a directory. Original isdir does follow symlinks, this implementation does not.""" cwd = os.getcwd() slash_i = path.find("/") if slash_i == -1: fir_part, sec_part = path, "" else: if slash_i == 0: slash_i = path.find("/", slash_i+1) fir_part, sec_part = (path, "") if slash_i == -1 else (path[:slash_i], path[slash_i+1:]) else: fir_part, sec_part = path[:slash_i], path[slash_i+1:] # The code above is written to handle state when user tries to create dir in /. # It is impossible to do this. But there will be no exception if you try to os.chdir("/") and then os.chdir("anyD") # But if if you run os.chdir("/anyD") the exception will occur. try: os.chdir(fir_part) except OSError: os.chdir(cwd) return False res = isdir(sec_part) if sec_part != "" else True os.chdir(cwd) return res def isabs(path: str): """Check if path is an absolute pathname.""" return path.find("/") == 0 def isfile(path: str): """Check if both a file exists and if it is a file. Original isfile does follow symlinks, this implementation does not.""" if isdir(dirname(path)): filename = path[path.rfind("/")+1:] for f in os.ilistdir(dirname(path)): if f[0] == filename and f[1] == FILE_TYPE: return True return False
# A function to calculate the nth fibonacci number def fibonacci(n): memo = {1: 0, 2: 1} # Check if the value has been calculated if n in memo: return memo[n] # Calculate and cache the value else: memo[n] = fibonacci(n-1) + fibonacci(n-2) return memo[n] print(fibonacci(10))
#!/usr/bin/env bats load helpers function setup() { newconfig="$TESTDIR/config.json" setup_test } function teardown() { cleanup_test } @test "pids limit" { if ! grep -qEw ^pids /proc/cgroups; then skip "pids cgroup controller is not available" fi CONTAINER_PIDS_LIMIT=1234 start_crio jq ' .command'='["/bin/sleep", "600"]' \ "$TESTDATA"/container_config.json > "$TESTDIR"/container_pids_limit.json ctr_id=$(crictl run "$TESTDIR"/container_pids_limit.json "$TESTDATA"/sandbox_config.json) output=$(crictl exec --sync "$ctr_id" sh -c 'cat /sys/fs/cgroup/pids/pids.max 2>/dev/null || cat /sys/fs/cgroup/pids.max') [[ "$output" == "1234" ]] } @test "conmon custom cgroup" { CONTAINER_CGROUP_MANAGER="systemd" CONTAINER_DROP_INFRA_CTR=false CONTAINER_MANAGE_NS_LIFECYCLE=false CONTAINER_CONMON_CGROUP="customcrioconmon.slice" start_crio jq ' .linux.cgroup_parent = "Burstablecriotest123.slice"' \ "$TESTDATA"/sandbox_config.json > "$TESTDIR"/sandbox_config_slice.json pod_id=$(crictl runp "$TESTDIR"/sandbox_config_slice.json) output=$(systemctl status "crio-conmon-$pod_id.scope") [[ "$output" == *"customcrioconmon.slice"* ]] } @test "ctr with swap should be configured" { if ! grep -v Filename < /proc/swaps; then skip "swap not enabled" fi start_crio # memsw should be greater than or equal to memory limit # 210763776 = 1024*1024*200 jq ' .linux.resources.memory_swap_limit_in_bytes = 210763776 | .linux.resources.memory_limit_in_bytes = 209715200' \ "$TESTDATA"/container_sleep.json > "$newconfig" ctr_id=$(crictl run "$newconfig" "$TESTDATA"/sandbox_config.json) set_swap_fields_given_cgroup_version if test -r "$CGROUP_MEM_SWAP_FILE"; then output=$(crictl exec --sync "$ctr_id" sh -c "cat $CGROUP_MEM_SWAP_FILE") [[ "$output" == "210763776" ]] fi } @test "ctr with swap should fail when swap is lower" { if ! grep -v Filename < /proc/swaps; then skip "swap not enabled" fi start_crio # memsw should be greater than or equal to memory limit # 210763776 = 1024*1024*200 jq ' .linux.resources.memory_swap_limit_in_bytes = 209715200 | .linux.resources.memory_limit_in_bytes = 210763776' \ "$TESTDATA"/container_sleep.json > "$newconfig" ! crictl run "$newconfig" "$TESTDATA"/sandbox_config.json } @test "cgroupv2 unified support" { if ! is_cgroup_v2; then skip "node must be configured with cgroupv2 for this test" fi start_crio jq ' .linux.resources.unified = {"memory.min": "209715200", "memory.high": "210763776"}' \ "$TESTDATA"/container_sleep.json > "$newconfig" ctr_id=$(crictl run "$newconfig" "$TESTDATA"/sandbox_config.json) output=$(crictl exec --sync "$ctr_id" sh -c "cat /sys/fs/cgroup/memory.min") [[ "$output" == *"209715200"* ]] output=$(crictl exec --sync "$ctr_id" sh -c "cat /sys/fs/cgroup/memory.high") [[ "$output" == *"210763776"* ]] }
<gh_stars>0 package org.fluentlenium.core.filter.matcher; import java.util.regex.Pattern; /** * Static class that are in charge of analyzed the filter and matcher. */ public final class CalculateService { private CalculateService() { } /** * check if the current value contains the patternValue or the referenceValue * * @param patternValue pattern * @param referenceValue reference value * @param currentValue current value * @return boolean value for contains check */ public static boolean contains(Pattern patternValue, String referenceValue, String currentValue) { if (currentValue == null) { return false; } if (patternValue == null) { return currentValue.contains(referenceValue); } return patternValue.matcher(currentValue).find(); } /** * check if the current value is equal the patternValue or the referenceValue * * @param patternValue pattern * @param referenceValue reference value * @param currentValue current value * @return boolean value for equal check */ public static boolean equal(Pattern patternValue, String referenceValue, String currentValue) { if (currentValue == null) { return false; } if (patternValue == null) { return currentValue.equals(referenceValue); } return patternValue.matcher(currentValue).matches(); } /** * check if the current value starts with the patternValue or the referenceValue * * @param patternValue pattern * @param referenceValue reference value * @param currentValue current value * @return boolean value for startsWith check */ public static boolean startsWith(Pattern patternValue, String referenceValue, String currentValue) { if (currentValue == null) { return false; } if (patternValue == null) { return currentValue.startsWith(referenceValue); } java.util.regex.Matcher m2 = patternValue.matcher(currentValue); return m2.find() && 0 == m2.start(); } /** * check if the current value ends with the patternValue or the referenceValue * * @param patternValue pattern * @param referenceValue reference value * @param currentValue current value * @return boolean value for endsWith check */ public static boolean endsWith(Pattern patternValue, String referenceValue, String currentValue) { if (currentValue == null) { return false; } if (patternValue == null) { return currentValue.endsWith(referenceValue); } java.util.regex.Matcher m2 = patternValue.matcher(currentValue); int end = 0; while (m2.find()) { end = m2.end(); } return currentValue.length() == end; } }
<reponame>ShibaPipi/sarair // util hooks export * from './lib/useAsync' export * from './lib/useAntdTable' export * from './lib/useDebounce' export * from './lib/useDebounceFn' export * from './lib/useDocumentTitle' export * from './lib/useEventListener' export * from './lib/useGetState' export * from './lib/useInterval' export * from './lib/useLockFn' export * from './lib/useMemoizedFn' export * from './lib/useMount' export * from './lib/useObjectState' export * from './lib/useSetState' export * from './lib/useThrottle' export * from './lib/useThrottleFn' export * from './lib/useUndo' export * from './lib/useUnmountedRef' export * from './lib/useUrlState' export * from './lib/useWhyDidYouUpdate' // react-query hooks export * from './lib/query' export * from './lib/query-optimistic-options' // request hooks export * from './lib/request'
if [ -z "$1" ] then echo "usage ./signin.sh <You ID>" exit 1 fi git checkout -b $1
package com.project.elisabet.appPaciente; import java.net.Socket; public class SocketHandler { private static Socket socket; public static synchronized Socket getSocket(){ return socket; } public static synchronized void setSocket(Socket socket){ SocketHandler.socket = socket; } }
#include <stdio.h> int sumOfPositiveIntegers(int arr[], int size) { int sum = 0; for (int i = 0; i < size; i++) { if (arr[i] > 0) { sum += arr[i]; } } return sum; }
import sys import numpy as np import tensorflow as tf import os import voxel from PIL import Image import time from Refinement_utils import * voxel_size = 32 img_h = 128 img_w = 128 vector_channel = 1024 threshold = 0.4 cates = ["04256520", "02691156", "03636649", "04401088", "04530566", "03691459", "03001627", "02933112", "04379243", "03211117", "02958343", "02828884", "04090263"] # The object categories dic = {"04256520": "sofa", "02691156": "airplane", "03636649": "lamp", "04401088": "telephone", "04530566": "vessel", "03691459": "loudspeaker", "03001627": "chair", "02933112": "cabinet", "04379243": "table", "03211117": "display", "02958343": "car", "02828884": "bench", "04090263": "rifle"} def encoder_residual_block(input, layer_id, num_layers=2, channels=None): input_shape = input.get_shape() last_channel = int(input_shape[-1]) last_layer = input batch_size = int(input_shape[0]) wd_res = tf.get_variable("wres%d" % layer_id, shape=[1, 1, last_channel, channels], initializer=tf.contrib.layers.xavier_initializer()) wb_res = tf.get_variable("bres%d" % layer_id, shape=[channels], initializer=tf.zeros_initializer()) res = tf.nn.conv2d(input, wd_res, strides=[1, 1, 1, 1], padding='SAME') res = tf.nn.bias_add(res, wb_res) res = lrelu(res) for i in range(num_layers): wd_conv = tf.get_variable("wd%d_%d" % (layer_id, i), shape=[3, 3, last_channel, channels], initializer=tf.contrib.layers.xavier_initializer()) wb_conv = tf.get_variable("wb%d_%d" % (layer_id, i), shape=[channels], initializer=tf.zeros_initializer()) last_layer = tf.nn.conv2d(last_layer, wd_conv, strides=[1, 1, 1, 1], padding='SAME') last_layer = tf.nn.bias_add(last_layer, wb_conv) last_layer = lrelu(last_layer) last_channel = channels output = res + last_layer return output def encoder(input, reuse=False): # print(input.get_shape()[0]) batch_size = int(input.get_shape()[0]) input = tf.reshape(input, shape=[batch_size, img_h, img_w, 3]) layer_id = 1 shortcuts = [] with tf.variable_scope("encoder", reuse=reuse): wd00 = tf.get_variable("wd00", shape=[7, 7, 3, 96], initializer=tf.contrib.layers.xavier_initializer()) bd00 = tf.get_variable("bd00", shape=[96], initializer=tf.zeros_initializer()) conv0a = tf.nn.conv2d(input, wd00, strides=[1, 1, 1, 1], padding='SAME') conv0a = tf.nn.bias_add(conv0a, bd00) wd01 = tf.get_variable("wd01", shape=[3, 3, 96, 96], initializer=tf.contrib.layers.xavier_initializer()) bd01 = tf.get_variable("bd01", shape=[96], initializer=tf.zeros_initializer()) conv0b = tf.nn.conv2d(conv0a, wd01, strides=[1, 1, 1, 1], padding='SAME') conv0b = tf.nn.bias_add(conv0b, bd01) pool1 = tf.nn.max_pool(conv0b, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool1) conv1 = encoder_residual_block(pool1, layer_id, 2, 128) pool2 = tf.nn.max_pool(conv1, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool2) layer_id += 1 conv2 = encoder_residual_block(pool2, layer_id, 2, 256) pool3 = tf.nn.max_pool(conv2, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool3) layer_id += 1 wd30 = tf.get_variable("wd30", shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bd30 = tf.get_variable("bd30", shape=[256], initializer=tf.zeros_initializer()) conv3a = tf.nn.conv2d(pool3, wd30, strides=[1, 1, 1, 1], padding='SAME') conv3a = tf.nn.bias_add(conv3a, bd30) wd31 = tf.get_variable("wd31", shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bd31 = tf.get_variable("bd31", shape=[256], initializer=tf.zeros_initializer()) conv3b = tf.nn.conv2d(conv3a, wd31, strides=[1, 1, 1, 1], padding='SAME') conv3b = tf.nn.bias_add(conv3b, bd31) pool4 = tf.nn.max_pool(conv3b, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool4) layer_id += 1 conv4 = encoder_residual_block(pool4, layer_id, 2, 256) pool5 = tf.nn.max_pool(conv4, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool5) layer_id += 1 conv5 = encoder_residual_block(pool5, layer_id, 2, 256) pool6 = tf.nn.max_pool(conv5, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') feature_map = pool6 pool6 = tf.reduce_mean(pool6, [1, 2]) wfc = tf.get_variable("wfc", shape=[256, 1024], initializer=tf.contrib.layers.xavier_initializer()) feature = tf.matmul(pool6, wfc) w_e = tf.get_variable("w_euler", shape=[1024, 3], initializer=tf.contrib.layers.xavier_initializer()) euler_angle = tf.matmul(feature, w_e) w_st = tf.get_variable('w_ft', shape=[1024, 3], initializer=tf.contrib.layers.xavier_initializer()) st = tf.matmul(feature, w_st) print('pool1', pool1) print('pool2', pool2) print('pool3', pool3) print('pool4', pool4) print('pool5', pool5) print('pool6', pool6) print('feature', feature) print('feature_map', feature_map) return feature, feature_map, euler_angle, st, shortcuts def encoder_angle(input, reuse=False): batch_size = int(input.get_shape()[0]) input = tf.reshape(input, shape=[batch_size, img_h, img_w, 3]) layer_id = 1 shortcuts = [] eulers_cates = {} st_cates = {} with tf.variable_scope("encoder", reuse=reuse): wd00 = tf.get_variable("wd00", shape=[7, 7, 3, 96], initializer=tf.contrib.layers.xavier_initializer()) bd00 = tf.get_variable("bd00", shape=[96], initializer=tf.zeros_initializer()) conv0a = tf.nn.conv2d(input, wd00, strides=[1, 1, 1, 1], padding='SAME') conv0a = tf.nn.bias_add(conv0a, bd00) wd01 = tf.get_variable("wd01", shape=[3, 3, 96, 96], initializer=tf.contrib.layers.xavier_initializer()) bd01 = tf.get_variable("bd01", shape=[96], initializer=tf.zeros_initializer()) conv0b = tf.nn.conv2d(conv0a, wd01, strides=[1, 1, 1, 1], padding='SAME') conv0b = tf.nn.bias_add(conv0b, bd01) pool1 = tf.nn.max_pool(conv0b, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool1) conv1 = encoder_residual_block(pool1, layer_id, 2, 128) pool2 = tf.nn.max_pool(conv1, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool2) layer_id += 1 conv2 = encoder_residual_block(pool2, layer_id, 2, 256) pool3 = tf.nn.max_pool(conv2, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool3) layer_id += 1 wd30 = tf.get_variable("wd30", shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bd30 = tf.get_variable("bd30", shape=[256], initializer=tf.zeros_initializer()) conv3a = tf.nn.conv2d(pool3, wd30, strides=[1, 1, 1, 1], padding='SAME') conv3a = tf.nn.bias_add(conv3a, bd30) wd31 = tf.get_variable("wd31", shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bd31 = tf.get_variable("bd31", shape=[256], initializer=tf.zeros_initializer()) conv3b = tf.nn.conv2d(conv3a, wd31, strides=[1, 1, 1, 1], padding='SAME') conv3b = tf.nn.bias_add(conv3b, bd31) pool4 = tf.nn.max_pool(conv3b, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool4) layer_id += 1 conv4 = encoder_residual_block(pool4, layer_id, 2, 256) pool5 = tf.nn.max_pool(conv4, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') shortcuts.append(pool5) layer_id += 1 conv5 = encoder_residual_block(pool5, layer_id, 2, 256) pool6 = tf.nn.max_pool(conv5, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME') feature_map = pool6 pool6 = tf.reduce_mean(pool6, [1, 2]) wfc = tf.get_variable("wfc", shape=[256, 1024], initializer=tf.contrib.layers.xavier_initializer()) feature = tf.matmul(pool6, wfc) print('pool1', pool1) print('pool2', pool2) print('pool3', pool3) print('pool4', pool4) print('pool5', pool5) print('pool6', pool6) print('feature', feature) print('feature_map', feature_map) return feature, feature_map, shortcuts def generator(input, shortcuts, reuse=False): batch_size = int(input.shape[0]) strides = [[1, 2, 2, 1], # 4 [1, 2, 2, 1], # 8 [1, 2, 2, 1], # 16 [1, 2, 2, 1], # 32 [1, 2, 2, 1], # 64 [1, 2, 2, 1]] # 127 print(input) with tf.variable_scope("ge", reuse=reuse): wg1 = tf.get_variable('wg1', shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bg1 = tf.get_variable('bg1', shape=[256], initializer=tf.zeros_initializer()) g_1 = tf.nn.conv2d_transpose(input, wg1, [batch_size, 4, 4, 256], strides=strides[0], padding='SAME') g_1 = tf.nn.bias_add(g_1, bg1) g_1 = lrelu(g_1) g_1 = tf.add(g_1, shortcuts[4]) wg2 = tf.get_variable('wg2', shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bg2 = tf.get_variable('bg2', shape=[256], initializer=tf.zeros_initializer()) g_2 = tf.nn.conv2d_transpose(g_1, wg2, [batch_size, 8, 8, 256], strides=strides[1], padding='SAME') g_2 = tf.nn.bias_add(g_2, bg2) g_2 = lrelu(g_2) g_2 = tf.add(g_2, shortcuts[3]) wg3 = tf.get_variable('wg3', shape=[3, 3, 256, 256], initializer=tf.contrib.layers.xavier_initializer()) bg3 = tf.get_variable('bg3', shape=[256], initializer=tf.zeros_initializer()) g_3 = tf.nn.conv2d_transpose(g_2, wg3, [batch_size, 16, 16, 256], strides=strides[2], padding='SAME') g_3 = tf.nn.bias_add(g_3, bg3) g_3 = lrelu(g_3) g_3 = tf.add(g_3, shortcuts[2]) wg4 = tf.get_variable('wg4', shape=[3, 3, 128, 256], initializer=tf.contrib.layers.xavier_initializer()) bg4 = tf.get_variable('bg4', shape=[128], initializer=tf.zeros_initializer()) g_4 = tf.nn.conv2d_transpose(g_3, wg4, [batch_size, 32, 32, 128], strides=strides[3], padding='SAME') g_4 = tf.nn.bias_add(g_4, bg4) g_4 = lrelu(g_4) g_4 = tf.add(g_4, shortcuts[1]) wg5 = tf.get_variable('wg5', shape=[4, 4, 96, 128], initializer=tf.contrib.layers.xavier_initializer()) bg5 = tf.get_variable('bg5', shape=[96], initializer=tf.zeros_initializer()) g_5 = tf.nn.conv2d_transpose(g_4, wg5, [batch_size, 64, 64, 96], strides=strides[4], padding='SAME') g_5 = tf.nn.bias_add(g_5, bg5) g_5 = lrelu(g_5) g_5 = tf.add(g_5, shortcuts[0]) wg6 = tf.get_variable('wg6', shape=[4, 4, 2, 96], initializer=tf.contrib.layers.xavier_initializer()) g_6 = tf.nn.conv2d_transpose(g_5, wg6, [batch_size, img_h, img_w, 2], strides=strides[5], padding='SAME') mask_softmax = tf.nn.softmax(g_6) return g_6, mask_softmax def decoder(input, reuse=False): batch_size = int(input.get_shape()[0]) strides = [1, 2, 2, 2, 1] layer_id = 2 print(input) with tf.variable_scope("decoder", reuse=reuse): input = tf.reshape(input, (batch_size, 1, 1, 1, 1024)) print(input) wd = tf.get_variable("wd1", shape=[4, 4, 4, 256, 1024], initializer=tf.contrib.layers.xavier_initializer()) bd = tf.get_variable("bd1", shape=[256], initializer=tf.zeros_initializer()) d_1 = tf.nn.conv3d_transpose(input, wd, (batch_size, 4, 4, 4, 256), strides=[1, 1, 1, 1, 1], padding='VALID') d_1 = tf.nn.bias_add(d_1, bd) d_1 = tf.nn.relu(d_1) d_2 = residual_block(d_1, layer_id) layer_id += 1 d_3 = residual_block(d_2, layer_id) layer_id += 1 d_4 = residual_block(d_3, layer_id) layer_id += 1 d_5 = residual_block(d_4, layer_id, 3, unpool=False) layer_id += 1 last_channel = int(d_5.shape[-1]) print('d1', d_1) print('d2', d_2) print('d3', d_3) print('d4', d_4) print('d5', d_5) wd = tf.get_variable("wd6", shape=[3, 3, 3, 2, last_channel], initializer=tf.contrib.layers.xavier_initializer()) res = tf.nn.conv3d_transpose(d_5, wd, (batch_size, 32, 32, 32, 2), strides=[1, 1, 1, 1, 1], padding='SAME') res_softmax = tf.nn.softmax(res) print('d6', res) return res, res_softmax def syn_model(): cates = ['03001627'] # The chair category y_vectors = tf.placeholder(shape=[1, img_w, img_h, 3], dtype=tf.float32, name='all_Images') with tf.variable_scope('voxel'): feature, feature_map, euler, st, shortcuts = encoder(y_vectors, reuse=False) voxels, voxels_softmax_before = decoder(feature, reuse=False) with tf.variable_scope("mask"): feature, feature_map, _, _, shortcuts = encoder(y_vectors, reuse=False) mask, mask_softmax = generator(feature_map, shortcuts, reuse=False) feature, feature_map, shortcuts = encoder_angle(y_vectors, reuse=False) voxel_after_dic = {} for i, cate in enumerate(cates): if i == 0: reuse = False else: reuse = True with tf.variable_scope("angles_trans", reuse=False): w_e_1 = tf.get_variable("w_euler_0_%s" % cate, shape=[1024, 512], initializer=tf.contrib.layers.xavier_initializer()) e_1 = lrelu(tf.matmul(feature, w_e_1)) w_e_2 = tf.get_variable("w_euler_1_%s" % cate, shape=[512, 3], initializer=tf.contrib.layers.xavier_initializer()) euler = tf.matmul(e_1, w_e_2) w_st = tf.get_variable('w_ft_%s' % cate, shape=[1024, 3], initializer=tf.contrib.layers.xavier_initializer()) st = tf.matmul(feature, w_st) rotation_matrices = get_rotation_matrix_r2n2(euler) mask_indexs = scale_trans_r2n2(st) projection = cast(mask_softmax[..., 0], mask_indexs, rotation_matrices=rotation_matrices) c1 = voxels_softmax_before[..., 0] c2 = projection c3 = c1 - c1 * c2 c4 = c2 - c1 * c2 feedin = tf.stack([c1, c2, c3, c4], axis=4) feature_vector, shortcuts = refine_encoder(feedin, reuse=reuse) voxels, voxels_softmax_after = refine_decoder(feature_vector, shortcuts, reuse=reuse) voxel_after_dic[cate] = voxels_softmax_after[..., 0] return voxels_softmax_before[...,0], voxel_after_dic['03001627'], y_vectors def real_model(): cates = ['03001627'] # The chair category y_vectors = tf.placeholder(shape=[1, img_w, img_h, 3], dtype=tf.float32, name='all_Images') with tf.variable_scope('voxel'): feature, feature_map, euler, st, shortcuts = encoder(y_vectors, reuse=False) voxels, voxels_softmax_before = decoder(feature, reuse=False) with tf.variable_scope("mask"): feature, feature_map, _, _, shortcuts = encoder(y_vectors, reuse=False) mask, mask_softmax = generator(feature_map, shortcuts, reuse=False) feature, feature_map, shortcuts = encoder_angle(y_vectors, reuse=False) voxel_after_dic = {} for i,cate in enumerate(cates): if i == 0: reuse = False else: reuse = True with tf.variable_scope("angles_trans", reuse=False): w_e_1 = tf.get_variable("w_euler_0_%s" % cate, shape=[1024, 512], initializer=tf.contrib.layers.xavier_initializer()) e_1 = lrelu(tf.matmul(feature, w_e_1)) w_e_2 = tf.get_variable("w_euler_1_%s" % cate, shape=[512, 3], initializer=tf.contrib.layers.xavier_initializer()) euler = tf.matmul(e_1, w_e_2) w_st = tf.get_variable('w_ft_%s' % cate, shape=[1024, 3], initializer=tf.contrib.layers.xavier_initializer()) st = tf.matmul(feature, w_st) st = tf.stack([st[..., 0] * 10, st[..., 1], st[..., 2]], axis=-1) rotation_matrices = get_rotation_matrix_voc(euler) mask_indexs = scale_trans_voc(st) masks = rotate_mask_voc(mask_softmax[..., 0], euler) projection = cast(masks, mask_indexs, rotation_matrices=rotation_matrices) c1 = voxels_softmax_before[..., 0] c2 = projection c3 = c1 - c1 * c2 c4 = c2 - c1 * c2 feedin = tf.stack([c1, c2, c3, c4], axis=4) feature_vector, shortcuts = refine_encoder(feedin, reuse=reuse) voxels, voxels_softmax_after = refine_decoder(feature_vector, shortcuts, reuse=reuse) voxel_after_dic[cate] = voxels_softmax_after[..., 0] return voxels_softmax_before[...,0], voxel_after_dic['03001627'], y_vectors ''' choose the model 'syn' is the model trained on synthetic data 'real' is the model finetuned on the PASCAL VOC 3D+ (real images) dataset ''' def examples(model='syn'): from voxel import voxel2obj cates = ['chair'] if not model in ['syn','real']: print("The model '%s' is not in 'syn' or 'real'"%model) return weight_path = os.path.join('models','%s_model'%model,'model.cptk') source_path = '%s_data'%model dest_path = 'res_%s_data'%model if model == 'syn': before, after, img_input = syn_model() else: before, after, img_input = real_model() params = tf.trainable_variables() saver = tf.train.Saver(var_list=params) if not os.path.exists(dest_path): os.makedirs(dest_path) for cate in cates: files = [file for file in os.listdir(os.path.join(source_path,cate)) if file[-3:]=='png'] with tf.Session() as sess: sess.run(tf.global_variables_initializer()) saver.restore(sess,weight_path) for file in files: filepath = os.path.join(source_path,'chair',file) img = Image.open(filepath).resize((img_h,img_w)) img = np.array(img).astype(np.float32)/255. img = img.reshape([1, img_h, img_w, 3]) v_before, v_after = sess.run([before,after],feed_dict={img_input:img}) v_before = v_before.squeeze() > threshold v_after = v_after.squeeze() > threshold voxel2obj('%s/%s_before.obj'%(dest_path,file[:-4]),v_before) voxel2obj('%s/%s_after.obj'%(dest_path,file[:-4]),v_after) if __name__ == '__main__': examples('real')
<gh_stars>1-10 from flask.ext.cache import Cache cache = Cache()
package com.example.sanja.mymessenger; import java.io.Serializable; import java.util.ArrayList; /** * Created by sanja on 6/21/2017. */ public class User implements Serializable{ String fname; String lname; String gender; String uname; String email; String password; String uid; String imageUrl; boolean isSelected; String status; ArrayList<User> friendReq =new ArrayList<>(); ArrayList<User> myFriends = new ArrayList<>(); public String getFname() { return fname; } public void setFname(String fname) { this.fname = fname; } public String getLname() { return lname; } public void setLname(String lname) { this.lname = lname; } public String getGender() { return gender; } public void setGender(String gender) { this.gender = gender; } public String getUname() { return uname; } public void setUname(String uname) { this.uname = uname; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getImageUrl() { return imageUrl; } public void setImageUrl(String imageUrl) { this.imageUrl = imageUrl; } public ArrayList<User> getFriendReq() { return friendReq; } public void setFriendReq(ArrayList<User> friendReq) { this.friendReq = friendReq; } public ArrayList<User> getMyFriends() { return myFriends; } public void setMyFriends(ArrayList<User> myFriends) { this.myFriends = myFriends; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public boolean isSelected() { return isSelected; } public void setSelected(boolean selected) { isSelected = selected; } @Override public String toString() { return "User{" + "fname='" + fname + '\'' + ", lname='" + lname + '\'' + ", gender='" + gender + '\'' + ", uname='" + uname + '\'' + ", email='" + email + '\'' + ", password='" + password + '\'' + ", uid='" + uid + '\'' + ", imageUrl='" + imageUrl + '\'' + ", status='" + status + '\'' + ", friendReq=" + friendReq + ", myFriends=" + myFriends + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof User)) return false; User user = (User) o; if (getFname() != null ? !getFname().equals(user.getFname()) : user.getFname() != null) return false; if (getLname() != null ? !getLname().equals(user.getLname()) : user.getLname() != null) return false; if (getGender() != null ? !getGender().equals(user.getGender()) : user.getGender() != null) return false; if (getUname() != null ? !getUname().equals(user.getUname()) : user.getUname() != null) return false; if (getEmail() != null ? !getEmail().equals(user.getEmail()) : user.getEmail() != null) return false; if (getPassword() != null ? !getPassword().equals(user.getPassword()) : user.getPassword() != null) return false; if (getUid() != null ? !getUid().equals(user.getUid()) : user.getUid() != null) return false; if (getImageUrl() != null ? !getImageUrl().equals(user.getImageUrl()) : user.getImageUrl() != null) return false; if (getFriendReq() != null ? !getFriendReq().equals(user.getFriendReq()) : user.getFriendReq() != null) return false; return getMyFriends() != null ? getMyFriends().equals(user.getMyFriends()) : user.getMyFriends() == null; } @Override public int hashCode() { int result = getFname() != null ? getFname().hashCode() : 0; result = 31 * result + (getLname() != null ? getLname().hashCode() : 0); result = 31 * result + (getGender() != null ? getGender().hashCode() : 0); result = 31 * result + (getUname() != null ? getUname().hashCode() : 0); result = 31 * result + (getEmail() != null ? getEmail().hashCode() : 0); result = 31 * result + (getPassword() != null ? getPassword().hashCode() : 0); result = 31 * result + (getUid() != null ? getUid().hashCode() : 0); result = 31 * result + (getImageUrl() != null ? getImageUrl().hashCode() : 0); result = 31 * result + (getFriendReq() != null ? getFriendReq().hashCode() : 0); result = 31 * result + (getMyFriends() != null ? getMyFriends().hashCode() : 0); return result; } }
////////////////////////////////////////////////////////////////////////// // // pgAdmin III - PostgreSQL Tools // // Copyright (C) 2002 - 2016, The pgAdmin Development Team // This software is released under the PostgreSQL Licence // // gqbGraphsimple.cpp - A simple Implementation of the Graphic Interface for GQB // ////////////////////////////////////////////////////////////////////////// // App headers #include "pgAdmin3.h" // wxWindows headers #include <wx/wx.h> #include <wx/dcbuffer.h> // App headers #include "utils/sysSettings.h" #include "gqb/gqbGraphSimple.h" #include "gqb/gqbQueryObjs.h" // GQB Images #include "images/gqbColNotSel.pngc" #include "images/gqbColSel.pngc" gqbGraphSimple::gqbGraphSimple() { normalFont = settings->GetSystemFont(); TableTitleFont = settings->GetSystemFont(); TableTitleFont.SetWeight(wxFONTWEIGHT_BOLD); //ABDUL: 4 Sep 2021:BEGIN #if wxCHECK_VERSION(3, 1, 0) BackgroundLayer1 = wxBrush(wxColour(112, 112, 112), wxBRUSHSTYLE_SOLID); BackgroundLayer2 = wxBrush (wxColour(208, 208, 208), wxBRUSHSTYLE_SOLID); BackgroundTitle = wxBrush (wxColour(245, 245, 245), wxBRUSHSTYLE_SOLID); #else BackgroundLayer1 = wxBrush(wxColour(112, 112, 112), wxSOLID); BackgroundLayer2 = wxBrush (wxColour(208, 208, 208), wxSOLID); BackgroundTitle = wxBrush (wxColour(245, 245, 245), wxSOLID); #endif minTableWidth = 80; minTableHeight = 54; rowHeight = 0; //By default but this it's replaced by font metrics value rowLeftMargin = 14; rowRightMargin = 5; rowTopMargin = 1; lineClickThreshold = 7; //ABDUL: 4 Sep 2021:BEGIN #if wxCHECK_VERSION(3, 1, 0) selectedPen = wxPen(wxColour(0, 146, 195), 2, wxPENSTYLE_SOLID); selectedBrush = wxBrush(wxColour(0, 146, 195), wxBRUSHSTYLE_SOLID); #else selectedPen = wxPen(wxColour(0, 146, 195), 2, wxSOLID); selectedBrush = wxBrush(wxColour(0, 146, 195), wxSOLID); #endif imgSelBoxEmpty = *gqbColNotSel_png_bmp; imgSelBoxSelected = *gqbColSel_png_bmp; } // NOTES:(1) store values of width & height at queryTable. // (2)Need to set a font for the device context before get font metrics with GetTextExtent void gqbGraphSimple::drawTable(wxMemoryDC &bdc, wxPoint *origin, gqbQueryObject *queryTable) { #if wxCHECK_VERSION(2, 9, 0) wxCoord w = 0, h = 0, height = 0, width = 0, margin = 5; #else long w = 0, h = 0, height = 0, width = 0, margin = 5; #endif // Get Value for row Height if(!rowHeight) { bdc.SetFont(TableTitleFont); bdc.GetTextExtent(wxT("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxtz"), &w, &h); rowHeight = h; } // Get Title Metrics bdc.SetFont(TableTitleFont); height += rowHeight + rowTopMargin; // Calculate font metrics for table title with/without alias if(queryTable->getAlias().length() > 0) bdc.GetTextExtent(queryTable->getName() + wxT(" (") + queryTable->getAlias() + wxT(")"), &w, &h); else bdc.GetTextExtent(queryTable->getName(), &w, &h); width = rowLeftMargin + w + rowRightMargin; // Get Columns Metrics bdc.SetFont(normalFont); // Don't use h value from font metrics to get consistency between columns vertical separation (height) height += rowHeight * queryTable->parent->countCols() + rowTopMargin * queryTable->parent->countCols(); gqbIteratorBase *iterator = queryTable->parent->createColumnsIterator(); while(iterator->HasNext()) { gqbColumn *tmp = (gqbColumn *)iterator->Next(); bdc.GetTextExtent(tmp->getName(), &w, &h); if((rowLeftMargin + w + rowRightMargin) > width) width = rowLeftMargin + w + rowRightMargin; } //Don't delete iterator because will be use below; // Set table Size in ObjectModel (Temporary Values for object representation, // and for this reason the view can modified model without using the controller // because this values are used by controller when use object's size in internal operations) if( (height + 2) < minTableHeight) // +2 from BackgroundLayers addition { queryTable->setHeight(minTableHeight); height = minTableHeight; } else queryTable->setHeight(height + 2); if( (width + 2) < minTableWidth) { queryTable->setWidth(minTableWidth); width = minTableWidth; } else queryTable->setWidth(width + 2); //Decorate Table bdc.SetPen(*wxTRANSPARENT_PEN); //draw second Layer bdc.SetBrush(BackgroundLayer2); bdc.DrawRectangle(wxRect(wxPoint(origin->x, origin->y), wxSize(width + 2, height + 2))); //draw third Layer bdc.SetBrush(BackgroundLayer1); bdc.DrawRectangle(wxRect(wxPoint(origin->x, origin->y), wxSize(width + 1, height + 1))); //draw real frame layer bdc.SetBrush(*wxWHITE_BRUSH); if(queryTable->getSelected()) { bdc.SetPen(selectedPen); } else { bdc.SetPen(*wxBLACK_PEN); } bdc.DrawRectangle(wxRect(wxPoint(origin->x, origin->y), wxSize(width, height))); //draw title layer bdc.SetBrush(BackgroundTitle); bdc.DrawRectangle(wxRect(wxPoint(origin->x, origin->y), wxSize(width, rowHeight + rowTopMargin))); bdc.SetFont(TableTitleFont); if(queryTable->getAlias().length() > 0) bdc.DrawText(queryTable->getName() + wxT(" (") + queryTable->getAlias() + wxT(")"), origin->x + margin, origin->y + rowTopMargin); else bdc.DrawText(queryTable->getName(), origin->x + margin, origin->y + rowTopMargin); bdc.SetFont(normalFont); // GQB-TODO: in a future reuse a little more the iterator creating it inside the Query or Table Object // and only delete it when delete the query object. // Draw Columns height = rowHeight + rowTopMargin; iterator->ResetIterator(); while(iterator->HasNext()) { gqbColumn *tmp = (gqbColumn *)iterator->Next(); if(queryTable->existsColumn(tmp)) { bdc.SetTextForeground(* wxRED); bdc.SetFont(normalFont); bdc.DrawBitmap(imgSelBoxSelected, origin->x + 3, origin->y + height, true); } else { bdc.SetFont(normalFont); bdc.DrawBitmap(imgSelBoxEmpty, origin->x + 3, origin->y + height, true); } bdc.DrawText(tmp->getName(), origin->x + rowLeftMargin, origin->y + height); bdc.SetTextForeground( *wxBLACK); height += rowHeight + rowTopMargin; } delete iterator; //now if delete because it's not needed anymore } // return a column when a user click on a checkbox [0->16] x point gqbColumn *gqbGraphSimple::getColumnAtPosition(wxPoint *clickPoint, gqbQueryObject *queryTable, int sensibility) { int countCols = queryTable->parent->countCols(), colPos = -1; if(countCols > 0) //exists any column { colPos = (clickPoint->y - queryTable->position.y) / (rowHeight + rowTopMargin); } int x = clickPoint->x - queryTable->position.x; if( (x > 0 && x < sensibility) && colPos > 0) // Because 0 is title return queryTable->parent->getColumnAtIndex(colPos - 1); else return NULL; } void gqbGraphSimple::drawTempJoinLine(wxMemoryDC &bdc, wxPoint &origin, wxPoint &end) { wxPoint anchorsUsed = wxPoint(0, 0); if(origin.x < end.x) { anchorsUsed.x = 1; anchorsUsed.y = -1; } else { anchorsUsed.x = -1; anchorsUsed.y = 1; } drawJoin(bdc, origin, end, anchorsUsed, true, _equally); } void gqbGraphSimple::drawJoin(wxMemoryDC &bdc, wxPoint &origin, wxPoint &dest, wxPoint &anchorUsed, bool selected = false, type_Join joinKind = _equally) { wxPoint origin2 = origin; wxPoint dest2 = dest; if(selected) { bdc.SetPen(selectedPen); bdc.SetBrush(selectedBrush); } else { bdc.SetPen(*wxBLACK_PEN); bdc.SetBrush(*wxBLACK_BRUSH); } // GQB-TODO: optimize this if possible, I know one other can be the same? // getAnchorsUsed() [-1==left] [1==right] x->origin y->destination if(anchorUsed.x == 1) { bdc.DrawRectangle(origin.x, origin.y - 4, 8, 8); origin2.x += 20; } else { bdc.DrawRectangle(origin.x - 8, origin.y - 4, 8, 8); origin2.x -= 20; } if(anchorUsed.y == 1) { bdc.DrawRectangle(dest.x, dest.y - 4, 8, 8); dest2.x += 20; } else { bdc.DrawRectangle(dest.x - 8, dest.y - 4, 8, 8); dest2.x -= 20; } bdc.DrawLine(origin, origin2); bdc.DrawLine(dest, dest2); bdc.DrawLine(origin2, dest2); // Draw type of join switch(joinKind) { case _equally: bdc.DrawText(wxT("="), findLineMiddle(origin2, dest2)); break; case _lesser: bdc.DrawText(wxT("<"), findLineMiddle(origin2, dest2)); break; case _greater: bdc.DrawText(wxT(">"), findLineMiddle(origin2, dest2)); break; case _equlesser: bdc.DrawText(wxT("<="), findLineMiddle(origin2, dest2)); break; case _equgreater: bdc.DrawText(wxT(">="), findLineMiddle(origin2, dest2)); break; }; } // Return true if pt click over a threshold of the join, false if not bool gqbGraphSimple::clickOnJoin(gqbQueryJoin *join, wxPoint &pt, wxPoint &origin, wxPoint &dest) { wxPoint origin2 = origin; wxPoint dest2 = dest; if(join->getAnchorsUsed().x == 1) { origin2.x += 20; } else { origin2.x -= 20; } if(join->getAnchorsUsed().y == 1) { dest2.x += 20; } else { dest2.x -= 20; } // Check origin anchor bool value1 = insideLine(pt, origin, origin2, lineClickThreshold); // Check dest anchor bool value2 = insideLine(pt, dest, dest2, lineClickThreshold); // Check line between both tables bool value3 = insideLine(pt, origin2, dest2, lineClickThreshold); if(value1 || value2 || value3) return true; else return false; } bool gqbGraphSimple::insideLine(wxPoint &pt, wxPoint &p1, wxPoint &p2, int threshold = 7) { bool value = false; if(distanceToLine(pt, p1, p2) < threshold) { value = true; } return value; } wxPoint gqbGraphSimple::findLineMiddle(wxPoint p1, wxPoint p2) { int middleX = -1, middleY = -1; int dx = p2.x - p1.x; if(dx > 0) // p1 at left { middleX = dx / 2 + p1.x; } // p1 at right else { middleX = p1.x + dx / 2; } int dy = p2.y - p1.y; if(dy > 0) // p1 is above { middleY = dy / 2 + p1.y; } // p1 is below else { middleY = p1.y + dy / 2; } if(dy == 0) middleY = p1.y; if(dx == 0) middleX = p1.x; return wxPoint(middleX, middleY); } double gqbGraphSimple::distanceToLine(wxPoint pt, wxPoint p1, wxPoint p2) { p2.x -= p1.x; p2.y -= p1.y; pt.x -= p1.x; pt.y -= p1.y; double dprod = pt.x * p2.x + pt.y * p2.y; double pLenSq; if(dprod <= 0.0) { pLenSq = 0.0; } else { pt.x = p2.x - pt.x; pt.y = p2.y - pt.y; dprod = pt.x * p2.x + pt.y * p2.y; if(dprod <= 0.0) { pLenSq = 0.0; } else { pLenSq = dprod * dprod / (p2.x * p2.x + p2.y * p2.y); } } double lengthSq = pt.x * pt.x + pt.y * pt.y - pLenSq; if(lengthSq < 0) { lengthSq = 0; } double length = sqrt(lengthSq); return length; } // Set the anchors points [source, destination] for a join void gqbGraphSimple::calcAnchorPoint(gqbQueryJoin *join) { int index, x, y; wxPoint use; // [-1==left] [1==right] x->origin y->destination int sx = join->getSourceQTable()->position.x; int sy = join->getSourceQTable()->position.y; int dx = join->getDestQTable()->position.x; int dy = join->getDestQTable()->position.y; // Source index = join->getSourceQTable()->getColumnIndex(join->getSCol()) + 1; if(sx < dx) { x = sx + join->getSourceQTable()->getWidth(); use.x = 1; } else { x = sx; use.x = -1; } y = sy + index * (rowHeight + rowTopMargin) + ((rowHeight + rowTopMargin) / 2); join->setSourceAnchor(wxPoint(x, y)); // Destination index = join->getDestQTable()->getColumnIndex(join->getDCol()) + 1; if(dx < sx) { x = dx + join->getDestQTable()->getWidth(); use.y = 1; } else { x = dx; use.y = -1; } y = dy + index * (rowHeight + rowTopMargin) + ((rowHeight + rowTopMargin) / 2); join->setDestAnchor(wxPoint(x, y)); join->setAnchorsUsed(use); } // Update position of Object in the query if move table & adjust all other items like joins (own & registered) void gqbGraphSimple::UpdatePosObject(gqbQueryObject *queryTable, int x, int y, int cursorAdjustment) { x -= cursorAdjustment; // Move Pointer to a better Position; y -= rowHeight / 2; // Update position of table // Do not allow table/view moved/repositioned less than (0, 0) cordinates queryTable->position.x = x > 0 ? x : 0; queryTable->position.y = y > 0 ? y : 0; // Update position of anchor points of Joins that origin from this table if(queryTable->getHaveJoins()) { gqbIteratorBase *j = queryTable->createJoinsIterator(); while(j->HasNext()) { gqbQueryJoin *tmp = (gqbQueryJoin *)j->Next(); calcAnchorPoint(tmp); } delete j; } // Update position of anchor points of Joins that come from others tables if(queryTable->getHaveRegJoins()) { gqbIteratorBase *r = queryTable->createRegJoinsIterator(); while(r->HasNext()) { gqbQueryJoin *tmp = (gqbQueryJoin *)r->Next(); calcAnchorPoint(tmp); } delete r; } } int gqbGraphSimple::getTitleRowHeight() { return rowHeight; }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.admin.client.mvp.view.header; import org.kaaproject.kaa.server.admin.client.KaaAdminResources.KaaAdminStyle; import org.kaaproject.kaa.server.admin.client.mvp.view.HeaderView; import org.kaaproject.kaa.server.admin.client.mvp.view.widget.ActionsLabel; import org.kaaproject.kaa.server.admin.client.util.Utils; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.Widget; public class HeaderViewImpl extends Composite implements HeaderView { interface HeaderViewImplUiBinder extends UiBinder<Widget, HeaderViewImpl> { } private static HeaderViewImplUiBinder uiBinder = GWT.create(HeaderViewImplUiBinder.class); private Presenter presenter; @UiField Label usernameLabel; @UiField Label signoutLabel; @UiField(provided=true) final ActionsLabel settingsLabel; @UiField Label title; @UiField(provided=true) final KaaAdminStyle kaaAdminStyle; public HeaderViewImpl() { settingsLabel = new ActionsLabel(Utils.constants.settings()); kaaAdminStyle = Utils.kaaAdminStyle; settingsLabel.setStyleName(kaaAdminStyle.bAppHeaderMenu()); initWidget(uiBinder.createAndBindUi(this)); signoutLabel.setText(Utils.constants.signOut()); title.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { presenter.goToHome(); } }); } @Override public void setPresenter(Presenter presenter) { this.presenter = presenter; } @Override public Label getUsernameLabel() { return usernameLabel; } @Override public Label getSignoutLabel() { return signoutLabel; } @Override public ActionsLabel getSettingsLabel() { return settingsLabel; } }
<filename>TelegramApi/src/main/java/org/telegram/telegrambots/api/methods/AnswerInlineQuery.java<gh_stars>0 package org.telegram.telegrambots.api.methods; import java.util.List; import javax.ws.rs.Path; import org.telegram.telegrambots.api.methods.send.ApiMethod; import org.telegram.telegrambots.api.objects.inlinequery.result.InlineQueryResult; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.ToString; /** * * @author <NAME> * @author <NAME> * @brief Use this method to send answers to an inline query. On success, True is returned. * @date 2016-11-21 */ @Path("answerInlineQuery") @Data @ToString @EqualsAndHashCode(callSuper=false) public class AnswerInlineQuery extends ApiMethod{ @JsonProperty("inline_query_id") String inlineQueryId; ///< Unique identifier for answered query List<InlineQueryResult> results; ///< A JSON-serialized array of results for the inline query @JsonProperty("cache_time") Integer cacheTime; ///< Optional The maximum amount of time the result of the inline query may be cached on the server @JsonProperty("is_personal") Boolean isPersonal; ///< Pass True, if results may be cached on the server side only for the user that sent the query. By default, results may be returned to any user who sends the same query @JsonProperty("next_offset") String nextOffset; ///< Optional. Pass the offset that a client should send in the next query with the same text to receive more results. Pass an empty string if there are no more results or if you don‘t support pagination. Offset length can’t exceed 64 bytes. @JsonProperty("switch_pm_text") String switchPmText; ///< Optional. If passed, clients will display a button with specified text that switches the user to a chat with the bot and sends the bot a start message with the parameter switch_pm_parameter @JsonProperty("switch_pm_parameter") String switchPmParameter; ///< Optional. Parameter for the start message sent to the bot when user presses the switch button }
int[] arr = {2, 5, 7, 9}; int sum = 0; for (int i = 0; i < arr.length; i++) { sum += arr[i]; } System.out.println("Sum = " + sum);
#!/usr/bin/bash # Script to convert Password Agent's file (exported to XML) # to format of KeePass XML PA_FILE="$1" if test -z "$PA_FILE" then PA_FILE="-" fi xmllint --format "$PA_FILE" --output - | \ iconv --from-code Windows-1251 --to-code utf-8 | \ sed -r -e '1d' \ -e '2i<!DOCTYPE KEEPASSX_DATABASE>' \ -e 's#^(</?)data.*>#\1database>#' \ -e 's#(</?)name>#\1title>#g' \ -e 's#(</?)account>#\1username>#g' \ -e 's#(</?)link(/?>)#\1url\2#g' \ -e 's#(</?)note(/?>)#\1comment\2#g' \ -e 's#(<date_.+?>)([0-9]+)\.([0-9]+)\.([0-9]+)#\1\4-\3-\2#' \ -e 's#(</?)date_added>#\1creation>#g' \ -e 's#(</?)date_modified>#\1last_mod>#g' \ -e 's#<date_expire/>#<expire>Never</expire>#' \ -e 's#(</?)date_expire(/?>)#\1expire\2#g' \
// Create the Express server const express = require('express'); const app = express(); // Connect to the MongoDB const mongoose = require('mongoose'); mongoose.connect('YOUR_MONGO_URI'); // Define the document schema const documentSchema = mongoose.Schema({ title: { type: String, required: true }, content: { type: String, required: true }, owner_id: String, collaborators: [String] }); // Create the Mongoose model const Document = mongoose.model('Document', documentSchema); // Register the API endpoints app.get('/documents', (req, res) => { Document.find((err, docs) => { if (err) return res.status(400).end(err.message); res.json(docs); }); }); app.post('/documents', (req, res) => { const doc = new Document(req.body); doc.save((err, doc) => { if (err) return res.status(400).end(err.message); res.json(doc); }); }); // Start the server const port = process.env.PORT || 3000; app.listen(port, () => console.log(`Listening on port ${port}...`));
#!/bin/bash $PYTHON setup.py install --single-version-externally-managed --record=record.txt # Python command to install the script.
#!/bin/bash #This is a one liner to display the date of acquisition from the header of a dicom file mri_probedicom --i $1 --t 8 20
#!/bin/bash docker build -t swarmpostgres . docker run -d --name swarmpostgres --restart always \ -p 5432:5432 \ --env-file ./env.list \ --network swarm-net\ swarmpostgres:latest
<reponame>duncangrant/incubator-brooklyn /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package brooklyn.util.guava; import org.testng.Assert; import org.testng.annotations.Test; import brooklyn.util.guava.IfFunctions.IfFunctionBuilder; import com.google.common.base.Function; import com.google.common.base.Predicates; import com.google.common.base.Suppliers; public class IfFunctionsTest { @Test public void testCommonUsage() { checkTF(IfFunctions.ifEquals(false).value("F").ifEquals(true).value("T").defaultValue("?").build(), "?"); } @Test public void testNoBuilder() { checkTF(IfFunctions.ifEquals(false).value("F").ifEquals(true).value("T").defaultValue("?"), "?"); } @Test public void testPredicateAndSupplier() { // we cannot use checkTF here as an IntelliJ issues causes the project to fail to launch as IntelliJ does not // recognize the return value of IfFunctions.ifPredicate as Function<Boolean, String> Function function = IfFunctions.ifPredicate(Predicates.equalTo(false)).get(Suppliers.ofInstance("F")) .ifEquals(true).value("T").defaultGet(Suppliers.ofInstance("?")).build(); Assert.assertEquals(function.apply(true), "T"); Assert.assertEquals(function.apply(false), "F"); Assert.assertEquals(function.apply(null), "?"); } @Test public void testNoDefault() { checkTF(IfFunctions.ifEquals(false).value("F").ifEquals(true).value("T").build(), null); } @Test public void testNotEqual() { checkTF(IfFunctions.ifNotEquals(false).value("T").defaultValue("F").build(), "T"); } @Test public void testFunction() { checkTF(IfFunctions.ifNotEquals((Boolean)null).apply(new Function<Boolean, String>() { @Override public String apply(Boolean input) { return input.toString().toUpperCase().substring(0, 1); } }).defaultValue("?"), "?"); } @Test public void testWithCast() { Function<Boolean, String> f = IfFunctions.ifEquals(false).value("F").ifEquals(true).value("T").defaultValue("?").build(); checkTF(f, "?"); } @Test public void testWithoutCast() { Function<Boolean, String> f = IfFunctions.newInstance(Boolean.class, String.class).ifEquals(false).value("F").ifEquals(true).value("T").defaultValue("?").build(); checkTF(f, "?"); } @Test public void testSupportsReplace() { checkTF(IfFunctions.ifEquals(false).value("false").ifEquals(false).value("F").ifEquals(true).value("T").defaultValue("?").build(), "?"); } @Test public void testIsImmutableAndSupportsReplace() { IfFunctionBuilder<Boolean, String> f = IfFunctions.ifEquals(false).value("F").ifEquals(true).value("T").defaultValue("?"); IfFunctionBuilder<Boolean, String> f2 = f.ifEquals(false).value("false").defaultValue("X"); IfFunctionBuilder<Boolean, String> f3 = f2.ifEquals(false).value("F"); checkTF(f, "?"); checkTF(f3, "X"); Assert.assertEquals(f2.apply(false), "false"); } static void checkTF(Function<Boolean, String> f, Object defaultValue) { Assert.assertEquals(f.apply(true), "T"); Assert.assertEquals(f.apply(false), "F"); Assert.assertEquals(f.apply(null), defaultValue); } }
python3 main_continual.py \ --dataset imagenet100 \ --encoder resnet18 \ --data_dir $DATA_DIR \ --train_dir imagenet-100/train \ --val_dir imagenet-100/val \ --split_strategy class \ --max_epochs 400 \ --num_tasks 5 \ --task_idx 1 \ --gpus 0,1 \ --accelerator ddp \ --sync_batchnorm \ --num_workers 5 \ --precision 16 \ --optimizer sgd \ --scheduler cosine \ --lr 0.4 \ --classifier_lr 0.3 \ --weight_decay 1e-4 \ --batch_size 128 \ --brightness 0.4 \ --contrast 0.4 \ --saturation 0.4 \ --hue 0.1 \ --dali \ --check_val_every_n_epoch 9999 \ --name mocov2plus-imagenet100-5T-contrastive \ --project ever-learn \ --entity unitn-mhug \ --wandb \ --save_checkpoint \ --method mocov2plus \ --proj_hidden_dim 2048 \ --queue_size 65536 \ --temperature 0.2 \ --base_tau_momentum 0.99 \ --final_tau_momentum 0.999 \ --momentum_classifier \ --distiller contrastive \ --pretrained_model $PRETRAINED_PATH
# frozen_string_literal: true module Qernel module NodeApi # Attributes relating to employment. module Employment extend ActiveSupport::Concern ATTRIBUTES = %i[ hours_prep_nl hours_prep_abroad hours_prod_nl hours_prod_abroad hours_place_nl hours_place_abroad hours_maint_nl hours_maint_abroad hours_remov_nl hours_remov_abroad ].freeze included do dataset_accessors(*ATTRIBUTES) end end end end
package cyclops.stream.spliterator; import java.util.Spliterator; import java.util.function.Function; /** * Created by johnmcclean on 30/12/2016. */ public interface FunctionSpliterator<S, T> extends Spliterator<T> { Spliterator<S> source(); Function<? super S, ? extends T> function(); }
<filename>src/utils/query.utils.ts export const whereId = (id: number) => { return { where: { id: id, }, }; }; export const whereInstanceId = (instanceId: number) => { return { where: { instanceId: instanceId, }, }; }; export const whereName = (name: string) => { return { where: { name: name, }, }; }; export const whereSysName = (sysName: string) => { return { where: { sysName: sysName, }, }; }; export const insertNameAndOwnerId = (name: string, ownerId: string) => { return { name: name, ownerId: ownerId, }; }; export default { whereId, whereInstanceId, whereName, whereSysName, insertNameAndOwnerId, };
import java.io.*; import java.net.*; import java.math.BigInteger; public class FactorPrimeMultipleClient { public static void main(String[] args) throws Exception { String hostName = "localhost"; int portNumber = 4321; Socket echoSocket = new Socket(hostName, portNumber); BufferedReader in = new BufferedReader(new InputStreamReader(echoSocket.getInputStream())); String value1; String value2; String value3; BigInteger receivedSemiPrime; BigInteger borderValue1; BigInteger borderValue2; BigInteger zero = new BigInteger("0"); do { value1 = in.readLine(); value2 = in.readLine(); value3 = in.readLine(); receivedSemiPrime = new BigInteger(value1); borderValue1 = new BigInteger(value2); borderValue2 = new BigInteger(value3); for (BigInteger i = borderValue1; i.compareTo(borderValue2) <= 0; i = i.add(new BigInteger("1"))) { if (receivedSemiPrime.remainder(i).compareTo(zero) == 0) { BigInteger anotherFactor = receivedSemiPrime.divide(i); System.out.println("The prime factors are " + i + " and " + anotherFactor + "."); break; } } } while (!value1.equals("Bye!")); System.out.println("Client connection terminated."); echoSocket.close(); in.close(); } }
<filename>client/src/routes/NewPublish/Authorship.tsx import styles from './index.module.scss' import Input from '../../components/atoms/MaterialForms/Input' import Select from '../../components/atoms/MaterialForms/Select' import React, { ChangeEvent } from 'react' import Button from '../../components/atoms/Button' export type AuthorshipFormValues = { author?: string coprHolder?: string license?: string } type Props = { handleChange: (event: ChangeEvent<HTMLInputElement | HTMLTextAreaElement> | ChangeEvent<{ name?: string | undefined; value: unknown }>) => void setStep: (newStep: number) => void step: number values: AuthorshipFormValues } const licenseOptions = [ "Public Domain", "PDDL: Public Domain Dedication and License", "ODC-By: Attribution License", "ODC-ODbL: Open Database License", "CDLA-Sharing: Community Data License Agreement", "CDLA-Permissive: Community Data License Agreement", "CC0: Public Domain Dedication", "CC BY: Attribution 4.0 International", "CC BY-SA: Attribution-ShareAlike 4.0 International", "CC BY-ND: Attribution-NoDerivatives 4.0 International", "CC BY-NC: Attribution-NonCommercial 4.0 International", "CC BY-NC-SA: Attribution-NonCommercial-ShareAlike 4.0 International", "CC BY-NC-ND: Attribution-NonCommercial-NoDerivatives 4.0 International" ] export const Authorship = ({ handleChange, setStep, step, values }: Props) => { if (step !== 2) return null return <> <h2>Authorship</h2> <p className={styles.subtitle}> Give proper attribution to your artwork. </p> <Input className={styles.formField} label="Author" placeholder="Your name, or artist pseudonym" value={values.author || ''} name="author" onChange={handleChange} /> <Input className={styles.formField} label="Copyright Holder" placeholder="Myself" value={values.coprHolder || ''} name="coprHolder" onChange={handleChange} /> <Select name="license" defaultValue="none" style={{ color: values.license && values.license !== 'none' ? 'black' : '#9D9FA0', margin: '10px 0 48px' }} value={values.license} label="License" onChange={handleChange} > <option value="none" disabled> All Rights Reserved </option> {licenseOptions.map(opt => <option key={opt} value={opt}>{opt}</option>)} </Select> <Button disabled={!values.author || !values.coprHolder || !values.license} fullWidth onClick={() => setStep(3)} secondary >next</Button> </> }
/* query su database Aggiungi alla classe DB un metodo registiConFilm(p:Film=>Boolean):List[Regista] che estrae tutti i registi che hanno diretto almeno un film con la proprietà p. case class Film(id:Int, titolo:String, anno:Int) case class Regista(id:Int, nome:String) case class DirettoDa(idFilm:Int, idRegista:Int) case class DB(film:List[Film], registi:List[Regista], regie:List[DirettoDa]) { def registiConFilm(p:Film=>Boolean):List[Regista] = Nil // da completare... } Nota: Se ritenunto utile, è possibile aggiungere alla classe DB variabili di istanza e metodi privati a piacere. Scrivere la soluzione nel file E3.scala e usare il programma di prova E3Main.scala. */ case class Film(id:Int, titolo:String, anno:Int) case class Regista(id:Int, nome:String) case class DirettoDa(idFilm:Int, idRegista:Int) case class DB(film:List[Film], registi:List[Regista], regie:List[DirettoDa]) { def registiConFilm(p:Film=>Boolean):List[Regista] = { // da completare... val filmConP = film.filter(p(_)).map(_.id) //val regieConP = regie.map(_.idFilm).intersect(filmConP) val idRegisti = regie.filter(dd => filmConP.contains(dd.idFilm)).map(_.idRegista) //val registiConP = registi.filter(r => idRegisti.contains(r.id)) } }
import pulsar as psr def load_ref_system(): """ Returns circumcoronene as found in the IQMol fragment library. All credit to https://github.com/nutjunkie/IQmol """ return psr.make_system(""" C 1.22126 0.70509 0.00000 C 1.22126 -0.70509 0.00000 C 0.00000 1.41018 0.00000 C 2.44253 1.41020 0.00000 C -0.00000 -1.41018 -0.00000 C -1.22126 0.70509 -0.00000 C 2.44253 -1.41020 0.00000 C 0.00000 2.82039 0.00000 C 3.66390 0.70593 0.00000 C 2.44330 2.82006 0.00000 C 3.66390 -0.70593 0.00000 C -1.22126 -0.70509 -0.00000 C 1.22059 3.52600 0.00000 C -0.00000 -2.82039 -0.00000 C -2.44253 1.41020 -0.00000 C 2.44330 -2.82006 0.00000 C -1.22059 3.52600 -0.00000 C 4.88654 1.41226 0.00000 C 3.66632 3.52574 0.00000 C -2.44253 -1.41020 -0.00000 C 1.22059 -3.52600 0.00000 C -2.44330 2.82006 -0.00000 C 4.88654 -1.41226 0.00000 C 1.22022 4.93799 0.00000 C 4.88134 2.81824 0.00000 C -1.22059 -3.52600 -0.00000 C -3.66390 0.70593 -0.00000 C 3.66632 -3.52574 0.00000 C -1.22022 4.93799 0.00000 C 6.09679 0.70026 0.00000 C 3.65484 4.92984 0.00000 C -2.44330 -2.82006 -0.00000 C -3.66390 -0.70593 -0.00000 C 4.88134 -2.81824 0.00000 C 0.00000 5.63649 0.00000 C 6.09679 -0.70026 0.00000 C 2.44195 5.63011 0.00000 C 1.22022 -4.93799 -0.00000 C -3.66632 3.52574 -0.00000 C -1.22022 -4.93799 -0.00000 C -4.88654 1.41226 -0.00000 C 3.65484 -4.92984 0.00000 C -2.44195 5.63011 -0.00000 C -3.66632 -3.52574 -0.00000 C -4.88654 -1.41226 -0.00000 C -0.00000 -5.63649 -0.00000 C -4.88134 2.81824 -0.00000 C 2.44195 -5.63011 0.00000 C -3.65484 4.92984 -0.00000 C -4.88134 -2.81824 -0.00000 C -2.44195 -5.63011 -0.00000 C -6.09679 0.70026 -0.00000 C -3.65484 -4.92984 -0.00000 C -6.09679 -0.70026 -0.00000 H 5.82070 3.36059 0.00000 H 7.04271 1.22863 0.00000 H 4.58538 5.48485 0.00000 H 5.82070 -3.36059 0.00000 H 0.00000 6.72117 0.00000 H 7.04271 -1.22863 0.00000 H 2.45733 6.71348 0.00000 H 4.58538 -5.48485 0.00000 H -2.45733 6.71348 -0.00000 H -0.00000 -6.72117 -0.00000 H -5.82070 3.36059 -0.00000 H 2.45733 -6.71348 0.00000 H -4.58538 5.48485 -0.00000 H -5.82070 -3.36059 -0.00000 H -2.45733 -6.71348 -0.00000 H -7.04271 1.22863 -0.00000 H -4.58538 -5.48485 -0.00000 H -7.04271 -1.22863 -0.00000 """)
#!/bin/bash cat old_log.txt | cut -d ' ' -f2 > old_log cat new_log.txt | cut -d ' ' -f2 > new_log diff old_log new_log > diff
<gh_stars>0 /* * $Id$ * * Copyright 2006-2008 <NAME>. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opoo.dao.hibernate3; import java.sql.SQLException; import java.util.List; import org.hibernate.HibernateException; import org.hibernate.Query; import org.hibernate.Session; import org.opoo.dao.support.PageableList; import org.opoo.dao.support.QuerySupport; import org.opoo.dao.support.ResultFilter; import org.opoo.oqs.criterion.Criterion; import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.HibernateTemplate; /** * * @author <NAME>(<EMAIL>) * @version 1.0 */ public class HibernateQuerySupport implements QuerySupport { private HibernateTemplate template; public HibernateQuerySupport(HibernateTemplate template) { this.template = template; } public int executeUpdate(final String baseSql, final Criterion c) { return ((Number)template.execute(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return Integer.valueOf(HibernateQueryHelper.createQuery(session, baseSql, c).executeUpdate()); } })).intValue(); } public List find(final String baseSql, final ResultFilter resultFilter) { return template.executeFind(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { Query q = HibernateQueryHelper.createQuery(session, baseSql, resultFilter.getCriterion(), resultFilter.getOrder()); if (resultFilter.isPageable()) { q.setFirstResult(resultFilter.getFirstResult()); q.setMaxResults(resultFilter.getMaxResults()); } return q.list(); } }); } public PageableList find(String baseSelectSql, String baseCountSql, ResultFilter resultFilter) { List l = find(baseSelectSql, resultFilter); int count = getInt(baseCountSql, resultFilter.getCriterion()); return new PageableList(l, resultFilter.getFirstResult(), resultFilter.getMaxResults(), count); } public int getInt(final String baseSql, final Criterion c) { return ((Number) template.execute(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return (Number) HibernateQueryHelper.createQuery(session, baseSql, c).uniqueResult(); } })).intValue(); } public List find(String queryString) { return template.find(queryString); } public List find(String queryString, Object value){ return template.find(queryString, value); } public List find(String queryString, Object[] values){ return template.find(queryString, values); } public int executeUpdate(final String queryString){ return ((Integer) template.execute(new HibernateCallback() { public Object doInHibernate(Session s) throws HibernateException, SQLException { int r2 = s.createQuery(queryString).executeUpdate(); return new Integer(r2); } })).intValue(); } public int executeUpdate(final String queryString, final Object[] values) { return ( (Integer) template.execute(new HibernateCallback() { public Object doInHibernate(Session s) throws HibernateException, SQLException { int r2 = HibernateQueryHelper.createQuery(s, queryString, values).executeUpdate(); return new Integer(r2); } })).intValue(); } public int executeUpdate(final String queryString, final Object value) { return ((Integer) template.execute(new HibernateCallback() { public Object doInHibernate(Session s) throws HibernateException, SQLException { int r2 = HibernateQueryHelper.createQuery(s, queryString, value).executeUpdate(); return new Integer(r2); } })).intValue(); } public List find(final String queryString, final String name, final Object value) { return template.executeFind(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return HibernateQueryHelper.createQuery(session, queryString, name, value).list(); } }); } public List find(final String queryString, final String name, final Object[] values) { return template.executeFind(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return HibernateQueryHelper.createQuery(session, queryString, name, values).list(); } }); } public List find(final String queryString, final String[] names, final Object[] values) { return template.executeFind(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return HibernateQueryHelper.createQuery(session, queryString, names, values).list(); } }); } public int executeUpdate(final String queryString, final String[] names, final Object[] values) { return ((Number) template.execute(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return new Integer(HibernateQueryHelper.createQuery(session, queryString, names, values).executeUpdate()); } })).intValue(); } public int executeUpdate(final String queryString, final String name, final Object[] values) { return ((Number) template.execute(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return new Integer(HibernateQueryHelper.createQuery(session, queryString, name, values).executeUpdate()); } })).intValue(); } public int executeUpdate(final String queryString, final String name, final Object value) { return ((Number) template.execute(new HibernateCallback() { public Object doInHibernate(Session session) throws SQLException, HibernateException { return new Integer(HibernateQueryHelper.createQuery(session, queryString, name, value).executeUpdate()); } })).intValue(); } }
<reponame>ntpdev/scripts.py #!/usr/bin/python3 from datetime import date, time, timedelta, datetime import numpy as np import pandas as pd import sys from tsutils import make_filename, load_files, day_index, rth_index, aggregate_daily_bars, calc_vwap, calc_atr, LineBreak def exportNinja(df, outfile): print(f'exporting in Ninja Trader format {outfile} {len(df)}') with open(outfile, 'w') as f: for i,r in df.iterrows(): s = '%s;%4.2f;%4.2f;%4.2f;%4.2f;%d\n' % (i.strftime('%Y%m%d %H%M%S'),r['Open'],r['High'],r['Low'],r['Close'],r['Volume']) f.write(s) def exportMinVol(df, outfile): df2 = aggregateMinVolume(df, 2500) print(f'exporting minVol file {outfile} {len(df2)}') df2.to_csv(outfile) # create a new DF which aggregates bars between inclusive indexes def aggregate_bars(df, idxs_start, idxs_end): rows = [] dts = [] for s,e in zip(idxs_start, idxs_end): dts.append(e.date()) r = {} r['Open'] = df.Open[s] r['High'] = df.High[s:e].max() r['Low'] = df.Low[s:e].min() r['Close'] = df.Close[e] r['Volume'] = df.Volume[s:e].sum() vwap = np.average( df.WAP[s:e], weights=df.Volume[s:e] ) r['VWAP'] = round(vwap, 2) rows.append(r) daily = pd.DataFrame(rows, index=dts) daily['Change'] = daily['Close'].sub(daily['Close'].shift()) daily['DayChg'] = daily['Close'].sub(daily['Open']) daily['Range'] = daily['High'].sub(daily['Low']) return daily def aggregrate_bars_between(df, tm_open, tm_close): rows = [] # find indexes of open & close bars ops = df.at_time( tm_open ).index cls = df.at_time( tm_close ).index for op,cl in zip(ops, cls): # slicing a dataframe by index uses an inclusive range acc = aggregate(df.loc[op:cl]) rows.append(acc) return pd.DataFrame(rows) # inclusive end def aggregate(df): acc = {} for i,r in df.iterrows(): acc = single(i,r,1) if len(acc) == 0 else combine(acc, i, r, 1) return acc def aggregateMinVolume(df, minvol): rows = [] acc = {} # selector = (df.index.minute == 0) & (df.index.to_series().diff() != timedelta(minutes=1)) selector = df.index.to_series().diff() != timedelta(minutes=1) openbar = (df.index.minute == 0) & selector lastbar = selector.shift(-1, fill_value=True) eur_open = date(2021,1,1) rth_open = date(2021,1,1) for i,r in df.iterrows(): if openbar.loc[i]: eur_open = i + timedelta(hours=8, minutes=59) rth_open = i + timedelta(hours=15, minutes=29) acc = single(i,r,1) if len(acc) == 0 else combine(acc, i, r, 1) if acc['Volume'] >= minvol or lastbar.loc[i] or i == eur_open or i == rth_open: rows.append(acc) acc = {} if len(acc) > 0: rows.append(acc) return pd.DataFrame(rows) def single(dt_fst, fst, period): r = {} r['Date'] = dt_fst r['DateCl'] = dt_fst + timedelta(minutes=period) r['Open'] = fst['Open'] r['High'] = fst['High'] r['Low'] = fst['Low'] r['Close'] = fst['Close'] r['Volume'] = fst['Volume'] r['VWAP'] = fst['VWAP'] return r def combine(acc, dt_snd, snd, period): r = {} r['Date'] = acc['Date'] r['DateCl'] = dt_snd + timedelta(minutes=period) r['Open'] = acc['Open'] r['High'] = max(acc['High'], snd['High']) r['Low'] = min(acc['Low'], snd['Low']) r['Close'] = snd['Close'] r['Volume'] = acc['Volume'] + snd['Volume'] r['VWAP'] = snd['VWAP'] return r def islastbar(d): return (d.hour == 20 and (d.minute == 14 or d.minute == 59)) or (d.hour == 13 and d.minute == 29) def isfirstbar(d): return (d.hour == 22 and d.minute == 00) or (d.hour == 13 and d.minute == 30) def hilo(df, rev): xs = [] hw = df.High.iloc[0] hwp = 0 lw = df.Low.iloc[0] lwp = 0 hf = False lf = False c = 0 for i,r in df.iterrows(): if r.High > hw: hw = r.High hwp = i hf = True elif hw - r.High > rev and hf: print('High %.2f' % df.High.loc[hwp]) # hf = False lf = False lw = r.Low lwp = i if r.Low < lw: lw = r.Low lwp = i lf = True elif r.Low - lw > rev and lf: # print('Low %.2f' % df.Low.loc[lwp:i]) print(df.loc[lwp:i]) # lf = False hf = False hw = r.High hwp = i print(df.iloc[50:70]) c = c + 1 if c == 2: sys.exit(0) # https://firstratedata.com/i/futures/ES def fn1(): column_names = ["TimeStamp", "open", "high", "low", "close", "volume"] df = pd.read_csv("d:\esz19.txt", names=column_names, parse_dates=["TimeStamp"], index_col=["TimeStamp"] ) dfd = df.resample("1H").agg({'open': 'first', 'close': 'last', 'high' : 'max', 'low' : 'min', 'volume': 'sum'}) dfd = dfd[dfd.volume > 1000] print(dfd.tail(19)) def print_summary(df): di = day_index(df) dr = rth_index(di) print('--- Daily bars ---') df2 = aggregate_daily_bars(df, di) print(df2) print('--- RTH bars ---') df2 = aggregate_daily_bars(df, dr) print(df2) lb = LineBreak(3) for i,r in df2.iterrows(): lb.append(r['Close'], i) # for c in df2['Close']: # lb.append(c) df3 = lb.asDataFrame() print(df3) df = load_files(make_filename('esz1*.csv')) print_summary(df) df['VWAP'] = calc_vwap(df) exportNinja(df, make_filename('ES 12-21.Last.txt')) exportMinVol(df, make_filename('es-minvol.csv'))
<filename>src/list-props.js const listProps = (obj, list = [], parent = "") => { for (const [key, type] of Object.entries(obj)) { if (type !== false) { const propName = `${parent}.${key}` list.push(propName) if (typeof type === "object") { listProps(type, list, propName) } } } return list } module.exports = listProps
#!/bin/sh # -*- coding: utf-8 -*- # sh compileWASM.sh hyphenEngine SRCFILE="$1.asm.js" WASTNAME="$1.wast" WASMNAME="$1.wasm" #DISNAME="$1.wat" echo 'running asm2wasm...' ./third-party/binaryen/bin/asm2wasm $SRCFILE -O4 -m 2097152 -mm 16777216 > $WASTNAME echo 'optimize > WASM...' ./third-party/binaryen/bin/wasm-opt $WASTNAME -Oz -o $WASMNAME rm $WASTNAME #echo 'disassemble WASM...' #./third-party/wabt/bin/wasm2wat $WASMNAME > $DISNAME
<reponame>parti-coop/demosx package seoul.democracy.opinion.repository; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.querydsl.QueryDslPredicateExecutor; import seoul.democracy.opinion.domain.Opinion; public interface OpinionRepository extends OpinionRepositoryCustom, JpaRepository<Opinion, Long>, QueryDslPredicateExecutor<Opinion> { }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package exp4_5prgs; import java.util.Scanner; /** * * @author prakash */ public class Prg3exp4 { public static void main(String[] args) { Scanner sc = new Scanner(System.in); String str = sc.next(); int i; for (i=1;i<str.length();i++) { if (str.charAt(i)==str.charAt(i-1)) { str=str.substring(0, i-1)+str.substring(i+1); i=0; } } if (str.length()==0) { System.out.println("Empty String"); } else { System.out.println (str); } } } /* Sample Input 0 aaabccddd Sample Output 0 abd */
<reponame>sethcoder/cc65 /*****************************************************************************/ /* */ /* loadexpr.c */ /* */ /* Load an expression into the primary register */ /* */ /* */ /* */ /* (C) 2004-2009, <NAME> */ /* Roemerstrasse 52 */ /* D-70794 Filderstadt */ /* EMail: <EMAIL> */ /* */ /* */ /* This software is provided 'as-is', without any expressed or implied */ /* warranty. In no event will the authors be held liable for any damages */ /* arising from the use of this software. */ /* */ /* Permission is granted to anyone to use this software for any purpose, */ /* including commercial applications, and to alter it and redistribute it */ /* freely, subject to the following restrictions: */ /* */ /* 1. The origin of this software must not be misrepresented; you must not */ /* claim that you wrote the original software. If you use this software */ /* in a product, an acknowledgment in the product documentation would be */ /* appreciated but is not required. */ /* 2. Altered source versions must be plainly marked as such, and must not */ /* be misrepresented as being the original software. */ /* 3. This notice may not be removed or altered from any source */ /* distribution. */ /* */ /*****************************************************************************/ /* cc65 */ #include "codegen.h" #include "error.h" #include "exprdesc.h" #include "global.h" #include "loadexpr.h" /*****************************************************************************/ /* Code */ /*****************************************************************************/ static void LoadConstant (unsigned Flags, ExprDesc* Expr) /* Load the primary register with some constant value. */ { switch (ED_GetLoc (Expr)) { case E_LOC_ABS: /* Number constant */ g_getimmed (Flags | TypeOf (Expr->Type) | CF_CONST, Expr->IVal, 0); break; case E_LOC_GLOBAL: /* Global symbol, load address */ g_getimmed ((Flags | CF_EXTERNAL) & ~CF_CONST, Expr->Name, Expr->IVal); break; case E_LOC_STATIC: case E_LOC_LITERAL: /* Static symbol or literal, load address */ g_getimmed ((Flags | CF_STATIC) & ~CF_CONST, Expr->Name, Expr->IVal); break; case E_LOC_REGISTER: /* Register variable. Taking the address is usually not ** allowed. */ if (IS_Get (&AllowRegVarAddr) == 0) { Error ("Cannot take the address of a register variable"); } g_getimmed ((Flags | CF_REGVAR) & ~CF_CONST, Expr->Name, Expr->IVal); break; case E_LOC_STACK: g_leasp (Expr->IVal); break; default: Internal ("Unknown constant type: %04X", Expr->Flags); } } void LoadExpr (unsigned Flags, struct ExprDesc* Expr) /* Load an expression into the primary register if it is not already there. */ { if (ED_IsLVal (Expr)) { /* Dereferenced lvalue. If this is a bit field its type is unsigned. ** But if the field is completely contained in the lower byte, we will ** throw away the high byte anyway and may therefore load just the ** low byte. */ if (ED_IsBitField (Expr)) { Flags |= (Expr->BitOffs + Expr->BitWidth <= CHAR_BITS)? CF_CHAR : CF_INT; Flags |= CF_UNSIGNED; } else { Flags |= TypeOf (Expr->Type); } if (ED_NeedsTest (Expr)) { Flags |= CF_TEST; } switch (ED_GetLoc (Expr)) { case E_LOC_ABS: /* Absolute: numeric address or const */ g_getstatic (Flags | CF_ABSOLUTE, Expr->IVal, 0); break; case E_LOC_GLOBAL: /* Global variable */ g_getstatic (Flags | CF_EXTERNAL, Expr->Name, Expr->IVal); break; case E_LOC_STATIC: case E_LOC_LITERAL: /* Static variable or literal in the literal pool */ g_getstatic (Flags | CF_STATIC, Expr->Name, Expr->IVal); break; case E_LOC_REGISTER: /* Register variable */ g_getstatic (Flags | CF_REGVAR, Expr->Name, Expr->IVal); break; case E_LOC_STACK: /* Value on the stack */ g_getlocal (Flags, Expr->IVal); break; case E_LOC_PRIMARY: /* The primary register - just test if necessary */ if (Flags & CF_TEST) { g_test (Flags); } break; case E_LOC_EXPR: /* Reference to address in primary with offset in Expr */ g_getind (Flags, Expr->IVal); break; default: Internal ("Invalid location in LoadExpr: 0x%04X", ED_GetLoc (Expr)); } /* Handle bit fields. The actual type may have been casted or ** converted, so be sure to always use unsigned ints for the ** operations. */ if (ED_IsBitField (Expr)) { unsigned F = CF_INT | CF_UNSIGNED | CF_CONST | (Flags & CF_TEST); /* Shift right by the bit offset */ g_asr (F, Expr->BitOffs); /* And by the width if the field doesn't end on an int boundary */ if (Expr->BitOffs + Expr->BitWidth != CHAR_BITS && Expr->BitOffs + Expr->BitWidth != INT_BITS) { g_and (F, (0x0001U << Expr->BitWidth) - 1U); } } /* Expression was tested */ ED_TestDone (Expr); } else { /* An rvalue */ if (ED_IsLocExpr (Expr)) { if (Expr->IVal != 0) { /* We have an expression in the primary plus a constant ** offset. Adjust the value in the primary accordingly. */ Flags |= TypeOf (Expr->Type); g_inc (Flags | CF_CONST, Expr->IVal); } } else { /* Constant of some sort, load it into the primary */ LoadConstant (Flags, Expr); } /* Are we testing this value? */ if (ED_NeedsTest (Expr)) { /* Yes, force a test */ Flags |= TypeOf (Expr->Type); g_test (Flags); ED_TestDone (Expr); } } }
#!/bin/bash TXT_RESET=$(tput sgr 0 2> /dev/null) TXT_RED=$(tput setaf 1 2> /dev/null) TXT_GREEN=$(tput setaf 2 2> /dev/null) OUTFILES=outfiles if [ ! -f "glc" ]; then echo "'parser' executable does not exist. Use 'make'" exit 1 fi function usage() { echo -e "$0 [OPTIONS]...\n" echo -e "OPTIONS" echo -e " --all Compares all solution files" exit 1 } function compare_all() { mkdir -p ${OUTFILES} for file in $(ls samples/*.java); do filename=$(echo $file | cut -f1 -d '.' | cut -f2 -d '/') ./glc < $file 2> ${OUTFILES}/$filename.out if diff -s ${OUTFILES}/$filename.out samples/$filename.out > /dev/null; then echo "${TXT_GREEN}[PASSED]${TXT_RESET} $file" else echo "${TXT_RED}[FAILED]${TXT_RESET} $file" fi done rm -rf ${OUTFILES} } while true; do case "$1" in --all ) compare_all; break ;; * ) usage ;; esac done
'use strict'; /* * Create a `yell` function that takes a string * and return the same string in upper case * */ // Your code: const yell = (str) => str; //* Begin of tests const assert = require('assert'); assert.strictEqual(typeof yell, 'function'); assert.strictEqual(yell.length, 1); assert.strictEqual(yell('bobba fett'), 'bobba fett'); assert.strictEqual(yell('a'), 'a'); assert.strictEqual(yell('toto'), 'toto'); //assert.fail('You must write your own tests'); // End of tests */
package pkcs7 import ( "gitee.com/ddkwork/libraryGo/internal/padder/padderApi" "gitee.com/ddkwork/libraryGo/stream" ) type ( Pkcs7 interface { padderApi.ApiPadder } _pkcs7 struct { padder padderApi.ApiPadder } ) func (p *_pkcs7) Padding(src []byte) stream.Interface { return p.padder.Padding(src) } func (p *_pkcs7) UnPadding(dst []byte) stream.Interface { return p.padder.UnPadding(dst) } func (p *_pkcs7) Size() int { return p.padder.Size() } func (p *_pkcs7) SetSize(size int) { p.padder.SetSize(8 * 2) } func New() Pkcs7 { p := &_pkcs7{ padder: padderApi.New(), } p.padder.SetSize(8 * 2) return p }
<filename>src/test/java/com/jeecookbook/maven/plugins/solc/FakeCompilerBridge.java /*- * -\-\- * solc-maven-plugin * -- * Copyright (C) 2017 <NAME> * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ package com.jeecookbook.maven.plugins.solc; import com.jeecookbook.maven.plugins.solc.bridge.CompilerBridge; import java.util.ArrayList; import java.util.List; public class FakeCompilerBridge implements CompilerBridge { CompilerBridge.CompilerResult compilerResult; List<String> cmds = new ArrayList<String>(); public FakeCompilerBridge(CompilerBridge.CompilerResult compilerResult){ this.compilerResult = compilerResult; } public CompilerBridge.CompilerResult executeCmd(String cmd) { this.cmds.add(cmd); return this.compilerResult; } static CompilerBridge.CompilerResult build(int status, boolean success, String output, Throwable throwable){ CompilerBridge.CompilerResult result = new CompilerBridge.CompilerResult(); result.setOutput(output); result.setThrowable(throwable); result.setSuccess(success); result.setStatus(status); return result; } static CompilerBridge.CompilerResult build(int status, boolean success, String output){ CompilerBridge.CompilerResult result = new CompilerBridge.CompilerResult(); result.setOutput(output); result.setSuccess(success); result.setStatus(status); return result; } }
<filename>src/main.ts<gh_stars>1-10 import { CodeGeneratorRequest, CodeGeneratorResponse, CodeGeneratorResponse_Feature, } from 'ts-proto-descriptors'; import { promisify } from 'util'; import { FlatWalker, FlatWalkerStrategy } from './walker/flat_walker_strategy.js'; import { WalkerASSingleFile, WalkerASMultiFile } from './walker_as/index.js'; import { NamedDescriptorIndex, DecoratedDescriptorIndex, NamedDescriptorIndexReducer, } from './proto/index.js'; import { parseOptions } from './options.js'; import { readToBuffer } from './internal.js'; async function main() { const stdin = await readToBuffer(process.stdin); const request = CodeGeneratorRequest.decode(stdin); const options = parseOptions(request.parameter); request.protoFile.forEach((f) => { if ((f.syntax != 'proto3') && (f.syntax != '')) { throw new Error( `Only proto3 syntax is supported. ${f.name} has ${f.syntax} syntax!`, ); } }); const types = new NamedDescriptorIndex(request); const roots: Set<string> = types.rootIDs() as Set<string>; options.include.forEach((n:string) => roots.add(n)); const requiredIDs = new NamedDescriptorIndexReducer( types, roots, new Set<string>(options.exclude), ); const brokenReferences = requiredIDs.brokenReferences(); if (brokenReferences.length > 0) { throw new Error( `Broken references found: ${brokenReferences .map((value:[string, string]) => `${value[0]} references ${value[1]}`) .join( ', ', )}, please either exclude a type and all it's references`, ); } const descriptors = new DecoratedDescriptorIndex(requiredIDs); const strategy = new FlatWalkerStrategy(descriptors); let walker:FlatWalker = new WalkerASSingleFile(options); if (options.mode == 'multi') { walker = new WalkerASMultiFile(options); } strategy.walk(walker); const files = walker.files() const response = CodeGeneratorResponse.fromPartial({ // There is an issue with type declaration in ts-proto-descriptors, ignoring it for now /* eslint-disable @typescript-eslint/ban-ts-comment */ // @ts-ignore file: files, supportedFeatures: CodeGeneratorResponse_Feature.FEATURE_PROTO3_OPTIONAL, }); const buffer = CodeGeneratorResponse.encode(response).finish(); const write = promisify( process.stdout.write as (buffer: Buffer) => boolean, ).bind(process.stdout); await write(Buffer.from(buffer)); } main() .then(() => { process.exit(0); }) .catch((e) => { process.stderr.write('FAILED!'); process.stderr.write(e.message); process.stderr.write(e.stack); process.exit(1); });
<reponame>ivanthescientist/JsonSurfer // Generated from JsonPath.g4 by ANTLR 4.9.3 package org.jsfr.json.compiler; import org.antlr.v4.runtime.tree.ParseTreeVisitor; /** * This interface defines a complete generic visitor for a parse tree produced * by {@link JsonPathParser}. * * @param <T> The return type of the visit operation. Use {@link Void} for * operations with no return type. */ public interface JsonPathVisitor<T> extends ParseTreeVisitor<T> { /** * Visit a parse tree produced by {@link JsonPathParser#path}. * @param ctx the parse tree * @return the visitor result */ T visitPath(JsonPathParser.PathContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#syntaxMode}. * @param ctx the parse tree * @return the visitor result */ T visitSyntaxMode(JsonPathParser.SyntaxModeContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#relativePath}. * @param ctx the parse tree * @return the visitor result */ T visitRelativePath(JsonPathParser.RelativePathContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#searchChild}. * @param ctx the parse tree * @return the visitor result */ T visitSearchChild(JsonPathParser.SearchChildContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#anyChild}. * @param ctx the parse tree * @return the visitor result */ T visitAnyChild(JsonPathParser.AnyChildContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#any}. * @param ctx the parse tree * @return the visitor result */ T visitAny(JsonPathParser.AnyContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#index}. * @param ctx the parse tree * @return the visitor result */ T visitIndex(JsonPathParser.IndexContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#indexes}. * @param ctx the parse tree * @return the visitor result */ T visitIndexes(JsonPathParser.IndexesContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filter}. * @param ctx the parse tree * @return the visitor result */ T visitFilter(JsonPathParser.FilterContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#childNode}. * @param ctx the parse tree * @return the visitor result */ T visitChildNode(JsonPathParser.ChildNodeContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#array}. * @param ctx the parse tree * @return the visitor result */ T visitArray(JsonPathParser.ArrayContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#childrenNode}. * @param ctx the parse tree * @return the visitor result */ T visitChildrenNode(JsonPathParser.ChildrenNodeContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterExpr}. * @param ctx the parse tree * @return the visitor result */ T visitFilterExpr(JsonPathParser.FilterExprContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterExist}. * @param ctx the parse tree * @return the visitor result */ T visitFilterExist(JsonPathParser.FilterExistContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterGtNum}. * @param ctx the parse tree * @return the visitor result */ T visitFilterGtNum(JsonPathParser.FilterGtNumContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterGeNum}. * @param ctx the parse tree * @return the visitor result */ T visitFilterGeNum(JsonPathParser.FilterGeNumContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterLtNum}. * @param ctx the parse tree * @return the visitor result */ T visitFilterLtNum(JsonPathParser.FilterLtNumContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterLeNum}. * @param ctx the parse tree * @return the visitor result */ T visitFilterLeNum(JsonPathParser.FilterLeNumContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterEqualNum}. * @param ctx the parse tree * @return the visitor result */ T visitFilterEqualNum(JsonPathParser.FilterEqualNumContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterNEqualNum}. * @param ctx the parse tree * @return the visitor result */ T visitFilterNEqualNum(JsonPathParser.FilterNEqualNumContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterEqualBool}. * @param ctx the parse tree * @return the visitor result */ T visitFilterEqualBool(JsonPathParser.FilterEqualBoolContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterNEqualBool}. * @param ctx the parse tree * @return the visitor result */ T visitFilterNEqualBool(JsonPathParser.FilterNEqualBoolContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterEqualStr}. * @param ctx the parse tree * @return the visitor result */ T visitFilterEqualStr(JsonPathParser.FilterEqualStrContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterNEqualStr}. * @param ctx the parse tree * @return the visitor result */ T visitFilterNEqualStr(JsonPathParser.FilterNEqualStrContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterEqualNull}. * @param ctx the parse tree * @return the visitor result */ T visitFilterEqualNull(JsonPathParser.FilterEqualNullContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterNEqualNull}. * @param ctx the parse tree * @return the visitor result */ T visitFilterNEqualNull(JsonPathParser.FilterNEqualNullContext ctx); /** * Visit a parse tree produced by {@link JsonPathParser#filterMatchRegex}. * @param ctx the parse tree * @return the visitor result */ T visitFilterMatchRegex(JsonPathParser.FilterMatchRegexContext ctx); }
#!/bin/bash echo "Testing DefectDojo Service" echo "Waiting max 60s for services to start" # Wait for services to become available COUNTER=0 while [ $COUNTER -lt 10 ]; do curl -s -o "/dev/null" $DD_BASE_URL -m 120 CR=$(curl --insecure -s -m 10 -I "${DD_BASE_URL}login?next=/" | egrep "^HTTP" | cut -d' ' -f2) if [ "$CR" == 200 ]; then echo "Succesfully displayed login page, starting integration tests" break fi echo "Waiting: cannot display login screen; got HTTP code $CR" sleep 10 let COUNTER=COUNTER+1 done if [ $COUNTER -gt 10 ]; then echo "ERROR: cannot display login screen; got HTTP code $CR" exit 1 fi export CHROMEDRIVER=$(find /opt/chrome-driver -name chromedriver) # Run available unittests with a simple setup # All available Integrationtest Scripts are activated below # If successsful, A successs message is printed and the script continues # If any script is unsuccesssful a failure message is printed and the test script # Exits with status code of 1 function fail() { echo "Error: $1 test failed\n" exit 1 } function success() { echo "Success: $1 test passed\n" } echo "IT FILENAME: $DD_INTEGRATION_TEST_FILENAME" if [[ ! -z "$DD_INTEGRATION_TEST_FILENAME" ]]; then test=$DD_INTEGRATION_TEST_FILENAME echo "Running: $test" if python3 $DD_INTEGRATION_TEST_FILENAME; then success $test else fail $test fi else test="Finding integration tests" echo "Running: $test" if python3 tests/finding_test.py ; then success $test else fail $test fi test="Report Builder tests" echo "Running: $test" if python3 tests/report_builder_test.py ; then success $test else fail $test fi test="Notes integration tests" echo "Running: $test" if python3 tests/notes_test.py ; then success $test else fail $test fi test="Regulation integration tests" echo "Running: $test" if python3 tests/regulations_test.py ; then success $test else fail $test fi test="Product type integration tests" echo "Running: $test" if python3 tests/product_type_test.py ; then success $test else fail $test fi test="Product integration tests" echo "Running: $test" if python3 tests/product_test.py ; then success $test else fail $test fi test="Endpoint integration tests" echo "Running: $test" if python3 tests/endpoint_test.py ; then success $test else fail $test fi test="Engagement integration tests" echo "Running: $test" if python3 tests/engagement_test.py ; then success $test else fail $test fi test="Environment integration tests" echo "Running: $test" if python3 tests/environment_test.py ; then success $test else fail $test fi test="Test integration tests" echo "Running: $test" if python3 tests/test_test.py ; then success $test else fail $test fi test="User integration tests" echo "Running: $test" if python3 tests/user_test.py ; then success $test else fail $test fi test="Group integration tests" echo "Running: $test" if python3 tests/group_test.py ; then success $test else fail $test fi test="Product Group integration tests" echo "Running: $test" if python3 tests/product_group_test.py ; then success $test else fail $test fi test="Product Type Group integration tests" echo "Running: $test" if python3 tests/product_type_group_test.py ; then success $test else fail $test fi test="Product member integration tests" echo "Running: $test" if python3 tests/product_member_test.py ; then success $test else fail $test fi test="Product type member integration tests" echo "Running: $test" if python3 tests/product_type_member_test.py ; then success $test else fail $test fi test="Ibm Appscan integration test" echo "Running: $test" if python3 tests/ibm_appscan_test.py ; then success $test else fail $test fi test="Search integration test" echo "Running: $test" if python3 tests/search_test.py ; then success $test else fail $test fi test="File Upload tests" echo "Running: $test" if python3 tests/file_test.py ; then success $test else fail $test fi test="Dedupe integration tests" echo "Running: $test" if python3 tests/dedupe_test.py ; then success $test else fail $test fi # The below tests are commented out because they are still an unstable work in progress ## Once Ready they can be uncommented. echo "Check Various Pages integration test" if python3 tests/check_various_pages.py ; then echo "Success: Check Various Pages tests passed" else echo "Error: Check Various Pages test failed"; exit 1 fi # The below tests are commented out because they are still an unstable work in progress ## Once Ready they can be uncommented. # echo "Import Scanner integration test" # if python3 tests/import_scanner_test.py ; then # echo "Success: Import Scanner integration tests passed" # else # echo "Error: Import Scanner integration test failed"; exit 1 # fi # echo "Zap integration test" # if python3 tests/zap.py ; then # echo "Success: zap integration tests passed" # else # echo "Error: Zap integration test failed"; exit 1 # fi exec echo "Done Running all configured integration tests." fi
#!/bin/bash -e if [[ $EUID -ne 0 ]]; then echo "ERROR: This script must be run as root" exit 1 fi yum install fail2ban -y # ssh echo "[sshd]" > /etc/fail2ban/jail.d/ssh.local echo "enabled = true" >> /etc/fail2ban/jail.d/ssh.local service fail2ban restart > /dev/null service fail2ban status
import * as THREE from 'three' import { WEBGL } from 'three/examples/jsm/WebGL' const scene = new THREE.Scene() const camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 1000) const cameraR = 10, cameraRL = 10 * 2 * Math.PI camera.position.x = 10 camera.position.z = 10 camera.position.y = 10 camera.lookAt(new THREE.Vector3(0, 0, 0)) // const light = new THREE.AmbientLight(0xFF0000) // const light = new THREE.PointLight(0x00FFFF) const light = new THREE.SpotLight(0x00FFFF, 1,50,20) light.position.set(5, 5, 0) scene.add(light) const cube = new THREE.Mesh(new THREE.BoxGeometry(2, 2, 2), new THREE.MeshLambertMaterial({ color: 0xFFFFFF })) scene.add(cube) const renderer = new THREE.WebGL1Renderer({ antialias: true }) renderer.setSize(window.innerWidth, window.innerHeight) renderer.setClearColor(0xFFFFFF, 1.0) document.body.appendChild(renderer.domElement) renderer.render(scene, camera) // 改变相机在空间中的位置,观测点光源辐射实际情况 let originTouchX: number let rad: number, originRad = 0 document.body.addEventListener('touchstart', (e) => { console.log(e) originTouchX = e.touches[0].clientX }) document.body.addEventListener('touchmove', (e) => { // 拖动x、y轴变动,我们让其改变相机在球面的位置 // console.log(e) let offsetX = (e.touches[0].clientX - originTouchX) rad = offsetX / cameraRL * Math.PI camera.position.x = Math.sin(originRad + rad) * cameraR camera.position.z = Math.cos(originRad + rad) * cameraR camera.lookAt(new THREE.Vector3(0, 0, 0)) renderer.render(scene, camera) }) document.body.addEventListener('touchend', (e) => { console.log(e) originRad = rad })
<filename>momo-permission-system-core/momo-permission-system-core-service/src/main/java/com/momo/service/cache/AdminAuthorityServiceCache.java package com.momo.service.cache; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.momo.common.util.LevelUtil; import com.momo.common.util.RedisUtil; import com.momo.mapper.dataobject.AclDO; import com.momo.mapper.req.sysmain.DynamicMenuAuthorReq; import com.momo.mapper.req.sysmain.RedisUser; import com.momo.mapper.res.authority.AclLevelRes; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.Comparator; import java.util.List; /** * @ProjectName: momo-cloud-permission * @Package: com.momo.service.cache * @Description: TODO * @Author: <NAME> * @CreateDate: 2019/9/10 0010 11:08 * @UpdateDate: 2019/9/10 0010 11:08 * @Version: 1.0 * <p>Copyright: Copyright (c) 2019</p> */ @Service @Slf4j public class AdminAuthorityServiceCache { @Autowired private AdminSysCoreServiceCache adminSysCoreServiceCache; @Autowired private RedisUtil redisUtil; public List<AclLevelRes> dynamicMenuTree(DynamicMenuAuthorReq loginAuthReq, RedisUser redisUser) { List<AclDO> userAclList = adminSysCoreServiceCache.getUserAclList(loginAuthReq, redisUser); List<AclLevelRes> aclDtoList = Lists.newArrayList(); for (AclDO acl : userAclList) { if (acl.getDisabledFlag().equals(0) && acl.getDelFlag().equals(0)) { AclLevelRes dto = AclLevelRes.adapt(acl); dto.setHasAcl(true); dto.setDisabled(false); dto.setChecked(true); aclDtoList.add(dto); } } return aclListToTree(aclDtoList); } private List<AclLevelRes> aclListToTree(List<AclLevelRes> aclDtoList) { if (CollectionUtils.isEmpty(aclDtoList)) { return Lists.newArrayList(); } // level -> [dept1, dept2, ...] Map<String, List<Object>> Multimap<String, AclLevelRes> levelDeptMap = ArrayListMultimap.create(); List<AclLevelRes> rootList = Lists.newArrayList(); for (AclLevelRes dto : aclDtoList) { levelDeptMap.put(dto.getSysAclLevel(), dto); if (LevelUtil.ROOT.equals(dto.getSysAclLevel())) { rootList.add(dto); } } // 按照seq从大到小排序 rootList.sort((o1, o2) -> o2.getSysAclSeq() - o1.getSysAclSeq()); // 递归生成树 transformDeptTree(rootList, LevelUtil.ROOT, levelDeptMap); return rootList; } // level:0, 0, all 0->0.1,0.2 // level:0.1 // level:0.2 private void transformDeptTree(List<AclLevelRes> deptLevelList, String level, Multimap<String, AclLevelRes> levelDeptMap) { for (AclLevelRes deptLevelDto : deptLevelList) { // 遍历该层的每个元素 // 处理当前层级的数据 String nextLevel = LevelUtil.calculateLevel(level, deptLevelDto.getId()); // 处理下一层 List<AclLevelRes> tempDeptList = (List<AclLevelRes>) levelDeptMap.get(nextLevel); if (CollectionUtils.isNotEmpty(tempDeptList)) { // 排序 tempDeptList.sort(deptSeqComparator); // 设置下一层部门 deptLevelDto.setChildren(tempDeptList); // 进入到下一层处理 transformDeptTree(tempDeptList, nextLevel, levelDeptMap); } } } private Comparator<AclLevelRes> deptSeqComparator = (o1, o2) -> o2.getSysAclSeq() - o1.getSysAclSeq(); }
package istu.bacs.background.combined; import istu.bacs.background.combined.db.SubmissionService; import istu.bacs.db.submission.Submission; import istu.bacs.db.submission.Verdict; import istu.bacs.externalapi.ExternalApi; import istu.bacs.rabbit.QueueName; import istu.bacs.rabbit.RabbitService; import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.springframework.stereotype.Component; import java.util.List; import static istu.bacs.background.combined.SubmissionSubmitterProcessor.PROCESSOR_NAME; import static istu.bacs.db.submission.Verdict.SCHEDULED; import static istu.bacs.rabbit.QueueName.SCHEDULED_SUBMISSIONS; import static istu.bacs.rabbit.QueueName.SUBMITTED_SUBMISSIONS; @Slf4j @Component(PROCESSOR_NAME) public class SubmissionSubmitterProcessor extends SubmissionProcessor { static final String PROCESSOR_NAME = "SubmissionSubmitterProcessor"; private final ExternalApi externalApi; public SubmissionSubmitterProcessor(SubmissionService submissionService, RabbitService rabbitService, ExternalApi externalApi) { super(submissionService, rabbitService); this.externalApi = externalApi; } @Override protected boolean process(List<Submission> submissions) { return externalApi.submit(submissions); } @Override protected Verdict incomingVerdict() { return SCHEDULED; } @Override protected QueueName incomingQueueName() { return SCHEDULED_SUBMISSIONS; } @Override protected QueueName outcomingQueueName() { return SUBMITTED_SUBMISSIONS; } @Override protected String processorName() { return PROCESSOR_NAME; } @Override protected Logger log() { return log; } }
#!/bin/bash # 获取要监控的本地服务器IP地址 IP=`ifconfig | grep inet | grep -vE 'inet6|127.0.0.1' | awk '{print $2}'` echo "IP地址:"$IP # 获取cpu总核数 cpu_num=`grep -c "model name" /proc/cpuinfo` echo "cpu总核数:"$cpu_num # 1、获取CPU利用率 ################################################ #us 用户空间占用CPU百分比 #sy 内核空间占用CPU百分比 #ni 用户进程空间内改变过优先级的进程占用CPU百分比 #id 空闲CPU百分比 #wa 等待输入输出的CPU时间百分比 #hi 硬件中断 #si 软件中断 ################################################# # 获取用户空间占用CPU百分比 cpu_user=`top -b -n 1 | grep Cpu | awk '{print $2}' | cut -f 1 -d "%"` echo "用户空间占用CPU百分比:"$cpu_user # 获取内核空间占用CPU百分比 cpu_system=`top -b -n 1 | grep Cpu | awk '{print $4}' | cut -f 1 -d "%"` echo "内核空间占用CPU百分比:"$cpu_system # 获取空闲CPU百分比 cpu_idle=`top -b -n 1 | grep Cpu | awk '{print $8}' | cut -f 1 -d "%"` echo "空闲CPU百分比:"$cpu_idle # 获取等待输入输出占CPU百分比 cpu_iowait=`top -b -n 1 | grep Cpu | awk '{print $10}' | cut -f 1 -d "%"` echo "等待输入输出占CPU百分比:"$cpu_iowait #2、获取CPU上下文切换和中断次数 # 获取CPU中断次数 cpu_interrupt=`vmstat -n 1 1 | sed -n 3p | awk '{print $11}'` echo "CPU中断次数:"$cpu_interrupt # 获取CPU上下文切换次数 cpu_context_switch=`vmstat -n 1 1 | sed -n 3p | awk '{print $12}'` echo "CPU上下文切换次数:"$cpu_context_switch #3、获取CPU负载信息 # 获取CPU15分钟前到现在的负载平均值 cpu_load_15min=`uptime | awk '{print $11}' | cut -f 1 -d ','` echo "CPU 15分钟前到现在的负载平均值:"$cpu_load_15min # 获取CPU5分钟前到现在的负载平均值 cpu_load_5min=`uptime | awk '{print $10}' | cut -f 1 -d ','` echo "CPU 5分钟前到现在的负载平均值:"$cpu_load_5min # 获取CPU1分钟前到现在的负载平均值 cpu_load_1min=`uptime | awk '{print $9}' | cut -f 1 -d ','` echo "CPU 1分钟前到现在的负载平均值:"$cpu_load_1min # 获取任务队列(就绪状态等待的进程数) cpu_task_length=`vmstat -n 1 1 | sed -n 3p | awk '{print $1}'` echo "CPU任务队列长度:"$cpu_task_length #4、获取内存信息 # 获取物理内存总量 mem_total=`free | grep Mem | awk '{print $2}'` echo "物理内存总量:"$mem_total # 获取操作系统已使用内存总量 mem_sys_used=`free | grep Mem | awk '{print $3}'` echo "已使用内存总量(操作系统):"$mem_sys_used # 获取操作系统未使用内存总量 mem_sys_free=`free | grep Mem | awk '{print $4}'` echo "剩余内存总量(操作系统):"$mem_sys_free # 获取应用程序已使用的内存总量 mem_user_used=`free | sed -n 3p | awk '{print $3}'` echo "已使用内存总量(应用程序):"$mem_user_used # 获取应用程序未使用内存总量 mem_user_free=`free | sed -n 3p | awk '{print $4}'` echo "剩余内存总量(应用程序):"$mem_user_free # 获取交换分区总大小 mem_swap_total=`free | grep Swap | awk '{print $2}'` echo "交换分区总大小:"$mem_swap_total # 获取已使用交换分区大小 mem_swap_used=`free | grep Swap | awk '{print $3}'` echo "已使用交换分区大小:"$mem_swap_used # 获取剩余交换分区大小 mem_swap_free=`free | grep Swap | awk '{print $4}'` echo "剩余交换分区大小:"$mem_swap_free #5、获取磁盘I/O统计信息 echo "指定设备(/dev/sda)的统计信息" # 每秒向设备发起的读请求次数 disk_sda_rs=`iostat -kx | grep sda| awk '{print $4}'` echo "每秒向设备发起的读请求次数:"$disk_sda_rs # 每秒向设备发起的写请求次数 disk_sda_ws=`iostat -kx | grep sda| awk '{print $5}'` echo "每秒向设备发起的写请求次数:"$disk_sda_ws # 向设备发起的I/O请求队列长度平均值 disk_sda_avgqu_sz=`iostat -kx | grep sda| awk '{print $9}'` echo "向设备发起的I/O请求队列长度平均值"$disk_sda_avgqu_sz # 每次向设备发起的I/O请求平均时间 disk_sda_await=`iostat -kx | grep sda| awk '{print $10}'` echo "每次向设备发起的I/O请求平均时间:"$disk_sda_await # 向设备发起的I/O服务时间均值 disk_sda_svctm=`iostat -kx | grep sda| awk '{print $11}'` echo "向设备发起的I/O服务时间均值:"$disk_sda_svctm # 向设备发起I/O请求的CPU时间百分占比 disk_sda_util=`iostat -kx | grep sda| awk '{print $12}'` echo "向设备发起I/O请求的CPU时间百分占比:"$disk_sda_util
#!/bin/bash set -e export DOCKER_CLI_EXPERIMENTAL="enabled" DOCKER_BUILD_CONTEXT="${DOCKER_BUILD_CONTEXT:-.}" DOCKER_FILE="${DOCKER_FILE:-${DOCKER_BUILD_CONTEXT}/Dockerfile}" if [[ -z "$DOCKER_TAGS" ]]; then echo "Set the DOCKER_TAGS environment variable." exit 1 fi if [[ -z "$DOCKER_USERNAME" ]]; then echo "Set the DOCKER_USERNAME environment variable." exit 1 fi if [[ -z "$DOCKER_PASSWORD" ]]; then echo "Set the DOCKER_PASSWORD environment variable." exit 1 fi if [[ -z "$GHCR_TOKEN" ]]; then echo "Set the GHCR_TOKEN environment variable." exit 1 fi if [[ -z "$GITHUB_REPOSITORY" ]]; then echo "Set the GITHUB_REPOSITORY environment variable." exit 1 fi GITHUB_OWNER=$(echo "${GITHUB_REPOSITORY}" | cut -d'/' -f1) if [[ -z "$DOCKER_IMAGE_VERSION" ]]; then echo "Set the DOCKER_IMAGE_VERSION environment variable." exit 1 fi export SOURCE="https://github.com/${GITHUB_REPOSITORY}" platforms="linux/amd64" # Login to Docker Hub echo "${DOCKER_PASSWORD}" | docker login -u "${DOCKER_USERNAME}" --password-stdin "docker.io" # Login to GitHub Container registry echo "${GHCR_TOKEN}" | docker login -u "${GITHUB_OWNER}" --password-stdin "ghcr.io" docker_base_repo="docker.io/${DOCKER_USERNAME}/fd" ghcr_base_repo="ghcr.io/${GITHUB_OWNER}/fd" IFS=',' read -ra TAGS <<< "$DOCKER_TAGS" for tag in "${TAGS[@]}"; do tag_command="${tag_command} --tag ${docker_base_repo}:${tag} --tag ${ghcr_base_repo}:${tag}" done created_date=`date --utc --rfc-3339=seconds` docker buildx build --platform "${platforms}" \ --label "org.opencontainers.image.source=${SOURCE}" \ --label "org.opencontainers.image.created=${created_date}" \ --output "type=image,push=true" \ ${tag_command} \ --file "${DOCKER_FILE}" \ "${DOCKER_BUILD_CONTEXT}" docker logout
#!/bin/sh jagen_pkg_install() { pkg_run install -vm755 hdparm "$pkg_install_dir/sbin" }
const path = require('path'); const CleanWebpackPlugin = require('clean-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); module.exports = { entry : { example : [ "babel-polyfill" , path.resolve( __dirname, 'example/index.js' ) ] }, output : { path : path.resolve( __dirname, 'public' ), filename : 'js/[name].js', chunkFilename : 'js/[id].[chunkhash].js', }, module: { rules : [ { test : /\.(js|jsx)$/, include : [ path.resolve(__dirname, "example") ], exclude : [ path.resolve(__dirname, "node_modules") ], use : { loader : 'babel-loader' } } ] }, plugins : [ new CleanWebpackPlugin( [ 'public' ] ), new HtmlWebpackPlugin( { title : `Versión : ${process.env.VERSION_APP}`, filename : 'index.html', template : 'example/index.ejs' } ) ] };
#include "glibc_cxx_wrap/sys/cxx_inotify.h" #include "glibc_cxx_wrap/sys/cxx_mman.h" #include "glibc_cxx_wrap/sys/cxx_stat.h" #include "glibc_cxx_wrap/sys/cxx_epoll.h" #include "glibc_cxx_wrap/sys/cxx_timerfd.h" #include "glibc_cxx_wrap/macros.h" #include "glibc_cxx_wrap/cxx_time.h" #include "glibc_cxx_wrap/13_low_level_io.h" #include "glibc_cxx_wrap/14_file_system_interface.h" /* test_1.cc test_2.cc 测试有 2 个目的: * 1. 测试是否有语法问题. * 2. 是否忘记追加 inline 属性;若某个函数忘记追加 inline 属性,则最后在链接时会提示符号重定义. */
import time def optimize_power(Tempmin, Tempmax, Optimization, df, delta_1st_pass, number_rows): """ Function to optimize power consumption based on temperature ranges and specified optimization method. Parameters: - Tempmin: Minimum temperature range - Tempmax: Maximum temperature range - Optimization: Optimization method ("power" in this case) - df: Dataframe containing relevant data - delta_1st_pass: Delta value for first pass optimization - number_rows: Number of rows in the dataframe Returns: - Result of power optimization calculation """ if Tempmin == "" or Tempmax == "": print("Please provide a temperature range") exit() else: if Optimization == "power": print("*********Performing Power Optimization*********") time.sleep(5) return calculate_min_error_new(df, delta_1st_pass, number_rows)
git clone -b gh-pages "https://stellar-jenkins@github.com/stellar/js-stellar-base.git" jsdoc if [ ! -d "jsdoc" ]; then echo "Error cloning" exit 1 fi jsdoc -c .jsdoc.json --verbose cd jsdoc git add . git commit -m $TRAVIS_TAG git push origin gh-pages
<filename>src/part-strip.js import './part-strip.scss'; import React, { memo } from 'react'; import { SELECT_PART } from './actions'; import { getContrastColor } from './ui-helper'; export default memo(function PartStrip({ doc, partHeight }) { return ( <div className="PartStrip" data-role="part-strip" data-doc-id={doc.id} > <div className="PartStrip-parts" style={{ height: partHeight, gridTemplateColumns: doc.work.parts.map(part => `${part.length}fr`).join(' ') }} > {doc.work.parts.map((part, index) => ( <div className="PartStrip-partOuter" key={part.id} data-parent-doc-id={doc.id} data-part-id={part.id} data-possible-action={SELECT_PART} style={{ gridRow: 1, gridColumn: index + 1, backgroundColor: part.color, color: getContrastColor(part.color) }} > <div className="PartStrip-partInner"> <div className="PartStrip-partName">{part.name}</div> </div> </div> ))} </div> </div> ); });
import string import random def generate_password(length): chars = string.ascii_uppercase + string.ascii_lowercase + string.digits password = '' for i in range(length): password += random.choice(chars) return password
from django.db import models from django.conf import settings from django.utils.translation import get_language from django.utils.translation import ugettext as _ from django.utils.encoding import force_unicode, smart_str, smart_unicode from django.forms.fields import Field from django.forms import ValidationError from widgets import TransCharWidget, TransTextWidget def get_default_language_name(): ''' Get language from default language specified by LANGUAGE_CODE in settings Used in error messages ''' lang_name = '' for lang in settings.LANGUAGES: if lang[0] == settings.LANGUAGE_CODE: lang_name = lang[1] break return force_unicode(lang_name) class TransDbValue(unicode): ''' This class implements a unicode string, but with a hidden attribute raw_data. When used as a string it returns the translation of the current language raw_data attribute stores a dictionary with all translations Also implements a method "get_in_language(language)" that returns the translation on any available language ''' raw_data = {} def get_in_language(self, language): if self.raw_data and self.raw_data.has_key(language): return self.raw_data[language] else: return u'' def set_in_language(self, language, value): self.raw_data[language] = value class TransFormField(Field): ''' forms field, used when ModelForm (or deprecated form_for_model/form_form_instance) is called Also implements form validation in admin ''' def clean(self, value): if isinstance(value, dict) and self.required: filled_value = [ v for v in value.values() if bool(v) ] if not filled_value: raise ValidationError, _("This field is required.") return super(TransFormField, self).clean(value) class TransField(models.Field): ''' Model field to be subclassed Used for storing a string in many languages at database (with python's dictionary format) pickle module could be used, but wouldn't alow search on fields? ''' def get_internal_type(self): return 'TextField' def to_python(self, value): if isinstance(value, TransDbValue): return value if isinstance(value, dict): # formfield method makes this function be called with value as a dict python_value = value else: try: python_value = eval(value) for k,v in python_value.items(): python_value[k] = smart_unicode(v) except Exception: python_value = None if isinstance(python_value, dict): if python_value.has_key(get_language()) and python_value[get_language()]: result = TransDbValue(python_value[get_language()]) elif python_value.has_key(settings.LANGUAGE_CODE) and python_value[settings.LANGUAGE_CODE]: result = TransDbValue(python_value[settings.LANGUAGE_CODE]) else: val = "bal" for item in python_value.items(): try: val = item[1] except: pass if val: break result = TransDbValue(python_value.items()[0][1]) result.raw_data = python_value else: result = TransDbValue(value) result.raw_data = {settings.LANGUAGE_CODE: value} return result def get_db_prep_save(self, value): if not isinstance(value, TransDbValue): return value value = [u"'%s': '''%s'''" % (k, v) for k, v in value.raw_data.items()] value = u'{%s}' % u','.join(value) return smart_str(value) def formfield(self, **kwargs): defaults = {'form_class': TransFormField} defaults.update(kwargs) return super(TransField, self).formfield(**defaults) def flatten_data(self, follow, obj=None): ''' for serializing objects ''' raw_data = self._get_val_from_obj(obj).raw_data.copy() for k,v in raw_data.items(): raw_data[k] = smart_str(v) return {self.attname: raw_data} class TransCharField(TransField): ''' TransField used with CharField widget ''' __metaclass__ = models.SubfieldBase def formfield(self, **kwargs): kwargs['widget'] = TransCharWidget return super(TransCharField, self).formfield(**kwargs) class TransTextField(TransField): ''' TransField used with CharField widget ''' __metaclass__ = models.SubfieldBase def formfield(self, **kwargs): kwargs['widget'] = TransTextWidget return super(TransTextField, self).formfield(**kwargs)
<reponame>xeon225/CyanMD<gh_stars>1-10 'use strict'; import merge from '../util/merge.js'; /** * 运行 Web Worker * @function registerWebWorker * @param {Object} [options={}] * @param {String} options.file * @return {Promise} 返回一个 Promise 对象,在 resolve 时传入生成的 Web Worker 实例 * */ function registerWebWorker(options={}){ let config = merge(options, registerWebWorker._CONFIG) ; return new Promise((resolve, reject)=>{ if( 'Worker' in self ){ if( !(config.file in registerWebWorker._WORKER_CACHE) ){ registerWebWorker._WORKER_CACHE[config.file] = new Worker( config.file ); } resolve( registerWebWorker._WORKER_CACHE[config.file] ); } else{ reject( new Error('您的浏览器不支持 Web Worker') ); } }); } registerWebWorker._WORKER_CACHE = {}; registerWebWorker._CONFIG = { file: 'ww.js' }; export default registerWebWorker;
#!/bin/bash ################################################################################ # Copyright (c) 2021 Arsene Temfack # # # # SPDX-License-Identifier: MIT # ################################################################################ echo -e "\n[-- Generate Token --] Generate and save cluster join command to /joincluster.sh" kubeadm token create --print-join-command > /home/vagrant/joincluster.sh chown $(id -u):$(id -g) /home/vagrant/joincluster.sh # Reference: # https://kubernetes.io/docs/setup/production-environment/tools/kubeadm/create-cluster-kubeadm/#join-nodes
#!/bin/bash FILE=${1?Error: what to put} TEST=${FILE:0:10} if [ $TEST == "/Fizilion/" ]; then MFILE=$FILE else MFILE="/Fizilion/" MFILE+="${FILE}" fi expect -c " spawn sftp $SFUSER@frs.sourceforge.net #expect \"yes/no\" #send \"yes\r\" expect \"Password:\" send \"$SFPASS\r\" expect \"sftp> \" send \"cd $SFDIR\r\" set timeout -1 send \"put $MFILE\r\" expect \"Uploading\" expect \"100%\" expect \"sftp>\" interact" rm -rf .ssh/known_hosts
import { CommandKeyword } from "../enum/commandKeyword"; import { StringNumberArray } from "../index"; export class Sample { private key: string; private value: number; private timestamp: string | number; constructor(key: string, value: number, timestamp?: number) { this.setKey(key); this.setValue(value); this.setTimestamp(timestamp); } public getKey(): string { return this.key; } public setKey(key: string): Sample { this.key = key; return this; } public getValue(): number { return this.value; } public setValue(value: number): Sample { this.value = value; return this; } public getTimestamp(): string | number { return this.timestamp; } public setTimestamp(timestamp?: number): Sample { this.timestamp = this.validateTimestamp(timestamp); return this; } public flatten(): StringNumberArray { return [this.getKey(), this.getTimestamp(), this.getValue()]; } protected validateTimestamp(timestamp?: number): string | number { if (timestamp == null) { return CommandKeyword.CURRENT_TIMESTAMP; } timestamp = Math.trunc(timestamp); if (this.isValidTimestamp(timestamp)) { return timestamp; } throw new Error(`wrong timestamp: ${timestamp}`); } protected isValidTimestamp(timestamp: number): boolean { return new Date(timestamp).getTime() >= 0; } }
<reponame>SandraZee/PinballY-Addons-and-Examples //metafilter // These special filters "narrow" existing filters // e.g. You filter "Visual Pinball X" as system // regular "3 stars" Filter added afterwards would bring again all 3stars games (incl. FX3, FP, ...) // using this Meta Filter instead keeps the "only Visual PinballX" list and additionaly removes everything below 3 stars. // global variable to save FilterID --> needed to remove filters later var meta_3star_id; var meta_4star_id; var meta_notrated_id; let RemoveMetaFilter = command.allocate("removeMeta"); // Remove all metaflters let MetaFilterSub = command.allocate("MetaFilterSub"); // This is the "entrance" into the submenu //lets define the menu and functions mainWindow.on("menuopen", ev => { // when main menu is triggered if (ev.id == "main") { ev.addMenuItem({after: command.RateGame}, // add item after "Rate Game" { title: "Remove Metafilter", cmd: RemoveMetaFilter }); ev.addMenuItem({ after: command.PlayGame }, // this is the trigger for the SUB menu, right after "play game" on the very top { title: "Set MetaFilter", cmd: MetaFilterSub }); } }); // Lets "define" the functions of the sub menu // 4 colors will be provided let Meta_sub_unrated = command.allocate("unrated"); let Meta_sub_4starplus = command.allocate("4star"); let Meta_sub_3starplus = command.allocate("3star"); // if Submenu was triggered mainWindow.on("command", ev => { if (ev.id == MetaFilterSub) { mainWindow.showMenu("custom.ShowMetaFilter", [ { //unrated title: "Only unrated tables", cmd: Meta_sub_unrated, }, { //3starplus title: ">=3 stars", cmd: Meta_sub_3starplus, }, { //4starplus title: ">=4 stars", cmd: Meta_sub_4starplus, }, { // separator line cmd: -1 }, { //4starplus title: "Remove MetaFilter", cmd: RemoveMetaFilter, }, { // separator line cmd: -1 }, { // leave submenu title: "Cancel", cmd: command.MenuReturn } ]); } }); // now finally we can check what was selected and trigger the action mainWindow.on("command", ev => { //if any of our commands have been recognized --> do something if (ev.id == RemoveMetaFilter ) { gameList.removeMetaFilter(meta_3star_id); gameList.removeMetaFilter(meta_4star_id); gameList.removeMetaFilter(meta_notrated_id); } if (ev.id == Meta_sub_unrated ) { meta_notrated_id = gameList.createMetaFilter({ includeExcluded: false, // consider all games, even if excluded by other filters priority: 100000, // high priority so that get the last say select: function(game, included) { // keep it if it's included by the other filters, OR if // it has the "Core" category tag return game.rating == -1; } }); } if (ev.id == Meta_sub_3starplus) { meta_4star_id = gameList.createMetaFilter({ includeExcluded: false, // consider all games, even if excluded by other filters priority: 100000, // high priority so that get the last say select: function(game, included) { return game.rating >= 3; } }); } if (ev.id == Meta_sub_4starplus) { meta_4star_id = gameList.createMetaFilter({ includeExcluded: false, // consider all games, even if excluded by other filters priority: 100000, // high priority so that get the last say select: function(game, included) { return game.rating >= 4; } }); } });
package template import ( "bytes" "encoding/json" "strings" "text/template" ) // basicFunctions are the set of initial // functions provided to every template. var basicFunctions = template.FuncMap{ //nolint:gochecknoglobals "json": func(v interface{}) string { buf := &bytes.Buffer{} enc := json.NewEncoder(buf) enc.SetEscapeHTML(false) _ = enc.Encode(v) // Remove the trailing new line added by the encoder return strings.TrimSpace(buf.String()) }, "split": strings.Split, "join": strings.Join, "title": strings.Title, "lower": strings.ToLower, "upper": strings.ToUpper, } // New returns a new named template.Text with the basic functions and parses text as template. func New(name, text string) (*template.Template, error) { return template.New(name).Funcs(basicFunctions).Parse(text) }
#!/bin/bash set -e # Remove the local sitespeed-result dir and node modules to start clean rm -fR sitespeed-result # Login early docker login # Super simple release script for sitespeed.io # Lets use it it for now and make it better over time :) # You need np for this to work # npm install --global np np $* --no-yarn --branch main # Update to latest version in the docs bin/sitespeed.js --version | tr -d '\n' > docs/_includes/version/sitespeed.io.txt # Generate the help for the docs bin/sitespeed.js --help > docs/documentation/sitespeed.io/configuration/config.md # Generate friendly names from code node release/friendlyNames.js > docs/documentation/sitespeed.io/configure-html/friendlynames.md node release/friendlyNamesBudget.js > docs/documentation/sitespeed.io/performance-budget/friendlynames.md # Generate the RSS feeds node release/feed.js
import random # Number of points in the circle N = 1000 # Number of points in quadrant inside_circle = 0 for x in range(N): rand_x = random.random() rand_y = random.random() # Distance formula between (0,0) and (rand_x,rand_y) dist = ((rand_x**2) + (rand_y**2))**0.5 # If within unit circle if dist < 1: inside_circle = inside_circle + 1 # Approximation of pi pi = (4 * (inside_circle/N)) print("Value of pi: %f" %pi)
addPkgConfigPath () { addToSearchPath PKG_CONFIG_PATH $1/lib/pkgconfig addToSearchPath PKG_CONFIG_PATH $1/share/pkgconfig } if test -n "$crossConfig"; then crossEnvHooks+=(addPkgConfigPath) else envHooks+=(addPkgConfigPath) fi
#!/bin/bash # ~/bin/build-polycode mkdir -p dependencies/Build/Debug dependencies/Build/Release cd dependencies/Build/Debug cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug ../.. make cd ../Release cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release ../.. make
#!/bin/bash enable_kernel_module () { local s_dir="$1" local d_dir="$2" echo "Load vrouter.ko $s_dir for kernel $d_dir" mkdir -p /lib/modules/$d_dir/kernel/net/vrouter cp -f /opt/contrail/vrouter-kernel-modules/$s_dir/vrouter.ko /lib/modules/$d_dir/kernel/net/vrouter/ depmod -a $d_dir } get_vrouter_dirs () { local path=$1 find "$path" -type f -name "vrouter.ko" } get_kernel_dirs () { local path=$1 find "$path" -type d -name "*.x86_64" } get_lists_modules_versions () { local list_dirs=$1 echo "$list_dirs" | awk -F "/" '{print($(NF-1))}' | sed 's/\.el/ el/' | sort -V | sed 's/ /./1' } get_lists_kernels_versions () { local list_dirs=$1 echo "$list_dirs_kernels" | awk -F "/" '{print $NF}' } install_kernel_modules () { local modules=$1 local kernels=$2 local sorted_list local kernel_prefix_regex local d for d in $kernels ; do # Enable module if we have equal version if echo "$modules" | grep -q "$d" ; then enable_kernel_module "$d" "$d" continue fi kernel_prefix_regex="^$(echo $d | cut -d. -f1,2,3)" # Check if minor version modules exist. If not - use major version if ! echo "$modules" | grep $kernel_prefix_regex | grep -vq "$d" ; then kernel_prefix_regex="^$(echo $d | cut -d. -f1,2)" fi # Add OS kernel version to list of available and sort them sorted_list=$(echo -e "${modules}\n${d}" | grep $kernel_prefix_regex | sed 's/\.el/ el/' | sort -V | sed 's/ /./1') if ! echo "$sorted_list" | grep -B1 -A1 "$d" | grep -vq "$d" ; then # Enable first installed module if current kernel is upper all modules that we have enable_kernel_module $(echo "$modules" | grep $kernel_prefix_regex | head -1) "$d" else # Enable upper version kernel module if exists or lower version if not enable_kernel_module $(echo "$sorted_list" | grep -B1 -A1 "$d" | grep -v "$d" | head -1 ) "$d" fi done }
from typing import List def countCommits(repoName: str, startTime: int, endTime: int, commitTimestamps: List[int]) -> int: count = 0 for timestamp in commitTimestamps: if startTime <= timestamp <= endTime: count += 1 return count
import React from "react"; import styled from "styled-components"; import Card from "./lib/Card"; import Spinner from "./lib/Spinner"; import Checkmark from "./lib/Checkmark"; const Wrapper = styled.div` width: 100%; display: flex; max-width: 780px; flex-direction: column; `; const LoadingItemWrapper = styled(Card)` font-size: 18px; padding: 2em; display: flex; width: calc(100% - 4em); color: #3d5873; justify-content: space-between; align-items: center; margin-top: 20px; `; const LoadingItem = ({ message, loading }) => ( <LoadingItemWrapper> <span>{message}</span> {loading ? <Spinner /> : <Checkmark />} </LoadingItemWrapper> ); const LoadingResults = ({ verifyLoading, verified, timestampLoading, timestamped, loadingChainVerify, chainVerified }) => { return ( <Wrapper> {verifyLoading || verified ? ( <LoadingItem message={verified ? "Digests verified" : "Verifying digests"} loading={verifyLoading} /> ) : null} {timestampLoading || timestamped ? ( <LoadingItem message={ timestamped ? "Uploaded new digests" : "Uploading new digests" } loading={timestampLoading} /> ) : null} </Wrapper> ); }; export default LoadingResults;
git add . git commit git push #makes getting the password easier /d/portable\ applications/windows/keepass/keepass_classic/KeePass.exe & Hugo --cleanDestinationDir --destination "public_html/" rm -R ./public_html/*.ts # cut down on archive space tar -zcvf 404.tgz public_html # as security concern dont write the password here because it would be on git hub than anyone can access it. scp 404.tgz jadonbel@jadonbelezos.com:/home/jadonbel/ ssh jadonbel@jadonbelezos.com "./deploy.sh" HASH= $(ipfs.sh add -qr --only-hash /d/website_source/public_html/ | tail -n 1) ipfs.sh name publish $HASH echo "publishing to pinata"; read $LOGINKEY ipfs.sh pin remote add $HASH $LOGINKEY
cat ./data.sql | mysql --password=rootPass --user=root --host=localhost --database=wiki_test
<reponame>oldmonkABA/optimal_histogram_bin_width ####################################################################################################################### #Author : Dr. <NAME> # #Based on : <NAME>. and <NAME>., A method for selecting the bin size of a time histogram Neural Computation (2007) # Vol. 19(6), 1503-1527 # #Data : The duration for eruptions of the Old Faithful geyser in Yellowstone National Park (in minutes) # or normal distribuition. # given at http://192.168.127.12/~hideaki/res/histogram.html # #Comments : Implements a faster version than using hist from matplotlib and histogram from numpy libraries # Also implements the shifts for the bin edges # ######################################################################################################################## import numpy as np from numpy.random import normal from scipy import linspace import array from matplotlib import rcParams from matplotlib.pyplot import figure, plot, xlabel, ylabel,\ title, show, savefig, hist data = normal(0, 1, 100000) #Data placeholder.. Use this to input your data #data = [4.37,3.87,4.00,4.03,3.50,4.08,2.25,4.70,1.73,4.93,1.73,4.62,\ #3.43,4.25,1.68,3.92,3.68,3.10,4.03,1.77,4.08,1.75,3.20,1.85,\ #4.62,1.97,4.50,3.92,4.35,2.33,3.83,1.88,4.60,1.80,4.73,1.77,\ #4.57,1.85,3.52,4.00,3.70,3.72,4.25,3.58,3.80,3.77,3.75,2.50,\ #4.50,4.10,3.70,3.80,3.43,4.00,2.27,4.40,4.05,4.25,3.33,2.00,\ #4.33,2.93,4.58,1.90,3.58,3.73,3.73,1.82,4.63,3.50,4.00,3.67,\ #1.67,4.60,1.67,4.00,1.80,4.42,1.90,4.63,2.93,3.50,1.97,4.28,\ #1.83,4.13,1.83,4.65,4.20,3.93,4.33,1.83,4.53,2.03,4.18,4.43,\ #4.07,4.13,3.95,4.10,2.27,4.58,1.90,4.50,1.95,4.83,4.12] data_max = max(data) #lower end of data data_min = min(data) #upper end of data n_min = 2 #Minimum number of bins Ideal value = 2 n_max = 200 #Maximum number of bins Ideal value =200 n_shift = 30 #number of shifts Ideal value = 30 N = np.array(range(n_min,n_max)) D = float(data_max-data_min)/N #Bin width vector Cs = np.zeros((len(D),n_shift)) #Cost function vector #Computation of the cost function for i in xrange(np.size(N)): shift = linspace(0,D[i],n_shift) for j in xrange(n_shift): edges = linspace(data_min+shift[j]-D[i]/2,data_max+shift[j]-D[i]/2,N[i]+1) # shift the Bin edges binindex = np.digitize(data,edges) #Find binindex of each data point ki=np.bincount(binindex)[1:N[i]+1] #Find number of points in each bin k = np.mean(ki) #Mean of event count v = sum((ki-k)**2)/N[i] #Variance of event count Cs[i,j]+= (2*k-v)/((D[i])**2) #The cost Function C=Cs.mean(1) #Optimal Bin Size Selection loc = np.argwhere(Cs==Cs.min())[0] cmin = C.min() idx = np.where(C==cmin) idx = idx[0][0] optD = D[idx] print 'Optimal Bin Number :',N[idx] print 'Optimal Bin Width :',optD #Plot edges = linspace(data_min+shift[loc[1]]-D[idx]/2,data_max+shift[loc[1]]-D[idx]/2,N[idx]+1) rcParams.update({'figure.autolayout': True}) fig = figure() ax = fig.add_subplot(111) ax.hist(data,edges) title(u"Histogram") ylabel(u"Frequency") xlabel(u"Value") savefig('Hist.png') fig = figure() plot(N,C,'.b',N[idx],cmin,'*r') xlabel('Number of bins') ylabel('Cobj') savefig('Fobj.png')
<filename>discarded/launcher-v1-executable/stop.go package main import ( "bufio" "fmt" "os" "os/exec" "path/filepath" "runtime" ) func main() { dir, _ := filepath.Abs(filepath.Dir(os.Args[0])) path := dir + string(os.PathSeparator) + "service-manager." + runtime.GOOS fmt.Println(path) cmd := exec.Command(path, "-stop") cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err := cmd.Run() if err != nil { fmt.Println(err) } fmt.Println("Enter to exit...") bufio.NewReader(os.Stdin).ReadBytes('\n') }
#include <iostream> class MessageLoopHelper { // Define the MessageLoopHelper class if necessary }; class FileCallbackTest { public: FileCallbackTest(int id, MessageLoopHelper* helper, int* max_id) : id_(id), helper_(helper), max_id_(max_id) {} virtual void OnFileIOComplete(int bytes_copied) { // Implement the file I/O completion handling std::cout << "File I/O operation completed. Bytes copied: " << bytes_copied << std::endl; } private: int id_; MessageLoopHelper* helper_; int* max_id_; }; int main() { // Example usage of the FileCallbackTest class int maxId = 100; MessageLoopHelper helper; FileCallbackTest fileCallback(1, &helper, &maxId); fileCallback.OnFileIOComplete(1024); // Simulate file I/O completion return 0; }