text stringlengths 1 1.05M |
|---|
<gh_stars>0
package com.coltsoftware.liquidsledgehammer.cmd;
import static org.junit.Assert.*;
import java.io.PrintStream;
import org.junit.Before;
import org.junit.Test;
import com.coltsoftware.liquidsledgehammer.cmd.Arguments;
import static org.mockito.Mockito.*;
public final class ContextTests extends ContextTestBase {
private Arguments args;
private SourceFactory mock1;
private SourceFactory mock2;
@Before
public void setUp() {
mock1 = createMockSourceFactory(1, "usage1");
mock2 = createMockSourceFactory(2, "usage2");
Context.registerSourceFactory("jsonin", mock1);
Context.registerSourceFactory("dbin", mock2);
args = Arguments.fromString("-jsonin thePath -dbin path2");
}
@Test
public void can_create_with_json_input_source_specified() {
Context context = Context.fromArgs(args);
assertNotNull(context);
}
@Test(expected = ContextException.class)
public void cant_create_without_json_input_source_specified() {
Arguments invalidArgs = Arguments
.fromString("-jsonin2 thePath -dbin2 path2");
Context.fromArgs(invalidArgs);
}
@Test
public void has_sources_from_single_factory() {
Context context = Context.fromArgs(Arguments.fromString("-jsonin a"));
assertEquals(1, context.getSources().size());
}
@Test
public void has_sources_from_two_factories() {
Context context = Context.fromArgs(Arguments
.fromString("-jsonin a -dbin b"));
assertEquals(3, context.getSources().size());
}
@Test
public void sources_are_same_each_call() {
Context context = Context.fromArgs(args);
assertSame(context.getSources(), context.getSources());
}
@Test
public void mocks_were_given_path() {
Context.fromArgs(args);
verify(mock1).getSources(new String[] { "thePath" }, args);
verify(mock2).getSources(new String[] { "path2" }, args);
}
@Test
public void print_sources_for_usage() {
PrintStream printStream = mock(PrintStream.class);
Context.printSources(printStream);
verify(printStream).println(" -jsonin usage1");
verify(printStream).println(" -dbin usage2");
}
}
|
<filename>pkg/framework/pod.go<gh_stars>1-10
//
// Provides builders and helper methods for preparing Pods and nested Containers
//
package framework
import (
"bytes"
"context"
"fmt"
"github.com/pkg/errors"
"github.com/rh-messaging/shipshape/pkg/framework/log"
"io"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"time"
"k8s.io/client-go/kubernetes/scheme"
"k8s.io/client-go/tools/remotecommand"
)
//
// PodBuilder
//
type PodBuilder struct {
pod *v1.Pod
}
// NewPodBuilder Creates an instance of a PodBuilder helper
func NewPodBuilder(name string, namespace string) *PodBuilder {
pb := new(PodBuilder)
pb.pod = new(v1.Pod)
pb.pod.Name = name
pb.pod.Namespace = namespace
pb.pod.Spec = v1.PodSpec{}
pb.pod.Status = v1.PodStatus{}
return pb
}
// NewContainerBuilder creates an instance of a ContainerBuilder helper
func NewContainerBuilder(name string, image string) *ContainerBuilder {
cb := new(ContainerBuilder)
cb.c = v1.Container{}
cb.c.Name = name
cb.c.Image = image
cb.c.TerminationMessagePolicy = v1.TerminationMessageFallbackToLogsOnError
return cb
}
// AddLabel Adds or replaces the given label key and value to Pod
func (p *PodBuilder) AddLabel(key, value string) *PodBuilder {
if p.pod.Labels == nil {
p.pod.Labels = map[string]string{}
}
p.pod.Labels[key] = value
return p
}
// AddContainer adds a container to the Pod being prepared
func (p *PodBuilder) AddContainer(c v1.Container) *PodBuilder {
if p.pod.Spec.Containers == nil {
p.pod.Spec.Containers = []v1.Container{}
}
p.pod.Spec.Containers = append(p.pod.Spec.Containers, c)
return p
}
// AddConfigMapVolumeSource append a Volume with a local reference
// to a ConfigMap into the Pod Spec
func (p *PodBuilder) AddConfigMapVolumeSource(name string, configMapName string) *PodBuilder {
if p.pod.Spec.Volumes == nil {
p.pod.Spec.Volumes = []v1.Volume{}
}
v := v1.Volume{
Name: name,
VolumeSource: v1.VolumeSource{
ConfigMap: &v1.ConfigMapVolumeSource{
LocalObjectReference: v1.LocalObjectReference{
Name: configMapName,
},
},
},
}
p.pod.Spec.Volumes = append(p.pod.Spec.Volumes, v)
return p
}
// RestartPolicy defines the RestartPolicy of the Pod.
// Default is Never.
func (p *PodBuilder) RestartPolicy(policy string) *PodBuilder {
switch policy {
case string(v1.RestartPolicyAlways), string(v1.RestartPolicyNever), string(v1.RestartPolicyOnFailure):
p.pod.Spec.RestartPolicy = v1.RestartPolicy(policy)
default:
p.pod.Spec.RestartPolicy = v1.RestartPolicyNever
}
return p
}
// Build returns the prepared Pod instance
func (p *PodBuilder) Build() *v1.Pod {
return p.pod
}
//
// ContainerBuilder
//
type ContainerBuilder struct {
c v1.Container
}
// WithCommands set the list of commands to use with the new container
func (cb *ContainerBuilder) WithCommands(commands ...string) *ContainerBuilder {
cb.c.Command = commands
return cb
}
// AddArgs appends a given list of arguments to the existing
func (cb *ContainerBuilder) AddArgs(args ...string) *ContainerBuilder {
if cb.c.Args == nil {
cb.c.Args = []string{}
}
cb.c.Args = append(cb.c.Args, args...)
return cb
}
// EnvVar sets an environment variable into the container
func (cb *ContainerBuilder) EnvVar(variable, value string) *ContainerBuilder {
if cb.c.Env == nil {
cb.c.Env = []v1.EnvVar{}
}
cb.c.Env = append(cb.c.Env, v1.EnvVar{
Name: variable,
Value: value,
})
return cb
}
// ImagePullPolicy sets the ImagePullPolicy for the given container.
// Default is PullAlways.
func (cb *ContainerBuilder) ImagePullPolicy(policy string) *ContainerBuilder {
switch policy {
case string(v1.PullIfNotPresent), string(v1.PullNever), string(v1.PullAlways):
cb.c.ImagePullPolicy = v1.PullPolicy(policy)
default:
cb.c.ImagePullPolicy = v1.PullAlways
}
return cb
}
// AddVolumeMountConfigMapData add a VolumeMount entry to the container
// that must be related with a valid Volume defined in the Pod Spec.
func (cb *ContainerBuilder) AddVolumeMountConfigMapData(volumeName string, mountPath string, readOnly bool) *ContainerBuilder {
if cb.c.VolumeMounts == nil {
cb.c.VolumeMounts = []v1.VolumeMount{}
}
vm := v1.VolumeMount{
Name: volumeName,
ReadOnly: readOnly,
MountPath: mountPath,
}
cb.c.VolumeMounts = append(cb.c.VolumeMounts, vm)
return cb
}
// Build returns the prepared Container to be used within a Pod
func (cb *ContainerBuilder) Build() v1.Container {
return cb.c
}
func (c *ContextData) GetPodName(label string) (string, error) {
podListOpts := metav1.ListOptions{}
podListOpts.LabelSelector = "name=" + label
podList, err := c.Clients.KubeClient.CoreV1().Pods(c.Namespace).List(podListOpts)
if err != nil {
return "", err
}
if len(podList.Items) == 0 {
return "", fmt.Errorf("no pod with label %s found", label)
} else if len(podList.Items) > 1 {
for _, pod := range podList.Items {
log.Logf("Contains %s", pod.Name)
}
return "", fmt.Errorf("too many pods with label %s found", label)
}
return podList.Items[0].Name, nil
}
//returns whole pod log as a (meaty) string
func (c *ContextData) GetLogs(podName string) (string, error) {
podLogOpts := v1.PodLogOptions{}
request := c.Clients.KubeClient.CoreV1().Pods(c.Namespace).GetLogs(podName, &podLogOpts)
podLogs, err := request.Stream()
if err != nil {
return "", err
}
defer podLogs.Close()
buf := new(bytes.Buffer)
_, err = io.Copy(buf, podLogs)
if err != nil {
return "", err
}
return buf.String(), nil
}
func (c *ContextData) WaitForPodStatus(podName string, status v1.PodPhase, timeout time.Duration, interval time.Duration) (*v1.Pod, error) {
var pod *v1.Pod
var err error
ctx, cancel := context.WithTimeout(context.TODO(), timeout)
defer cancel()
err = RetryWithContext(ctx, interval, func() (bool, error) {
pod, err = c.Clients.KubeClient.CoreV1().Pods(c.Namespace).Get(podName, metav1.GetOptions{})
if err != nil {
// pod does not exist yet
return false, nil
}
return pod.Status.Phase == status, nil
})
return pod, err
}
func Execute(ctx1 *ContextData, command string, arguments []string, podname string) (string, string, error) {
pod, err := ctx1.Clients.KubeClient.CoreV1().Pods(ctx1.Namespace).Get(podname, metav1.GetOptions{})
request := ctx1.Clients.KubeClient.CoreV1().RESTClient().
Post().
Namespace(pod.Namespace).
Resource("pods").
Name(pod.Name).
SubResource("exec").
VersionedParams(&v1.PodExecOptions{
Command: append([]string{command}, arguments...),
Stdin: true,
Stdout: true,
Stderr: true,
TTY: true,
}, scheme.ParameterCodec)
exec, err := remotecommand.NewSPDYExecutor(&restConfig, "POST", request.URL())
buf := &bytes.Buffer{}
errBuf := &bytes.Buffer{}
err = exec.Stream(remotecommand.StreamOptions{
Stdout: buf,
Stderr: errBuf,
})
if err != nil {
return "", "", errors.Wrapf(err, "Failed executing command %s on %v/%v", command, pod.Namespace, pod.Name)
}
return buf.String(), errBuf.String(), nil
}
|
import React, { Component } from 'react';
import DD35CharacterTable from "./DD35CharacterTable";
import DD35CharacterCreate from "./DD35CharacterCreate";
export default class DD35Characters extends Component {
constructor(props) {
super(props);
this.state = { selectedChar: undefined };
this.handleSelect = this.handleSelect.bind(this);
this.handleDeselect = this.handleDeselect.bind(this);
}
handleSelect(character) {
this.setState({ selectedChar: character });
}
handleDeselect() {
this.setState({ selectedChar: undefined });
}
render() {
const { selectedChar } = this.state;
const { gateway } = this.props;
return (
<div>
{
selectedChar &&
<DD35CharacterCreate
selectedChar={ selectedChar }
onClose={ this.handleDeselect }
/>
}
{
!selectedChar &&
<DD35CharacterTable
onSelect={ this.handleSelect }
gateway={ gateway }
/>
}
</div>
);
}
}
|
# Set the project of interest
gcloud config set project csye-final-project-189104
#Big Table
gcloud beta bigtable instances create csye-final-project-bigtable --cluster=csye6225-final-project-cluster --cluster-zone=us-east1-b --description=test-final-project --cluster-num-nodes=3
#Cloud Pub/Sub
gcloud beta pubsub topics create myTopic
gcloud beta pubsub subscriptions create --topic myTopic mySubscription
gcloud beta pubsub topics publish myTopic "hello"
gcloud beta pubsub subscriptions pull --auto-ack mySubscription |
/*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nz.co.testamation.hibernate.config;
import nz.co.testamation.hibernate.HibernateEntityTemplate;
import org.hibernate.Session;
import java.io.Serializable;
public class HibernateObjectExistsConfig<T> extends AbstractHibernateEntityConfig<T> {
private T hibernateObj;
public HibernateObjectExistsConfig( T hibernateObj ) {
this.hibernateObj = hibernateObj;
}
@Override
public T apply() {
return doInHibernate( new HibernateEntityTemplate.Work<T>() {
@Override
public T execute( Session session ) {
Serializable id = session.save( hibernateObj );
return (T) session.load( hibernateObj.getClass(), id );
}
} );
}
}
|
import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { render } from '@ember/test-helpers';
import hbs from 'htmlbars-inline-precompile';
module('auto-focus', function (hooks) {
setupRenderingTest(hooks);
test('it focuses the element', async function (assert) {
assert.expect(3);
this.show = true;
await render(hbs`
{{#if this.show}}
<div class="foo" tabindex="0" {{auto-focus}}></div>
{{/if}}
`);
assert.dom('.foo').isFocused('the element is focused on initial render');
this.set('show', false);
assert
.dom('.foo')
.doesNotExist('precondition, element is removed from the DOM');
this.set('show', true);
assert
.dom('.foo')
.isFocused('the element is focused on subsequent renders');
});
test('it can focus a specific child element', async function (assert) {
assert.expect(1);
this.selector = '.inner > .foo';
await render(hbs`
<div class="outer" {{auto-focus this.selector}}>
<div class="inner">
<div class="foo" tabindex="0"></div>
</div>
</div>
`);
assert
.dom(this.selector)
.isFocused('the element specified by the selector is focused');
});
test('it does not focus an element outside of itself', async function (assert) {
assert.expect(1);
await render(hbs`
<div class="focusable" tabindex="0"></div>
<div {{auto-focus ".focusable"}}></div>
`);
assert
.dom('.focusable')
.isNotFocused('the selector is scoped to child elements only');
});
test('disabled argument (disabled)', async function (assert) {
assert.expect(1);
await render(hbs`
<div class="foo" tabindex="0" {{auto-focus disabled=true}}>/</div>
`);
assert.dom('.foo').isNotFocused('does not focus the element');
});
test('disabled argument (enabled)', async function (assert) {
assert.expect(1);
await render(hbs`
<div class="foo" tabindex="0" {{auto-focus disabled=false}}>/</div>
`);
assert.dom('.foo').isFocused('focus the element');
});
test('rendering', async function (assert) {
assert.expect(2);
this.focusInOuter = () => assert.step('focusin on parent node');
await render(hbs`
<div {{on "focusin" this.focusInOuter}}>
<input {{auto-focus}} class="foo">
</div>
`);
assert.verifySteps(['focusin on parent node']);
});
test('nesting', async function (assert) {
assert.expect(1);
await render(hbs`
<div {{auto-focus}} tabindex="0" class="outer">
<div {{auto-focus}} tabindex="0" class="inner">
</div>
</div>
`);
assert.dom('.inner').isFocused(
`child modifiers run before parents, but this scenario behaves as expected
(because the parent renders first, then the child)`
);
});
test('programmatic focus', async function (assert) {
assert.expect(2);
this.focused = (e) => {
assert
.dom('.foo')
.hasAttribute(
'data-programmatically-focused',
'true',
'property is true because this addon focused the element'
);
};
await render(hbs`
<div
{{on "focus" this.focused}}
{{auto-focus}}
class="foo"
tabindex="0"
></div>
`);
assert
.dom('.foo')
.doesNotHaveAttribute(
'data-programmatically-focused',
'property removed after focus'
);
});
});
|
// https://codeforces.com/contest/4/problem/A
#include <bits/stdc++.h>
using namespace std;
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
int w;
cin>>w;
cout<<((w%2||w==2)?"NO\n":"YES\n");
}
|
#!/bin/bash
SCRIPT=$(readlink -f "$0")
BASEDIR=$(dirname "$SCRIPT")
cd $BASEDIR
sh build.sh
cd bin
./CodeCraft-2022
|
<reponame>Qihoo360/kafkabridge
#ifndef QBUS_QBUS_PRODUCER_IMP_H_
#define QBUS_QBUS_PRODUCER_IMP_H_
#include "qbus_producer.h"
namespace qbus {
class QbusProducer::Imp {
public:
virtual ~Imp() {}
virtual bool init(const std::string& cluster, const std::string& log_path, const std::string& config_path,
const std::string& topic) = 0;
virtual void uninit() = 0;
virtual bool produce(const char* data, size_t data_len, const std::string& key) = 0;
};
} // namespace qbus
#endif // QBUS_QBUS_PRODUCER_IMP_H_
|
/* Implementation of a MinHeap.
* (C) <NAME>, 2014
*
* This header implements a specialization of the
* abstract Heap, the MinHeap.
* This is just like the abstract one, but it follows
* the rule that a node is always smaller than its
* children. Because of that, the root of this Heap
* is always the global minimum of all elements and
* so we provide the special method "deleteMin" which
* is useful for many many algorithms.
*
* This code is licensed under the BSD3 license.
*/
#ifndef min_heap_h___
#define min_heap_h___
#include "heap.h"
template <typename T> class MinHeap : public Heap<T>
{
private:
/* Swaps two nodes (values only).
*
* @param child The first node to swap.
* @param parent The second node to swap.
*/
void swapNodes( Node<T> *child, Node<T> *parent )
{
T childValue = child->getValue();
child->setValue( parent->getValue() );
parent->setValue( childValue );
}
/* Performs a "bubbleDown" operation, so if the
* value of a node is here bigger than one/two
* of its children, the appropriate nodes are
* swapped until everything is fine again.
*
* @param node The node to "bubble down".
*/
void bubbleDown( Node<T> *node )
{
if( !node->getLeft() && !node->getRight() )
return;
if( node->getLeft() && node->getRight() )
{
if(( node->getValue() > node->getLeft()->getValue() ) &&
( node->getLeft()->getValue() < node->getRight()->getValue() ))
{
swapNodes( node, node->getLeft() );
bubbleDown( node->getLeft() );
}
else if( node->getValue() > node->getRight()->getValue() )
{
swapNodes( node, node->getRight() );
bubbleDown( node->getRight() );
}
}
if( node->getLeft() && (node->getValue() > node->getLeft()->getValue() ))
{
swapNodes( node, node->getLeft() );
bubbleDown( node->getLeft() );
}
if( node->getRight() && (node->getValue() > node->getRight()->getValue() ))
{
swapNodes( node, node->getRight() );
bubbleDown( node->getRight() );
}
}
/* Performs a "bubbleUp" operation, so if a Node's value
* is smaller than its parent, the two Nodes are swapped.
*
* @param node The node to "bubble Up".
*/
void bubbleUp( Node<T> *node )
{
if( !node->getParent() )
return;
if( node->getValue() < node->getParent()->getValue() )
{
swapNodes( node, node->getParent() );
bubbleUp( node->getParent() );
}
}
public:
/* Constructor for a new MinHeap,
* calls the Constructor of an
* abstract Heap.
*/
MinHeap():Heap<T>(){}
// destructor is automatically called
// from superclass, doesn't need
// to be called explicitly
/* Inserts a new Node containing the
* value <code>value</code> to the
* Heap by creating the Node, inserting
* it and then performs a bubbleUp
* (if necessary).
*
* @param value The value of the new Node
* to insert.
*/
void insert( T value )
{
Node<T> *node = new Node<T>( value );
Heap<T>::insert( node );
bubbleUp( node );
}
/* Performs the deleteMin operation.
* That means this method deletes the root of
* the Heap by firstly swapping its value with
* the one of the last Node added to the Heap
* and then deleting the last Node added
* (which carries the value of the root now).
* After that - if necessary - a bubbleDown
* will be performed.
*/
void deleteMin()
{
Node<T> *lastOne = Heap<T>::getLastNode();
Heap<T>::getRoot()->setValue( lastOne->getValue() );
if( lastOne->getParent()->getLeft() == lastOne )
{
lastOne->getParent()->setLeft( NULL );
lastOne->setParent( NULL );
}
else
{
lastOne->getParent()->setRight( NULL );
lastOne->setParent( NULL );
}
Heap<T>::decreaseSize();
bubbleDown( Heap<T>::getRoot() );
}
/* Prints a MinHeap to stdout.
*/
void print()
{
Heap<T>::print();
}
};
#endif // min_heap.h
|
#!/bin/bash
#
# Copyright (C) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
MASTER_ADDRESS=${1:-"8.8.8.18"}
NODE_ADDRESS=${2:-"8.8.8.20"}
DNS_SERVER_IP=${3:-"192.168.3.100"}
DNS_DOMAIN=${4:-"cluster.local"}
KUBECONFIG_DIR=${KUBECONFIG_DIR:-/opt/kubernetes/cfg}
# Generate a kubeconfig file
cat <<EOF > "${KUBECONFIG_DIR}/kubelet.kubeconfig"
apiVersion: v1
kind: Config
clusters:
- cluster:
server: http://${MASTER_ADDRESS}:8080/
name: local
contexts:
- context:
cluster: local
name: local
current-context: local
EOF
cat <<EOF >/opt/kubernetes/cfg/kubelet
# --logtostderr=true: log to standard error instead of files
KUBE_LOGTOSTDERR="--logtostderr=true"
# --v=0: log level for V logs
KUBE_LOG_LEVEL="--v=4"
# --address=0.0.0.0: The IP address for the Kubelet to serve on (set to 0.0.0.0 for all interfaces)
NODE_ADDRESS="--address=${NODE_ADDRESS}"
# --port=10250: The port for the Kubelet to serve on. Note that "kubectl logs" will not work if you set this flag.
NODE_PORT="--port=10250"
# --hostname-override="": If non-empty, will use this string as identification instead of the actual hostname.
NODE_HOSTNAME="--hostname-override=${NODE_ADDRESS}"
# Path to a kubeconfig file, specifying how to connect to the API server.
KUBELET_KUBECONFIG="--kubeconfig=${KUBECONFIG_DIR}/kubelet.kubeconfig"
# --allow-privileged=false: If true, allow containers to request privileged mode. [default=false]
KUBE_ALLOW_PRIV="--allow-privileged=false"
# DNS info
KUBELET__DNS_IP="--cluster-dns=${DNS_SERVER_IP}"
KUBELET_DNS_DOMAIN="--cluster-domain=${DNS_DOMAIN}"
# Add your own!
KUBELET_ARGS=""
EOF
KUBELET_OPTS=" \${KUBE_LOGTOSTDERR} \\
\${KUBE_LOG_LEVEL} \\
\${NODE_ADDRESS} \\
\${NODE_PORT} \\
\${NODE_HOSTNAME} \\
\${KUBELET_KUBECONFIG} \\
\${KUBE_ALLOW_PRIV} \\
\${KUBELET__DNS_IP} \\
\${KUBELET_DNS_DOMAIN} \\
\$KUBELET_ARGS"
cat <<EOF >/usr/lib/systemd/system/kubelet.service
[Unit]
Description=Kubernetes Kubelet
After=docker.service
Requires=docker.service
[Service]
EnvironmentFile=-/opt/kubernetes/cfg/kubelet
ExecStart=/opt/kubernetes/bin/kubelet ${KUBELET_OPTS}
Restart=on-failure
KillMode=process
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable kubelet
systemctl restart kubelet
|
sudo apt-get install -y meld
|
window.onload = () => {
const simpleObjectButton = document.getElementById('simple-object-action');
const functionButton = document.getElementById('function-object-action');
const customClassButton = document.getElementById('custom-class-action');
simpleObjectButton.onclick = () => {
const simpleObjectDisplay = document.getElementById('simple-object-display');
}
functionButton.onclick = () => {
const functionObjectDisplay = document.getElementById('function-object-display');
}
customClassButton.onclick = function() {
const customClassDisplay = document.getElementById('customClassNameDisplay');
const customClass = new CustomClass();
const name = customClass.getName();
customClassDisplay.textContent = name;
}
}
//const myObject = new FunctionObject();
// console.log(functionObject.getFullName());
//}
//key value pairs
//comma separated list of key value pairs.
const myObject = {
firstName: 'Jonas',
lastName: 'Laid',
getName: function () {
return this.firstName + ' ' + this.lastName;
}
};
myObject.middleName = 'Karth';
myObject.getFullName = function() {
return this.firstName +
" " + this.middleName +
" " + this.lastName;
};
function FunctionObject() {
const firstName = "Jonas";
const lastName = "Laid";
FunctionObject.prototype.getFullName = () => {
return firstName + " " + lastName;
}
}
const functionObject = new FunctionObject();
class CustomClass {
constructor() {
this.firstName = 'Jonas';
this.lastName = 'Laid'
}
getName() {
return this.firstName + ' ' + this.lastName;
}
}
console.log (myObject.firstName);
console.log (myObject['firstName']);
console.log (myObject.getName());
console.log (myObject.middleName);
console.log (myObject.getFullName());
console.log (functionObject.getFullName());
|
<gh_stars>0
declare const status: {
UNSET: 0;
OK: 1;
ERROR: 2;
};
export = kinds;
|
#include<stdio.h>
// A utility function to check if a character
// is operator symbol or not.
int isOperator(char c) {
if (c == '+' || c == '-' ||
c == '*' || c == '/')
return 1;
return 0;
}
// A utility function to calculate value of
// postfix expression
int evaluatePostfix(char* exp) {
// Create a stack of capacity equal to expression size
int size = strlen(exp);
int* stk = (int*)malloc(sizeof(int) * (size));
int top = -1;
// Scan all characters one by one
for (int i = 0; exp[i]; ++i) {
// If the scanned character is an
// operand (number here),
// push it to the stack.
if (isdigit(exp[i])) {
stk[++top] = exp[i] - '0';
}
// If the scanned character is an
// operator, pop two elements from
// stack apply the operator
else {
int val1 = stk[top--];
int val2 = stk[top];
stk[top] = evaluate(val2, val1, exp[i]);
}
}
return stk[top];
}
// Perform operation and return result
int evaluate(int op1, int op2, char op) {
if (op == '+')
return op1 + op2;
if (op == '-')
return op1 - op2;
if (op == '*')
return op1 * op2;
return op1 / op2;
}
// Driver program to test above functions
int main() {
char exp[] = "2 3 4 * +";
printf("Value of %s is %d", exp, evaluatePostfix(exp));
return 0;
} |
networkserviceorder=$1
networksetup -getwebproxy $networkserviceorder > proxyinfo.txt
if [ `(awk '/Enabled:/ {print $2;exit}' proxyinfo.txt)` == "No" ] || [ `(awk '/Server:/ {print $2}' proxyinfo.txt)` != "127.0.0.1" ] || [ `(awk '/Port:/ {print $2}' proxyinfo.txt)` != "9999" ]
then
networksetup -setwebproxy $networkserviceorder 127.0.0.1 9999
fi
networksetup -getsecurewebproxy $networkserviceorder > proxyinfossl.txt
if [ `(awk '/Enabled:/ {print $2;exit}' proxyinfossl.txt)` == "No" ] || [ `(awk '/Server:/ {print $2}' proxyinfossl.txt)` != "127.0.0.1" ] || [ `(awk '/Port:/ {print $2}' proxyinfossl.txt)` != "9999" ]
then
networksetup -setsecurewebproxy $networkserviceorder 127.0.0.1 9999
fi
|
import java.net.Socket;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.XMLConstants;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamSource;
import org.xml.sax.InputSource;
class DocumentBuilderTests {
public void unconfiguredParse(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void disableDTD(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //safe
}
public void enableSecurityFeature(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //safe
}
public void enableSecurityFeature2(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://javax.xml.XMLConstants/feature/secure-processing", true);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //safe
}
public void enableDTD(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void disableSecurityFeature(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://javax.xml.XMLConstants/feature/secure-processing", false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void disableExternalEntities(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
factory.setFeature("http://xml.org/sax/features/external-general-entities", false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //safe
}
public void partialDisableExternalEntities(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void partialDisableExternalEntities2(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://xml.org/sax/features/external-general-entities", false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void misConfigureExternalEntities1(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://xml.org/sax/features/external-parameter-entities", true);
factory.setFeature("http://xml.org/sax/features/external-general-entities", false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void misConfigureExternalEntities2(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
factory.setFeature("http://xml.org/sax/features/external-general-entities", true);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.parse(sock.getInputStream()); //unsafe
}
public void taintedSAXInputSource1(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
SAXSource source = new SAXSource(new InputSource(sock.getInputStream()));
builder.parse(source.getInputSource()); //unsafe
}
public void taintedSAXInputSource2(Socket sock) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
StreamSource source = new StreamSource(sock.getInputStream());
builder.parse(SAXSource.sourceToInputSource(source)); //unsafe
builder.parse(source.getInputStream()); //unsafe
}
private static DocumentBuilderFactory getDocumentBuilderFactory() {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
String feature = "";
feature = "http://xml.org/sax/features/external-parameter-entities";
factory.setFeature(feature, false);
feature = "http://xml.org/sax/features/external-general-entities";
factory.setFeature(feature, false);
return factory;
}
private static final ThreadLocal<DocumentBuilder> XML_DOCUMENT_BUILDER = new ThreadLocal<DocumentBuilder>() {
@Override
protected DocumentBuilder initialValue() {
DocumentBuilderFactory factory = getDocumentBuilderFactory();
try {
return factory.newDocumentBuilder();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
};
public void disableExternalEntities2(Socket sock) throws Exception {
DocumentBuilder builder = XML_DOCUMENT_BUILDER.get();
builder.parse(sock.getInputStream()); //safe
}
}
|
package com.github.messenger4j.internal.gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import java.lang.reflect.Type;
import java.time.Instant;
import java.util.Optional;
/**
* @author <NAME>
* @since 1.0.0
*/
public final class OptionalInstantToSecondsStringSerializer
implements JsonSerializer<Optional<Instant>> {
@Override
public JsonElement serialize(
Optional<Instant> src, Type typeOfSrc, JsonSerializationContext context) {
return src.map(instant -> new JsonPrimitive(Long.toString(instant.getEpochSecond())))
.orElse(null);
}
}
|
#!/bin/sh
#deviceIds=("esp32-CAFEEC" "esp32-AF5FA4" "esp32-0C9D6C" "esp32-CB3DC4" "esp32-134248" "esp32-13994C")
deviceIds=() # ("esp32-13994C")
deviceIds[${#deviceIds[@]}]="$1"
containerId="$2" # 1
domain=$3 # "local"
certDir=$4 # "../../certs"
signingKey=$5 #"../../certs/rootCA/Sign/SigningPrivateKey.pem"
verificationKey=$6 #"../../certs/rootCA/Sign/SigningPublicKey.pem"
rootCA_cert=$7 #"../../certs/rootCA/ACME_CA.cer"
for i in "${deviceIds[@]}"
do
serverCert="${certDir}/${i}/${i}.${domain}.cer"
#echo "${serverCert}"
mkdir -p "$i"
python3 makeKeystore.py "$i" -c "${containerId}" -v "${verificationKey}" -r "${rootCA_cert}" -s "${serverCert}"
python3 $IDF_PATH/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py generate truststore.csv "$i"/truststore.bin 0x8000 --version 2
echo "Signing truststore ..."
./pk_sign "${signingKey}" data.tlv8.pre
echo "OK"
echo "Verifying signature ..."
./pk_verify "${verificationKey}" data.tlv8.pre
echo "OK"
echo "Creating truststore.tlv8 ..."
cat data.tlv8.pre > "$i"/truststore.tlv8
size="$(wc -c data.tlv8.pre.sig | awk '{print $1}')"
hex=$( printf "%x" $size )
#echo -n -e '\xFE\x46' | hexf
output="\xFE\x$hex"
printf "%b" $output >> "$i"/truststore.tlv8
cat data.tlv8.pre.sig >> "$i"/truststore.tlv8
echo "OK"
echo "Cleaning ..."
rm data.tlv8.pre.sig
rm data.tlv8.pre
rm truststore.csv
echo "OK"
done |
def parse_busco_file(busco):
"""Parses the BUSCO output file and returns a list of duplicated genes."""
duplicated_list = []
with open(busco) as handle:
for line in handle:
if 'D:1%' in line: # Assuming 'D:1%' indicates a duplicated gene
mode_index = line.find('mode:')
if mode_index != -1:
mode = line[mode_index + 5:].strip()
duplicated_list.append(mode)
return duplicated_list |
def remove_outliers(prices):
mean = sum(prices) / len(prices)
std = (sum((x - mean)**2 for x in prices) / len(prices))**0.5
thresholds = {'lower': mean - std * 3,
'upper': mean + std * 3}
outliers = [x for x in prices
if x < thresholds['lower'] or x > thresholds['upper']]
return [x for x in prices if x not in outliers] |
<reponame>amcircle/site
export const links = [
{
href: '/lectures',
text: 'lectures',
translation: {
en: 'Lectures',
ru: 'Лекции'
},
available: { en: true, ru: true },
dynContent: true
},
{
href: '/publications',
text: 'publications',
translation: {
en: 'Publications',
ru: 'Публикации'
},
available: { en: true, ru: true },
dynContent: true
},
{
href: '/guides',
text: 'guides',
translation: {
en: 'Guides',
ru: 'Пособия'
},
available: { en: true, ru: true },
dynContent: true
},
{
href: '/projects',
text: 'projects',
translation: {
en: 'Projects',
ru: 'Проекты'
},
available: { en: false, ru: true },
dynContent: true
},
{
href: '/blog',
text: 'blog',
translation: {
en: 'Blog',
ru: 'Блог'
},
available: { en: true, ru: true },
dynContent: true
},
{
href: '/about',
text: 'about',
translation: {
en: 'About',
ru: 'О нас'
},
available: { en: true, ru: true },
},
];
export const linksTranslation = {};
links.forEach(l => {
linksTranslation[l.text] = l.translation;
});
|
<reponame>ctuning/ck-spack
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from spack import *
from spack.environment import EnvironmentModifications
class Intel(IntelPackage):
"""Intel Compilers."""
homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
version('18.0.1', '28cb807126d713350f4aa6f9f167448a',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12381/parallel_studio_xe_2018_update1_composer_edition.tgz')
version('18.0.0', '31ba768fba6e7322957b03feaa3add28',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12067/parallel_studio_xe_2018_composer_edition.tgz')
version('17.0.4', 'd03d351809e182c481dc65e07376d9a2',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11541/parallel_studio_xe_2017_update4_composer_edition.tgz')
version('17.0.3', '52344df122c17ddff3687f84ceb21623',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11464/parallel_studio_xe_2017_update3_composer_edition.tgz')
version('17.0.2', '2891ab1ece43eb61b6ab892f07c47f01',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11302/parallel_studio_xe_2017_update2_composer_edition.tgz')
version('17.0.1', '1f31976931ed8ec424ac7c3ef56f5e85',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/10978/parallel_studio_xe_2017_update1_composer_edition.tgz')
version('17.0.0', 'b67da0065a17a05f110ed1d15c3c6312',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9656/parallel_studio_xe_2017_composer_edition.tgz')
version('16.0.4', '2bc9bfc9be9c1968a6e42efb4378f40e',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9785/parallel_studio_xe_2016_composer_edition_update4.tgz')
version('16.0.3', '3208eeabee951fc27579177b593cefe9',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9063/parallel_studio_xe_2016_composer_edition_update3.tgz')
version('16.0.2', '1133fb831312eb519f7da897fec223fa',
url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8680/parallel_studio_xe_2016_composer_edition_update2.tgz')
variant('rpath', default=True, description='Add rpath to .cfg files')
components = [
# Common files
'intel-comp-',
'intel-openmp',
# C/C++
'intel-icc',
# Fortran
'intel-ifort',
]
@property
def license_files(self):
return [
'Licenses/license.lic',
join_path('compilers_and_libraries', 'linux', 'bin',
'intel64', 'license.lic')
]
@run_after('install')
def rpath_configuration(self):
if '+rpath' in self.spec:
bin_dir = join_path(self.prefix, 'compilers_and_libraries',
'linux', 'bin', 'intel64')
lib_dir = join_path(self.prefix, 'compilers_and_libraries',
'linux', 'compiler', 'lib', 'intel64_lin')
for compiler in ['icc', 'icpc', 'ifort']:
cfgfilename = join_path(bin_dir, '{0}.cfg'.format(compiler))
with open(cfgfilename, 'w') as f:
f.write('-Xlinker -rpath -Xlinker {0}\n'.format(lib_dir))
def setup_environment(self, spack_env, run_env):
"""Adds environment variables to the generated module file.
These environment variables come from running:
.. code-block:: console
$ source bin/compilervars.sh intel64
"""
# NOTE: Spack runs setup_environment twice, once pre-build to set up
# the build environment, and once post-installation to determine
# the environment variables needed at run-time to add to the module
# file. The script we need to source is only present post-installation,
# so check for its existence before sourcing.
# TODO: At some point we should split setup_environment into
# setup_build_environment and setup_run_environment to get around
# this problem.
compilervars = os.path.join(self.prefix.bin, 'compilervars.sh')
if os.path.isfile(compilervars):
run_env.extend(EnvironmentModifications.from_sourcing_file(
compilervars, 'intel64'))
|
<gh_stars>10-100
/**
* @author ooooo
* @date 2021/3/5 11:11
*/
#ifndef CPP_1648__SOLUTION1_H_
#define CPP_1648__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <numeric>
using namespace std;
class Solution {
public:
static constexpr int MOD = 1000000007;
int maxProfit(vector<int> &inventory, int orders) {
long long l = 1, r = *max_element(inventory.begin(), inventory.end());
long long pos = 0;
while (l <= r) {
long long mid = l + (r - l) / 2;
if (check(mid, inventory, orders)) {
pos = mid;
r = mid - 1;
} else {
l = mid + 1;
}
}
long long total = 0;
for (int i = 0; i < inventory.size(); ++i) {
long long v = inventory[i];
if (v > pos) {
total += (pos + 1 + v) * (inventory[i] - pos) / 2;
orders -= (inventory[i] - pos);
}
}
if (orders == 0) {
return total % MOD;
}
return (total + orders * pos) % MOD;
}
bool check(long long mid, vector<int> &inventory, int orders) {
long long sum = 0;
for (int i = 0; i < inventory.size(); ++i) {
long long v = inventory[i];
if (v > mid) {
sum += (v - mid);
}
}
return sum <= orders;
}
};
#endif //CPP_1648__SOLUTION1_H_
|
import unittest
import logging
import numpy as np
import pandas as pd
import scipy.sparse
import anndata
from batchglm.api.models.glm_nb import Simulator
import diffxpy.api as de
class TestDataTypesSingle(unittest.TestCase):
def _test_wald(self, data, sample_description, gene_names=None):
test = de.test.wald(
data=data,
sample_description=sample_description,
gene_names=gene_names,
factor_loc_totest="condition",
formula_loc="~ 1 + condition",
noise_model="nb",
batch_size=5
)
_ = test.summary()
def _test_lrt(self, data, sample_description, gene_names=None):
test = de.test.lrt(
data=data,
sample_description=sample_description,
gene_names=gene_names,
full_formula_loc="~ 1 + condition",
reduced_formula_loc="~ 1",
noise_model="nb"
)
_ = test.summary()
def _test_t_test(self, data, sample_description, gene_names=None):
test = de.test.t_test(
data=data,
sample_description=sample_description,
gene_names=gene_names,
grouping="condition"
)
_ = test.summary()
def _test_rank(self, data, sample_description, gene_names=None):
test = de.test.rank_test(
data=data,
sample_description=sample_description,
gene_names=gene_names,
grouping="condition"
)
_ = test.summary()
def simulate(self, n_cells: int = 200, n_genes: int = 2):
sim = Simulator(num_observations=n_cells, num_features=n_genes)
sim.generate_sample_description(num_batches=0, num_conditions=0)
sim.generate()
random_sample_description = pd.DataFrame({
"condition": np.random.randint(2, size=sim.input_data.num_observations)
})
return sim.x, random_sample_description
def _test_numpy(self, sparse):
data, sample_description = self.simulate()
gene_names = ["gene" + str(i) for i in range(data.shape[1])]
if sparse:
data = scipy.sparse.csr_matrix(data)
self._test_wald(data=data, sample_description=sample_description, gene_names=gene_names)
self._test_lrt(data=data, sample_description=sample_description, gene_names=gene_names)
self._test_t_test(data=data, sample_description=sample_description, gene_names=gene_names)
self._test_rank(data=data, sample_description=sample_description, gene_names=gene_names)
def _test_anndata(self, sparse):
data, sample_description = self.simulate()
gene_names = ["gene" + str(i) for i in range(data.shape[1])]
if sparse:
data = scipy.sparse.csr_matrix(data)
data = anndata.AnnData(data)
data.var_names = gene_names
self._test_wald(data=data, sample_description=sample_description)
self._test_lrt(data=data, sample_description=sample_description)
self._test_t_test(data=data, sample_description=sample_description)
self._test_rank(data=data, sample_description=sample_description)
def _test_anndata_raw(self, sparse):
data, sample_description = self.simulate()
gene_names = ["gene" + str(i) for i in range(data.shape[1])]
if sparse:
data = scipy.sparse.csr_matrix(data)
data = anndata.AnnData(data)
data.var_names = gene_names
data.raw = data
self._test_wald(data=data.raw, sample_description=sample_description)
self._test_lrt(data=data.raw, sample_description=sample_description)
self._test_t_test(data=data, sample_description=sample_description)
self._test_rank(data=data, sample_description=sample_description)
def test_numpy(self):
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
self._test_numpy(sparse=False)
self._test_numpy(sparse=True)
return True
def test_anndata(self):
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
self._test_anndata(sparse=False)
self._test_anndata(sparse=True)
self._test_anndata_raw(sparse=False)
self._test_anndata_raw(sparse=True)
return True
if __name__ == '__main__':
unittest.main()
|
import datetime
now = datetime.datetime.now().strftime("%d/%m/%Y")
print(now) |
<reponame>FlandiaYingman/arkwaifu<filename>internal/pkg/arkres/arkavg/pic_assets.go
package arkavg
import (
"os"
"path/filepath"
"strings"
"github.com/pkg/errors"
"github.com/samber/lo"
)
func ScanForPicAssets(resDir string, prefix string) ([]Asset, error) {
imgs, err := ScanForPicAssetsByKind(resDir, prefix, KindImage)
if err != nil {
return nil, err
}
bkgs, err := ScanForPicAssetsByKind(resDir, prefix, KindBackground)
if err != nil {
return nil, err
}
return lo.Flatten([][]Asset{imgs, bkgs}), nil
}
func ScanForPicAssetsByKind(resDir string, prefix string, kind Kind) ([]Asset, error) {
path := filepath.Join(resDir, prefix, "avg", string(kind))
entries, err := os.ReadDir(path)
if err != nil {
return nil, errors.WithStack(err)
}
assets := make([]Asset, 0)
for _, entry := range entries {
// There could be some JSON files (asset metadata), so filter out any non-PNG files.
if !strings.HasSuffix(entry.Name(), ".png") {
continue
}
assets = append(assets, Asset{
Name: entry.Name(),
Kind: kind,
})
}
return assets, nil
}
|
#!/bin/bash
# stage2b <date> <yyyy-mm-dd>
# calculate the M9 metrics
# TODO: install latest ithitools at: cd /usr/local/ITHI/dev/ithitools/
# and update script location
cd ~/ithitools/stats
COUNT_FOLDER=/data/ITHI/count-addr/$1
M9_FOLDER=/data/ITHI/addr-m9
mkdir -p $M9_FOLDER
FREQUENT=../data/frequent-resolvers.csv
M9_CSV="$M9_FOLDER/M9-$2.csv"
echo "Preparing $M9_CSV from $COUNT_FOLDER"
python3 ./compute_m9x.py $2 $M9_CSV $FREQUENT $COUNT_FOLDER
|
<reponame>Kun-a-Kun/Algorithms-Fourth-Edition-Exercises
package Chapter1_1High;
//Exercise 1.1.29
public class EquivalentKey {
public static void main(String[] args) {
int[] N = {1, 2, 3, 4, 5, 6, 7, 8, 9, 89, 89, 100};
System.out.println(rank(89, N) + "");
System.out.println(count(89, N) + "");
}
public static int binarySearch(int target, int[] N) {
int lo = 0;
int hi = N.length - 1;
while (lo < hi) {
int mid = lo + (hi - lo) / 2;
if (target > N[mid])
lo = mid + 1;
else if (target < N[mid])
hi = mid - 1;
else return mid;
}
return -1;
}
public static int rank(int key, int[] N) {
int searchResult = binarySearch(key, N);
System.out.println("searchResult: " + searchResult);
int resultNum = 0;
if (searchResult != -1) { //如果二分查找结果存在
for (int i = 0; i < N.length; i++) {
if (N[i] == N[searchResult]) {
resultNum = i;
break;
}
}
}
return resultNum;
}
public static int count(int key, int[] N) {
int searchResult = binarySearch(key, N);
System.out.println("searchResult: " + searchResult);
int resultNum = 0;
if (searchResult != -1) {
for (int i = 0; i < N.length; i++) {
if (N[i] == N[searchResult]) {
resultNum++;
}
}
}
return resultNum;
}
}
|
from __future__ import print_function
import numpy as np
import tensorflow as tf
import sklearn.metrics
# Import MNIST data
from tensorflow.examples.tutorials.mnist import input_data
def readData(filename):
with open(filename, 'r') as f:
string = [line.strip().split('\t') for line in f.readlines()]
X = [map(float, line[:-1]) for line in string]
Y = [int(line[-1]) for line in string]
return np.array(X), np.array(Y)
def one_hot(Y,length):
NewY=[]
for i in range(len(Y)):
content=[]
num=Y[i]
for i in range(num):
content.append(0)
content.append(1)
for i in range(num+1,length):
content.append(0)
NewY.append(content)
return np.array(NewY)
def init(X, Y):
assert X.shape[0] == Y.shape[0], 'shape not match'
num_all = X.shape[0]
num_train = int(0.7 * num_all)
num_test = num_all - num_train
# shuffle
mask = np.random.permutation(num_all)
X = X[mask]
Y = Y[mask]
# training data
mask_train = range(num_train)
X_train = X[mask_train]
Y_train = Y[mask_train]
#testing data
mask_test = range(num_train, num_all)
X_test = X[mask_test]
Y_test = Y[mask_test]
print('All data shape: ', X.shape)
print('Train data shape: ', X_train.shape)
print('Train label shape: ', Y_train.shape)
print('Test data shape: ', X_test.shape)
print('Test label shape: ', Y_test.shape)
return X_train, Y_train, X_test, Y_test
X,Y=readData("../../data/finalData.txt")
Y=one_hot(Y,79)
X_train, Y_train, X_test, Y_test=init(X,Y)
xtr = tf.placeholder("float", [None, 453])
xte = tf.placeholder("float", [453])
distance = tf.reduce_sum(tf.abs(tf.add(xtr, tf.negative(xte))), reduction_indices=1)
#distance = tf.reduce_sum(tf.sqrt(tf.square(tf.add(xtr, tf.negative(xte)))), reduction_indices=1)
pred = tf.arg_min(distance, 0)
accuracy = 0.
pred_class = []
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for i in range(len(X_test)):
nn_index = sess.run(pred, feed_dict={xtr: X_train, xte: X_test[i, :]})
print("Test", i, "Prediction:", np.argmax(Y_train[nn_index]), \
"True Class:", np.argmax(Y_test[i]))
pred_class.append(Y_train[nn_index])
if np.argmax(Y_train[nn_index]) == np.argmax(Y_test[i]):
accuracy += 1./len(X_test)
print("Done!")
print("Accuracy:", accuracy)
print('F1 score: %f' % sklearn.metrics.f1_score(Y_test, np.array(pred_class), average='weighted'))
|
import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { render, findAll } from '@ember/test-helpers';
import { hbs } from 'ember-cli-htmlbars';
module('Integration | Component | chart-container', function (hooks) {
setupRenderingTest(hooks);
test('cases over time', async function (assert) {
assert.expect(3);
await render(hbs`<ChartContainer @location={{hash attributes=(hash wikiId="Earth")}} />`);
assert
.dom('.chart-container .chart-container__graph--time-series')
.exists('A timeseries is rendered in the chart container');
assert
.dom('.chart-container__graph--time-series .timeseries__title')
.hasText('Cases Over Time', 'the timeseries chart renders with a title');
assert
.dom('.chart-container .chart-container__graph--time-series .ember-apex-chart')
.exists('A timeseries chart is also rendered in the chart container');
});
test('7-day moving average', async function (assert) {
assert.expect(3);
await render(hbs`<ChartContainer @location={{hash attributes=(hash wikiId="Earth")}} />`);
assert
.dom(findAll('.chart-container .chart-container__graph--time-series')[1])
.exists('A second timeseries is rendered in the chart container');
assert
.dom(findAll('.chart-container__graph--time-series .timeseries__title')[1])
.hasText('7-Day Moving Average', 'the second timeseries chart renders with a title');
assert
.dom(findAll('.chart-container .chart-container__graph--time-series .ember-apex-chart')[1])
.exists('A second timeseries chart is also rendered in the chart container');
});
test('daily change', async function (assert) {
assert.expect(3);
await render(hbs`<ChartContainer @location={{hash attributes=(hash wikiId="Earth")}} />`);
assert
.dom('.chart-container .chart-container__graph--stacked-bar')
.exists('A stacked bar is rendered in the chart container');
assert
.dom('.chart-container__graph--stacked-bar .stacked-bar__title')
.hasText('Daily Change', 'the stacked bar chart renders with a title');
assert
.dom('.chart-container .chart-container__graph--stacked-bar .ember-apex-chart')
.exists('A stacked bar chart is also rendered in the chart container');
});
});
|
<reponame>khalifagroup/vscode-docker
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See LICENSE.md in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { WorkspaceFolder } from 'vscode';
import { IActionContext } from '../../extension.bundle';
import { DockerDebugScaffoldContext } from '../debugging/DebugHelper';
import { dockerDebugScaffoldingProvider } from '../debugging/DockerDebugScaffoldingProvider';
import { PlatformOS } from '../utils/platform';
import { getComposePorts, getExposeStatements, IPlatformGeneratorInfo, PackageInfo } from './configure';
export let configureNode: IPlatformGeneratorInfo = {
genDockerFile,
genDockerCompose,
genDockerComposeDebug,
defaultPorts: [3000],
initializeForDebugging,
};
function genDockerFile(serviceNameAndRelativePath: string, platform: string, os: string | undefined, ports: number[], { cmd, author, version, artifactName }: Partial<PackageInfo>): string {
let exposeStatements = getExposeStatements(ports);
return `FROM node:10.13-alpine
ENV NODE_ENV production
WORKDIR /usr/src/app
COPY ["package.json", "package-lock.json*", "npm-shrinkwrap.json*", "./"]
RUN npm install --production --silent && mv node_modules ../
COPY . .
${exposeStatements}
CMD ${cmd}`;
}
function genDockerCompose(serviceNameAndRelativePath: string, platform: string, os: string | undefined, ports: number[]): string {
return `version: '2.1'
services:
${serviceNameAndRelativePath}:
image: ${serviceNameAndRelativePath}
build: .
environment:
NODE_ENV: production
${getComposePorts(ports)}`;
}
function genDockerComposeDebug(serviceNameAndRelativePath: string, platform: string, os: string | undefined, ports: number[], { fullCommand: cmd }: Partial<PackageInfo>): string {
const inspectConfig = '--inspect=0.0.0.0:9229';
const cmdArray: string[] = cmd.split(' ');
if (cmdArray[0].toLowerCase() === 'node') {
cmdArray.splice(1, 0, inspectConfig);
cmd = `command: ${cmdArray.join(' ')}`;
} else {
cmd = `## set your startup file here\n command: node ${inspectConfig} index.js`;
}
return `version: '2.1'
services:
${serviceNameAndRelativePath}:
image: ${serviceNameAndRelativePath}
build: .
environment:
NODE_ENV: development
${getComposePorts(ports, 9229)}
${cmd}`;
}
async function initializeForDebugging(context: IActionContext, folder: WorkspaceFolder, platformOS: PlatformOS, dockerfile: string, packageInfo: PackageInfo): Promise<void> {
const scaffoldContext: DockerDebugScaffoldContext = {
folder: folder,
platform: 'node',
actionContext: context,
dockerfile: dockerfile,
}
await dockerDebugScaffoldingProvider.initializeNodeForDebugging(scaffoldContext);
}
|
docker run -p 9090:9090 -v /mnt/user/Share/web:/dist/static webwork |
package org.softuni.exodia.domain.entities;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.softuni.exodia.annotations.validation.composite.ValidUserEmail;
import org.softuni.exodia.annotations.validation.composite.ValidUserHashedPassword;
import org.softuni.exodia.annotations.validation.composite.ValidUserUsername;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
@NoArgsConstructor
@Entity
@Table(name = "users")
public class User extends BaseUuidEntity {
@ValidUserUsername
@Column(unique = true, nullable = false, length = 32)
private String username;
@Getter
@ValidUserHashedPassword
@Column(nullable = false, length = 75)
private String password;
@ValidUserEmail
@Column(unique = true, nullable = false, length = 64)
private String email;
}
|
#!/usr/bin/env bash
# Test:
# Run an install, and let it set up a new template directory
TEST_DIR=$(cd "$(dirname "$0")" && pwd)
# shellcheck disable=SC1091
. "$TEST_DIR/general.sh"
acceptAllTrustPrompts || exit 1
if echo "$EXTRA_INSTALL_ARGS" | grep -q "use-core-hookspath"; then
echo "Using core.hooksPath"
exit 249
fi
# delete the built-in git template folder
rm -rf "$GH_TEST_GIT_CORE/templates" || exit 1
# run the install, and let it search for the templates
echo 'n
y
' | "$GH_TEST_BIN/cli" installer --stdin || exit 1
mkdir -p "$GH_TEST_TMP/test7" &&
cd "$GH_TEST_TMP/test7" &&
git init || exit 1
# verify that the hooks are installed and are working
if ! grep 'github.com/gabyx/githooks' "$GH_TEST_TMP/test7/.git/hooks/pre-commit"; then
echo "! Githooks were not installed into a new repo"
exit 1
fi
|
<reponame>anunn1417/Rubrix
import pytest
import rubrix
from rubrix import TokenClassificationRecord
from tests.server.test_helpers import client, mocking_client
def test_log_with_empty_text(monkeypatch):
mocking_client(monkeypatch, client)
dataset = "test_log_with_empty_text"
text = " "
rubrix.delete(dataset)
with pytest.raises(Exception, match="No text or empty text provided"):
rubrix.log(
TokenClassificationRecord(id=0, text=text, tokens=["a", "b", "c"]),
name=dataset,
)
def test_log_with_empty_tokens_list(monkeypatch):
mocking_client(monkeypatch, client)
dataset = "test_log_with_empty_text"
text = "The text"
rubrix.delete(dataset)
with pytest.raises(
Exception,
match="ensure this value has at least 1 items",
):
rubrix.log(
TokenClassificationRecord(id=0, text=text, tokens=[]),
name=dataset,
)
|
import os
def dofMysqlSetup1():
# Implement the logic for MySQL setup process 1
# Execute necessary MySQL commands and configurations
# Example:
os.system("mysql -u username -p password -e 'CREATE DATABASE database1;'")
os.system("mysql -u username -p password database1 < database1_schema.sql")
print("MySQL setup process 1 completed successfully.")
def dofMysqlSetup2():
# Implement the logic for MySQL setup process 2
# Execute necessary MySQL commands and configurations
# Example:
os.system("mysql -u username -p password -e 'CREATE DATABASE database2;'")
os.system("mysql -u username -p password database2 < database2_schema.sql")
print("MySQL setup process 2 completed successfully.")
def showMenu():
# Display the menu options
print("1) Exit")
print("2) ShowMenu")
print("3) dofMysqlSetup1")
print("4) dofMysqlSetup2")
def main():
while True:
showMenu()
choice = input("Enter your choice: ")
if choice == "1":
print("Exiting the CLI tool.")
break
elif choice == "2":
showMenu()
elif choice == "3":
dofMysqlSetup1()
elif choice == "4":
dofMysqlSetup2()
else:
print("Invalid choice. Please select a valid option.")
if __name__ == "__main__":
main() |
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MASTER_ADDRESS=${1:-"8.8.8.18"}
cat <<EOF >/opt/kubernetes/cfg/kube-scheduler
###
# kubernetes scheduler config
# --logtostderr=true: log to standard error instead of files
KUBE_LOGTOSTDERR="--logtostderr=true"
# --v=0: log level for V logs
KUBE_LOG_LEVEL="--v=4"
KUBE_MASTER="--master=${MASTER_ADDRESS}:8080"
# --leader-elect
KUBE_LEADER_ELECT="--leader-elect"
# Add your own!
KUBE_SCHEDULER_ARGS=""
EOF
KUBE_SCHEDULER_OPTS=" \${KUBE_LOGTOSTDERR} \\
\${KUBE_LOG_LEVEL} \\
\${KUBE_MASTER} \\
\${KUBE_LEADER_ELECT} \\
\${KUBE_SCHEDULER_ARGS}"
cat <<EOF >/usr/lib/systemd/system/kube-scheduler.service
[Unit]
Description=Kubernetes Scheduler
Documentation=https://github.com/kubernetes/kubernetes
[Service]
EnvironmentFile=-/opt/kubernetes/cfg/kube-scheduler
ExecStart=/opt/kubernetes/bin/kube-scheduler ${KUBE_SCHEDULER_OPTS}
Restart=on-failure
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable kube-scheduler
systemctl restart kube-scheduler
|
<gh_stars>1-10
/*
Navicat MySQL Data Transfer
Source Server : localhost
Source Server Version : 50553
Source Host : localhost:3306
Source Database : ailablecopy
Target Server Type : MYSQL
Target Server Version : 50553
File Encoding : 65001
Date: 2020-02-15 16:23:38
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for cnpo_admin
-- ----------------------------
DROP TABLE IF EXISTS `cnpo_admin`;
CREATE TABLE `cnpo_admin` (
`id` int(5) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',
`username` varchar(20) NOT NULL COMMENT '用户名',
`password` varchar(100) NOT NULL COMMENT '密码',
`auth_key` varchar(50) DEFAULT NULL COMMENT 'KEY',
`password_reset_token` varchar(50) DEFAULT NULL COMMENT '密码重置',
`thumb_key` varchar(50) DEFAULT NULL COMMENT '头像key',
`thumb_url` varchar(100) DEFAULT NULL COMMENT '头像url',
`nickname` varchar(20) DEFAULT NULL COMMENT '别名',
`role_id` int(5) NOT NULL DEFAULT '0' COMMENT '关联角色',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态',
`created_at` int(11) NOT NULL COMMENT '添加时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
UNIQUE KEY `username` (`username`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of cnpo_admin
-- ----------------------------
INSERT INTO `cnpo_admin` VALUES ('2', 'admin', <PASSWORD>', 'WUJWw4T2XiM8jsKJaRT7fUoXaQGNnA9C', null, '', null, '', '0', '1', '1526615081', '1575461792');
-- ----------------------------
-- Table structure for cnpo_permission
-- ----------------------------
DROP TABLE IF EXISTS `cnpo_permission`;
CREATE TABLE `cnpo_permission` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`permission` varchar(50) NOT NULL COMMENT '权限规则',
`name` varchar(50) NOT NULL COMMENT '权限名称',
`permission_level` int(3) NOT NULL COMMENT '权限等级',
`parent_id` int(8) NOT NULL DEFAULT '0' COMMENT '上级权限',
`sort` int(5) DEFAULT NULL COMMENT '排序',
`icon` varchar(255) DEFAULT NULL COMMENT '图标',
`status` int(3) NOT NULL DEFAULT '1' COMMENT '状态 1有效 5无效',
`created_at` int(11) NOT NULL COMMENT '创建时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=52 DEFAULT CHARSET=utf8 COMMENT='权限表';
-- ----------------------------
-- Records of cnpo_permission
-- ----------------------------
INSERT INTO `cnpo_permission` VALUES ('6', 'admin', '管理员管理', '1', '0', '6', ' fa-hand-peace-o', '1', '1531639957', '1575426917');
INSERT INTO `cnpo_permission` VALUES ('7', 'admin/admin/index', '管理员列表', '2', '6', '1', null, '1', '1531639993', '1531639993');
INSERT INTO `cnpo_permission` VALUES ('8', 'admin/permission/index', '权限管理', '2', '6', '2', null, '1', '1531640071', '1531640071');
INSERT INTO `cnpo_permission` VALUES ('9', 'admin/role/index', '角色管理', '2', '6', '3', ' fa-bars', '1', '1531640116', '1531719595');
INSERT INTO `cnpo_permission` VALUES ('28', 'system', '系统管理', '1', '0', '5', '', '1', '1531725690', '1531725690');
INSERT INTO `cnpo_permission` VALUES ('47', 'project', '项目管理', '1', '0', '1', '', '1', '1572266588', '1580889154');
INSERT INTO `cnpo_permission` VALUES ('48', 'project/project/index', '项目管理', '2', '47', '1', '', '1', '1572266646', '1580889184');
INSERT INTO `cnpo_permission` VALUES ('49', 'work', '我的工作区', '1', '0', '1', '', '1', '1580611455', '1581249749');
INSERT INTO `cnpo_permission` VALUES ('50', 'work/work/index', '数据标注', '2', '49', '1', '', '1', '1580611486', '1581249779');
INSERT INTO `cnpo_permission` VALUES ('51', 'image/image/index', '数据管理', '2', '47', '2', '', '1', '1581247542', '1581247542');
-- ----------------------------
-- Table structure for cnpo_permission_role
-- ----------------------------
DROP TABLE IF EXISTS `cnpo_permission_role`;
CREATE TABLE `cnpo_permission_role` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`role_id` int(6) NOT NULL COMMENT '角色ID',
`permission_id` int(6) NOT NULL COMMENT '权限ID',
`status` int(3) NOT NULL DEFAULT '1' COMMENT '状态 1有效 5无效',
`created_at` int(11) NOT NULL COMMENT '创建时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=25 DEFAULT CHARSET=utf8 COMMENT='角色权限表';
-- ----------------------------
-- Records of cnpo_permission_role
-- ----------------------------
-- ----------------------------
-- Table structure for cnpo_system_role
-- ----------------------------
DROP TABLE IF EXISTS `cnpo_system_role`;
CREATE TABLE `cnpo_system_role` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`name` varchar(50) NOT NULL COMMENT '角色名称',
`remark` varchar(100) NOT NULL DEFAULT '' COMMENT '角色说明',
`status` int(3) NOT NULL DEFAULT '1' COMMENT '状态 1有效 5无效',
`created_at` int(11) NOT NULL COMMENT '创建时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8 COMMENT='角色表';
-- ----------------------------
-- Records of cnpo_system_role
-- ----------------------------
-- ----------------------------
-- Table structure for project
-- ----------------------------
DROP TABLE IF EXISTS `project`;
CREATE TABLE `project` (
`id` int(5) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',
`project_name` varchar(100) NOT NULL COMMENT '项目名称',
`description` varchar(100) NOT NULL COMMENT '项目介绍',
`operator_id` int(5) NOT NULL DEFAULT '0' COMMENT '操作人员',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态',
`created_at` int(11) NOT NULL COMMENT '添加时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of project
-- ----------------------------
-- ----------------------------
-- Table structure for project_admin
-- ----------------------------
DROP TABLE IF EXISTS `project_admin`;
CREATE TABLE `project_admin` (
`id` int(5) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',
`project_id` varchar(100) NOT NULL COMMENT '项目ID',
`admin_id` varchar(100) NOT NULL COMMENT '项目成员ID',
`operator_id` int(5) NOT NULL DEFAULT '0' COMMENT '操作人员',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态',
`created_at` int(11) NOT NULL COMMENT '添加时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of project_admin
-- ----------------------------
INSERT INTO `project_admin` VALUES ('6', '4', '6', '2', '1', '1581169995', '1581169995');
INSERT INTO `project_admin` VALUES ('7', '5', '4', '2', '1', '1581573084', '1581573084');
INSERT INTO `project_admin` VALUES ('8', '5', '5', '2', '1', '1581573084', '1581573084');
INSERT INTO `project_admin` VALUES ('9', '5', '6', '2', '1', '1581573084', '1581573084');
INSERT INTO `project_admin` VALUES ('10', '4', '5', '2', '1', '1581592501', '1581592501');
INSERT INTO `project_admin` VALUES ('11', '4', '7', '2', '1', '1581592501', '1581592501');
-- ----------------------------
-- Table structure for project_files
-- ----------------------------
DROP TABLE IF EXISTS `project_files`;
CREATE TABLE `project_files` (
`id` int(5) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',
`project_id` varchar(100) NOT NULL COMMENT '项目ID',
`file_name` varchar(100) NOT NULL COMMENT '项目成员ID',
`operator_id` int(5) NOT NULL DEFAULT '0' COMMENT '操作人员',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态',
`created_at` int(11) NOT NULL COMMENT '添加时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of project_files
-- ----------------------------
-- ----------------------------
-- Table structure for project_image
-- ----------------------------
DROP TABLE IF EXISTS `project_image`;
CREATE TABLE `project_image` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',
`project_id` varchar(100) NOT NULL COMMENT '项目ID',
`file_id` int(5) NOT NULL COMMENT '文件ID',
`image_name` varchar(100) NOT NULL COMMENT '图片名称',
`image_url` varchar(100) NOT NULL COMMENT '图片地址',
`image_annotation` varchar(500) NOT NULL DEFAULT '' COMMENT '图片标注信息',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态 1未标注 5标注 8 审核',
`operator_id` int(5) NOT NULL DEFAULT '0' COMMENT '上传人员',
`label_user` int(5) NOT NULL DEFAULT '0' COMMENT '标注人员',
`created_at` int(11) NOT NULL COMMENT '添加时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=18 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of project_image
-- ----------------------------
-- ----------------------------
-- Table structure for project_label
-- ----------------------------
DROP TABLE IF EXISTS `project_label`;
CREATE TABLE `project_label` (
`id` int(5) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',
`project_id` varchar(100) NOT NULL COMMENT '项目ID',
`label_name` varchar(100) NOT NULL COMMENT '项目成员ID',
`operator_id` int(5) NOT NULL DEFAULT '0' COMMENT '操作人员',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态',
`created_at` int(11) NOT NULL COMMENT '添加时间',
`updated_at` int(11) NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of project_label
-- ----------------------------
|
<gh_stars>1000+
/*
Copyright 2021 The Crossplane Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package composition
import (
"context"
"io"
"testing"
"github.com/google/go-cmp/cmp"
kerrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/log/zap"
"sigs.k8s.io/controller-runtime/pkg/manager"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
xpv1 "github.com/crossplane/crossplane-runtime/apis/common/v1"
"github.com/crossplane/crossplane-runtime/pkg/errors"
"github.com/crossplane/crossplane-runtime/pkg/logging"
"github.com/crossplane/crossplane-runtime/pkg/resource/fake"
"github.com/crossplane/crossplane-runtime/pkg/test"
v1 "github.com/crossplane/crossplane/apis/apiextensions/v1"
"github.com/crossplane/crossplane/apis/apiextensions/v1alpha1"
)
func TestReconcile(t *testing.T) {
errBoom := errors.New("boom")
testLog := logging.NewLogrLogger(zap.New(zap.UseDevMode(true), zap.WriteTo(io.Discard)).WithName("testlog"))
ctrl := true
comp := &v1.Composition{
ObjectMeta: metav1.ObjectMeta{
Name: "cool-composition",
UID: types.UID("no-you-uid"),
},
}
// Not owned by the above composition.
rev1 := &v1alpha1.CompositionRevision{
ObjectMeta: metav1.ObjectMeta{
Name: comp.GetName() + "-1",
},
Spec: v1alpha1.CompositionRevisionSpec{Revision: 1},
}
// Owned by the above composition, but with an 'older' hash. The status
// indicates it is the current version, and thus should be updated.
rev2 := &v1alpha1.CompositionRevision{
ObjectMeta: metav1.ObjectMeta{
Name: comp.GetName() + "-2",
OwnerReferences: []metav1.OwnerReference{{
UID: comp.GetUID(),
Controller: &ctrl,
}},
Labels: map[string]string{
v1alpha1.LabelCompositionSpecHash: "some-older-hash",
},
},
Spec: v1alpha1.CompositionRevisionSpec{Revision: 2},
Status: v1alpha1.CompositionRevisionStatus{
ConditionedStatus: xpv1.ConditionedStatus{
Conditions: []xpv1.Condition{v1alpha1.CompositionSpecMatches()},
},
},
}
// Owned by the above composition, with a current hash. The status
// indicates it is not the current revision, and thus should be updated.
rev3 := &v1alpha1.CompositionRevision{
ObjectMeta: metav1.ObjectMeta{
Name: comp.GetName() + "-3",
OwnerReferences: []metav1.OwnerReference{{
UID: comp.GetUID(),
Controller: &ctrl,
}},
Labels: map[string]string{
v1alpha1.LabelCompositionSpecHash: comp.Spec.Hash(),
},
},
Spec: v1alpha1.CompositionRevisionSpec{Revision: 3},
Status: v1alpha1.CompositionRevisionStatus{
ConditionedStatus: xpv1.ConditionedStatus{
Conditions: []xpv1.Condition{v1alpha1.CompositionSpecDiffers()},
},
},
}
// Owned by the above composition, with a current hash. The status
// indicates it is the current revision, and thus should not be updated.
rev4 := &v1alpha1.CompositionRevision{
ObjectMeta: metav1.ObjectMeta{
Name: comp.GetName() + "-4",
OwnerReferences: []metav1.OwnerReference{{
UID: comp.GetUID(),
Controller: &ctrl,
}},
Labels: map[string]string{
v1alpha1.LabelCompositionSpecHash: comp.Spec.Hash(),
},
},
Spec: v1alpha1.CompositionRevisionSpec{Revision: 3},
Status: v1alpha1.CompositionRevisionStatus{
ConditionedStatus: xpv1.ConditionedStatus{
Conditions: []xpv1.Condition{v1alpha1.CompositionSpecMatches()},
},
},
}
type args struct {
mgr manager.Manager
opts []ReconcilerOption
}
type want struct {
r reconcile.Result
err error
}
cases := map[string]struct {
reason string
args args
want want
}{
"CompositionNotFound": {
reason: "We should not return an error if the Composition was not found.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(kerrors.NewNotFound(schema.GroupResource{}, "")),
},
},
},
want: want{
r: reconcile.Result{},
},
},
"GetCompositionError": {
reason: "We should return any other error encountered while getting a Composition.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(errBoom),
},
},
},
want: want{
err: errors.Wrap(errBoom, errGet),
},
},
"CompositionDeleted": {
reason: "We should return without error if the Composition exists but is being deleted.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(nil, func(obj client.Object) error {
now := metav1.Now()
*obj.(*v1.Composition) = v1.Composition{ObjectMeta: metav1.ObjectMeta{DeletionTimestamp: &now}}
return nil
}),
},
},
},
want: want{
r: reconcile.Result{},
err: nil,
},
},
"ListCompositionRevisionsError": {
reason: "We should return any error encountered while listing CompositionRevisions.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(nil),
MockList: test.NewMockListFn(errBoom),
},
},
},
want: want{
err: errors.Wrap(errBoom, errListRevs),
},
},
"SuccessfulNoOp": {
reason: "We should not create a new CompositionRevision if one exists that matches the Composition's spec hash.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(nil, func(obj client.Object) error {
*obj.(*v1.Composition) = *comp
return nil
}),
MockList: test.NewMockListFn(nil, func(obj client.ObjectList) error {
*obj.(*v1alpha1.CompositionRevisionList) = v1alpha1.CompositionRevisionList{
Items: []v1alpha1.CompositionRevision{
// Not controlled by the above composition.
*rev1,
// Controlled by the above composition with a current hash.
// This indicates we don't need to create a new revision.
// It does not need its status updated.
*rev4,
},
}
return nil
}),
},
},
},
want: want{
r: reconcile.Result{},
err: nil,
},
},
"UpdateRevisionStatusError": {
reason: "We should return any error encountered while updating a CompositionRevision's status.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(nil, func(obj client.Object) error {
*obj.(*v1.Composition) = *comp
return nil
}),
MockStatusUpdate: test.NewMockStatusUpdateFn(errBoom),
MockList: test.NewMockListFn(nil, func(obj client.ObjectList) error {
*obj.(*v1alpha1.CompositionRevisionList) = v1alpha1.CompositionRevisionList{
Items: []v1alpha1.CompositionRevision{
// Not controlled by the above composition.
*rev1,
// Controlled by the above composition with a current hash.
// This indicates we don't need to create a new revision.
// It does need its status updated to indicate that it it's
// the current revision, though.
*rev3,
},
}
return nil
}),
},
},
},
want: want{
r: reconcile.Result{},
err: errors.Wrap(errBoom, errUpdateRevStatus),
},
},
"CreateCompositionRevisionError": {
reason: "We should return any error encountered while creating a CompositionRevision.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(nil),
MockList: test.NewMockListFn(nil),
MockCreate: test.NewMockCreateFn(errBoom),
},
},
},
want: want{
err: errors.Wrap(errBoom, errCreateRev),
},
},
"SuccessfulCreation": {
reason: "We should increase the revision number by one when creating a new CompositionRevision.",
args: args{
mgr: &fake.Manager{
Client: &test.MockClient{
MockGet: test.NewMockGetFn(nil, func(obj client.Object) error {
*obj.(*v1.Composition) = *comp
return nil
}),
MockStatusUpdate: test.NewMockStatusUpdateFn(nil, func(obj client.Object) error {
want := rev2.DeepCopy()
want.Status.SetConditions(v1alpha1.CompositionSpecDiffers())
if diff := cmp.Diff(want, obj, test.EquateConditions()); diff != "" {
t.Errorf("Status().Update(...): -want, +got:\n%s", diff)
}
return nil
}),
MockList: test.NewMockListFn(nil, func(obj client.ObjectList) error {
*obj.(*v1alpha1.CompositionRevisionList) = v1alpha1.CompositionRevisionList{
Items: []v1alpha1.CompositionRevision{
// Not controlled by the above composition.
*rev1,
// Controlled by the above composition, but with an older hash.
// This indicates we need to create a new composition. The status
// also needs updating to indicate this is not the currenr revision.
*rev2,
},
}
return nil
}),
MockCreate: test.NewMockCreateFn(nil, func(got client.Object) error {
want := NewCompositionRevision(comp, rev2.Spec.Revision+1, comp.Spec.Hash())
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("Create(): -want, +got:\n%s", diff)
}
return nil
}),
},
},
},
want: want{
r: reconcile.Result{},
err: nil,
},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
r := NewReconciler(tc.args.mgr, append(tc.args.opts, WithLogger(testLog))...)
got, err := r.Reconcile(context.Background(), reconcile.Request{})
if diff := cmp.Diff(tc.want.err, err, test.EquateErrors()); diff != "" {
t.Errorf("\n%s\nr.Reconcile(...): -want error, +got error:\n%s", tc.reason, diff)
}
if diff := cmp.Diff(tc.want.r, got, test.EquateErrors()); diff != "" {
t.Errorf("\n%s\nr.Reconcile(...): -want, +got:\n%s", tc.reason, diff)
}
})
}
}
|
<filename>src/filesystem/NextFSDirectory.ts<gh_stars>0
import { NextFSObject } from './NextFSObject';
import { NextFSType } from "./NextFSType";
export class NextFSDirectory extends NextFSObject {
public type = NextFSType.Directory;
}
|
<gh_stars>1-10
package com.iflytek.cyber.resolver.audioplayer.service;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
public class ProgressHandler extends Handler {
private static final int MSG_UPDATE = 0x1001;
private static final int MSG_UPDATE_PROGRESS_EVENT = 0x1002;
private static final int MSG_UPDATE_CALLBACK = 0x1003;
private static final int PROGRESS_DELAYED = 1;
private static final int PROGRESS_INTERVAL = 2;
private static final String INTERVAL = "interval";
private static final long DELAY = 100;
private static final long DELAY_CALLBACK = 1000;
private MediaPlayer player;
private String currentAudioItemId = null; // to mark the current audio item
private HandlerCallback onPositionUpdateListener;
ProgressHandler(@NonNull MediaPlayer mediaPlayer,
@NonNull HandlerCallback onPositionUpdateListener) {
this.onPositionUpdateListener = onPositionUpdateListener;
player = mediaPlayer;
}
void updateAudioItemId(String audioItemId) {
currentAudioItemId = audioItemId;
}
void setProgressReportDelayed(String audioItem, long delay) {
Message message = Message.obtain();
message.what = MSG_UPDATE_PROGRESS_EVENT;
message.arg1 = PROGRESS_DELAYED;
message.obj = audioItem;
sendMessageDelayed(message, delay);
}
void setProgressReportInterval(String audioItem, long interval) {
Message message = Message.obtain();
message.what = MSG_UPDATE_PROGRESS_EVENT;
message.arg1 = PROGRESS_INTERVAL;
Bundle bundle = new Bundle();
bundle.putLong(INTERVAL, interval);
message.setData(bundle);
message.obj = audioItem;
sendMessageDelayed(message, interval);
}
@Override
public void handleMessage(Message msg) {
if (player == null)
return;
try {
switch (msg.what) {
case MSG_UPDATE:
onPositionUpdateListener.onPositionUpdated(player.getCurrentPosition());
startUpdating(DELAY);
break;
case MSG_UPDATE_PROGRESS_EVENT:
String audioItem = msg.obj.toString();
if (audioItem.equals(currentAudioItemId)) {
if (msg.arg1 == PROGRESS_INTERVAL) {
onPositionUpdateListener.onProgressReportInterval(player.getCurrentPosition());
long interval = msg.getData().getLong(INTERVAL);
sendMessageDelayed(msg, interval);
} else if (msg.arg1 == PROGRESS_DELAYED) {
onPositionUpdateListener.onProgressReportDelay(player.getCurrentPosition());
}
}
break;
case MSG_UPDATE_CALLBACK:
if (onPositionUpdateListener != null)
onPositionUpdateListener.onPositionUpdatedCallback(player.getCurrentPosition());
sendEmptyMessageDelayed(MSG_UPDATE_CALLBACK, DELAY_CALLBACK);
break;
}
} catch (Exception e) {
e.printStackTrace();
}
}
void startUpdating() {
sendEmptyMessage(MSG_UPDATE_CALLBACK);
}
private void startUpdating(long delay) {
if (delay == 0)
startUpdating();
Message message = Message.obtain();
message.what = MSG_UPDATE;
sendMessageDelayed(message, delay);
}
public void startUpdatingCallback() {
Message message = Message.obtain();
message.what = MSG_UPDATE_CALLBACK;
sendMessage(message);
}
public interface HandlerCallback {
/**
* update current position of MediaPlayer whatever player is not stopped
*
* @param position current position of MediaPlayer in milliseconds, if player is stopped position would be 0.
*/
void onPositionUpdated(int position);
void onPositionUpdatedCallback(int position);
void onProgressReportDelay(int offsetInMilliseconds);
void onProgressReportInterval(int offsetInMilliseconds);
}
}
|
#pragma once
#include <vector>
#include <cmath>
#include "Point.h"
#include <iostream>
class SpatialHash
{
public:
SpatialHash(float boundaryX, float boundaryY, int numberOfCellsPerRow, int numberOfCellsPerColumn);
~SpatialHash();
// Hash function
int GetCellIndex(Point& point);
void FillSpatialHashTable(std::vector<Point>& points, int numberOfPoints);
void Redistribute();
void AddPoint(Point* p);
void CollisionDetection();
void CheckAdjecentCells(Point& point, int cellIndex);
void GenerateSpatialHashVertices(std::vector<Vertex>& vertices);
void ClearPoints();
private:
float m_BoundaryX, m_BoundaryY;
int m_NumberOfCellsPerColumn, m_NumberofCellsPerRow;
std::vector<std::vector<Point*>> m_HashedPoints;
}; |
<gh_stars>0
import java.io.*;
import java.math.*;
import java.security.*;
import java.text.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.regex.*;
import java.util.stream.*;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
public class Solution {
// Solution
static long countTriplets(List<Long> arr, long r) {
if (arr.size() < 3) return 0;
long count = 0;
HashMap<Long, Long> freqMap = new HashMap<>();
HashMap<List<Long>, Long> pairsMap = new HashMap<>();
// Making frequency map for all array elements
for (int i = arr.size() - 1; i >= 0; i--) {
long x = arr.get(i);
long r_x = x * r;
long r_r_x = r_x * r;
// Calculating pair
r_x = r*x;
r_r_x = r*r_x;
List<Long> pair = Arrays.asList(r_x, r_r_x);
// x is the first element (x, x*r, x*r*r)
if (pairsMap.containsKey(pair)) {
count += pairsMap.get(pair);
System.out.println(x + " " + r_x + " " + r_r_x);
}
// x is the second element (x/r, x, x*r)
pair = Arrays.asList(x, r_x);
if (pairsMap.containsKey(pair)) {
pairsMap.put(pair, pairsMap.get(pair) + freqMap.get(r_x));
}
else if (freqMap.containsKey(r_x)) {
pairsMap.put(pair, freqMap.get(r_x));
}
// Calculating freq for each x in initial array
// x is the third element (x/(r*r), x/r, x)
if (freqMap.containsKey(x)) {
freqMap.put(x, freqMap.get(x) + 1);
}
else {
freqMap.put(x, 1L);
}
}
//System.out.println(freqMap);
//System.out.println(pairsMap);
return count;
}
public static void main(String[] args) throws IOException {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(System.getenv("OUTPUT_PATH")));
String[] nr = bufferedReader.readLine().replaceAll("\\s+$", "").split(" ");
int n = Integer.parseInt(nr[0]);
long r = Long.parseLong(nr[1]);
List<Long> arr = Stream.of(bufferedReader.readLine().replaceAll("\\s+$", "").split(" "))
.map(Long::parseLong)
.collect(toList());
long ans = countTriplets(arr, r);
bufferedWriter.write(String.valueOf(ans));
bufferedWriter.newLine();
bufferedReader.close();
bufferedWriter.close();
}
}
|
#!/bin/bash -x
PGPASSWORD=$DB_PASS psql -h $DB_HOST -U $DB_USER -d $DB_NAME -a -f ./drop_public_tables.sql
|
<gh_stars>0
package com.huatuo.net.thread;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.os.Handler;
import com.huatuo.dictionary.MsgId;
import com.huatuo.net.http.ActionResponse;
import com.huatuo.net.http.HttpAgent;
import com.huatuo.util.CommonUtil;
import com.huatuo.util.LogUtil;
/**
* @author Android开发工程师王润泽
*
*/
public class GetServicesTimeListInfo implements Runnable {
private Handler myHandler;
private Context mContext;
// private ArrayList<JSONObject> technicianList;
// private ArrayList<String> levelList;
private JSONObject inJson;
private HashMap<String, String> inJsonMap;
private JSONObject outJson;
private ArrayList<JSONObject> serviceTimeList;
private String outMsgJson = null;
private int outCode;
/**
* @param projectID
* 项目ID
* */
public GetServicesTimeListInfo(Context mContext, Handler mHandler, HashMap<String, String> inJson) {
this.mContext = mContext;
this.myHandler = mHandler;
serviceTimeList = new ArrayList<JSONObject>();
this.inJsonMap = inJson;
// levelList = new ArrayList<String>();
}
@Override
public void run() {
try {
CommonUtil.log("请求服务时间列表参数------------inJsonMap:" + inJsonMap);
HttpAgent httpAgent = new HttpAgent("order/getServiceTime", inJsonMap,
mContext);
ActionResponse response = httpAgent.sendRequest(null);
int code = response.getCode();
initRsultData(response);
CommonUtil.log("请求服务时间列表参数------------code:" + code);
if (code == 0) {
// initRsultData(response);
CommonUtil.log("请求服务时间列表参数------------myHandler:" + myHandler);
myHandler.sendEmptyMessage(MsgId.DOWN_DATA_S);
} else if (code == MsgId.NET_NOT_CONNECT) {
myHandler.sendEmptyMessage(MsgId.NET_NOT_CONNECT);
} else {
myHandler.sendEmptyMessage(MsgId.DOWN_DATA_F);
}
} catch (Exception e) {
// LogUtil.e(Tag, "LoginInvokeItem run" + e.getMessage());
e.printStackTrace();
}
}
/**
* inJson
* */
public void setInJsong(JSONObject ij) {
this.inJson = ij;
}
/**
* Out:outJson
*/
public JSONObject getOutJson() {
return this.outJson;
}
/**
* 初始化返回结果数据
* */
private void initRsultData(ActionResponse actionRespons) {
JSONObject bodyJsonObject = actionRespons.getRsbody();
outJson = bodyJsonObject;
outMsgJson = actionRespons.getMsg();
outCode = actionRespons.getCode();
}
public String getOutMsg() {
return outMsgJson;
}
public int getOutCode() {
return outCode;
}
}
|
<gh_stars>0
import { prisma } from "../../../database/prisma-client";
export class DeleteTodoByIdUseCase {
async execute(id: string) {
const todoExists = await prisma.todos.findUnique({
where: {
id
}
});
if (!todoExists) {
throw new Error("Todo does not exists");
}
const result = await prisma.todos.delete({
where: {
id
}
});
return result;
}
}
|
<reponame>avanov/Solo
import logging
from typing import NamedTuple, Awaitable
import routes
import aioredis
from solo.server.db import SQLEngine
from solo.types import IO
logger = logging.getLogger(__name__)
class App(NamedTuple):
route_map: routes.Mapper
url_gen: routes.URLGenerator
dbengine: IO[SQLEngine]
memstore: IO[aioredis.commands.Redis]
|
package cyclops.reactive.collection.container.mutable;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import cyclops.container.control.Option;
import cyclops.container.immutable.tuple.Tuple;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.function.evaluation.Evaluation;
import cyclops.reactive.collection.container.AbstractCollectionXTest;
import cyclops.reactive.collection.container.ReactiveConvertableSequence;
import cyclops.reactive.collection.container.fluent.FluentCollectionX;
import cyclops.reactive.companion.Spouts;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class DequeXTest extends AbstractCollectionXTest {
AtomicLong counter = new AtomicLong(0);
@Override
public <T> DequeX<T> of(T... values) {
return DequeX.of(values);
}
@Before
public void setup() {
counter = new AtomicLong(0);
super.setup();
}
@Test
@Ignore
public void printNull() {
}
@Test
public void span() {
assertThat(of(1,
2,
3,
4,
1,
2,
3,
4).span(i -> i < 3),
equalTo(Tuple.tuple(of(1,
2),
of(3,
4,
1,
2,
3,
4))));
assertThat(of(1,
2,
3).span(i -> i < 9),
equalTo(Tuple.tuple(of(1,
2,
3),
of())));
assertThat(of(1,
2,
3).span(i -> i < 0),
equalTo(Tuple.tuple(of(),
of(1,
2,
3))));
}
@Test
public void splitBy() {
assertThat(of(1,
2,
3,
4,
1,
2,
3,
4).splitBy(i -> i > 3),
equalTo(Tuple.tuple(of(1,
2,
3),
of(4,
1,
2,
3,
4))));
assertThat(of(1,
2,
3).splitBy(i -> i < 9),
equalTo(Tuple.tuple(of(),
of(1,
2,
3))));
assertThat(of(1,
2,
3).splitBy(i -> i < 0),
equalTo(Tuple.tuple(of(1,
2,
3),
of())));
}
@Test
public void splitAtTest() {
assertThat(of(1,
2,
3).splitAt(4),
equalTo(Tuple.tuple(of(1,
2,
3),
of())));
assertThat(of(1,
2,
3).splitAt(3),
equalTo(Tuple.tuple(of(1,
2,
3),
of())));
assertThat(of(1,
2,
3).splitAt(2),
equalTo(Tuple.tuple(of(1,
2),
of(3))));
assertThat(of(1,
2,
3).splitAt(1),
equalTo(Tuple.tuple(of(1),
of(2,
3))));
assertThat(of(1,
2,
3).splitAt(0),
equalTo(Tuple.tuple(of(),
of(1,
2,
3))));
assertThat(of(1,
2,
3).splitAt(-1),
equalTo(Tuple.tuple(of(),
of(1,
2,
3))));
}
@Test
public void testPartition() {
assertEquals(asList(1,
3,
5),
of(1,
2,
3,
4,
5,
6).partition(i -> i % 2 != 0)
._1()
.toList());
assertEquals(asList(2,
4,
6),
of(1,
2,
3,
4,
5,
6).partition(i -> i % 2 != 0)
._2()
.toList());
assertEquals(asList(2,
4,
6),
of(1,
2,
3,
4,
5,
6).partition(i -> i % 2 == 0)
._1()
.toList());
assertEquals(asList(1,
3,
5),
of(1,
2,
3,
4,
5,
6).partition(i -> i % 2 == 0)
._2()
.toList());
assertEquals(asList(1,
2,
3),
of(1,
2,
3,
4,
5,
6).partition(i -> i <= 3)
._1()
.toList());
assertEquals(asList(4,
5,
6),
of(1,
2,
3,
4,
5,
6).partition(i -> i <= 3)
._2()
.toList());
assertEquals(asList(1,
2,
3,
4,
5,
6),
of(1,
2,
3,
4,
5,
6).partition(i -> true)
._1()
.toList());
assertEquals(asList(),
of(1,
2,
3,
4,
5,
6).partition(i -> true)
._2()
.toList());
assertEquals(asList(),
of(1,
2,
3,
4,
5,
6).partition(i -> false)
._1()
.toList());
assertEquals(asList(1,
2,
3,
4,
5,
6),
of(1,
2,
3,
4,
5,
6).splitBy(i -> false)
._1()
.toList());
}
@Test
public void asyncTest() throws InterruptedException {
Spouts.async(Stream.generate(() -> "next"),
Executors.newFixedThreadPool(1))
.onePer(1,
TimeUnit.MILLISECONDS)
.take(1000)
.to(ReactiveConvertableSequence::converter)
.dequeX(Evaluation.LAZY)
.peek(i -> counter.incrementAndGet())
.materialize();
long current = counter.get();
Thread.sleep(400);
assertTrue(current < counter.get());
}
@Test
public void coflatMap() {
assertThat(DequeX.of(1,
2,
3)
.coflatMap(s -> s.sumInt(i -> i))
.singleOrElse(null),
equalTo(6));
}
@Test
public void onEmptySwitch() {
assertThat(DequeX.empty()
.onEmptySwitch(() -> DequeX.of(1,
2,
3))
.toList(),
equalTo(ListX.of(1,
2,
3)));
}
/* (non-Javadoc)
* @see com.oath.cyclops.function.collections.extensions.AbstractCollectionXTest#zero()
*/
@Override
public <T> FluentCollectionX<T> empty() {
return DequeX.empty();
}
@Override
public FluentCollectionX<Integer> range(int start,
int end) {
return DequeX.range(start,
end);
}
@Override
public FluentCollectionX<Long> rangeLong(long start,
long end) {
return DequeX.rangeLong(start,
end);
}
@Override
public <T> FluentCollectionX<T> iterate(int times,
T seed,
UnaryOperator<T> fn) {
return DequeX.iterate(times,
seed,
fn);
}
@Override
public <T> FluentCollectionX<T> generate(int times,
Supplier<T> fn) {
return DequeX.generate(times,
fn);
}
@Override
public <U, T> FluentCollectionX<T> unfold(U seed,
Function<? super U, Option<Tuple2<T, U>>> unfolder) {
return DequeX.unfold(seed,
unfolder);
}
@Test
public void compareDifferentSizes() {
assertThat(empty().size(),
not(equalTo(of(1).size())));
assertThat(of(1).size(),
not(equalTo(empty().size())));
assertThat(of(1).size(),
not(equalTo(of(1,
2,
3).size())));
}
}
|
def calculate_interest_rate(principal, rate, period):
return principal * (rate / 100) * period
calculate_interest_rate(2000, 4.5, 5) #900 |
class GreencheckStats:
def __init__(self):
self.trees_planted = 0
self.co2_emissions_reduced = 0
self.energy_savings = 0
def update_trees_planted(self, num_trees):
self.trees_planted += num_trees
def update_co2_emissions_reduced(self, co2_reduction):
self.co2_emissions_reduced += co2_reduction
def update_energy_savings(self, energy_saved):
self.energy_savings += energy_saved
def get_stats(self):
return {
"trees_planted": self.trees_planted,
"co2_emissions_reduced": self.co2_emissions_reduced,
"energy_savings": self.energy_savings
} |
<reponame>kalebm1/react-d3-tree<filename>src/Tree/tests/mockData.js
/* eslint-disable camelcase */
// The naming of the mock trees describes their shape.
// E.g. `mockTree_D1N2_D2N2` -> _Depth1with2Nodes_Depth2with2Nodes_...
export const mockTree_D1N2_D2N2 = [
{
name: 'Top Level',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: 'Level 2: A',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: '3: Son of A',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
},
{
name: '3: Daughter of A',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
},
],
},
{
name: 'Level 2: B',
},
],
},
];
const mockData = [
{
name: 'Top Level',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: '2: A',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: '3: Son of A',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
},
{
name: '3: Daughter of A',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
},
],
},
{
name: '2: B',
},
],
},
];
const mockData2 = [
{
name: 'Top Level',
parent: 'null',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: 'Level 2: A',
parent: 'Top Level',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
},
],
},
];
const mockData4 = [
{
name: 'Top Level',
parent: 'null',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: 'Level 2: A',
parent: 'Top Level',
attributes: {
keyA: 'val A',
keyB: 'val B',
keyC: 'val C',
},
children: [
{
name: 'Level 3: A',
parent: 'Level 2: B',
},
{
name: 'Level 3: B',
parent: 'Level 2: A',
},
],
},
{
name: 'Level 2: B',
parent: 'Level 2: A',
children: [
{
name: 'Level 3: B',
parent: 'Level 2: B',
},
],
},
],
},
];
export { mockData, mockData2, mockData4 };
|
<gh_stars>1-10
require 'active_support/core_ext/hash/keys'
module JokerAPI
module Operations
module QueryContactList
DEFAULT_OPTIONS = {:from => nil, :to => nil,
:tld => nil, :extended => false}
VALID_OPTIONS = DEFAULT_OPTIONS.keys
# @param [String] pattern Pattern to match (against handle)
# @param [Hash] options
# @option options [String] :from Start from this item in list
# @option options [String] :to End by this item in list
# @option options [String] :tld Limits output to contact handles which may be used
# with specified toplevel domain (tld), like "com".
# @option options [Boolean] :extended (false) Provides additional information for every
# contact listed: name & organization.
# @return [Array,Hash] When :extended is false an array of contact handles is returned,
# when :extended is true a hash of contact information is returned
# keyed by contact handle.
#
# @raise [ArgumentError] if any of the passed option keys are not valid
def query_contact_list(pattern, options = {})
options.assert_valid_keys(VALID_OPTIONS)
params = DEFAULT_OPTIONS.merge(options) # options are essentially dup'd here
extended = params.delete(:extended)
params['extended-format'] = extended ? "1" : "0"
params[:pattern] = pattern
response = perform_request("query-contact-list", params)
if response.error?
return {} if extended
return []
end
if extended
separator = response.headers['Separator'] == 'TAB' ? "\t" : "\t" # Not sure what other separator options are available
columns = response.headers['Columns'].split(',').map { |c| c.to_sym }
result = {}
response.body.each_line do |line|
fields = line.strip.split(separator, columns.count)
result[fields[0]] = {}
columns.each_with_index do |name, index|
next if index == 0
result[fields[0]][name] = fields[index]
end
end
result
else
response.body.split("\n")
end
end
end
end
end
|
package org.sliner;
/**
* Component:
* Description:
* Date: 13-6-17
*
* @author <NAME>
*/
public enum ValueWrapper {
L("l"),
R("r"),
A("a");
private String wrapper;
private ValueWrapper(String wrapper) {
this.wrapper = wrapper;
}
public String getWrapper() {
return wrapper;
}
}
|
<filename>spec/requests/manager/default/new_spec.rb
# frozen_string_literal: true
require 'spec_helper'
describe 'Sunrise Manager New' do
subject { page }
before(:all) { @admin = FactoryGirl.create(:admin_user) }
context 'admin' do
before(:each) { login_as @admin }
describe 'GET /manage/typo/new' do
it 'should raise NotFound' do
lambda {
visit new_path(model_name: 'whatever')
}.should raise_error ActionController::RoutingError
end
end
describe 'GET /manage/structures/new' do
before(:each) do
visit new_path(model_name: 'structures')
end
it 'should show page title' do
should have_content(I18n.t('manage.add'))
end
it 'should generate field to edit' do
SunriseStructure.config.form.fields.each do |f|
if %w[structure_type_id parent_id position_type_id].include?(f.name)
should have_selector "select[@name='structure[#{f.name}]']"
else
should have_selector "input[@name='structure[#{f.name}]']"
end
end
end
end
describe 'GET /manage/structures/new with params' do
before(:each) do
@title = 'Default-title'
visit new_path(model_name: 'structures', structure: { title: @title })
end
it 'should pre-fill attributes' do
should have_selector "input[@value='#{@title}']"
end
end
end
describe 'anonymous user' do
before(:each) do
visit new_path(model_name: 'structures')
end
it 'should redirect to login page' do
should have_content('Sign in')
end
end
end
|
#!/bin/bash
# Copyright (c) 2016-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
source target/classes/resources/version.properties &&
export RULE_ENGINE_PACKAGE_NAME=gearpump-rule-engine-${VERSION}-jar-with-dependencies.jar &&
cd deployer &&
python src/app.py
|
#!/usr/bin/env bash
DOCS_REPO_NAME="kompose"
DOCS_REPO_URL="git@github.com:kubernetes/kompose.git"
DOCS_KEY="script/deploy_key"
DOCS_USER="komposebot"
DOCS_EMAIL="cdrage+kompose@redhat.com"
DOCS_BRANCH="gh-pages"
DOCS_FOLDER="docs"
# decrypt the private key
openssl aes-256-cbc -K $encrypted_b1c51b116939_key -iv $encrypted_b1c51b116939_iv -in "$DOCS_KEY.enc" -out "$DOCS_KEY" -d
chmod 600 "$DOCS_KEY"
eval `ssh-agent -s`
ssh-add "$DOCS_KEY"
# clone the repo
git clone "$DOCS_REPO_URL" "$DOCS_REPO_NAME"
# change to that directory (to prevent accidental pushing to master, etc.)
cd "$DOCS_REPO_NAME"
# switch to gh-pages and grab the docs folder from master
git checkout gh-pages
git checkout master docs
# Remove README.md from docs folder as it isn't relevant
rm docs/README.md
# Use introduction.md instead as the main index page
mv docs/introduction.md index.md
# Check that index.md has the appropriate Jekyll format
index="index.md"
if cat $index | head -n 1 | grep "\-\-\-";
then
echo "index.md already contains Jekyll format"
else
# Remove ".md" from the name
name=${index::-3}
echo "Adding Jekyll file format to $index"
jekyll="---
layout: default
---
"
echo -e "$jekyll\n$(cat $index)" > $index
fi
# clean-up the docs and convert to jekyll-friendly docs
cd docs
for filename in *.md; do
if cat $filename | head -n 1 | grep "\-\-\-";
then
echo "$filename already contains Jekyll format"
else
# Remove ".md" from the name
name=${filename::-3}
echo "Adding Jekyll file format to $filename"
jekyll="---
layout: default
permalink: /$name/
redirect_from:
- /docs/$name.md/
---
"
echo -e "$jekyll\n$(cat $filename)" > $filename
fi
done
cd ..
# add relevant user information
git config user.name "$DOCS_USER"
# email assigned to @komposebot
git config user.email "$DOCS_EMAIL"
git add --all
# Check if anything changed, and if it's the case, push to origin/master.
if git commit -m 'Update docs' -m "Commit: https://github.com/kubernetes/kompose/commit/$GITHUB_SHA" ; then
git push
fi
# cd back to the original root folder
cd ..
|
#!/usr/bin/env bash
set -eu
root_dir=$(cd "$(dirname "$0")" && pwd)
fly -t ci set-pipeline -n \
-p concourse-external-worker-tile-ci \
-c $root_dir/pipeline.yml \
-l <(lpass show --note "pcf:concourse-external-worker-tile-ci")
|
<gh_stars>0
function sortOrder(i, j) {
switch (true) {
case i < j:
return -1
case j < i:
return 1
default:
return 0
}
}
new Vue({
computed: {
},
data: {
tweets: [],
modalTweet: null,
},
el: '#app',
methods: {
callModal(tweet) {
this.modalTweet = tweet
},
deleteTweet(tweet) {
axios
.delete(`/api/${tweet.id}`)
.then(() => this.removeTweet(tweet.id))
.catch(err => console.log(err))
},
favourite(tweet) {
axios
.put(`/api/${tweet.id}/favorite`)
.then(res => {
if (res.data.length === 0) {
this.refetch(tweet)
return
}
this.upsertTweets(res.data)
})
.catch(err => console.log(err))
},
notify(text, title = 'MediaTimeline Viewer') {
this.$bvToast.toast(text, {
title,
autoHideDelay: 3000,
appendToast: true,
})
},
refetch(tweet) {
axios
.put(`/api/${tweet.id}/refresh`)
.then(res => {
if (res.data.gone) {
return this.removeTweet(tweet.id)
}
if (res.data.length === 0) {
return
}
this.upsertTweets(res.data)
})
.catch(err => console.log(err))
},
refresh(forceReload = false) {
let apiURL = '/api/page' // By default query page 1
let append = false
if (this.tweets.length > 0 && !forceReload) {
apiURL = `/api/since?id=${this.tweets[0].id}`
append = true
}
axios
.get(apiURL)
.then(resp => {
if (resp.data.length === 0) {
return
}
this.upsertTweets(resp.data, append)
})
.catch(err => {
console.log(err)
})
},
removeTweet(id) {
const tweets = []
for (const i in this.tweets) {
const t = this.tweets[i]
if (t.id === id) {
continue
}
tweets.push(t)
}
this.tweets = tweets
},
triggerForceFetch() {
axios
.put('/api/force-reload')
.then(() => {
this.notify('Force refresh triggered, reloading tweets in 10s')
window.setTimeout(() => this.refresh(true), 10000)
})
.catch(err => console.log(err))
},
upsertTweets(data, append = true) {
let tweets = append ? this.tweets : []
for (const idx in data) {
const tweet = data[idx]
let inserted = false
for (let i = 0; i < tweets.length; i++) {
if (tweets[i].id === tweet.id) {
tweets[i] = tweet
inserted = true
break
}
}
if (!inserted) {
tweets = [...tweets, tweet]
}
}
tweets.sort((j, i) => sortOrder(i.id, j.id))
this.tweets = tweets
},
},
mounted() {
this.refresh()
window.setInterval(this.refresh, 30000)
},
})
|
sh $(dirname "$0")/PushSubRepositories.sh
sh $(dirname "$0")/PushMainRepository.sh |
curl -X GET https://beta.press.one/api/v2/contracts/6d21afd769ee7b6de4dea2835f5382580f38e9b068b317698f583478594e7d46 \
-H "Content-Type: application/json" \
-H "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE1NTIzNTM4MTMsImp0aSI6IjA4OTIxNjZiLTU3OTYtNDk2Yi04NTU0LTAwZTMxOWNhNGU1OCIsImRhdGEiOnsiYXV0aEFkZHJlc3MiOiIyNGJiODViMmEyZTcyYWY4NDllOGE4M2U5ZjJmY2UxZDdmOWY2Njg1In0sInByb3ZpZGVyIjoiZGFwcCJ9.qa76GKlcOHq-4salLQduPs3EHB3xWmlq7JJymYSAMmo" |
<reponame>alterem/smartCityService<gh_stars>0
package com.zhcs.utils;
//*****************************************************************************
/**
* <p>Title: ServiceException</p>
* <p>Description: 业务处理层异常类</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司 </p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
@SuppressWarnings("serial")
public class ServiceException extends Exception {
private boolean customMsg = false;
public boolean isCustomMsg() {
return customMsg;
}
//*************************************************************************
/**
* ServiceException构造函数
*/
//*************************************************************************
public ServiceException() {
super();
}
//*************************************************************************
/**
* DaoException构造函数根据传递的异常信息
* @param argMessage 日志信息
*/
//*************************************************************************
public ServiceException(String argMessage) {
super(argMessage);
customMsg = true;
}
//*************************************************************************
/**
* DaoException构造函数根据传递的异常信息
* @param argMessage 日志信息
* @param argThr 异常对象
*/
//*************************************************************************
public ServiceException(String argMessage, Throwable argThr) {
super(argMessage,argThr);
}
//*************************************************************************
/**
* DaoException构造函数通过传递异常对象
* @param argThr 异常对象
*/
//*************************************************************************
public ServiceException(Throwable argThr) {
super(argThr);
}
} |
<reponame>wtlow003/leetcode-daily
"""
56. Merge Intervals
https://leetcode.com/problems/merge-intervals/
Given an array of intervals where intervals[i] = [starti, endi], merge all
overlapping intervals, and return an array of the non-overlapping intervals
that cover all the intervals in the input.
Example:
Input: intervals = [[1,3],[2,6],[8,10],[15,18]]
Output: [[1,6],[8,10],[15,18]]
Explanation: Since intervals [1,3] and [2,6] overlaps, merge them into [1,6].
"""
# Runtime: 124ms (11.62%)
class Solution:
def merge(self, intervals: List[List[int]]) -> List[List[int]]:
intervals.sort(reverse=False, key=lambda x: x[0])
# keep track of the non-overlapping interval
output = []
# looping through each interval
for idx, interval in enumerate(intervals):
# if the output list is empty or the last output is not overlapping with current interval
if not output or output[-1][1] < interval[0]:
output.append(interval)
# else if last output interval overlaps with current interval
else:
# we need to check which interval's last val is bigger
output[-1][1] = max(output[-1][1], interval[1])
return output
|
#!/usr/bin/env bash
set -Ceu
#---------------------------------------------------------------------------
# Zolaでextendsを使う。
# CreatedAt: 2021-08-02
#---------------------------------------------------------------------------
Run() {
THIS="$(realpath "${BASH_SOURCE:-0}")"; HERE="$(dirname "$THIS")"; PARENT="$(dirname "$HERE")"; THIS_NAME="$(basename "$THIS")"; APP_ROOT="$PARENT";
cd "$HERE"
# SITE_ROOT=ytyaru-zola
# zola init $SITE_ROOT
# cd $SITE_ROOT
# zola serve
# zola build
}
Run "$@"
|
#!/bin/bash
cleanup() {
set +e
echo "Clean up..."
kubectl delete -f my-clusterrole.yaml
kubectl delete -f cluster-parent.yaml
kubectl delete configmap cluster-parent-controller -n metacontroller
}
trap cleanup EXIT
set -ex
echo "Install controller..."
kubectl create configmap cluster-parent-controller -n metacontroller --from-file=sync.py
kubectl apply -f cluster-parent.yaml
echo "Create a ClusterRole..."
kubectl apply -f my-clusterrole.yaml
echo "Wait for Namespaced child..."
until [[ "$(kubectl get rolebinding -n default my-clusterrole -o 'jsonpath={.metadata.name}')" == "my-clusterrole" ]]; do sleep 1; done
# Test to make sure cascading deletion of cross namespaces resources works.
echo "Deleting ClusterRole..."
kubectl delete -f my-clusterrole.yaml
echo "Wait for Namespaced child cleanup..."
until [[ "$(kubectl get clusterrole.rbac.authorization.k8s.io -n default my-clusterrole 2>&1 )" == 'Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "my-clusterrole" not found' ]]; do sleep 1; done
|
def get_text_between_tags(str):
start = str.find('<')
end = str.find('>')
if start == -1 or end == -1:
return ""
return str[start + 1 : end]
# input string
str = "<h1>This is a title</h1>"
# get text between tags
text_between_tags = get_text_between_tags(str)
# print the result
print(text_between_tags) |
package org.jaudiotagger.issues;
import org.jaudiotagger.AbstractTestCase;
import org.jaudiotagger.audio.AudioFile;
import org.jaudiotagger.audio.AudioFileIO;
import org.jaudiotagger.tag.FieldKey;
import org.jaudiotagger.tag.Tag;
import java.io.File;
/**
* Test GetAll functionality for mp4
*/
public class Issue423Test extends AbstractTestCase
{
public void testGetAllMp4() throws Exception
{
File testFile = AbstractTestCase.copyAudioToTmp("test2.m4a");
AudioFile f = AudioFileIO.read(testFile);
Tag tag = f.getTag();
assertTrue(tag.hasField(FieldKey.TRACK));
assertEquals("1",tag.getFirst(FieldKey.TRACK));
assertEquals(1,tag.getAll(FieldKey.TRACK).size());
assertTrue(tag.hasField(FieldKey.TITLE));
assertEquals("title", tag.getFirst(FieldKey.TITLE));
assertEquals(1, tag.getAll(FieldKey.TITLE).size());
assertEquals("title",tag.getAll(FieldKey.TITLE).get(0));
;
}
public void testGetAllMp3() throws Exception
{
File testFile = AbstractTestCase.copyAudioToTmp("testV1.mp3", new File("testGetAllMp3.mp3"));
AudioFile f = AudioFileIO.read(testFile);
Tag tag = f.getTagOrCreateDefault();
tag.setField(FieldKey.TRACK,"1");
tag.setField(FieldKey.TITLE,"title");
assertTrue(tag.hasField(FieldKey.TRACK));
assertEquals("1",tag.getFirst(FieldKey.TRACK));
assertEquals(1,tag.getAll(FieldKey.TRACK).size());
assertTrue(tag.hasField(FieldKey.TITLE));
assertEquals("title",tag.getFirst(FieldKey.TITLE));
assertEquals(1,tag.getAll(FieldKey.TITLE).size());
assertEquals("title",tag.getAll(FieldKey.TITLE).get(0));
;
}
public void testGetAllOgg() throws Exception
{
File testFile = AbstractTestCase.copyAudioToTmp("test.ogg");
AudioFile f = AudioFileIO.read(testFile);
Tag tag = f.getTagOrCreateDefault();
tag.setField(FieldKey.TRACK,"1");
tag.setField(FieldKey.TITLE,"title");
assertTrue(tag.hasField(FieldKey.TRACK));
assertEquals("1",tag.getFirst(FieldKey.TRACK));
assertEquals(1,tag.getAll(FieldKey.TRACK).size());
assertTrue(tag.hasField(FieldKey.TITLE));
assertEquals("title",tag.getFirst(FieldKey.TITLE));
assertEquals(1,tag.getAll(FieldKey.TITLE).size());
assertEquals("title",tag.getAll(FieldKey.TITLE).get(0));
;
}
public void testGetAllFlac() throws Exception
{
File testFile = AbstractTestCase.copyAudioToTmp("test.flac");
AudioFile f = AudioFileIO.read(testFile);
Tag tag = f.getTagOrCreateDefault();
tag.setField(FieldKey.TRACK,"1");
tag.setField(FieldKey.TITLE,"title");
assertTrue(tag.hasField(FieldKey.TRACK));
assertEquals("1",tag.getFirst(FieldKey.TRACK));
assertEquals(1,tag.getAll(FieldKey.TRACK).size());
assertTrue(tag.hasField(FieldKey.TITLE));
assertEquals("title",tag.getFirst(FieldKey.TITLE));
assertEquals(1,tag.getAll(FieldKey.TITLE).size());
assertEquals("title",tag.getAll(FieldKey.TITLE).get(0));
;
}
public void testGetAllWma() throws Exception
{
File testFile = AbstractTestCase.copyAudioToTmp("test1.wma");
AudioFile f = AudioFileIO.read(testFile);
Tag tag = f.getTagOrCreateDefault();
tag.setField(FieldKey.TRACK,"1");
tag.setField(FieldKey.TITLE,"title");
assertTrue(tag.hasField(FieldKey.TRACK));
assertEquals("1",tag.getFirst(FieldKey.TRACK));
assertEquals(1,tag.getAll(FieldKey.TRACK).size());
assertTrue(tag.hasField(FieldKey.TITLE));
assertEquals("title",tag.getFirst(FieldKey.TITLE));
assertEquals(1,tag.getAll(FieldKey.TITLE).size());
assertEquals("title",tag.getAll(FieldKey.TITLE).get(0));
;
}
}
|
package ddbt.tpcc.loadtest
import ddbt.lib.util.ThreadInfo
import java.io.File
import java.io.FileInputStream
import java.io.IOException
import java.sql.Connection
import java.sql.DriverManager
import java.sql.SQLException
import java.text.DecimalFormat
import java.util.Map
import java.util.Properties
import java.util.Set
import org.slf4j.LoggerFactory
import org.slf4j.Logger
import TpccThread._
import scala.collection.JavaConversions._
import ddbt.tpcc.itx._
object TpccThread {
private val logger = LoggerFactory.getLogger(classOf[TpccThread])
private val DEBUG = logger.isDebugEnabled
}
class TpccThread(val tInfo: ThreadInfo,
val port: Int,
val is_local: Int,
val db_user: String,
val db_password: String,
val num_ware: Int,
val num_conn: Int,
val driverClassName: String,
val jdbcUrl: String,
val fetchSize: Int,
val TRANSACTION_COUNT: Int,
var conn: Connection,
val newOrder: INewOrder,
val payment: IPayment,
val orderStat: IOrderStatus,
val slev: IStockLevel,
val delivery: IDelivery,
loopConditionChecker: => Boolean,
val maximumNumberOfTransactionsToExecute:Int = 0) extends Thread /*with DatabaseConnector*/{
/**
* Dedicated JDBC connection for this thread.
*/
// var conn: Connection = connectToDatabase
var driver = new TpccDriver(conn, fetchSize, TRANSACTION_COUNT, newOrder, payment, orderStat, slev, delivery)
override def run() {
try {
if (DEBUG) {
logger.debug("Starting driver with: tInfo: " + tInfo + " num_ware: " +
num_ware +
" num_conn: " +
num_conn)
}
driver.runAllTransactions(tInfo, num_ware, num_conn, loopConditionChecker, maximumNumberOfTransactionsToExecute)
} catch {
case e: Throwable => logger.error("Unhandled exception", e)
}
}
// private def connectToDatabase:Connection = connectToDB(driverClassName, jdbcUrl, db_user, db_password)
}
|
def process_operations(ops_list, self):
op_count = 0
for op in ops_list:
if op.type in ["batch_norm", "pool2d"]:
if op.type == "pool2d" and op.attr("pooling_type") != "max":
continue
op_count = self.op_match(op, ops_list, op_count)
if op_count >= len(ops_list):
continue
op._set_attr('out_threshold', self._out_scale_dict[ops_list[op_count]])
op_count += 1
else:
output_var_names = quantization_pass._get_op_output_var_names(op)
return ops_list |
<gh_stars>0
/* Based on the public domain implementation in
* crypto_hash/sha512/ref/ from http://bench.cr.yp.to/supercop.html
* by <NAME> */
#include <stddef.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <arm_neon.h>
#include "utils.h"
#include "sha256_neon.h"
static uint32x4_t load_bigendian_state_32(const uint8_t *x) {
uint32x4_t result;
for(int i = 0; i < 4; i++){
result[i] =
(uint32_t)(x[3+i*32]) |
(((uint32_t)(x[2+i*32])) << 8) |
(((uint32_t)(x[1+i*32])) << 16) |
(((uint32_t)(x[0+i*32])) << 24);
}
return result;
}
static uint64_t load_bigendian_64(const uint8_t *x) {
return (uint64_t)(x[7]) | (((uint64_t)(x[6])) << 8) |
(((uint64_t)(x[5])) << 16) | (((uint64_t)(x[4])) << 24) |
(((uint64_t)(x[3])) << 32) | (((uint64_t)(x[2])) << 40) |
(((uint64_t)(x[1])) << 48) | (((uint64_t)(x[0])) << 56);
}
static void store_bigendian_32(uint8_t *x, const uint32x4_t c) {
uint32x4_t u;
u = vshrq_n_u32(c, 24);
x[0] = (uint8_t) u[0];
x[0+32] = (uint8_t) u[1];
x[0+64] = (uint8_t) u[2];
x[0+96] = (uint8_t) u[3];
u = vshrq_n_u32(c, 16);
x[1] = (uint8_t) u[0];
x[1+32] = (uint8_t) u[1];
x[1+64] = (uint8_t) u[2];
x[1+96] = (uint8_t) u[3];
u = vshrq_n_u32(c, 8);
x[2] = (uint8_t) u[0];
x[2+32] = (uint8_t) u[1];
x[2+64] = (uint8_t) u[2];
x[2+96] = (uint8_t) u[3];
u = c;
x[3] = (uint8_t) u[0];
x[3+32] = (uint8_t) u[1];
x[3+64] = (uint8_t) u[2];
x[3+96] = (uint8_t) u[3];
}
static void store_bigendian_64(uint8_t *x, uint64_t u) {
x[7] = (uint8_t) u;
u >>= 8;
x[6] = (uint8_t) u;
u >>= 8;
x[5] = (uint8_t) u;
u >>= 8;
x[4] = (uint8_t) u;
u >>= 8;
x[3] = (uint8_t) u;
u >>= 8;
x[2] = (uint8_t) u;
u >>= 8;
x[1] = (uint8_t) u;
u >>= 8;
x[0] = (uint8_t) u;
}
#define SHR(x, c) (vshrq_n_u32((x), (c)))
//#define ROTR_32(x, c) (veorq_u32(((vshrq_n_u32(x), (c))), (vshlq_n_u32(x), (32-(c)))))
#define ROTR_32(x, c) (veorq_u32(vshlq_n_u32((x), 32 - (c)), vshrq_n_u32((x), (c))))
#define Ch(x, y, z) (veorq_u32(vandq_u32((x), (y)),vandq_u32(veorq_u32(vdupq_n_u32(0xffffffff), (x)),(z))))
#define Maj(x, y, z) (veorq_u32(veorq_u32(vandq_u32((x),(y)),vandq_u32((x),(z))),vandq_u32((y),(z))))
#define Sigma0_32(x) (veorq_u32(ROTR_32(x, 2), veorq_u32(ROTR_32(x,13), ROTR_32(x,22))))
#define Sigma1_32(x) (veorq_u32(ROTR_32(x, 6), veorq_u32(ROTR_32(x,11), ROTR_32(x,25))))
#define sigma0_32(x) (veorq_u32(ROTR_32(x, 7), veorq_u32(ROTR_32(x,18), SHR(x, 3))))
#define sigma1_32(x) (veorq_u32(ROTR_32(x, 17), veorq_u32(ROTR_32(x,19), SHR(x, 10))))
#define M_32(w0, w14, w9, w1) w0 = vaddq_u32(vaddq_u32((sigma1_32(w14)), (w9)), vaddq_u32((sigma0_32(w1)), (w0)));
#define EXPAND_32 \
M_32(w0, w14, w9, w1) \
M_32(w1, w15, w10, w2) \
M_32(w2, w0, w11, w3) \
M_32(w3, w1, w12, w4) \
M_32(w4, w2, w13, w5) \
M_32(w5, w3, w14, w6) \
M_32(w6, w4, w15, w7) \
M_32(w7, w5, w0, w8) \
M_32(w8, w6, w1, w9) \
M_32(w9, w7, w2, w10) \
M_32(w10, w8, w3, w11) \
M_32(w11, w9, w4, w12) \
M_32(w12, w10, w5, w13) \
M_32(w13, w11, w6, w14) \
M_32(w14, w12, w7, w15) \
M_32(w15, w13, w8, w0)
#define F_32(w, k) \
T1 = vaddq_u32(h, vaddq_u32(Sigma1_32(e), vaddq_u32(Ch(e, f, g), vaddq_u32((k), (w))))); \
T2 = vaddq_u32(Sigma0_32(a), Maj(a, b, c)); \
h = g; \
g = f; \
f = e; \
e = vaddq_u32(d, T1); \
d = c; \
c = b; \
b = a; \
a = vaddq_u32(T1, T2);
static size_t crypto_hashblocks_sha256(uint8_t *statebytes,
const uint8_t *in0,
const uint8_t *in1,
const uint8_t *in2,
const uint8_t *in3,
size_t inlen,
int padded) {
size_t offset = 0;
uint32x4_t state[8];
uint32x4_t a;
uint32x4_t b;
uint32x4_t c;
uint32x4_t d;
uint32x4_t e;
uint32x4_t f;
uint32x4_t g;
uint32x4_t h;
uint32x4_t T1;
uint32x4_t T2;
(void)padded;
uint32x4_t w0;
uint32x4_t w1;
uint32x4_t w2;
uint32x4_t w3;
uint32x4_t w4;
uint32x4_t w5;
uint32x4_t w6;
uint32x4_t w7;
uint32x4_t w8;
uint32x4_t w9;
uint32x4_t w10;
uint32x4_t w11;
uint32x4_t w12;
uint32x4_t w13;
uint32x4_t w14;
uint32x4_t w15;
uint32x4_t i, j, k, l, t;
uint64x2_t i2, j2, k2, l2;
uint32_t *in32_0 = (uint32_t *)in0;
uint32_t *in32_1 = (uint32_t *)in1;
uint32_t *in32_2 = (uint32_t *)in2;
uint32_t *in32_3 = (uint32_t *)in3;
a = load_bigendian_state_32(statebytes + 0);
state[0] = a;
b = load_bigendian_state_32(statebytes + 4);
state[1] = b;
c = load_bigendian_state_32(statebytes + 8);
state[2] = c;
d = load_bigendian_state_32(statebytes + 12);
state[3] = d;
e = load_bigendian_state_32(statebytes + 16);
state[4] = e;
f = load_bigendian_state_32(statebytes + 20);
state[5] = f;
g = load_bigendian_state_32(statebytes + 24);
state[6] = g;
h = load_bigendian_state_32(statebytes + 28);
state[7] = h;
while (inlen - offset*4 >= 64) {
i = vld1q_u32(in32_0 + 0 + offset);
j = vld1q_u32(in32_1 + 0 + offset);
k = vld1q_u32(in32_2 + 0 + offset);
l = vld1q_u32(in32_3 + 0 + offset);
t = vtrn1q_u32(i, j);
j = vtrn2q_u32(i, j);
i = t;
t = vtrn1q_u32(k, l);
l = vtrn2q_u32(k, l);
k = t;
i2 = vreinterpretq_u64_u32(i);
j2 = vreinterpretq_u64_u32(j);
k2 = vreinterpretq_u64_u32(k);
l2 = vreinterpretq_u64_u32(l);
w0 = vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(i2, k2)));
w1 = vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(j2, l2)));
w2 = vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(i2, k2)));
w3 = vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(j2, l2)));
i = vld1q_u32(in32_0 + 4 + offset);
j = vld1q_u32(in32_1 + 4 + offset);
k = vld1q_u32(in32_2 + 4 + offset);
l = vld1q_u32(in32_3 + 4 + offset);
t = vtrn1q_u32(i, j);
j = vtrn2q_u32(i, j);
i = t;
t = vtrn1q_u32(k, l);
l = vtrn2q_u32(k, l);
k = t;
i2 = vreinterpretq_u64_u32(i);
j2 = vreinterpretq_u64_u32(j);
k2 = vreinterpretq_u64_u32(k);
l2 = vreinterpretq_u64_u32(l);
w4 = vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(i2, k2)));
w5 = vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(j2, l2)));
w6 = vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(i2, k2)));
w7 = vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(j2, l2)));
i = vld1q_u32(in32_0 + 8 + offset);
j = vld1q_u32(in32_1 + 8 + offset);
k = vld1q_u32(in32_2 + 8 + offset);
l = vld1q_u32(in32_3 + 8 + offset);
t = vtrn1q_u32(i, j);
j = vtrn2q_u32(i, j);
i = t;
t = vtrn1q_u32(k, l);
l = vtrn2q_u32(k, l);
k = t;
i2 = vreinterpretq_u64_u32(i);
j2 = vreinterpretq_u64_u32(j);
k2 = vreinterpretq_u64_u32(k);
l2 = vreinterpretq_u64_u32(l);
w8 = vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(i2, k2)));
w9 = vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(j2, l2)));
w10= vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(i2, k2)));
w11= vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(j2, l2)));
i = vld1q_u32(in32_0 + 12 + offset);
j = vld1q_u32(in32_1 + 12 + offset);
k = vld1q_u32(in32_2 + 12 + offset);
l = vld1q_u32(in32_3 + 12 + offset);
t = vtrn1q_u32(i, j);
j = vtrn2q_u32(i, j);
i = t;
t = vtrn1q_u32(k, l);
l = vtrn2q_u32(k, l);
k = t;
i2 = vreinterpretq_u64_u32(i);
j2 = vreinterpretq_u64_u32(j);
k2 = vreinterpretq_u64_u32(k);
l2 = vreinterpretq_u64_u32(l);
w12= vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(i2, k2)));
w13= vrev32q_u8(vreinterpretq_u32_u64(vtrn1q_u64(j2, l2)));
w14= vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(i2, k2)));
w15= vrev32q_u8(vreinterpretq_u32_u64(vtrn2q_u64(j2, l2)));
F_32(w0, vdupq_n_u32(0x428a2f98))
F_32(w1, vdupq_n_u32(0x71374491))
F_32(w2, vdupq_n_u32(0xb5c0fbcf))
F_32(w3, vdupq_n_u32(0xe9b5dba5))
F_32(w4, vdupq_n_u32(0x3956c25b))
F_32(w5, vdupq_n_u32(0x59f111f1))
F_32(w6, vdupq_n_u32(0x923f82a4))
F_32(w7, vdupq_n_u32(0xab1c5ed5))
F_32(w8, vdupq_n_u32(0xd807aa98))
F_32(w9, vdupq_n_u32(0x12835b01))
F_32(w10, vdupq_n_u32(0x243185be))
F_32(w11, vdupq_n_u32(0x550c7dc3))
F_32(w12, vdupq_n_u32(0x72be5d74))
F_32(w13, vdupq_n_u32(0x80deb1fe))
F_32(w14, vdupq_n_u32(0x9bdc06a7))
F_32(w15, vdupq_n_u32(0xc19bf174))
EXPAND_32
F_32(w0, vdupq_n_u32(0xe49b69c1))
F_32(w1, vdupq_n_u32(0xefbe4786))
F_32(w2, vdupq_n_u32(0x0fc19dc6))
F_32(w3, vdupq_n_u32(0x240ca1cc))
F_32(w4, vdupq_n_u32(0x2de92c6f))
F_32(w5, vdupq_n_u32(0x4a7484aa))
F_32(w6, vdupq_n_u32(0x5cb0a9dc))
F_32(w7, vdupq_n_u32(0x76f988da))
F_32(w8, vdupq_n_u32(0x983e5152))
F_32(w9, vdupq_n_u32(0xa831c66d))
F_32(w10, vdupq_n_u32(0xb00327c8))
F_32(w11, vdupq_n_u32(0xbf597fc7))
F_32(w12, vdupq_n_u32(0xc6e00bf3))
F_32(w13, vdupq_n_u32(0xd5a79147))
F_32(w14, vdupq_n_u32(0x06ca6351))
F_32(w15, vdupq_n_u32(0x14292967))
EXPAND_32
F_32(w0, vdupq_n_u32(0x27b70a85))
F_32(w1, vdupq_n_u32(0x2e1b2138))
F_32(w2, vdupq_n_u32(0x4d2c6dfc))
F_32(w3, vdupq_n_u32(0x53380d13))
F_32(w4, vdupq_n_u32(0x650a7354))
F_32(w5, vdupq_n_u32(0x766a0abb))
F_32(w6, vdupq_n_u32(0x81c2c92e))
F_32(w7, vdupq_n_u32(0x92722c85))
F_32(w8, vdupq_n_u32(0xa2bfe8a1))
F_32(w9, vdupq_n_u32(0xa81a664b))
F_32(w10, vdupq_n_u32(0xc24b8b70))
F_32(w11, vdupq_n_u32(0xc76c51a3))
F_32(w12, vdupq_n_u32(0xd192e819))
F_32(w13, vdupq_n_u32(0xd6990624))
F_32(w14, vdupq_n_u32(0xf40e3585))
F_32(w15, vdupq_n_u32(0x106aa070))
EXPAND_32
F_32(w0, vdupq_n_u32(0x19a4c116))
F_32(w1, vdupq_n_u32(0x1e376c08))
F_32(w2, vdupq_n_u32(0x2748774c))
F_32(w3, vdupq_n_u32(0x34b0bcb5))
F_32(w4, vdupq_n_u32(0x391c0cb3))
F_32(w5, vdupq_n_u32(0x4ed8aa4a))
F_32(w6, vdupq_n_u32(0x5b9cca4f))
F_32(w7, vdupq_n_u32(0x682e6ff3))
F_32(w8, vdupq_n_u32(0x748f82ee))
F_32(w9, vdupq_n_u32(0x78a5636f))
F_32(w10, vdupq_n_u32(0x84c87814))
F_32(w11, vdupq_n_u32(0x8cc70208))
F_32(w12, vdupq_n_u32(0x90befffa))
F_32(w13, vdupq_n_u32(0xa4506ceb))
F_32(w14, vdupq_n_u32(0xbef9a3f7))
F_32(w15, vdupq_n_u32(0xc67178f2))
a += state[0];
b += state[1];
c += state[2];
d += state[3];
e += state[4];
f += state[5];
g += state[6];
h += state[7];
state[0] = a;
state[1] = b;
state[2] = c;
state[3] = d;
state[4] = e;
state[5] = f;
state[6] = g;
state[7] = h;
//in += 64;
offset += 16;
}
store_bigendian_32(statebytes + 0, state[0]);
store_bigendian_32(statebytes + 4, state[1]);
store_bigendian_32(statebytes + 8, state[2]);
store_bigendian_32(statebytes + 12, state[3]);
store_bigendian_32(statebytes + 16, state[4]);
store_bigendian_32(statebytes + 20, state[5]);
store_bigendian_32(statebytes + 24, state[6]);
store_bigendian_32(statebytes + 28, state[7]);
return inlen - (offset*4);
}
static const uint8_t iv_256[32] = {
0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85,
0x3c, 0x6e, 0xf3, 0x72, 0xa5, 0x4f, 0xf5, 0x3a,
0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05, 0x68, 0x8c,
0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19
};
void sha256x4_inc_init(sha256ctx2 *state) {
for (size_t counter = 0; counter < 4; counter = counter + 1) {
for (size_t i = 0; i < 32; ++i) {
state->ctx[i + 32*counter] = iv_256[i];
}
}
for (size_t i = 0; i < 8; ++i) {
state->ctx[32*4 + i] = 0;
}
//state = iv|iv|iv|iv|length
}
void sha256x4_inc_blocks(sha256ctx2 *state, const uint8_t *in, size_t inblocks) {
uint64_t bytes = load_bigendian_64(state->ctx + 32);
crypto_hashblocks_sha256(state->ctx, in, in, in, in, 64 * inblocks, 0);
bytes += 64 * inblocks;
store_bigendian_64(state->ctx + 32, bytes);
}
void sha256x4_inc_finalize(
uint8_t *out0,
uint8_t *out1,
uint8_t *out2,
uint8_t *out3,
uint8_t *state,
const uint8_t *in0,
const uint8_t *in1,
const uint8_t *in2,
const uint8_t *in3,
size_t inlen) {
uint8_t padded0[128];
uint8_t padded1[128];
uint8_t padded2[128];
uint8_t padded3[128];
uint64_t bytes = load_bigendian_64(state+ 4*32) + inlen; //bytes = length = 26
//size_t inlen_copy = inlen;
crypto_hashblocks_sha256(state, in0, in1, in2, in3, inlen, 0);
in0 += inlen;
in1 += inlen;
in2 += inlen;
in3 += inlen;
inlen &= 63;
in0 -= inlen;
in1 -= inlen;
in2 -= inlen;
in3 -= inlen;
for (size_t i = 0; i < inlen; ++i) {
padded0[i] = in0[i];
padded1[i] = in1[i];
padded2[i] = in2[i];
padded3[i] = in3[i];
}
padded0[inlen] = 0x80;
padded1[inlen] = 0x80;
padded2[inlen] = 0x80;
padded3[inlen] = 0x80;
uint8_t placeholder;
if (inlen < 56) {
for (size_t i = inlen + 1; i < 56; ++i) {
padded0[i] = 00;
padded1[i] = 00;
padded2[i] = 00;
padded3[i] = 00;
}
placeholder = (uint8_t) (bytes >> 53);
padded0[56] = placeholder;
padded1[56] = placeholder;
padded2[56] = placeholder;
padded3[56] = placeholder;
placeholder = (uint8_t) (bytes >> 45);
padded0[57] = placeholder;
padded1[57] = placeholder;
padded2[57] = placeholder;
padded3[57] = placeholder;
placeholder = (uint8_t) (bytes >> 37);
padded0[58] = placeholder;
padded1[58] = placeholder;
padded2[58] = placeholder;
padded3[58] = placeholder;
placeholder = (uint8_t) (bytes >> 29);
padded0[59] = placeholder;
padded1[59] = placeholder;
padded2[59] = placeholder;
padded3[59] = placeholder;
placeholder = (uint8_t) (bytes >> 21);
padded0[60] = placeholder;
padded1[60] = placeholder;
padded2[60] = placeholder;
padded3[60] = placeholder;
placeholder = (uint8_t) (bytes >> 13);
padded0[61] = placeholder;
padded1[61] = placeholder;
padded2[61] = placeholder;
padded3[61] = placeholder;
placeholder = (uint8_t) (bytes >> 5);
padded0[62] = placeholder;
padded1[62] = placeholder;
padded2[62] = placeholder;
padded3[62] = placeholder;
placeholder = (uint8_t) (bytes << 3);
padded0[63] = placeholder;
padded1[63] = placeholder;
padded2[63] = placeholder;
padded3[63] = placeholder;
crypto_hashblocks_sha256(state, padded0, padded1, padded2, padded3, 64, 1);
} else {
for (size_t i = inlen + 1; i < 120; ++i) {
padded0[i] = 0x0;
padded1[i] = 0x0;
padded2[i] = 0x0;
padded3[i] = 0x0;
}
placeholder = (uint8_t) (bytes >> 53);
padded0[120] = placeholder;
padded1[120] = placeholder;
padded2[120] = placeholder;
padded3[120] = placeholder;
placeholder = (uint8_t) (bytes >> 45);
padded0[121] = placeholder;
padded1[121] = placeholder;
padded2[121] = placeholder;
padded3[121] = placeholder;
placeholder = (uint8_t) (bytes >> 37);
padded0[122] = placeholder;
padded1[122] = placeholder;
padded2[122] = placeholder;
padded3[122] = placeholder;
placeholder = (uint8_t) (bytes >> 29);
padded0[123] = placeholder;
padded1[123] = placeholder;
padded2[123] = placeholder;
padded3[123] = placeholder;
placeholder = (uint8_t) (bytes >> 21);
padded0[124] = placeholder;
padded1[124] = placeholder;
padded2[124] = placeholder;
padded3[124] = placeholder;
placeholder = (uint8_t) (bytes >> 13);
padded0[125] = placeholder;
padded1[125] = placeholder;
padded2[125] = placeholder;
padded3[125] = placeholder;
placeholder = (uint8_t) (bytes >> 5);
padded0[126] = placeholder;
padded1[126] = placeholder;
padded2[126] = placeholder;
padded3[126] = placeholder;
placeholder = (uint8_t) (bytes << 3);
padded0[127] = placeholder;
padded1[127] = placeholder;
padded2[127] = placeholder;
padded3[127] = placeholder;
crypto_hashblocks_sha256(state, padded0, padded1, padded2, padded3, 128, 0);
}
for (size_t i = 0; i < 32; ++i) {
out0[i] = state[i];
out1[i] = state[i + 32];
out2[i] = state[i + 64];
out3[i] = state[i + 96];
}
}
void mgf1x42(
unsigned char *out0,
unsigned char *out1,
unsigned char *out2,
unsigned char *out3,
unsigned long outlen,
const unsigned char *in0,
const unsigned char *in1,
const unsigned char *in2,
const unsigned char *in3,
unsigned long inlen)
{
unsigned char inbuf0[inlen + 4];
unsigned char inbuf1[inlen + 4];
unsigned char inbuf2[inlen + 4];
unsigned char inbuf3[inlen + 4];
unsigned char outbuf0[SPX_SHA256_OUTPUT_BYTES];
unsigned char outbuf1[SPX_SHA256_OUTPUT_BYTES];
unsigned char outbuf2[SPX_SHA256_OUTPUT_BYTES];
unsigned char outbuf3[SPX_SHA256_OUTPUT_BYTES];
unsigned long i;
memcpy(inbuf0, in0, inlen);
memcpy(inbuf1, in1, inlen);
memcpy(inbuf2, in2, inlen);
memcpy(inbuf3, in3, inlen);
/* While we can fit in at least another full block of SHA256 output.. */
for (i = 0; (i+1)*SPX_SHA256_OUTPUT_BYTES <= outlen; i++) {
u32_to_bytes(inbuf0 + inlen, i);
u32_to_bytes(inbuf1 + inlen, i);
u32_to_bytes(inbuf2 + inlen, i);
u32_to_bytes(inbuf3 + inlen, i);
sha256x42(out0, out1, out2, out3, inbuf0, inbuf1, inbuf2, inbuf3, inlen + 4);
out0 += SPX_SHA256_OUTPUT_BYTES;
out1 += SPX_SHA256_OUTPUT_BYTES;
out2 += SPX_SHA256_OUTPUT_BYTES;
out3 += SPX_SHA256_OUTPUT_BYTES;
}
/* Until we cannot anymore, and we fill the remainder. */
u32_to_bytes(inbuf0 + inlen, i);
u32_to_bytes(inbuf1 + inlen, i);
u32_to_bytes(inbuf2 + inlen, i);
u32_to_bytes(inbuf3 + inlen, i);
sha256x42(
outbuf0,
outbuf1,
outbuf2,
outbuf3,
inbuf0,
inbuf1,
inbuf2,
inbuf3,
inlen + 4);
memcpy(out0, outbuf0, outlen - i*SPX_SHA256_OUTPUT_BYTES);
memcpy(out1, outbuf1, outlen - i*SPX_SHA256_OUTPUT_BYTES);
memcpy(out2, outbuf2, outlen - i*SPX_SHA256_OUTPUT_BYTES);
memcpy(out3, outbuf3, outlen - i*SPX_SHA256_OUTPUT_BYTES);
}
void sha256x42(
uint8_t *out0,
uint8_t *out1,
uint8_t *out2,
uint8_t *out3,
const uint8_t *in0,
const uint8_t *in1,
const uint8_t *in2,
const uint8_t *in3,
size_t inlen) {
sha256ctx2 state;
sha256x4_inc_init(&state);
sha256x4_inc_finalize(out0, out1, out2, out3, state.ctx, in0, in1, in2, in3, inlen);
} |
<gh_stars>0
import React from "react";
import { BrowserRouter as Router, Route, Switch } from "react-router-dom";
//redux
import { Provider } from "react-redux";
import store from "./redux/store";
import { SET_AUTHENTICATED } from "./redux/types";
import { logoutAction, getUserData } from "./redux/actions/authActions";
//axios
import axios from "./util/axios";
//jwt-decode
import jwtDecode from "jwt-decode";
//material-ui
import { ThemeProvider as MuiThemeProvider } from "@material-ui/core/styles";
import createMuiTheme from "@material-ui/core/styles/createMuiTheme";
//theme
import themeFile from "./util/theme";
//components
import AppBar from "./components/AppBar";
//util
import ScrollToTop from "./util/scrollToTop";
//restrict routes
import { AuthRoute, SellerRoute, UserRoute } from "./util/route";
//pages
import home from "./pages/home";
import error404 from "./pages/404";
import signup from "./pages/sign-up";
import login from "./pages/login";
import addRestaurant from "./pages/addRestaurant";
const theme = createMuiTheme(themeFile);
const token = localStorage.jwt;
if (token) {
const decodedToken = jwtDecode(token);
if (decodedToken.exp * 1000 < Date.now()) {
store.dispatch(logoutAction());
window.location.href = "/login";
} else {
store.dispatch({ type: SET_AUTHENTICATED });
axios.defaults.headers.common["Authorization"] = token;
store.dispatch(getUserData());
}
}
function App() {
return (
<MuiThemeProvider theme={theme}>
<Provider store={store}>
<Router>
<AppBar />
<ScrollToTop />
<Switch>
<Route exact path="/" component={home} />
<AuthRoute exact path="/login" component={login} />
<AuthRoute exact path="/register" component={signup} />
<AuthRoute exact path="/addrestaurant" component={addRestaurant} />
<Route component={error404} />
</Switch>
</Router>
</Provider>
</MuiThemeProvider>
);
}
export default App;
|
package com.scale.invest.eureka.listener;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cloud.netflix.eureka.server.event.EurekaInstanceRenewedEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.annotation.Configuration;
@Configuration
@Slf4j
public class InstanceRenewListener implements ApplicationListener<EurekaInstanceRenewedEvent> {
@Override
public void onApplicationEvent(EurekaInstanceRenewedEvent event) {
log.info("Detection the heartbeat service:{}" ,event.getInstanceInfo().getAppName());
}
}
|
import UserDict
class FifoCache(object, UserDict.DictMixin):
''' A mapping that remembers the last `num_entries' items that were set '''
def __init__(self, num_entries, dct=()):
self.num_entries = num_entries
self.dct = dict(dct)
self.lst = []
def __repr__(self):
return '%r(%r,%r)' % (
self.__class__.__name__, self.num_entries, self.dct)
def copy(self):
return self.__class__(self.num_entries, self.dct)
def keys(self):
return list(self.lst)
def __getitem__(self, key):
return self.dct[key]
def __setitem__(self, key, value):
dct = self.dct
lst = self.lst
if key in dct:
lst.remove(key)
dct[key] = value
lst.append(key)
if len(lst) > self.num_entries:
del dct[lst.pop(0)]
def __delitem__(self, key):
self.dct.pop(key)
self.lst.remove(key)
# a method explicitly defined only as an optimization
def __contains__(self, item):
return item in self.dct
has_key = __contains__
|
<filename>notes/bitbucket.py
import requests
import utils
CLIENT_ID = "dAhCaXVVvRdCjh7BcA"
CLIENT_SECRET = "<KEY>"
REPO_UUID = None
def get_auth_url():
return "https://bitbucket.org/site/oauth2/authorize?client_id={0}&response_type=code".format(CLIENT_ID)
def is_logged_in(repo_uuid):
try:
if repo_uuid is None:
raise Exception
return utils.in_db(repo_uuid)
# with open("bitbucket_access_token") as f:
# ACCESS_TOKEN = f.read()
# return True
except:
return False
def get_auth_token(request):
# headers = {"Content-type": "application/x-www-form-urlencoded"}
code = request.GET["code"]
data = {"code": code, "grant_type": "authorization_code"}
auth = (CLIENT_ID, CLIENT_SECRET)
try:
r = requests.post("https://bitbucket.org/site/oauth2/access_token", data=data, auth=auth)
if request.COOKIES.has_key('repo_uuid'):
repo_uuid = request.COOKIES['repo_uuid']
ACCESS_TOKEN = r.json()["access_token"]
utils.add_bitbucket_token_to_db(repo_uuid, ACCESS_TOKEN)
# with open('bitbucket_access_token', 'w') as f:
# f.write(ACCESS_TOKEN)
# print ACCESS_TOKEN
return r.json()
except:
return None
def verify(request):
# ensure we have a session state and the state value is the same as what Bitbucket returned
if 'code' not in request.GET:
return False
else:
return True
def get_request_headers(repo_uuid):
# with open("bitbucket_access_token") as f:
# ACCESS_TOKEN = f.read()
ACCESS_TOKEN = utils.get_token(repo_uuid, "Bitbucket")
headers = {"Authorization": "Bearer " + ACCESS_TOKEN}
return headers
def set_repo_uuid(repo_uuid):
global REPO_UUID # use global variable
REPO_UUID = repo_uuid
# Also save to file. This should reset each time you visit a new repo
# with open("repo_uuid", 'w') as f:
# f.write(REPO_UUID)
def get_repo_uuid():
with open("repo_uuid") as f:
UUID = f.read()
return UUID
def add_to_wiki(pages_list, repo_uuid):
content = "\n".join(pages_list)
data = {"content": content}
print "Adding to Wiki"
try:
r = requests.post("https://bitbucket.org/api/1.0/repositories/{1}/{0}/wiki/notes".format(repo_uuid, "{}"),
data=data)
print r.reason
except:
print "Oops"
if r.ok:
print "Added to Wiki!!"
return True
else:
return False
|
package cmdline
import (
"flag"
"fmt"
"testing"
)
func TestCommandLineFlags(t *testing.T) {
wordPtr := flag.String("hello", "world", "a string")
numbPtr := flag.Int("numb", 42, "an int")
boolPtr := flag.Bool("fork", false, "a bool")
var svar string
flag.StringVar(&svar, "strVarName", "bar", "a string var")
var ivar int
flag.IntVar(&ivar, "intVarName", 200, "a int var")
flag.Parse()
assertEq("world", *wordPtr)
assertEq(42, *numbPtr)
assertEq(false, *boolPtr)
assertEq("bar", svar)
assertEq(200, ivar)
assertEq([]string{}, flag.Args())
}
func commandLineFlagsTest() {
wordPtr := flag.String("word", "foo", "a string")
numbPtr := flag.Int("numb", 42, "an int")
boolPtr := flag.Bool("fork", false, "a bool")
var svar string
flag.StringVar(&svar, "svar", "bar", "a string var")
flag.Parse()
fmt.Println("word:", *wordPtr)
fmt.Println("numb:", *numbPtr)
fmt.Println("fork:", *boolPtr)
fmt.Println("svar:", svar)
fmt.Println("tail:", flag.Args())
}
|
#!/usr/bin/env bats
# vim: ft=bash
load test_helper
# file.exists-and-newer-than
# file.gsub
# file.install-with-backup
# file.last-modified-date
# file.last-modified-year
# file.list.filter-existing
# file.list.filter-non-empty
# file.size
# file.size.mb
# file.source-if-exists
# file.stat
# files.find
# files.map
# files.map.shell-scripts
source lib/file-helpers.sh
source lib/file.sh
source lib/time.sh
source lib/util.sh
source lib/bashmatic.sh
@test 'file.first-is-newer-than-second()' {
local old_file="test/fixtures/a.sh"
local newer_file="test/fixtures/b.sh"
touch "${newer_file}"
set -e
file.first-is-newer-than-second "${newer_file}" "${old_file}" && return 0
}
@test "file.temp()" {
set -e
local f="$(file.temp)"
[[ $(dirname $f) == "/tmp" && $f =~ ".bashmatic" ]]
}
@test "file.source-if-exists()" {
set -e
file.source-if-exists test/fixtures/a.sh
file.source-if-exists test/fixtures/b.sh
}
@test "file.map.shell-scripts()" {
set -e
declare -a files_array
eval "$(files.map.shell-scripts test/fixtures files_array)"
[[ ${#files_array[@]} -gt 2 ]]
}
@test "file.size()" {
set -e
[[ $(file.size test/fixtures/b.sh) -eq 14 ]]
}
@test "file.extension()" {
set -e
[[ "$(file.extension test/fixtures/b.sh)" == "sh" ]]
}
@test "file.strip.extension()" {
set -e
[[ "$(file.strip.extension test/fixtures/b.sh)" == "test/fixtures/b" ]]
}
@test "file.extension.replace() single file" {
set -e
local result="$(file.extension.replace .adoc test/fixtures/b.sh)"
[[ "${result}" == "test/fixtures/b.adoc" ]]
}
@test "file.extension.replace() list of files: result size comparison" {
set -e
local -a files=( $(find lib -type f -name '*.sh') )
local -a result=( $(file.extension.replace .bash "${files[@]}") )
# first check the sizes
[[ ${#result[@]} -eq ${#files[@]} ]]
# now we'll just check that the random element of the array
# is as we expect it.
local index=$(util.random-number ${#result[@]})
# first check the actual arrays
[[ "${result[${index}]/.bash/.sh}" == "${files[${index}]}" ]]
}
|
def find_median(arr):
n = len(arr)
arr.sort()
if n % 2 == 0:
median1 = arr[n//2]
median2 = arr[n//2 - 1]
median = (median1 + median2)/2
else:
median = arr[n//2]
return median |
<reponame>ktnyt/labcon<gh_stars>0
package usecases
import (
"github.com/ktnyt/labcon/cmd/labcon/app/repositories"
"github.com/ktnyt/labcon/cmd/labcon/lib"
"github.com/ktnyt/labcon/driver"
)
type DriverUsecaseImpl struct {
repository repositories.DriverRepository
generate func() string
}
func NewDriverUsecase(repository repositories.DriverRepository, generate func() string) DriverUsecase {
return DriverUsecaseImpl{
repository: repository,
generate: generate,
}
}
func (usecase DriverUsecaseImpl) List() ([]string, error) {
return usecase.repository.List()
}
func (usecase DriverUsecaseImpl) Register(name string, state interface{}) (string, error) {
token := usecase.generate()
err := usecase.repository.Create(name, token, state)
return token, err
}
func (usecase DriverUsecaseImpl) Authorize(name string, token string) error {
model, err := usecase.repository.Fetch(name)
if err != nil {
return err
}
if model.Token != token {
return lib.ErrForbidden
}
return nil
}
func (usecase DriverUsecaseImpl) GetState(name string) (interface{}, error) {
model, err := usecase.repository.Fetch(name)
return model.State, err
}
func (usecase DriverUsecaseImpl) SetState(name string, state interface{}) error {
model, err := usecase.repository.Fetch(name)
if err != nil {
return err
}
model.State = state
return usecase.repository.Update(model)
}
func (usecase DriverUsecaseImpl) GetStatus(name string) (driver.Status, error) {
model, err := usecase.repository.Fetch(name)
if err != nil {
return driver.Error, err
}
return model.Status, nil
}
func (usecase DriverUsecaseImpl) SetStatus(name string, status driver.Status) error {
model, err := usecase.repository.Fetch(name)
if err != nil {
return err
}
model.Status = status
model.Op = nil
return usecase.repository.Update(model)
}
func (usecase DriverUsecaseImpl) GetOp(name string) (*driver.Op, error) {
model, err := usecase.repository.Fetch(name)
if err != nil {
return nil, err
}
return model.Op, nil
}
func (usecase DriverUsecaseImpl) SetOp(name string, op driver.Op) error {
model, err := usecase.repository.Fetch(name)
if err != nil {
return err
}
if model.Status != driver.Idle || model.Op != nil {
return lib.ErrAlreadyExists
}
model.Status = driver.Busy
model.Op = &op
return usecase.repository.Update(model)
}
func (usecase DriverUsecaseImpl) Delete(name string) error {
return usecase.repository.Delete(name)
}
|
def append_list(list_to_append, list_to_add):
return list_to_append + list_to_add |
#include <iostream>
using namespace std;
int main() {
int n;
cout << "Enter the value of n: ";
cin >> n;
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= n; j++) {
if (j <= i) {
cout << "* ";
} else {
cout << " ";
}
}
cout << endl;
}
return 0;
} |
export enum ShaderStage {
VERTEX = 1,
FRAGMENT = 2,
COMPUTE = 4
}
|
from datetime import timedelta
def calculate_overlap_duration(range1, range2):
latest_start = max(range1.start, range2.start)
earliest_end = min(range1.end, range2.end)
overlap = (earliest_end - latest_start).total_seconds() + 1 if earliest_end > latest_start else 0
return int(overlap)
# Test the function with the provided ranges
r1 = Range(start=datetime(2012, 1, 15), end=datetime(2012, 5, 10))
r2 = Range(start=datetime(2012, 3, 20), end=datetime(2012, 9, 15))
print(calculate_overlap_duration(r1, r2)) # Output: 518400 |
var SCom = {};
var commonVar = {};
GlobalVar = (window.parent && window.parent.GlobalVar) ? window.parent.GlobalVar : null;
if (GlobalVar)
NowUserId = GlobalVar.NowUserId;
commonVar.GetUrlDeep = function (URL) {
var page_Deep = 0,
index_1 = URL.indexOf('/Sys'),
leftStr = URL.substr(index_1 + '/Sys'.length, URL.length - (index_1 + '/Sys'.length));
for (var i = 0; i < leftStr.length; ++i) {
if (leftStr[i] == '/')
++page_Deep;
}
return page_Deep;
}
commonVar.urlDeep = commonVar.GetUrlDeep(document.URL);
commonVar.urlCDStr = Ext.String.repeat('../', commonVar.urlDeep);
urlCDStr = commonVar.urlCDStr;
commonVar.RenderInt = function (v, mete, rec) {
if (v == 0)
return '';
else
return Ext.util.Format.number(v, '0');
}
commonVar.ConvertBool = function (value) {
if (value == 'T' || value == true || value == 'true' || value == 'True')
return true;
else
return false;
}
Ext.define('SCom.cbSalmType', {
extend : 'Ext.form.ComboBox',
xtype : 'cbSalmType',
fieldLabel: '',
store: Ext.create('Ext.data.Store', {
fields: ['value', 'name'],
data : [
{ "value": "1", "name": "1.半成品" },
{ "value": "2", "name": "2.上肠拼身" },
{ "value": "3", "name": "3.专车" },
{ "value": "4", "name": "4.上AB面" },
{ "value": "5", "name": "5.杂工" }
]
}),
queryMode: 'local',
displayField: 'name',
valueField: 'value',
value : ''
});
Ext.define('SCom.cbUTType', {
extend: 'Ext.form.ComboBox',
xtype: 'cbUTType',
fieldLabel: '',
store: Ext.create('Ext.data.Store', {
fields: [{ name:'value', type: 'int'} , 'name'],
data: [
{ value: 1, "name": "对" },
{ value: 2, "name": "个" },
]
}),
queryMode: 'local',
displayField: 'name',
valueField: 'value',
value: 1
});
Ext.define('SCom.cbWQType', {
extend: 'Ext.form.ComboBox',
xtype: 'cbWQType',
fieldLabel: '',
store: Ext.create('Ext.data.Store', {
fields: [{ name: 'value', type: 'string' }, 'name'],
data: [
{ value: 'size_qty', "name": "单尺寸量" },
{ value: 'all_size_qty', "name": "总--尺寸量" }
]
}),
queryMode: 'local',
displayField: 'name',
valueField: 'value',
value: 'size_qty'
});
Ext.define('SCom.cbLayoutFinishQty', {
extend: 'Ext.form.ComboBox',
xtype: 'cbLayoutFinishQty',
fieldLabel: '',
store: Ext.create('Ext.data.Store', {
fields: [{ name: 'value', type: 'string' }, 'name'],
data: [
{ value: 'FINISH', "name": "1.完成量" },
{ value: 'PLAN', "name": "2.计划量" },
{ value: 'FINISH-PLAN', "name": "3.完成量->计划量" }
]
}),
queryMode: 'local',
displayField: 'name',
valueField: 'value',
value: 'FINISH'
});
// renderer 工资水平分类
SCom.rdSalmType = function (v, m, rec) {
switch (v) {
case '1':
return '1.半成品';
break;
case '2':
return '2.上肠拼身';
break;
case '3':
return '3.专车';
break;
case '4':
return '4.上AB面';
break;
case '5':
return '5.杂工';
break;
}
return '';
}
//工资类型
SCom.rdClcType = function (v, m, rec) {
switch (v) {
case '1':
return '车位剪线';
break;
case '2':
return '杂工车位';
break;
case '3':
return '';
break;
}
return '';
}
SCom.rdUTType = function (v, m, rec) {
switch (v) {
case 1:
case '1':
return '对';
break;
case 2:
case '2':
return '个';
break;
}
return '';
}
SCom.rdWQType = function (v, m, rec) {
switch (v) {
case '':
case 'size_qty':
return '单尺寸量';
break;
case 'all_size_qty':
return '总--尺寸量';
break;
}
return '';
}
//过滤特殊字符
SCom.stripString = function(s) {
var pattern = new RegExp("[`~!@#$^&*()=|{}':;',\\[\\].<>/?~!@#¥……&*()&;—|{}【】‘;:”“'。,、?]")
var rs = "";
for (var i = 0; i < s.length; i++) {
rs = rs + s.substr(i, 1).replace(pattern, '');
}
var pattern2 = new RegExp("[ ]", 'g');
rs = rs.replace(pattern2, function(m) {
return ' ';
});
return rs;
}
Ext.define('SCom.cbPrdtState', {
extend: 'Ext.form.ComboBox',
xtype: 'cbPrdtState',
fieldLabel: '',
store: Ext.create('Ext.data.Store', {
fields: ['value', 'name'],
data: [
{ "value": "0", "name": "正常" },
{ "value": "1", "name": "停用" }
]
}),
queryMode: 'local',
displayField: 'name',
valueField: 'value',
value: '0'
});
// renderer
SCom.rdPrdtState = function (v, m, rec) {
switch (v) {
case '0':
return '正常';
case '1':
return '停用';
default:
return '';
}
return '';
}
///取节点下层Record
commonVar.GetSubHierarchicalRecord = function (lookUpStore, parentIdField, idField, loopParentId) {
var arr = [];
lookUpStore.findBy(function (qRec) {
if (qRec.get(parentIdField) == loopParentId) {
arr.push(qRec);
var hadSub = false;
lookUpStore.findBy(function (q2Rec) {
if (qRec.get(idField) == q2Rec.get(parentIdField)) {
hadSub = true;
}
});
if (hadSub) {
var arr2 = commonVar.GetHeriaRecord(lookUpStore, parentIdField, idField, loopParentId, qRec.get(idField));
for (var i = 0; i < arr2.length; i++) {
arr.push(arr2[i]);
}
}
}
});
return arr;
}
//**货品名称
commonVar.RenderPrdtName = function (v, m, rec) {
v = String(v || '');
if (!v)
return '';
if (GlobalVar) {
rec2 = GlobalVar.GetRecord('PRDT', v, true);
if (rec2)
return rec2.get('name');
else
return v || '';
}
return v || '';
}
commonVar.RenderColorName = function (v, m, rec) {
v = String(v || '');
if (!v || v == '-1' || v == '0') {
return '';
}
if (GlobalVar) {
var rec2 = GlobalVar.GetRecord('COLOR', v.toString(), true);
//console.log({ rec2: rec2, v:v });
if (rec2) {
return rec2.get('color');
}
else {
return v || '';
}
}
return v || '';
}
// 复制
var TConfig = {};
TConfig.GetDeptWithPeopleStore = function() {
return Ext.create('Ext.data.TreeStore', {
model: 'Model_TreeDept',
folderSort: true,
proxy: {
type: 'ajax',
url: '../../Handler2/TCRM/ashx_CrmDept.ashx?action=fetch_data_with_people',
reader: {
type: 'json'
}
},
root: {
text: '根节点',
expanded: true
}
});
}
//// 递归展开所有下层节点,并全选、后全选
TConfig.ExpandAndSelectedSub = function(node, checked) {
node.expand();
node.checked = checked;
node.eachChild(function(child) {
child.set('checked', checked);
TConfig.ExpandAndSelectedSub(child, checked);
});
}
Ext.EventManager.on(window, 'keydown', function (e, t) {
if (t.tagName != 'TEXTAREA') {
if (e.getKey() == e.BACKSPACE && (!/^input$/i.test(t.tagName) || t.disabled || t.readOnly)) {
e.stopEvent();
}
}
});
commonVar.AjaxRequest = function (postUrl, postParams, fnSucessCallBack) {
Ext.Ajax.request({
url: postUrl,
params: postParams,
success: function (response) {
var json = Ext.decode(response.responseText);
if (json.result) {
if (Ext.isFunction(fnSucessCallBack)) {
fnSucessCallBack(json);
}
}
else {
alert(json.msg);
}
},
failure: function (response, opts) {
alert('提交失败状态 ' + response.status);
}
});
}
commonVar.AjaxGetData = function (postUrl, postParams, fnSucessCallBack) {
Ext.Ajax.request({
url: postUrl,
params: postParams,
success: function (response) {
var json = Ext.decode(response.responseText);
if (Ext.isArray(json) || Ext.typeOf(json.total) != 'undefined') {
if (Ext.isFunction(fnSucessCallBack)) {
fnSucessCallBack(json);
}
}
else {
alert(response.responseText);
}
},
failure: function (response, opts) {
alert('提交失败状态 ' + response.status);
}
});
}
//Ext.define('Ext.Toast', {
// extend: 'Ext.Component',
// alias: 'widget.toast',
// initComponent: function () {
// var me = this;
// var msgCt;
// function createBox(t, s) {
// return [
// '<div class="msg">',
// '<div class="x-box-tl"><div class="x-box-tr"><div class="x-box-tc"></div></div></div>',
// '<div class="x-box-ml"><div class="x-box-mr"><div class="x-box-mc" style="font-size:13px"><h3>',
// t,
// '</h3>',
// s,
// '</div></div></div>',
// '<div class="x-box-bl"><div class="x-box-br"><div class="x-box-bc"></div></div></div>',
// '</div>'].join('');
// }
// /**
// * 信息提示
// * @member Ext.ux.Toast
// * @param {String}
// * title 标题
// * @param {String}
// * format 内容
// * @param {autoHide}
// * autoHide 是否自动隐藏
// * @param {pauseTime}
// * pauseTime 信息停留时间
// */
// me.msg = function (title, message, autoHide, pauseTime) {
// if (!msgCt) {
// msgCt = Ext.DomHelper.insertFirst(document.body, {
// id: 'msg-div',
// style: 'position:absolute;top:10px;width:250px;margin:0 auto;z-index:20000;'
// }, true);
// }
// // //给消息框右下角增加一个关闭按钮
// // message+='<br><span style="text-align:right;font-size:12px;
// // width:100%;">' +
// // '<font color="blank"><a style="cursor:hand;"
// // onclick="Ext.example.hide(this);">关闭</a></font></span>'
// var s = Ext.String.format.apply(String, Array.prototype.slice.call(
// arguments, 1));
// var m = Ext.DomHelper.append(msgCt, {
// html: createBox(title, s)
// }, true);
// msgCt.alignTo(document, 't-t');
// m.slideIn('t');
// if (!Ext.isEmpty(autoHide) && autoHide == true) {
// if (Ext.isEmpty(pauseTime)) {
// pauseTime = 1000;
// }
// console.log('pauseTime==>' + pauseTime);
// // 在extjs4中m.pause(t)方法已经被标记为 <strong style="font-size: 0.7em; border-top-left-radius: 2px; border-top-right-radius: 2px; border-bottom-right-radius: 2px; border-bottom-left-radius: 2px; margin-left: 5px; padding: 0px 3px; color: white; background-color: #aa0000; font-family: HelveticaNeue, helvetica, arial, clean, sans-serif; line-height: 19px; white-space: normal;" class="deprecated signature">DEPRECATED</strong>
// m.ghost("t", {
// delay: pauseTime,
// remove: true
// });
// }
// }
// me.callParent();
// return me;
// },
// /**
// * 隐藏提示框
// * @param {} v
// */
// hide: function (v) {
// var msg = Ext
// .get(v.parentElement.parentElement.parentElement.parentElement.parentElement.parentElement);
// msg.ghost("t", {
// remove: true
// });
// }
//}, function () {
// Ext.Toast = new this();
//});
Ext.toast = function () {
var msgCt;
function createBox(t, s) {
return '<div class="msg"><h3>' + t + '</h3><p>' + s + '</p></div>';
}
return {
msg: function (title, format, hideDealy) {
if (!msgCt) {
msgCt = Ext.DomHelper.insertFirst(document.body, { id: 'msg-div' }, true);
}
var s = Ext.String.format.apply(String, Array.prototype.slice.call(arguments, 1));
var m = Ext.DomHelper.append(msgCt, createBox(title, s), true);
m.hide();
m.slideIn('t').ghost("t", { delay: hideDealy, remove: true });
},
init: function () {
if (!msgCt) {
msgCt = Ext.DomHelper.insertFirst(document.body, { id: 'msg-div' }, true);
}
}
};
}();
//提示框
commonVar.Alert = function (title, html, width, align, hideDealy) {
Ext.toast.msg(title, html, hideDealy);
console.log(title);
//Ext.create('Ext.Toast', {
//}).msg(title, html, true, hideDealy);
//Ext.toast({
// alwaysOnTop: true,
// html: html,
// title: title,
// width: width || 200,
// align: align || 'tr',
// autoCloseDelay: hideDealy || 6 * 1000
//});
} |
import pandas as pd
data = {'Income': [25000, 52000, 80000],
'Education': ["Bachelor's", "Graduate", "PhD"]}
df = pd.DataFrame(data)
corr = df['Income'].corr(df['Education'])
print("Correlation: ", corr) |
#! /usr/bin/env bash
if [ "$#" -ne 2 ]; then
echo "One argument are required: [video_file] [output_dir] [GPU ID]"
exit 1
fi
BASEPATH=`dirname $0`
FILE_VIDEO=${1}
DIR_OUTPUT=${2}
GPUID=${3}
# Define output dir
DIR_OUTPUT_IMAGES=${DIR_OUTPUT}/images
DIR_OUTPUT_IMAGES_RESIZE=${DIR_OUTPUT}/images_resize
DIR_OUTPUT_LABELS=${DIR_OUTPUT}/labels
TIMESTAMP=$(date +%s)
TMP_FILE_IMAGE_LIST=/tmp/image_list_${TIMESTAMP}.txt
NUM_CORE=$(nproc)
mkdir -p ${DIR_OUTPUT_IMAGES} ${DIR_OUTPUT_IMAGES_RESIZE} ${DIR_OUTPUT_LABELS}
set -x
sh ${BASEPATH}/scripts/extract_frames.sh ${FILE_VIDEO} ${DIR_OUTPUT_IMAGES}
CUDA_VISIBLE_DEVICES=${3} python ./baseline_inference.py --image_folder=./vdata/${1}/images/ --label_folder=./vdata/${1}/labels/ --time_file=./${1}_time.txt
|
package net.swordie.ms.world.shop;
import net.swordie.ms.connection.OutPacket;
import net.swordie.ms.constants.ItemConstants;
import net.swordie.ms.loaders.ItemData;
import net.swordie.ms.loaders.containerclasses.ItemInfo;
import java.util.ArrayList;
import java.util.List;
/**
* Created on 3/27/2018.
*/
public class NpcShopDlg {
private int shopID;
private int selectNpcItemID;
private int npcTemplateID;
private int starCoin;
private int shopVerNo;
private List<NpcShopItem> items = new ArrayList<>();
public void generateProjectiles() {
for (int i : ItemConstants.getRechargeablesList()) {
ItemInfo ii = ItemData.getItemInfoByID(i);
if (ii == null) {
// atm just 2070014 (2070014) that somewhy isn't found
continue;
}
NpcShopItem nsi = new NpcShopItem();
nsi.setItemID(i);
nsi.setUnitPrice(1);
nsi.setMaxPerSlot((short) ii.getSlotMax());
addItem(nsi);
}
}
public void encode(OutPacket outPacket, List<NpcShopItem> buyBack) {
outPacket.encodeInt(getSelectNpcItemID());
outPacket.encodeInt(getNpcTemplateID());
outPacket.encodeInt(getStarCoin());
outPacket.encodeInt(getShopVerNo());
outPacket.encodeInt(1);
// start gms only
boolean hasQuest = false;
outPacket.encodeByte(hasQuest);
if (hasQuest) {
byte size = 0;
outPacket.encodeByte(size);
for (int i = 0; i < size; i++) {
// just a guess that this is for quests
outPacket.encodeInt(0); // questID?
outPacket.encodeString(""); // questKey?
}
}
// end gms only
outPacket.encodeShort(getItems().size() + buyBack.size());
getItems().forEach(item -> item.encode(outPacket));
buyBack.forEach(item -> item.encode(outPacket));
}
public int getShopID() {
return shopID;
}
public void setShopID(int shopID) {
this.shopID = shopID;
}
public List<NpcShopItem> getItems() {
return items;
}
public int getSelectNpcItemID() {
return selectNpcItemID;
}
public void setSelectNpcItemID(int selectNpcItemID) {
this.selectNpcItemID = selectNpcItemID;
}
public int getNpcTemplateID() {
return npcTemplateID;
}
public void setNpcTemplateID(int npcTemplateID) {
this.npcTemplateID = npcTemplateID;
}
public int getStarCoin() {
return starCoin;
}
public void setStarCoin(int starCoin) {
this.starCoin = starCoin;
}
public int getShopVerNo() {
return shopVerNo;
}
public void setShopVerNo(int shopVerNo) {
this.shopVerNo = shopVerNo;
}
public void addItem(NpcShopItem nsi) {
getItems().add(nsi);
}
public void setItems(List<NpcShopItem> items) {
this.items = items;
}
public NpcShopItem getItemByIndex(int idx) {
NpcShopItem nsi = null;
if (idx >= 0 || idx < getItems().size()) {
return getItems().get(idx);
}
return nsi;
}
}
|
#!/bin/bash -e
# shellcheck disable=SC2119
run_sub_stage()
{
log "Begin ${SUB_STAGE_DIR}"
pushd "${SUB_STAGE_DIR}" > /dev/null
for i in {00..99}; do
if [ -f "${i}-debconf" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-debconf"
on_chroot << EOF
debconf-set-selections <<SELEOF
$(cat "${i}-debconf")
SELEOF
EOF
log "End ${SUB_STAGE_DIR}/${i}-debconf"
fi
if [ -f "${i}-packages-nr" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages-nr"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages-nr")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get -o APT::Acquire::Retries=3 install --no-install-recommends -y $PACKAGES
EOF
if [ "${USE_QCOW2}" = "1" ]; then
on_chroot << EOF
apt-get clean
EOF
fi
fi
log "End ${SUB_STAGE_DIR}/${i}-packages-nr"
fi
if [ -f "${i}-packages" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get -o APT::Acquire::Retries=3 install -y $PACKAGES
EOF
if [ "${USE_QCOW2}" = "1" ]; then
on_chroot << EOF
apt-get clean
EOF
fi
fi
log "End ${SUB_STAGE_DIR}/${i}-packages"
fi
if [ -d "${i}-patches" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-patches"
pushd "${STAGE_WORK_DIR}" > /dev/null
if [ "${CLEAN}" = "1" ]; then
rm -rf .pc
rm -rf ./*-pc
fi
QUILT_PATCHES="${SUB_STAGE_DIR}/${i}-patches"
SUB_STAGE_QUILT_PATCH_DIR="$(basename "$SUB_STAGE_DIR")-pc"
mkdir -p "$SUB_STAGE_QUILT_PATCH_DIR"
ln -snf "$SUB_STAGE_QUILT_PATCH_DIR" .pc
quilt upgrade
if [ -e "${SUB_STAGE_DIR}/${i}-patches/EDIT" ]; then
echo "Dropping into bash to edit patches..."
bash
fi
RC=0
quilt push -a || RC=$?
case "$RC" in
0|2)
;;
*)
false
;;
esac
popd > /dev/null
log "End ${SUB_STAGE_DIR}/${i}-patches"
fi
if [ -x ${i}-run.sh ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-run.sh"
./${i}-run.sh
log "End ${SUB_STAGE_DIR}/${i}-run.sh"
fi
if [ -f ${i}-run-chroot.sh ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-run-chroot.sh"
on_chroot < ${i}-run-chroot.sh
log "End ${SUB_STAGE_DIR}/${i}-run-chroot.sh"
fi
done
popd > /dev/null
log "End ${SUB_STAGE_DIR}"
}
run_stage(){
log "Begin ${STAGE_DIR}"
STAGE="$(basename "${STAGE_DIR}")"
pushd "${STAGE_DIR}" > /dev/null
STAGE_WORK_DIR="${WORK_DIR}/${STAGE}"
ROOTFS_DIR="${STAGE_WORK_DIR}"/rootfs
if [ "${USE_QCOW2}" = "1" ]; then
if [ ! -f SKIP ]; then
load_qimage
fi
else
# make sure we are not umounting during export-image stage
if [ "${USE_QCOW2}" = "0" ] && [ "${NO_PRERUN_QCOW2}" = "0" ]; then
unmount "${WORK_DIR}/${STAGE}"
fi
fi
if [ ! -f SKIP_IMAGES ]; then
if [ -f "${STAGE_DIR}/EXPORT_IMAGE" ]; then
EXPORT_DIRS="${EXPORT_DIRS} ${STAGE_DIR}"
fi
fi
if [ ! -f SKIP ]; then
if [ "${CLEAN}" = "1" ] && [ "${USE_QCOW2}" = "0" ] ; then
if [ -d "${ROOTFS_DIR}" ]; then
rm -rf "${ROOTFS_DIR}"
fi
fi
if [ -x prerun.sh ]; then
log "Begin ${STAGE_DIR}/prerun.sh"
./prerun.sh
log "End ${STAGE_DIR}/prerun.sh"
fi
for SUB_STAGE_DIR in "${STAGE_DIR}"/*; do
if [ -d "${SUB_STAGE_DIR}" ] && [ ! -f "${SUB_STAGE_DIR}/SKIP" ]; then
run_sub_stage
fi
done
fi
if [ "${USE_QCOW2}" = "1" ]; then
unload_qimage
else
# make sure we are not umounting during export-image stage
if [ "${USE_QCOW2}" = "0" ] && [ "${NO_PRERUN_QCOW2}" = "0" ]; then
unmount "${WORK_DIR}/${STAGE}"
fi
fi
PREV_STAGE="${STAGE}"
PREV_STAGE_DIR="${STAGE_DIR}"
PREV_ROOTFS_DIR="${ROOTFS_DIR}"
popd > /dev/null
log "End ${STAGE_DIR}"
}
if [ "$(id -u)" != "0" ]; then
echo "Please run as root" 1>&2
exit 1
fi
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ $BASE_DIR = *" "* ]]; then
echo "There is a space in the base path of pi-gen"
echo "This is not a valid setup supported by debootstrap."
echo "Please remove the spaces, or move pi-gen directory to a base path without spaces" 1>&2
exit 1
fi
export BASE_DIR
if [ -f config ]; then
# shellcheck disable=SC1091
source config
fi
while getopts "c:" flag
do
case "$flag" in
c)
EXTRA_CONFIG="$OPTARG"
# shellcheck disable=SC1090
source "$EXTRA_CONFIG"
;;
*)
;;
esac
done
term() {
if [ "${USE_QCOW2}" = "1" ]; then
log "Unloading image"
unload_qimage
fi
}
trap term EXIT INT TERM
export PI_GEN=${PI_GEN:-pi-gen}
export PI_GEN_REPO=${PI_GEN_REPO:-https://github.com/RPi-Distro/pi-gen}
if [ -z "${IMG_NAME}" ]; then
echo "IMG_NAME not set" 1>&2
exit 1
fi
export USE_QEMU="${USE_QEMU:-0}"
export IMG_DATE="${IMG_DATE:-"$(date +%Y-%m-%d)"}"
export IMG_FILENAME="${IMG_FILENAME:-"${IMG_DATE}-${IMG_NAME}"}"
export ZIP_FILENAME="${ZIP_FILENAME:-"image_${IMG_DATE}-${IMG_NAME}"}"
export SCRIPT_DIR="${BASE_DIR}/scripts"
export WORK_DIR="${WORK_DIR:-"${BASE_DIR}/work/${IMG_NAME}"}"
export DEPLOY_DIR=${DEPLOY_DIR:-"${BASE_DIR}/deploy"}
export DEPLOY_ZIP="${DEPLOY_ZIP:-1}"
export LOG_FILE="${WORK_DIR}/build.log"
export TARGET_HOSTNAME=${TARGET_HOSTNAME:-raspberrypi}
export FIRST_USER_NAME=${FIRST_USER_NAME:-pi}
export FIRST_USER_PASS=${FIRST_USER_PASS:-raspberry}
export RELEASE=${RELEASE:-bullseye}
export WPA_ESSID
export WPA_PASSWORD
export WPA_COUNTRY=CN
export ENABLE_SSH="${ENABLE_SSH:-0}"
export PUBKEY_ONLY_SSH="${PUBKEY_ONLY_SSH:-0}"
export LOCALE_DEFAULT="${LOCALE_DEFAULT:-zh_CN.UTF-8}"
export KEYBOARD_KEYMAP="${KEYBOARD_KEYMAP:-cn}"
export KEYBOARD_LAYOUT="${KEYBOARD_LAYOUT:-English (US)}"
export TIMEZONE_DEFAULT="${TIMEZONE_DEFAULT:-Asia/Shanghai}"
export GIT_HASH=${GIT_HASH:-"$(git rev-parse HEAD)"}
export PUBKEY_SSH_FIRST_USER
export CLEAN
export IMG_NAME
export APT_PROXY=http://172.17.0.1:3142
export STAGE
export STAGE_DIR
export STAGE_WORK_DIR
export PREV_STAGE
export PREV_STAGE_DIR
export ROOTFS_DIR
export PREV_ROOTFS_DIR
export IMG_SUFFIX
export NOOBS_NAME
export NOOBS_DESCRIPTION
export EXPORT_DIR
export EXPORT_ROOTFS_DIR
export QUILT_PATCHES
export QUILT_NO_DIFF_INDEX=1
export QUILT_NO_DIFF_TIMESTAMPS=1
export QUILT_REFRESH_ARGS="-p ab"
# shellcheck source=scripts/common
source "${SCRIPT_DIR}/common"
# shellcheck source=scripts/dependencies_check
source "${SCRIPT_DIR}/dependencies_check"
export NO_PRERUN_QCOW2="${NO_PRERUN_QCOW2:-1}"
export USE_QCOW2="${USE_QCOW2:-0}"
export BASE_QCOW2_SIZE=${BASE_QCOW2_SIZE:-12G}
source "${SCRIPT_DIR}/qcow2_handling"
if [ "${USE_QCOW2}" = "1" ]; then
NO_PRERUN_QCOW2=1
else
NO_PRERUN_QCOW2=0
fi
export NO_PRERUN_QCOW2="${NO_PRERUN_QCOW2:-1}"
dependencies_check "${BASE_DIR}/depends"
#check username is valid
if [[ ! "$FIRST_USER_NAME" =~ ^[a-z][-a-z0-9_]*$ ]]; then
echo "Invalid FIRST_USER_NAME: $FIRST_USER_NAME"
exit 1
fi
if [[ -n "${APT_PROXY}" ]] && ! curl --silent "${APT_PROXY}" >/dev/null ; then
echo "Could not reach APT_PROXY server: ${APT_PROXY}"
exit 1
fi
if [[ -n "${WPA_PASSWORD}" && ${#WPA_PASSWORD} -lt 8 || ${#WPA_PASSWORD} -gt 63 ]] ; then
echo "WPA_PASSWORD" must be between 8 and 63 characters
exit 1
fi
if [[ "${PUBKEY_ONLY_SSH}" = "1" && -z "${PUBKEY_SSH_FIRST_USER}" ]]; then
echo "Must set 'PUBKEY_SSH_FIRST_USER' to a valid SSH public key if using PUBKEY_ONLY_SSH"
exit 1
fi
mkdir -p "${WORK_DIR}"
log "Begin ${BASE_DIR}"
STAGE_LIST=${STAGE_LIST:-${BASE_DIR}/stage*}
for STAGE_DIR in $STAGE_LIST; do
STAGE_DIR=$(realpath "${STAGE_DIR}")
run_stage
done
CLEAN=1
for EXPORT_DIR in ${EXPORT_DIRS}; do
STAGE_DIR=${BASE_DIR}/export-image
# shellcheck source=/dev/null
source "${EXPORT_DIR}/EXPORT_IMAGE"
EXPORT_ROOTFS_DIR=${WORK_DIR}/$(basename "${EXPORT_DIR}")/rootfs
if [ "${USE_QCOW2}" = "1" ]; then
USE_QCOW2=0
EXPORT_NAME="${IMG_FILENAME}${IMG_SUFFIX}"
echo "------------------------------------------------------------------------"
echo "Running export stage for ${EXPORT_NAME}"
rm -f "${WORK_DIR}/export-image/${EXPORT_NAME}.img" || true
rm -f "${WORK_DIR}/export-image/${EXPORT_NAME}.qcow2" || true
rm -f "${WORK_DIR}/${EXPORT_NAME}.img" || true
rm -f "${WORK_DIR}/${EXPORT_NAME}.qcow2" || true
EXPORT_STAGE=$(basename "${EXPORT_DIR}")
for s in $STAGE_LIST; do
TMP_LIST=${TMP_LIST:+$TMP_LIST }$(basename "${s}")
done
FIRST_STAGE=${TMP_LIST%% *}
FIRST_IMAGE="image-${FIRST_STAGE}.qcow2"
pushd "${WORK_DIR}" > /dev/null
echo "Creating new base "${EXPORT_NAME}.qcow2" from ${FIRST_IMAGE}"
cp "./${FIRST_IMAGE}" "${EXPORT_NAME}.qcow2"
ARR=($TMP_LIST)
# rebase stage images to new export base
for CURR_STAGE in "${ARR[@]}"; do
if [ "${CURR_STAGE}" = "${FIRST_STAGE}" ]; then
PREV_IMG="${EXPORT_NAME}"
continue
fi
echo "Rebasing image-${CURR_STAGE}.qcow2 onto ${PREV_IMG}.qcow2"
qemu-img rebase -f qcow2 -u -b ${PREV_IMG}.qcow2 image-${CURR_STAGE}.qcow2
if [ "${CURR_STAGE}" = "${EXPORT_STAGE}" ]; then
break
fi
PREV_IMG="image-${CURR_STAGE}"
done
# commit current export stage into base export image
echo "Committing image-${EXPORT_STAGE}.qcow2 to ${EXPORT_NAME}.qcow2"
qemu-img commit -f qcow2 -p -b "${EXPORT_NAME}.qcow2" image-${EXPORT_STAGE}.qcow2
# rebase stage images back to original first stage for easy re-run
for CURR_STAGE in "${ARR[@]}"; do
if [ "${CURR_STAGE}" = "${FIRST_STAGE}" ]; then
PREV_IMG="image-${CURR_STAGE}"
continue
fi
echo "Rebasing back image-${CURR_STAGE}.qcow2 onto ${PREV_IMG}.qcow2"
qemu-img rebase -f qcow2 -u -b ${PREV_IMG}.qcow2 image-${CURR_STAGE}.qcow2
if [ "${CURR_STAGE}" = "${EXPORT_STAGE}" ]; then
break
fi
PREV_IMG="image-${CURR_STAGE}"
done
popd > /dev/null
mkdir -p "${WORK_DIR}/export-image/rootfs"
mv "${WORK_DIR}/${EXPORT_NAME}.qcow2" "${WORK_DIR}/export-image/"
echo "Mounting image ${WORK_DIR}/export-image/${EXPORT_NAME}.qcow2 to rootfs ${WORK_DIR}/export-image/rootfs"
mount_qimage "${WORK_DIR}/export-image/${EXPORT_NAME}.qcow2" "${WORK_DIR}/export-image/rootfs"
CLEAN=0
run_stage
CLEAN=1
USE_QCOW2=1
else
run_stage
fi
if [ "${USE_QEMU}" != "1" ]; then
if [ -e "${EXPORT_DIR}/EXPORT_NOOBS" ]; then
# shellcheck source=/dev/null
source "${EXPORT_DIR}/EXPORT_NOOBS"
STAGE_DIR="${BASE_DIR}/export-noobs"
if [ "${USE_QCOW2}" = "1" ]; then
USE_QCOW2=0
run_stage
USE_QCOW2=1
else
run_stage
fi
fi
fi
done
if [ -x postrun.sh ]; then
log "Begin postrun.sh"
cd "${BASE_DIR}"
./postrun.sh
log "End postrun.sh"
fi
if [ "${USE_QCOW2}" = "1" ]; then
unload_qimage
fi
log "End ${BASE_DIR}"
|
<gh_stars>1-10
function SolaceMQAdapter(configuration){
throw new Error("Not Implemented!!!");
}
module.exports = SolaceMQAdapter; |
#!/bin/sh
#info
export TOPIC=Peristence
export DESCRIPTION="This script will patch Video in Motion."
export LD_LIBRARY_PATH=/mnt/app/root/lib-target:/eso/lib:/mnt/app/usr/lib:/mnt/app/armle/lib:/mnt/app/armle/lib/dll:/mnt/app/armle/usr/lib
export IPL_CONFIG_DIR=/etc/eso/production
echo $DESCRIPTION
. /eso/bin/PhoneCustomer/default/util_info.sh
. /eso/bin/PhoneCustomer/default/util_mountsd.sh
if [[ -z "$VOLUME" ]]
then
echo "No SD-card found, quitting"
exit 0
fi
export BACKUPFOLDER=$VOLUME/Backup/$VERSION/$FAZIT/$TOPIC/
mkdir -p $BACKUPFOLDER
echo "Making backup of Storage1.raw"
cp /net/rcc/mnt/efs-persist/storage1.raw $BACKUPFOLDER
echo "Making backup of Storage2.raw"
cp /net/rcc/mnt/efs-persist/storage2.raw $BACKUPFOLDER
echo "Setting VIM to 200km/h"
on -f mmx /net/mmx/eso/bin/apps/pc b:0:3221422082 c7000602ff0000000014ff00ff00ff000000ff02ff02000000073031ac3f
sync
sync
sync
echo "Patching done, please reboot for activation!"
echo "Don't be an ass: don't watch movies while driving."
sleep 1
echo "Resetting unit now"
sleep 2
mount -uw /mnt/system
touch /etc/ooc.allow.reset
echo hmi-sys-reset > /dev/ooc/reset
sleep 2
rm /etc/ooc.allow.reset
echo "Reboot request sent"
|
<filename>db/migrate/20180602165337_create_notifications.notifications.rb
# This migration comes from notifications (originally 20160328045436)
class CreateNotifications < ActiveRecord::Migration[5.0]
def change
create_table :notifications do |t|
t.integer(:user_id, null: false)
t.integer(:actor_id)
t.string(:notify_type, null: false)
t.string(:target_type)
t.integer(:target_id)
t.string(:second_target_type)
t.integer(:second_target_id)
t.string(:third_target_type)
t.integer(:third_target_id)
t.datetime(:read_at)
t.timestamps(null: false)
end
add_index(:notifications, %i[user_id notify_type])
add_index(:notifications, [:user_id])
end
end
|
from PIL import Image
from fastapi.responses import StreamingResponse
import io
def generate_thumbnail(input_image_path):
# Open the input image using PIL
with Image.open(input_image_path) as img:
# Create a thumbnail of the input image
thumbnail = img.copy()
thumbnail.thumbnail((100, 100)) # Set the size of the thumbnail
# Convert the thumbnail image to bytes
thumbnail_bytes = io.BytesIO()
thumbnail.save(thumbnail_bytes, format='PNG')
thumbnail_bytes.seek(0)
# Return the thumbnail image as a StreamingResponse
return StreamingResponse(thumbnail_bytes, media_type='image/png') |
module SupportInterface
class CandidateFeedbackExport
def data_for_export
application_forms.find_each.map do |application_form|
{
'Name' => application_form.full_name,
'Recruitment cycle year' => application_form.recruitment_cycle_year,
'Email_address' => application_form.candidate.email_address,
'Phone number' => application_form.phone_number,
'Submitted at' => application_form.submitted_at&.iso8601,
'Satisfaction level' => application_form.feedback_satisfaction_level,
'CSAT score' => csat_score(application_form.feedback_satisfaction_level),
'Suggestions' => application_form.feedback_suggestions,
}
end
end
private
def application_forms
@application_forms ||= ApplicationForm.where.not(
feedback_satisfaction_level: nil,
).includes(:candidate)
end
CSAT_SCORES = {
very_satisfied: 5,
satisfied: 4,
neither_satisfied_or_dissatisfied: 3,
dissatisfied: 2,
very_dissatisfied: 1,
}.with_indifferent_access.freeze
def csat_score(satisfaction_level)
CSAT_SCORES[satisfaction_level]
end
end
end
|
<reponame>zrwusa/expo-bunny
import React, {Component} from 'react';
import {StyleProp, StyleSheet, Text, TextStyle, View, ViewStyle} from 'react-native';
// TODO: support web
import {IMessage, PositionLeftOrRight} from './types';
import {AudioPlayer, AudioPlayerProps} from '../AudioPlayer';
import {SizeLabor, ThemeLabor} from '../../types';
import {withBunnyKit, WithBunnyKit} from '../../hooks/bunny-kit';
const makeStyles = (sizeLabor: SizeLabor, themeLabor: ThemeLabor) => {
const {wp} = sizeLabor.designsBasedOn.iphoneX;
const {theme: {colors}} = themeLabor;
return {
left: StyleSheet.create({
container: {
height: wp(46)
},
audio: {},
playButton: {
backgroundColor: colors.backgroundAA,
},
playButtonIcon: {
color: colors.backgroundAAA
},
progress: {
backgroundColor: colors.backgroundAB
},
remainTime: {
color: colors.textAC
}
}),
right: StyleSheet.create({
container: {
height: wp(46)
},
audio: {},
playButton: {
backgroundColor: colors.backgroundBA,
},
playButtonIcon: {
color: colors.backgroundBAA
},
progress: {
backgroundColor: colors.backgroundBB
},
remainTime: {
color: colors.textBC
}
}),
};
};
export interface MessageAudioProps<TMessage extends IMessage> {
position: PositionLeftOrRight;
currentMessage?: TMessage;
audioContainerStyle?: StyleProp<ViewStyle>;
audioStyle?: StyleProp<ViewStyle>;
audioProgressColor?: { left: string, right: string };
audioProgressStyle?: StyleProp<ViewStyle>;
audioPlayButtonStyle?: StyleProp<ViewStyle>;
audioRemainTimeStyle?: StyleProp<TextStyle>;
audioPlayButtonIconStyle?: StyleProp<TextStyle>;
audioProps?: Omit<AudioPlayerProps, 'source'>;
isDebug?: boolean;
onMessageLoad?(currentMessage: TMessage): void;
onMessageLoadStart?(currentMessage: TMessage): void;
onMessageLoadEnd?(currentMessage: TMessage): void;
onMessageLoadError?(e: Error, currentMessage: TMessage): void;
onMessageReadyForDisplay?(currentMessage: TMessage): void;
}
class MessageAudio<TMessage extends IMessage> extends Component<MessageAudioProps<TMessage> & WithBunnyKit> {
static defaultProps = {
position: 'left' as PositionLeftOrRight,
currentMessage: undefined,
audioContainerStyle: {},
audioStyle: {},
audioProgressStyle: {},
audioProgressColor: undefined,
audioPlayButtonStyle: {},
audioPlayButtonIconStyle: {},
audioProps: {},
onMessageLoad: undefined,
onMessageLoadStart: undefined,
onMessageLoadEnd: undefined,
onMessageReadyForDisplay: undefined,
onMessageLoadError: undefined,
isDebug: false,
};
render() {
const {
audioContainerStyle,
audioProps,
audioStyle,
audioProgressStyle,
audioPlayButtonStyle,
currentMessage,
isDebug,
position,
audioProgressColor,
audioRemainTimeStyle,
audioPlayButtonIconStyle,
} = this.props;
isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', '[level4]MessageAudio props', this.props);
const {bunnyKit: {sizeLabor, themeLabor, colors}} = this.props;
const styles = makeStyles(sizeLabor, themeLabor);
const stylesEnsurePosition = styles[position];
return (
<View style={[stylesEnsurePosition.container, audioContainerStyle]}>
{
currentMessage
? currentMessage.audio
? <AudioPlayer
style={[stylesEnsurePosition.audio, audioStyle]}
progressStyle={[stylesEnsurePosition.progress, audioProgressStyle]}
progressColor={position === 'left' ? colors.backgroundABA || audioProgressColor?.left : colors.backgroundBBA || audioProgressColor?.right}
playButtonStyle={[stylesEnsurePosition.playButton, audioPlayButtonStyle]}
remainTimeStyle={[stylesEnsurePosition.remainTime, audioRemainTimeStyle]}
playButtonIconStyle={[stylesEnsurePosition.playButtonIcon, audioPlayButtonIconStyle]}
source={{uri: currentMessage.audio}}
onLoad={() => {
isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageAudio onLoad');
this.props.onMessageLoad?.(currentMessage);
isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageAudio onMessageReadyForDisplay');
this.props.onMessageReadyForDisplay?.(currentMessage);
}}
onLoadStart={() => {
isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageAudio onLoadStart');
this.props.onMessageLoadStart?.(currentMessage);
}}
onLoadEnd={() => {
isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageAudio onLoadEnd');
this.props.onMessageLoadEnd?.(currentMessage);
}}
onError={(e) => {
isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageAudio onError');
this.props.onMessageLoadError?.(e, currentMessage);
}}
{...audioProps}
/>
: isDebug ? <Text>{'currentMessage.audio is undefined'}</Text> : null
: isDebug ? <Text>{'currentMessage is undefined'}</Text> : null
}
</View>
);
}
}
export default withBunnyKit(MessageAudio);
|
#!/bin/bash
set -e
stage=0
stop_stage=50
. ${MAIN_ROOT}/utils/parse_options.sh || exit 1;
if [ $# != 2 ];then
echo "usage: ${0} config_path ckpt_path_prefix"
exit -1
fi
ngpu=$(echo $CUDA_VISIBLE_DEVICES | awk -F "," '{print NF}')
echo "using $ngpu gpus..."
config_path=$1
ckpt_prefix=$2
chunk_mode=false
if [[ ${config_path} =~ ^.*chunk_.*yaml$ ]];then
chunk_mode=true
fi
# download language model
#bash local/download_lm_en.sh
#if [ $? -ne 0 ]; then
# exit 1
#fi
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
for type in attention ctc_greedy_search; do
echo "decoding ${type}"
if [ ${chunk_mode} == true ];then
# stream decoding only support batchsize=1
batch_size=1
else
batch_size=64
fi
python3 -u ${BIN_DIR}/test.py \
--ngpu ${ngpu} \
--config ${config_path} \
--result_file ${ckpt_prefix}.${type}.rsl \
--checkpoint_path ${ckpt_prefix} \
--opts decoding.decoding_method ${type} \
--opts decoding.batch_size ${batch_size}
if [ $? -ne 0 ]; then
echo "Failed in evaluation!"
exit 1
fi
done
fi
if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
for type in ctc_prefix_beam_search; do
echo "decoding ${type}"
batch_size=1
python3 -u ${BIN_DIR}/test.py \
--ngpu ${ngpu} \
--config ${config_path} \
--result_file ${ckpt_prefix}.${type}.rsl \
--checkpoint_path ${ckpt_prefix} \
--opts decoding.decoding_method ${type} \
--opts decoding.batch_size ${batch_size}
if [ $? -ne 0 ]; then
echo "Failed in evaluation!"
exit 1
fi
done
fi
if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
for type in attention_rescoring; do
echo "decoding ${type}"
batch_size=1
python3 -u ${BIN_DIR}/test.py \
--ngpu ${ngpu} \
--config ${config_path} \
--result_file ${ckpt_prefix}.${type}.rsl \
--checkpoint_path ${ckpt_prefix} \
--opts decoding.decoding_method ${type} \
--opts decoding.batch_size ${batch_size}
if [ $? -ne 0 ]; then
echo "Failed in evaluation!"
exit 1
fi
done
fi
exit 0
|
package com.example.blockchainapp.Account;
import android.content.Context;
import android.os.Build;
import android.util.Log;
import androidx.annotation.RequiresApi;
import com.example.blockchainapp.Constants;
import com.example.blockchainapp.Transaction.Transaction;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Base64;
import java.security.KeyPairGenerator;
import java.security.KeyPair;
import java.security.SecureRandom;
import java.security.Signature;
import java.io.FileNotFoundException;
import java.security.Key;
import org.bouncycastle.util.encoders.Hex;
import org.json.JSONObject;
public class RSAKey{
public static KeyPair generateKeyPair() throws Exception {
KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
generator.initialize(1024, new SecureRandom());
KeyPair pair = generator.generateKeyPair();
return pair;
}
private static void writePemFile(Context context, Key key, String description, String filename)
throws FileNotFoundException, IOException {
PemFile pemFile = new PemFile(key, description);
pemFile.write(context, filename);
}
public static void writePemFile(Context context, KeyPair keyPair, String name){
String publicPath = name + "-public-key.pem";
String privatePath = name + "-private-key.pem";
try {
writePemFile(context, keyPair.getPrivate(), "PRIVATE KEY", privatePath);
writePemFile(context, keyPair.getPublic(), "PUBLIC KEY", publicPath);
} catch (Exception e) {
//TODO: handle exception
System.out.println(e.toString());
}
}
//require 2 file pem
@RequiresApi(api = Build.VERSION_CODES.O)
private static KeyPair parseKey(Context context, String publicPath, String privatePath) throws Exception{
File file = new File(context.getFilesDir(), Constants.RESOURCE_LOCATION);
// Log.d("path", file.getAbsolutePath() + "/" + privatePath);
InputStreamReader isr = new InputStreamReader(new FileInputStream(file.getAbsolutePath() + "/" + privatePath));
BufferedReader bufferedReader = new BufferedReader(isr);
StringBuilder sb = new StringBuilder();
String line = "";
while ((line = bufferedReader.readLine()) != null) {
// Log.d("readline", line);
sb.append(line);
sb.append('\n');
}
String privateKeyContent = sb.toString();
bufferedReader.close();
isr.close();
isr = new InputStreamReader(new FileInputStream(file.getAbsolutePath() + "/" + publicPath));
bufferedReader = new BufferedReader(isr);
sb = new StringBuilder();
line = "";
while ((line = bufferedReader.readLine()) != null) {
sb.append(line);
sb.append('\n');
}
String publicKeyContent = sb.toString();
bufferedReader.close();
isr.close();
privateKeyContent = privateKeyContent.replaceAll("\\n", "").replaceAll("\r", "").replace("-----BEGIN PRIVATE KEY-----", "").replace("-----END PRIVATE KEY-----", "");
publicKeyContent = publicKeyContent.replaceAll("\\n", "").replaceAll("\r", "").replace("-----BEGIN PUBLIC KEY-----", "").replace("-----END PUBLIC KEY-----", "");;
KeyFactory kf = KeyFactory.getInstance("RSA");
PKCS8EncodedKeySpec keySpecPKCS8 = new PKCS8EncodedKeySpec(Base64.getDecoder().decode(privateKeyContent));
PrivateKey privKey = kf.generatePrivate(keySpecPKCS8);
X509EncodedKeySpec keySpecX509 = new X509EncodedKeySpec(Base64.getDecoder().decode(publicKeyContent));
RSAPublicKey pubKey = (RSAPublicKey) kf.generatePublic(keySpecX509);
System.out.println(privKey);
System.out.println(pubKey);
return new KeyPair(pubKey, privKey);
}
public static String readPublicKey(Context context, String publicPath) throws Exception {
File file = new File(context.getFilesDir(), Constants.RESOURCE_LOCATION);
// Log.d("path", file.getAbsolutePath() + "/" + privatePath);
InputStreamReader isr = new InputStreamReader(new FileInputStream(file.getAbsolutePath() + "/" + publicPath));
// System.out.println("File path:" + file.getAbsolutePath() + "/" + publicPath);
BufferedReader bufferedReader = new BufferedReader(isr);
StringBuilder sb = new StringBuilder();
String line = "";
while ((line = bufferedReader.readLine()) != null) {
// Log.d("readline", line);
sb.append(line);
sb.append('\n');
}
String publicKeyContent = sb.toString();
bufferedReader.close();
isr.close();
return publicKeyContent;
}
public static String readPrivateKey(Context context, String privatePath) throws Exception {
File file = new File(context.getFilesDir(), Constants.RESOURCE_LOCATION);
// Log.d("path", file.getAbsolutePath() + "/" + privatePath);
InputStreamReader isr = new InputStreamReader(new FileInputStream(file.getAbsolutePath() + "/" + privatePath));
// System.out.println("File path:" + file.getAbsolutePath() + "/" + publicPath);
BufferedReader bufferedReader = new BufferedReader(isr);
StringBuilder sb = new StringBuilder();
String line = "";
while ((line = bufferedReader.readLine()) != null) {
// Log.d("readline", line);
sb.append(line);
sb.append('\n');
}
String privateKeyContent = sb.toString();
bufferedReader.close();
isr.close();
return privateKeyContent;
}
@RequiresApi(api = Build.VERSION_CODES.O)
public static KeyPair parseKey(Context context, String name){
String publicPath = name + "-public-key.pem";
String privatePath = name + "-private-key.pem";
try {
// Log.d("Test", "Opened for input");
return parseKey(context, publicPath, privatePath);
} catch (Exception e) {
//TODO: handle exception
System.out.println(e.toString());
return null;
}
}
//only for testing
@RequiresApi(api = Build.VERSION_CODES.O)
public static KeyPair parseKey() throws InvalidKeySpecException, NoSuchAlgorithmException, IOException, URISyntaxException {
String privateKeyContent = new String(Files.readAllBytes(Paths.get(ClassLoader.getSystemResource("private_key_pkcs8.pem").toURI())));
String publicKeyContent = new String(Files.readAllBytes(Paths.get(ClassLoader.getSystemResource("public_key.pem").toURI())));
// Log.d("Path", String.valueOf(Paths.get(ClassLoader.getSystemResource("public_key.pem").toURI())));
privateKeyContent = privateKeyContent.replaceAll("\\n", "").replaceAll("\r", "").replace("-----BEGIN PRIVATE KEY-----", "").replace("-----END PRIVATE KEY-----", "");
publicKeyContent = publicKeyContent.replaceAll("\\n", "").replaceAll("\r", "").replace("-----BEGIN PUBLIC KEY-----", "").replace("-----END PUBLIC KEY-----", "");;
KeyFactory kf = KeyFactory.getInstance("RSA");
PKCS8EncodedKeySpec keySpecPKCS8 = new PKCS8EncodedKeySpec(Base64.getDecoder().decode(privateKeyContent));
PrivateKey privKey = kf.generatePrivate(keySpecPKCS8);
X509EncodedKeySpec keySpecX509 = new X509EncodedKeySpec(Base64.getDecoder().decode(publicKeyContent));
RSAPublicKey pubKey = (RSAPublicKey) kf.generatePublic(keySpecX509);
// System.out.println(privKey);
// System.out.println(pubKey);
return new KeyPair(pubKey, privKey);
}
@RequiresApi(api = Build.VERSION_CODES.O)
public static String sign(String text, PrivateKey privateKey) throws Exception {
Signature privateSignature = Signature.getInstance("SHA256withRSA");
privateSignature.initSign(privateKey);
// from + to + amount
// System.out.println("Signature: " + text);
// String toBeHashed = Constants.PUBLIC_KEY.toString() + transaction.getToUser() + transaction.getAmount();
privateSignature.update(Hex.encode(text.getBytes()));
byte[] signature = privateSignature.sign();
// return Hex.toHexString(signature);
return Base64.getEncoder().encodeToString(signature);
}
//This just for test, we don't need verify at the client
@RequiresApi(api = Build.VERSION_CODES.O)
private static boolean verify(String plainText, String signature, PublicKey publicKey) throws Exception {
Signature publicSignature = Signature.getInstance("SHA256withRSA");
publicSignature.initVerify(publicKey);
publicSignature.update(plainText.getBytes());
byte[] signatureBytes = Base64.getDecoder().decode(signature);
return publicSignature.verify(signatureBytes);
}
//General signing function, return standard information
/*
@RequiresApi(api = Build.VERSION_CODES.O)
public static JSONObject sign(String plaintext, KeyPair keypair) throws Exception {
JSONObject obj = new JSONObject();
obj.put("msg", plaintext);
obj.put("publicKey", keypair.getPublic().toString());
obj.put("signature", sign(plaintext, keypair.getPrivate()));
obj.put("algo", "SHA256withRSA");
return obj;
}
*/
public static void main(String[] args){
// KeyPair kp = null;
try {
// kp = generateKeyPair();
} catch (Exception e) {
//TODO: handle exception
System.out.println(e.toString());
}
// writePemFile(kp, "hung");
//The key need to be move into the src before ready to parse
//t khong biet trong android m se luu sao nen la m check code roi adapt lai nha
// try{
// kp = parseKey("hung");
// } catch(Exception ex){
// System.out.println(ex.toString());
}
}
|
<gh_stars>1-10
require 'versus'
class String
#
# Converts the String into a version number.
#
def to_version
Version::Number.parse(self)
end
end
|
import {fakeAsync, TestBed, tick} from '@angular/core/testing';
import {StorageWatcherService} from './storage-watcher.service';
import {EventBusService} from 'projects/event/src/lib/event-bus.service';
import {eventBusSpy} from 'projects/event/src/lib/event-bus.service.spec';
import {NodeCreatedEvent} from 'projects/storage/src/lib/events/node-created-event';
import {NodeDeletedEvent} from 'projects/storage/src/lib/events/node-deleted-event';
import {NotificationEvent} from 'projects/notification/src/lib/notification-event';
import {NodeModifiedEvent} from 'projects/storage/src/lib/events/node-modified-event';
import {testStorageDirectoryNode} from 'projects/storage/src/lib/entities/storage-node.spec';
import {QueryParamsToStringPipe} from 'projects/tools/src/lib/query-params-to-string.pipe';
import {StorageConfigurationService} from 'projects/storage/src/lib/storage-configuration.service';
import {storageConfigurationServiceSpy} from 'projects/storage/src/lib/storage-configuration.service.spec';
import {RetriesService} from 'projects/tools/src/lib/retries.service';
import {retriesServiceSpy} from 'projects/tools/src/lib/retries.service.spec';
import {DurationToStringPipe} from 'projects/date/src/lib/duration-to-string.pipe';
import {EventEmitter} from '@angular/core';
export const storageWatcherServiceSpy = () => {
const spy = jasmine.createSpyObj('StorageWatcherService', [
'watch',
]);
spy.reconnected = new EventEmitter<void>();
return spy;
};
describe('StorageWatcherService', () => {
let service: StorageWatcherService;
let eventBus: EventBusService;
beforeEach(() => {
TestBed.configureTestingModule({
providers: [
{provide: StorageConfigurationService, useValue: storageConfigurationServiceSpy()},
{provide: EventBusService, useValue: eventBusSpy()},
{provide: RetriesService, useValue: retriesServiceSpy()},
StorageWatcherService,
QueryParamsToStringPipe,
DurationToStringPipe,
]
});
service = TestBed.get(StorageWatcherService);
eventBus = TestBed.get(EventBusService);
});
afterEach(() => {
service.ngOnDestroy();
});
it('should be created', () => {
expect(service).toBeTruthy();
});
it('should create an event source', () => {
service.watch();
expect(service._subscription).toBeTruthy();
const subscription = service._subscription = jasmine.createSpyObj('_subscription', ['unsubscribe']);
service.watch();
expect(subscription.unsubscribe).toHaveBeenCalled();
});
it('should handle CREATE message', () => {
const node = testStorageDirectoryNode();
service.next({node, event: 'CREATE'});
expect(eventBus.publish).toHaveBeenCalledWith(new NodeCreatedEvent(node));
expect(service._retry.reset).toHaveBeenCalled();
});
it('should handle DELETE message', () => {
const node = testStorageDirectoryNode();
service.next({node, event: 'DELETE'});
expect(eventBus.publish).toHaveBeenCalledWith(new NodeDeletedEvent(node));
});
it('should handle MODIFY message', () => {
const node = testStorageDirectoryNode();
service.next({node, event: 'MODIFY'});
expect(eventBus.publish).toHaveBeenCalledWith(new NodeModifiedEvent(node));
});
it('should handle error', fakeAsync(() => {
const reconnected = spyOn(service.reconnected, 'emit');
const watch = spyOn(service, 'watch');
service.error(null);
expect(eventBus.publish).toHaveBeenCalledWith(jasmine.any(NotificationEvent));
tick(1000);
expect(watch).toHaveBeenCalled();
expect(reconnected).toHaveBeenCalled();
}));
it('should not handle error destroyed', fakeAsync(() => {
const reconnected = spyOn(service.reconnected, 'emit');
const watch = spyOn(service, 'watch');
service.ngOnDestroy();
service.complete();
expect(eventBus.publish).toHaveBeenCalledWith(jasmine.any(NotificationEvent));
tick(1000);
expect(watch).not.toHaveBeenCalled();
expect(reconnected).not.toHaveBeenCalled();
}));
});
|
#!/usr/bin/env -S bash ../.port_include.sh
port=bash
version=5.1.8
useconfigure=true
configopts=("--disable-nls" "--without-bash-malloc")
files="https://ftpmirror.gnu.org/gnu/bash/bash-${version}.tar.gz bash-${version}.tar.gz 0cfb5c9bb1a29f800a97bd242d19511c997a1013815b805e0fdd32214113d6be"
auth_type="sha256"
build() {
run_replace_in_file "s/define GETCWD_BROKEN 1/undef GETCWD_BROKEN/" config.h
run_replace_in_file "s/define CAN_REDEFINE_GETENV 1/undef CAN_REDEFINE_GETENV/" config.h
run make "${makeopts[@]}"
}
post_install() {
mkdir -p "${GELASSENHEIT_INSTALL_ROOT}/bin"
ln -sf /usr/local/bin/bash "${GELASSENHEIT_INSTALL_ROOT}/bin/bash"
}
|
# Program to calculate the mean of given numbers
# List to store the numbers
numbers = []
# Ask the user to enter the numbers
num = int(input("How many numbers you want to enter: "))
for i in range(num):
n = float(input("Enter the number: "))
numbers.append(n)
# Computing the mean
mean = sum(numbers)/len(numbers)
# Print the mean
print("The mean of the numbers is", mean) |
#!/usr/bin/env bash
CXDT=`readlink -f inputs/CXDT.bin`
XCELIUMD=`readlink -f inputs/xcelium.d`
MAX_CYCLE="20000000"
ln -s ${CXDT} CXDT.bin
ln -s ${XCELIUMD} xcelium.d
touch image.hex
echo "run" > commands.tcl
echo "exit" >> commands.tcl
xrun +MAX_CYCLE=${MAX_CYCLE} -R -input commands.tcl | tee outputs/xrun_run_${TEST_NAME}.log
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.