text stringlengths 1 1.05M |
|---|
<reponame>mengxiangru/jimu-mybatis-generator
/**
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.plugins.ext;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import org.mybatis.generator.api.*;
import org.mybatis.generator.api.dom.DefaultJavaFormatter;
import org.mybatis.generator.api.dom.java.*;
import org.mybatis.generator.internal.PluginAggregator;
import org.springframework.core.io.DefaultResourceLoader;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by xiangru.meng on 2016/6/6.
*/
public class ControllerJavaFilesPlugin extends PluginAdapter {
private static final String KEY_OF_ROOT_CLASS = "rootClass";
private static final String KEY_OF_TARGET_PROJECT = "targetProject";
private static final String KEY_OF_TARGET_PACKAGE = "targetPackage";
String rootClass;
String targetProject;
String targetPackage;
@Override
public boolean validate(List<String> warnings) {
rootClass = properties.getProperty(KEY_OF_ROOT_CLASS);
targetProject = properties.getProperty(KEY_OF_TARGET_PROJECT);
targetPackage = properties.getProperty(KEY_OF_TARGET_PACKAGE);
return true;
}
@Override
public List<GeneratedJavaFile> contextGenerateAdditionalJavaFiles(IntrospectedTable introspectedTable) {
ArrayList<GeneratedJavaFile> retn = new ArrayList<GeneratedJavaFile>();
String daoBasePackage = this.getContext().getJavaClientGeneratorConfiguration().getTargetPackage();
String domainObjectName = introspectedTable.getTableConfiguration().getDomainObjectName();
String DomainObjectNameWithLower = new StringBuilder().append(Character.toLowerCase(domainObjectName.charAt(0))).append(domainObjectName.substring(1)).toString();
String serviceBasePackage = daoBasePackage;
String vmmodelBasePackage = daoBasePackage;
for (Plugin plugin : ((PluginAggregator) this.getContext().getPlugins()).getPlugins()) {
if (plugin instanceof ServiceJavaFilesPlugin) {
serviceBasePackage = ((ServiceJavaFilesPlugin) plugin).targetPackage;
}
if (plugin instanceof VmModelJavaFilesPlugin) {
vmmodelBasePackage = ((VmModelJavaFilesPlugin) plugin).targetPackage;
}
}
//controller
TopLevelClass controllerClass = getControllerClass(introspectedTable, targetPackage, serviceBasePackage, vmmodelBasePackage, domainObjectName, DomainObjectNameWithLower);
GeneratedJavaFile controllerClassJavaFile = new GeneratedJavaFile(controllerClass, targetProject, new DefaultJavaFormatter());
retn.add(controllerClassJavaFile);
return retn;
}
private TopLevelClass getControllerClass(IntrospectedTable introspectedTable, String targetPackage, String serviceBasePackage, String vmmodelBasePackage, String domainObjectName, String domainObjectNameWithLower) {
String domainBasePackage = this.getContext().getJavaModelGeneratorConfiguration().getTargetPackage();
//class
TopLevelClass cls = new TopLevelClass(targetPackage + "." + domainObjectName + "Controller");
cls.setVisibility(JavaVisibility.PUBLIC);
cls.addAnnotation("@Controller");
cls.addAnnotation(String.format("@RequestMapping(value=\"/%s\")", domainObjectNameWithLower));
cls.addImportedType("org.springframework.stereotype.Controller");
cls.addImportedType("javax.annotation.Resource");
cls.addImportedType("org.springframework.web.bind.annotation.RequestMapping");
cls.addImportedType("org.springframework.web.bind.annotation.RequestMethod");
cls.addImportedType("org.springframework.web.bind.annotation.RequestParam");
cls.addImportedType("org.springframework.web.bind.annotation.PathVariable");
cls.addImportedType("org.springframework.validation.BindingResult");
cls.addImportedType("org.springframework.ui.ModelMap");
cls.addImportedType("org.springframework.ui.Model");
cls.addImportedType("com.github.pagehelper.Page");
cls.addImportedType("com.github.pagehelper.PageHelper");
cls.addImportedType("com.google.common.base.Function");
cls.addImportedType("com.google.common.collect.FluentIterable");
cls.addImportedType("com.google.common.collect.ImmutableSet");
cls.addImportedType("java.util.HashMap");
cls.addImportedType("java.util.List");
cls.addImportedType("java.util.Map");
cls.addImportedType("javax.validation.Valid");
cls.addImportedType("com.jimubox.auth.shiro.ext.CurrentUser");
cls.addImportedType("com.jimubox.auth.shiro.model.InnerUser");
cls.addImportedType("com.jimubox.tools.view.NotificationTips");
cls.addImportedType("com.jimubox.tools.view.ViewModelNumberPager");
cls.addImportedType(String.format("%s.%sService", serviceBasePackage, domainObjectName));
cls.addImportedType(String.format("%s.Vm%s", vmmodelBasePackage, domainObjectName));
cls.addImportedType(String.format("%s.%s", domainBasePackage, domainObjectName));
if (rootClass != null) {
FullyQualifiedJavaType superClass = new FullyQualifiedJavaType(rootClass);
cls.setSuperClass(superClass);
cls.addImportedType(superClass);
}
//fields
Field serviceField = new Field();
serviceField.addAnnotation(String.format("@Resource(name=\"%sService\")", domainObjectNameWithLower));
serviceField.setVisibility(JavaVisibility.PRIVATE);
serviceField.setType(new FullyQualifiedJavaType(String.format("%sService", domainObjectName)));
serviceField.setName(String.format("%sService", domainObjectNameWithLower));
cls.addField(serviceField);
Configuration cfg = new Configuration();
Map root = new HashMap();
root.put("domainObjectName", domainObjectName);
root.put("domainObjectNameWithLower", domainObjectNameWithLower);
SpringTemplateLoader springTemplateLoader = new SpringTemplateLoader(new DefaultResourceLoader(), "classpath:/org/mybatis/generator/freemarker/controller/");
cfg.setTemplateLoader(springTemplateLoader);
//search method
Method search = new Method();
search.addAnnotation("@RequestMapping(value = \"/search\")");
search.setVisibility(JavaVisibility.PUBLIC);
search.setName(String.format("search%s", domainObjectName));
search.setReturnType(FullyQualifiedJavaType.getStringInstance());
search.addParameter(new Parameter(FullyQualifiedJavaType.getIntInstance(), "page", "@RequestParam(value = \"page\", defaultValue = \"1\")"));
search.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "parameter", "@RequestParam(value = \"parameter\")"));
search.addParameter(new Parameter(new FullyQualifiedJavaType("ModelMap"), "model"));
search.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("Search.ftl");
template.process(root, writer);
search.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(search);
//list method
Method list = new Method();
list.addAnnotation("@RequestMapping(value = \"/list\")");
list.setVisibility(JavaVisibility.PUBLIC);
list.setName(String.format("%sList", domainObjectNameWithLower));
list.setReturnType(FullyQualifiedJavaType.getStringInstance());
list.addParameter(new Parameter(FullyQualifiedJavaType.getIntInstance(), "page", "@RequestParam(value = \"page\", defaultValue = \"1\")"));
list.addParameter(new Parameter(new FullyQualifiedJavaType("ModelMap"), "model"));
list.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("List.ftl");
template.process(root, writer);
list.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(list);
//addGet method
Method addGet = new Method();
addGet.addAnnotation(String.format("@RequestMapping(value = \"/add%s\", method = RequestMethod.GET)", domainObjectName));
addGet.setVisibility(JavaVisibility.PUBLIC);
addGet.setName(String.format("add%s", domainObjectName));
addGet.setReturnType(FullyQualifiedJavaType.getStringInstance());
addGet.addParameter(new Parameter(new FullyQualifiedJavaType("ModelMap"), "model"));
addGet.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("AddGet.ftl");
template.process(root, writer);
addGet.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(addGet);
//addPost method
Method addPost = new Method();
addPost.addAnnotation(String.format("@RequestMapping(value = \"/add%s\", method = RequestMethod.POST)", domainObjectName));
addPost.setVisibility(JavaVisibility.PUBLIC);
addPost.setName(String.format("add%s", domainObjectName));
addPost.setReturnType(FullyQualifiedJavaType.getStringInstance());
addPost.addParameter(new Parameter(new FullyQualifiedJavaType(String.format("Vm%s", domainObjectName)), String.format("vm%s", domainObjectName), "@Valid"));
addPost.addParameter(new Parameter(new FullyQualifiedJavaType("BindingResult"), "result"));
addPost.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("AddPost.ftl");
template.process(root, writer);
addPost.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(addPost);
//editGet method
Method editGet = new Method();
editGet.addAnnotation(String.format("@RequestMapping(value = \"/edit%s\", method = RequestMethod.GET)", domainObjectName));
editGet.setVisibility(JavaVisibility.PUBLIC);
editGet.setName(String.format("edit%s", domainObjectName));
editGet.setReturnType(FullyQualifiedJavaType.getStringInstance());
if (introspectedTable.hasPrimaryKeyColumns()) {
List<IntrospectedColumn> primaryKeyColumns = introspectedTable.getPrimaryKeyColumns();
if (primaryKeyColumns.size() > 1) {
editGet.addParameter(new Parameter(new FullyQualifiedJavaType(new FullyQualifiedJavaType(introspectedTable.getPrimaryKeyType()).getShortName()), "key"));
cls.addImportedType(introspectedTable.getPrimaryKeyType());
} else {
editGet.addParameter(new Parameter(new FullyQualifiedJavaType(primaryKeyColumns.get(0).getFullyQualifiedJavaType().getShortName()), "key", "@RequestParam(name = \"" + primaryKeyColumns.get(0).getJavaProperty() + "\")"));
}
}
editGet.addParameter(new Parameter(new FullyQualifiedJavaType("ModelMap"), "model"));
editGet.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("EditGet.ftl");
template.process(root, writer);
editGet.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(editGet);
//editPost method
Method editPost = new Method();
editPost.addAnnotation(String.format("@RequestMapping(value = \"/edit%s\", method = RequestMethod.POST)", domainObjectName));
editPost.setVisibility(JavaVisibility.PUBLIC);
editPost.setName(String.format("edit%s", domainObjectName));
editPost.setReturnType(FullyQualifiedJavaType.getStringInstance());
editPost.addParameter(new Parameter(new FullyQualifiedJavaType(String.format("Vm%s", domainObjectName)), String.format("vm%s", domainObjectName), "@Valid"));
if (introspectedTable.hasPrimaryKeyColumns()) {
List<IntrospectedColumn> primaryKeyColumns = introspectedTable.getPrimaryKeyColumns();
if (primaryKeyColumns.size() > 1) {
editPost.addParameter(new Parameter(new FullyQualifiedJavaType(new FullyQualifiedJavaType(introspectedTable.getPrimaryKeyType()).getShortName()), "key"));
} else {
editPost.addParameter(new Parameter(new FullyQualifiedJavaType(primaryKeyColumns.get(0).getFullyQualifiedJavaType().getShortName()), "key", "@RequestParam(name = \"" + primaryKeyColumns.get(0).getJavaProperty() + "\")"));
}
}
editPost.addParameter(new Parameter(new FullyQualifiedJavaType("BindingResult"), "result"));
editPost.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("EditPost.ftl");
template.process(root, writer);
editPost.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(editPost);
//delete method
Method delete = new Method();
delete.addAnnotation(String.format("@RequestMapping(value = \"/delete%s\", method = RequestMethod.GET)", domainObjectName));
delete.setVisibility(JavaVisibility.PUBLIC);
delete.setName(String.format("delete%s", domainObjectName));
delete.setReturnType(FullyQualifiedJavaType.getStringInstance());
if (introspectedTable.hasPrimaryKeyColumns()) {
List<IntrospectedColumn> primaryKeyColumns = introspectedTable.getPrimaryKeyColumns();
if (primaryKeyColumns.size() > 1) {
delete.addParameter(new Parameter(new FullyQualifiedJavaType(new FullyQualifiedJavaType(introspectedTable.getPrimaryKeyType()).getShortName()), "key"));
} else {
delete.addParameter(new Parameter(new FullyQualifiedJavaType(primaryKeyColumns.get(0).getFullyQualifiedJavaType().getShortName()), "key", "@RequestParam(name = \"" + primaryKeyColumns.get(0).getJavaProperty() + "\")"));
}
}
delete.addParameter(new Parameter(new FullyQualifiedJavaType("ModelMap"), "model"));
delete.addParameter(new Parameter(new FullyQualifiedJavaType("InnerUser"), "innerUser", "@CurrentUser"));
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
Template template = cfg.getTemplate("Delete.ftl");
template.process(root, writer);
delete.addBodyLine(out.toString("UTF-8"));
} catch (TemplateException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
cls.addMethod(delete);
return cls;
}
} |
# Prompt the user to input the length of the rectangle
length = float(input("Enter the length of the rectangle: "))
# Prompt the user to input the width of the rectangle
width = float(input("Enter the width of the rectangle: "))
# Calculate the area of the rectangle using the provided length and width
area = length * width
# Output the calculated area of the rectangle
print("The area of the rectangle is:", area) |
<filename>src/components/utils/extract-language-from-filename.ts
import { Language } from 'prism-react-renderer';
export const extractLanguageFromFilename = (filename = ''): Language => {
return filename.split('.').slice(-1)[0] as Language;
};
|
#!/bin/bash
#
# Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
source hack/common.sh
source cluster/$KUBEVIRT_PROVIDER/provider.sh
source hack/config.sh
source ./cluster/gocli.sh
registry_port=$($gocli --prefix $provider_prefix ports registry | tr -d '\r')
registry=localhost:$registry_port
REGISTRY=$registry make docker-build
REGISTRY=$registry make docker-push
if [[ $KUBEVIRT_PROVIDER == "k8s-"* ]]; then
ovs_cni_manifest="./cluster/examples/kubernetes-ovs-cni.yml"
elif [[ $KUBEVIRT_PROVIDER == "os-"* ]]; then
ovs_cni_manifest="./cluster/examples/openshift-ovs-cni.yml"
else
exit 1
fi
./cluster/kubectl.sh delete --ignore-not-found -f $ovs_cni_manifest
./cluster/kubectl.sh -n kube-system delete --ignore-not-found ds ovs-cni-plugin-amd64
# Wait until all objects are deleted
until [[ $(./cluster/kubectl.sh get --ignore-not-found -f $ovs_cni_manifest 2>&1 | wc -l) -eq 0 ]]; do sleep 1; done
until [[ $(./cluster/kubectl.sh get --ignore-not-found ds ovs-cni-plugin-amd64 2>&1 | wc -l) -eq 0 ]]; do sleep 1; done
./cluster/kubectl.sh create -f $ovs_cni_manifest
|
// Define the StackJob struct
struct StackJob<T> {
op: T,
latch: LockLatch,
}
impl<T> StackJob<T> {
// Create a new StackJob with the given operation and a new LockLatch
fn new(op: T, latch: LockLatch) -> Self {
StackJob { op, latch }
}
// Get a reference to the job
fn as_job_ref(&self) -> &StackJob<T> {
self
}
// Access the job's latch
fn latch(&self) -> &LockLatch {
&self.latch
}
// Retrieve the result of the job
fn into_result(self) -> T {
self.op
}
}
// Define the LockLatch struct
struct LockLatch {
// Use a boolean flag to represent the latch state
locked: bool,
}
impl LockLatch {
// Create a new LockLatch with the initial state set to locked
fn new() -> Self {
LockLatch { locked: true }
}
// Wait until the latch is released
fn wait(&self) {
while self.locked {
// Wait or yield the thread
}
}
// Release the latch
fn release(&mut self) {
self.locked = false;
}
}
// Define the JobRegistry struct
struct JobRegistry {
// Use a vector to store the injected jobs
jobs: Vec<StackJob<()>>,
}
impl JobRegistry {
// Inject a job into the registry
fn inject(&mut self, job: &StackJob<()>) {
self.jobs.push(job.clone());
job.latch().release(); // Release the latch after injecting the job
}
} |
#!/bin/bash
for i in ./src-tex_epub/*.tex
do
outfile="./src-html/`basename $i`.html"
echo $i
pandoc --smart --normalize -f latex -t html -o "$outfile" "$i"
sed -i 's/<\/*blockquote>//g' "$outfile"
done
|
<reponame>CharlyEstudio/2fa<filename>src/totp/strategy/totp.strategy.ts<gh_stars>0
import { Injectable, UnauthorizedException } from '@nestjs/common';
import { Strategy } from "passport-local";
import { PassportStrategy } from '@nestjs/passport';
import { AuthService } from "../../auth/auth.service";
@Injectable()
export class TotpStrategy extends PassportStrategy(Strategy) {
constructor(private authService: AuthService) {
super();
}
async validate(user: string) {
const userDB = await this.authService.validateUserTotp(user);
if (!userDB) {
throw new UnauthorizedException();
}
return userDB;
}
}
|
<reponame>gemini133/mango<gh_stars>1-10
/*
MANGO Multimedia Development Platform
Copyright (C) 2012-2020 Twilight Finland 3D Oy Ltd. All rights reserved.
*/
#pragma once
#include <mango/math/vector.hpp>
namespace mango
{
template <>
struct Vector<s8, 16>
{
using VectorType = simd::s8x16;
using ScalarType = s8;
enum { VectorSize = 16 };
VectorType m;
ScalarType& operator [] (size_t index)
{
assert(index < VectorSize);
return data()[index];
}
ScalarType operator [] (size_t index) const
{
assert(index < VectorSize);
return data()[index];
}
ScalarType* data()
{
return reinterpret_cast<ScalarType *>(this);
}
const ScalarType* data() const
{
return reinterpret_cast<const ScalarType *>(this);
}
explicit Vector() {}
Vector(s8 s)
: m(simd::s8x16_set(s))
{
}
Vector(s8 v0, s8 v1, s8 v2, s8 v3, s8 v4, s8 v5, s8 v6, s8 v7, s8 v8, s8 v9, s8 v10, s8 v11, s8 v12, s8 v13, s8 v14, s8 v15)
: m(simd::s8x16_set(v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15))
{
}
Vector(simd::s8x16 v)
: m(v)
{
}
template <typename T, int I>
Vector& operator = (const ScalarAccessor<ScalarType, T, I>& accessor)
{
*this = ScalarType(accessor);
return *this;
}
Vector(const Vector& v) = default;
Vector& operator = (const Vector& v)
{
m = v.m;
return *this;
}
Vector& operator = (simd::s8x16 v)
{
m = v;
return *this;
}
Vector& operator = (s8 s)
{
m = simd::s8x16_set(s);
return *this;
}
operator simd::s8x16 () const
{
return m;
}
#ifdef int128_is_hardware_vector
operator simd::s8x16::vector () const
{
return m.data;
}
#endif
static Vector ascend()
{
return Vector(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
}
};
template <>
inline Vector<s8, 16> load_low<s8, 16>(const s8 *source)
{
return simd::s8x16_load_low(source);
}
static inline void store_low(s8 *dest, Vector<s8, 16> v)
{
simd::s8x16_store_low(dest, v);
}
static inline const Vector<s8, 16> operator + (Vector<s8, 16> v)
{
return v;
}
static inline Vector<s8, 16> operator - (Vector<s8, 16> v)
{
return simd::neg(v);
}
static inline Vector<s8, 16>& operator += (Vector<s8, 16>& a, Vector<s8, 16> b)
{
a = simd::add(a, b);
return a;
}
static inline Vector<s8, 16>& operator -= (Vector<s8, 16>& a, Vector<s8, 16> b)
{
a = simd::sub(a, b);
return a;
}
static inline Vector<s8, 16> operator + (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::add(a, b);
}
static inline Vector<s8, 16> operator - (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::sub(a, b);
}
static inline Vector<s8, 16> unpacklo(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::unpacklo(a, b);
}
static inline Vector<s8, 16> unpackhi(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::unpackhi(a, b);
}
static inline Vector<s8, 16> abs(Vector<s8, 16> a)
{
return simd::abs(a);
}
static inline Vector<s8, 16> abs(Vector<s8, 16> a, mask8x16 mask)
{
return simd::abs(a, mask);
}
static inline Vector<s8, 16> abs(Vector<s8, 16> a, mask8x16 mask, Vector<s8, 16> value)
{
return simd::abs(a, mask, value);
}
static inline Vector<s8, 16> add(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask)
{
return simd::add(a, b, mask);
}
static inline Vector<s8, 16> add(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask, Vector<s8, 16> value)
{
return simd::add(a, b, mask, value);
}
static inline Vector<s8, 16> sub(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask)
{
return simd::sub(a, b, mask);
}
static inline Vector<s8, 16> sub(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask, Vector<s8, 16> value)
{
return simd::sub(a, b, mask, value);
}
static inline Vector<s8, 16> adds(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::adds(a, b);
}
static inline Vector<s8, 16> adds(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask)
{
return simd::adds(a, b, mask);
}
static inline Vector<s8, 16> adds(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask, Vector<s8, 16> value)
{
return simd::adds(a, b, mask, value);
}
static inline Vector<s8, 16> subs(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::subs(a, b);
}
static inline Vector<s8, 16> subs(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask)
{
return simd::subs(a, b, mask);
}
static inline Vector<s8, 16> subs(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask, Vector<s8, 16> value)
{
return simd::subs(a, b, mask, value);
}
static inline Vector<s8, 16> min(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::min(a, b);
}
static inline Vector<s8, 16> min(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask)
{
return simd::min(a, b, mask);
}
static inline Vector<s8, 16> min(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask, Vector<s8, 16> value)
{
return simd::min(a, b, mask, value);
}
static inline Vector<s8, 16> max(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::max(a, b);
}
static inline Vector<s8, 16> max(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask)
{
return simd::max(a, b, mask);
}
static inline Vector<s8, 16> max(Vector<s8, 16> a, Vector<s8, 16> b, mask8x16 mask, Vector<s8, 16> value)
{
return simd::max(a, b, mask, value);
}
static inline Vector<s8, 16> clamp(Vector<s8, 16> a, Vector<s8, 16> low, Vector<s8, 16> high)
{
return simd::clamp(a, low, high);
}
// ------------------------------------------------------------------
// bitwise operators
// ------------------------------------------------------------------
static inline Vector<s8, 16> nand(Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::bitwise_nand(a, b);
}
static inline Vector<s8, 16> operator & (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::bitwise_and(a, b);
}
static inline Vector<s8, 16> operator | (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::bitwise_or(a, b);
}
static inline Vector<s8, 16> operator ^ (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::bitwise_xor(a, b);
}
static inline Vector<s8, 16> operator ~ (Vector<s8, 16> a)
{
return simd::bitwise_not(a);
}
// ------------------------------------------------------------------
// compare / select
// ------------------------------------------------------------------
static inline mask8x16 operator > (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::compare_gt(a, b);
}
static inline mask8x16 operator >= (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::compare_ge(a, b);
}
static inline mask8x16 operator < (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::compare_lt(a, b);
}
static inline mask8x16 operator <= (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::compare_le(a, b);
}
static inline mask8x16 operator == (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::compare_eq(a, b);
}
static inline mask8x16 operator != (Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::compare_neq(a, b);
}
static inline Vector<s8, 16> select(mask8x16 mask, Vector<s8, 16> a, Vector<s8, 16> b)
{
return simd::select(mask, a, b);
}
} // namespace mango
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.modify;
import java.util.concurrent.atomic.AtomicLong ;
import org.apache.jena.atlas.iterator.Iter ;
import org.apache.jena.atlas.junit.BaseTest ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.NodeFactory ;
import org.apache.jena.rdf.model.Model ;
import org.apache.jena.rdf.model.ModelFactory ;
import org.apache.jena.rdf.model.RDFNode ;
import org.apache.jena.rdf.model.Resource ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.core.DatasetGraphFactory ;
import org.apache.jena.sparql.core.DatasetGraphWrapper ;
import org.apache.jena.sparql.core.Quad ;
import org.apache.jena.sparql.sse.SSE ;
import org.apache.jena.update.* ;
import org.apache.jena.vocabulary.OWL ;
import org.apache.jena.vocabulary.RDF ;
import org.junit.Test ;
// Most of the testing of SPARQL Update is scripts and uses the SPARQL-WG test suite.
// Here are a few additional tests
public class TestUpdateOperations extends BaseTest
{
private static final String DIR = "testing/Update" ;
private DatasetGraph graphStore() { return DatasetGraphFactory.createMem() ; }
private Node gName = SSE.parseNode("<http://example/g>") ;
@Test public void load1() {
DatasetGraph gs = graphStore() ;
UpdateRequest req = UpdateFactory.create("LOAD <"+DIR+"/D.nt>") ;
UpdateAction.execute(req, gs) ;
assertEquals(1, gs.getDefaultGraph().size()) ;
assertFalse( gs.listGraphNodes().hasNext()) ;
}
@Test public void load2() {
DatasetGraph gs = graphStore() ;
UpdateRequest req = UpdateFactory.create("LOAD <"+DIR+"/D.nt> INTO GRAPH <"+gName.getURI()+">") ;
UpdateAction.execute(req, gs) ;
}
// Quad loading
@Test public void load3() {
DatasetGraph gs = graphStore() ;
UpdateRequest req = UpdateFactory.create("LOAD <"+DIR+"/D.nq>") ;
UpdateAction.execute(req, gs) ;
assertEquals(0, gs.getDefaultGraph().size()) ;
gs.containsGraph(NodeFactory.createURI("http://example/")) ;
assertEquals(1, gs.getGraph(gName).size()) ;
}
// Bad: loading quads into a named graph
@Test(expected=UpdateException.class)
public void load4() {
DatasetGraph gs = graphStore() ;
UpdateRequest req = UpdateFactory.create("LOAD <"+DIR+"/D.nq> INTO GRAPH <"+gName.getURI()+">") ;
UpdateAction.execute(req, gs) ;
}
@Test public void load5() {
DatasetGraph gs = graphStore() ;
UpdateRequest req = UpdateFactory.create("LOAD SILENT <"+DIR+"/D.nq> INTO GRAPH <"+gName.getURI()+">") ;
UpdateAction.execute(req, gs) ;
assertEquals(0, Iter.count(gs.find())) ;
}
@Test public void insert_where_01() {
Model m = ModelFactory.createDefaultModel();
Resource anon = m.createResource();
anon.addProperty(RDF.type, OWL.Thing);
assertEquals(1, m.size());
UpdateRequest req = UpdateFactory.create("INSERT { ?s ?p ?o } WHERE { ?o ?p ?s }");
UpdateAction.execute(req, m);
assertEquals(2, m.size());
assertEquals(1, m.listStatements(anon, null, (RDFNode)null).toList().size());
assertEquals(1, m.listStatements(null, null, anon).toList().size());
}
// Check constant and template quads
@Test public void delete_insert_where_01() {
DatasetGraph dsg0 = DatasetGraphFactory.createMem() ;
UpdateRequest req = UpdateFactory.create("INSERT DATA { <x> <p> 2 . <z> <q> 2 . <z> <q> 3 . }") ;
UpdateAction.execute(req, dsg0);
assertEquals(3, dsg0.getDefaultGraph().size()) ;
AtomicLong counterIns = new AtomicLong(0) ;
AtomicLong counterDel = new AtomicLong(0) ;
DatasetGraph dsg = new DatasetGraphWrapper(dsg0) {
@Override
public void add(Quad quad) {
counterIns.incrementAndGet() ;
get().add(quad) ;
}
@Override
public void delete(Quad quad) {
counterDel.incrementAndGet() ;
get().delete(quad) ;
}
} ;
// WHERE clause doubles the effect.
String s = "DELETE { ?x <p> 2 . <z> <q> 2 } INSERT { ?x <p> 1 . <x> <q> 1 } WHERE { ?x <p> ?o {} UNION {} }" ;
req = UpdateFactory.create(s) ;
UpdateAction.execute(req, dsg);
assertEquals(3, counterIns.get()) ; // 3 : 1 constant, 2 from template.
assertEquals(3, counterIns.get()) ;
assertEquals(3, dsg.getDefaultGraph().size()) ;
}
}
|
#!/bin/sh
# this script assumes that you've cloned the project to the server with a git clone and have your
# github credentials and remotes set up.
echo "Pulling latest code..."
git reset --hard HEAD
git pull
# assuming you're using node-dev as recommended, the server should restart as needed to pick up changes
# if you're using git submodules, uncomment this line to ensure you get latest.
# (newer git does this by default - older git does not)
# git submodule update --init --recursive |
package com.yin.springboot.user.center.server.service;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import com.yin.springboot.user.center.domain.TbRole;
import com.yin.springboot.user.center.mapper.TbRoleMapper;
import java.util.List;
import com.yin.springboot.user.center.server.TbRoleService;
@Service
public class TbRoleServiceImpl implements TbRoleService{
@Resource
private TbRoleMapper tbRoleMapper;
@Override
public int updateBatch(List<TbRole> list) {
return tbRoleMapper.updateBatch(list);
}
@Override
public int batchInsert(List<TbRole> list) {
return tbRoleMapper.batchInsert(list);
}
@Override
public int insertOrUpdate(TbRole record) {
return tbRoleMapper.insertOrUpdate(record);
}
@Override
public int insertOrUpdateSelective(TbRole record) {
return tbRoleMapper.insertOrUpdateSelective(record);
}
}
|
<filename>src/services/persons/persons.hooks.js<gh_stars>0
const {
authenticate
} = require('@feathersjs/authentication').hooks;
const {
protect
} = require('@feathersjs/authentication-local').hooks;
const {
fastJoin
} = require('feathers-hooks-common');
const fastJoinImagePath = {
joins: {
imagePath: () => async (data, context) => {
// context.params.query ={
// $select:['profileImage','signature','firstName',
// 'lastName','phone','email','address','extentionNo',
// 'facilityId','post','isActive','deativeDate',
// 'signature','profileImage','engagementType','employeeId','cardId']
// }
if (data.profileImage !== undefined && data.profileImage !== null) {
let url = await context.app.service('upload-persons-profile-images').create({
uri: (data.profileImage !== undefined && data.profileImage.startsWith('data:image/')) ?
data.profileImage : 'data:image/jpeg;base64,' + data.profileImage
});
data.profileImagePath = 'http://localhost:3030/profile/' + url.id;
}
if (data.signature !== undefined && data.signature !== null) {
let _url = await context.app.service('upload-persons-profile-images').create({
uri: (data.signature !== undefined && data.signature.startsWith('data:image/')) ?
data.signature : 'data:image/jpeg;base64,' + data.signature
});
data.signatureImagePath = 'http://localhost:3030/profile/' + _url.id;
}
delete data.profileImage;
delete data.signature;
// if (context.result.data !== undefined) {
// for (let index = 0; index < context.result.data.length; index++) {
// const x = context.result.data[index];
// if (x.profileImage !== undefined && x.profileImage !== null) {
// let url = await context.app.service('upload-persons-profile-images').create({
// uri: (x.profileImage !== undefined && x.profileImage.startsWith('data:image/')) ?
// x.profileImage : 'data:image/jpeg;base64,' + x.profileImage
// });
// context.result.data[index].profileImagePath = 'http://localhost:3030/profile/' + url.id;
// }
// if (x.signature !== undefined && x.signature !== null) {
// let _url = await context.app.service('upload-persons-profile-images').create({
// uri: (x.signature !== undefined && x.signature.startsWith('data:image/')) ?
// x.signature : 'data:image/jpeg;base64,' + x.signature
// });
// context.result.data[index].signatureImagePath = 'http://localhost:3030/profile/' + _url.id;
// }
// };
// } else {
// if (context.result.profileImage !== undefined && context.result.profileImage !== null) {
// let url = await context.app.service('upload-persons-profile-images').create({
// uri: (context.result.profileImage !== undefined && context.result.profileImage.startsWith('data:image/')) ?
// context.result.profileImage : 'data:image/jpeg;base64,' + context.result.profileImage
// });
// context.result.profileImagePath = 'http://localhost:3030/profile/' + url.id;
// let _url = await context.app.service('upload-persons-profile-images').create({
// uri: (context.result.signature !== undefined && context.result.signature.startsWith('data:image/')) ?
// context.result.signature : 'data:image/jpeg;base64,' + context.result.signature
// });
// context.result.signatureImagePath = 'http://localhost:3030/profile/' + _url.id;
// }
// }
}
}
}
module.exports = {
before: {
all: [authenticate('jwt')],
find: [],
get: [],
create: [],
update: [],
patch: [],
remove: []
},
after: {
all: [fastJoin(fastJoinImagePath)],
find: [],
get: [],
create: [],
update: [],
patch: [],
remove: []
},
error: {
all: [],
find: [],
get: [],
create: [],
update: [],
patch: [],
remove: []
}
};
|
package io.opensphere.core.util.fx;
import java.util.function.Consumer;
import io.opensphere.core.util.Service;
import javafx.beans.value.ChangeListener;
import javafx.collections.ListChangeListener;
import javafx.collections.ListChangeListener.Change;
import javafx.collections.WeakListChangeListener;
import javafx.scene.control.Skin;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
/**
* Watches a {@link TableView} and resizes columns as necessary to accommodate
* new values. This is a {@link Service} and so {@link #open()} must be called
* for it to start listening to the table.
* <p>
* The listener on the table's items is a weak reference but the listener on the
* table's skin property is strong, so the auto-sizer should be cleaned up when
* the table is no longer used, even if the table's items are still referenced.
* {@link #close()} may be used to explicitly clean it up.
*
* @param <T> The type of the items in the table.
*/
public class TableViewAutosizer<T> implements Service
{
/** Weak reference to the listener that's added to the table. */
private final ListChangeListener<T> myWeakListChangeListener;
/** The table being sized. */
private final TableView<T> myTable;
/** Listener used to update the column widths when alerts are added. */
private final ListChangeListener<T> myListChangeListener;
/**
* Listener for changes to the table skin, used to initially size the
* columns.
*/
private final ChangeListener<? super Skin<?>> mySkinListener = (v, o, n) -> getTable().getColumns()
.forEach(c -> updatePrefWidth(c, 0, getTable().getItems().size()));
/**
* Get the table.
*
* @return The table.
*/
public TableView<T> getTable()
{
return myTable;
}
/**
* Constructor.
*
* @param table The table.
*/
public TableViewAutosizer(TableView<T> table)
{
myTable = table;
myListChangeListener = this::handleListChange;
myWeakListChangeListener = new WeakListChangeListener<>(myListChangeListener);
}
@Override
public void open()
{
myTable.getItems().addListener(myWeakListChangeListener);
myTable.skinProperty().addListener(mySkinListener);
}
@Override
public void close()
{
myTable.getItems().removeListener(myWeakListChangeListener);
myTable.skinProperty().removeListener(mySkinListener);
}
/**
* Update the preferred width of a column.
*
* @param col The column.
* @param from The start index (inclusive).
* @param to The end index (exclusive).
*/
protected void updatePrefWidth(TableColumn<T, ?> col, int from, int to)
{
int width = (int)FXUtilities.getMaxTableCellWidth(col, from, to);
col.setPrefWidth(Math.max(col.getPrefWidth(), width));
}
/**
* Handle a change to the table's items.
*
* @param change The change.
*/
protected void handleListChange(Change<? extends T> change)
{
while (change.next())
{
if (change.wasAdded())
{
myTable.getColumns().forEach(
(Consumer<? super TableColumn<T, ?>>)col -> updatePrefWidth(col, change.getFrom(), change.getTo()));
}
}
}
}
|
Create a redux action that dispatches an action type when a button is clicked. The action should include the necessary data, such as the button text, and should update the state in the store. |
module.exports = {
verbose: false,
roots: [
'<rootDir>/src',
'<rootDir>/tests/unit'
],
moduleFileExtensions: [
'js',
'json',
'vue'
],
setupFiles: ['jest-canvas-mock'],
moduleDirectories: ['node_modules'],
moduleNameMapper: {
'^@/test$': '<rootDir>/test/index.js',
'^@/test/(.*)$': '<rootDir>/test/$1',
'^src/(.*)$': '<rootDir>/src/$1'
},
transform: {
'\\.(js)$': 'babel-jest',
'\\.(vue)$': 'vue-jest'
},
collectCoverageFrom: [
'app.js',
'src/**/*.{js}',
'!**/node_modules/**'
],
transformIgnorePatterns: [
'node_modules'
],
coverageDirectory: './coverage/'
}
|
#!/usr/bin/env bash
base_path=$(
cd "$(dirname "${BASH_SOURCE[0]}")"
pwd -P
)
cd "$base_path"
miniconda/bin/python train.py --baseconfig train_config/base_train_seg.json seg train_config/BF-C2DL-HSC-GT-seg.json
|
#!/bin/sh
export REACT_APP_PLAN_SIMULATOR_API=$1
export REACT_APP_ACCOUNT_MANAGER_API=$2
export REACT_APP_BRANCH_FINDER_API=$3
# echo "Creating .env file"
echo "REACT_APP_PLAN_SIMULATOR_API=${1}" > .env
echo "REACT_APP_ACCOUNT_MANAGER_API=${2}" > .env
echo "REACT_APP_BRANCH_FINDER_API=${3}" > .env
echo "Building..."
npm run build --production
echo "Running..."
exec serve -s build |
#!/bin/bash
cp feeds.conf.default feeds.conf
echo "src-git node https://github.com/nxhack/openwrt-node-packages.git;openwrt-21.02" >> feeds.conf
echo "src-git openlumi https://github.com/openlumi/openwrt-lumi-packages.git" >> feeds.conf
|
<reponame>andywar65/buildings
axios.defaults.xsrfCookieName = 'csrftoken'
axios.defaults.xsrfHeaderName = "X-CSRFTOKEN"
let app = new Vue({
delimiters: ["[[", "]]"],
el: '#vue-app',
data : {
map_data : JSON.parse(document.getElementById("map_data").textContent),
map : Object,
buildLayerGroup : Object,
buildMarker : Object,
copy : '© <a href="https://osm.org/copyright">OpenStreetMap</a> contributors',
url : 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
mb_copy : 'Map data © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, Imagery © <a href="https://www.mapbox.com/">Mapbox</a>',
mb_url : 'https://api.mapbox.com/styles/v1/{id}/tiles/{z}/{x}/{y}?access_token={accessToken}',
mb_id : 'mapbox/satellite-v9',
alert : "",
alertType : "",
isAlertPanel : false,
isBuildList : true,
isCityChange : false,
isBuildAdd : false,
formErrors : false,
title : "",
lat : null,
long : null,
zoom : null,
image : "",
intro : ""
},
methods: {
setupLeafletMap: function () {
const base_map = L.tileLayer(this.url, {
attribution: this.copy,
maxZoom: 23,
}).addTo(this.map)
const sat_map = L.tileLayer(this.mb_url, {
attribution: this.mb_copy,
maxZoom: 23,
tileSize: 512,
zoomOffset: -1,
id: this.mb_id,
accessToken: this.map_data.mapbox_token
})
const baseMaps = {
"Base": base_map,
"Satellite": sat_map
}
L.control.layers(baseMaps, ).addTo(this.map)
this.map.on('click', this.onMapClick)
this.map.on('zoomend', this.onMapZoomEnd)
},
load_buildings : async function () {
let response = await fetch(`/build-api/all/`)
let geojson = await response.json()
return geojson
},
buildingPointToLayer : function (feature, latlng) {
return L.marker(latlng, {icon: this.buildMarker})
},
onEachBuildingFeature : function (feature, layer) {
let content = "<h5><a href=\"" + feature.properties.path + "\">" +
feature.properties.title +
"</a></h5><img src=\"" + feature.properties.image_path + "\"><br><small>" +
feature.properties.intro + "</small>"
layer.bindPopup(content, {minWidth: 300})
},
setCityView : async function () {
let response = await fetch(`/build-api/city/all/`)
let cityjson = await response.json()
try {
city = cityjson.features[0]
this.map.setView([city.geometry.coordinates[1], city.geometry.coordinates[0]],
city.properties.zoom)
} catch {
this.map.setView([this.map_data.city_lat, this.map_data.city_long],
this.map_data.city_zoom)
}
},
render_buildings : async function () {
this.buildLayerGroup.clearLayers()
let buildgeo = await this.load_buildings()
markers = L.geoJSON(buildgeo,
{ pointToLayer: this.buildingPointToLayer, onEachFeature: this.onEachBuildingFeature })
markers.addTo(this.buildLayerGroup)
try {
this.map.fitBounds(markers.getBounds(), {padding: [50,50]})
}
catch {
this.map.locate()
.on('locationfound', e => this.map.setView(e.latlng, 10))
.on('locationerror', () => this.setCityView())
}
},
handleImageUpload : function () {
this.image = this.$refs.image.files[0]
},
onCityPanel : function () {
this.isAlertPanel = false
this.alert = ""
this.isBuildList = false
this.isCityChange = true
},
onBuildPanel : function () {
this.isAlertPanel = false
this.alert = ""
this.isBuildList = false
this.isBuildAdd = true
},
onCityDismiss : function () {
this.isBuildList = true
this.isCityChange = false
this.formErrors = false
this.clearData()
},
onBuildDismiss : function () {
this.isBuildList = true
this.isBuildAdd = false
this.formErrors = false
this.clearData()
},
onMapClick : function (e) {
this.lat = e.latlng.lat
this.long = e.latlng.lng
},
onMapZoomEnd : function () {
this.zoom = this.map.getZoom()
},
clearData : function () {
this.title = ""
this.lat = null
this.long = null
this.zoom = null
this.image = ""
this.intro = ""
},
formValidation : function (id) {
let form = document.getElementById(id)
if (form.checkValidity() === false) {
this.formErrors = true
}
form.classList.add('was-validated')
},
formValidated : function (id) {
let form = document.getElementById(id)
form.classList.remove('was-validated')
},
onCityAdd : function () {
this.formErrors = false
this.formValidation("add_c_form")
if (this.formErrors) { return }//prevent from sending form
let url = '/build-api/city/add/'
let data = {
"name": this.title,
"lat": this.lat,
"long": this.long,
"zoom": this.zoom
}
axios
.post(url, data)
.then(response => {
this.isAlertPanel = true
this.alert = this.title
this.alertType = "fa fa-globe"
this.isBuildList = true
this.isCityChange = false
this.formValidated("add_c_form")
this.clearData()
this.render_buildings()
})
.catch(error => {
console.log(error)
})
},
onBuildAdd : function () {
this.formErrors = false
this.formValidation("add_b_form")
if (this.formErrors) { return }//prevent from sending form
let url = '/build-api/add/'
let data = new FormData()
data.append("image", this.image)
data.append("title", this.title)
data.append("intro", this.intro)
data.append("lat", this.lat)
data.append("long", this.long)
data.append("zoom", this.zoom)
axios
.post(url, data)
.then(response => {
this.isAlertPanel = true
if (this.title) {
this.alert = this.title
} else {
this.alert = "New building"
}
this.alertType = "fa fa-building"
this.isBuildList = true
this.isBuildAdd = false
this.formValidated("add_b_form")
this.clearData()
this.render_buildings()
})
.catch(error => {
console.log(error)
})
},
},
mounted() {
this.map = L.map('mapid')
this.buildLayerGroup = L.layerGroup().addTo(this.map)
this.buildMarker = L.AwesomeMarkers.icon({
icon: 'fa-building',
prefix: 'fa',
markerColor: 'blue',
iconColor: 'white',
})
this.setupLeafletMap()
this.render_buildings()
}
})
|
package com.hapramp.models;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class FormattedBodyResponse {
@Expose
@SerializedName("body")
private String mBody;
@Expose
@SerializedName("id")
private String mId;
public FormattedBodyResponse() {
}
public FormattedBodyResponse(String mBody, String mId) {
this.mBody = mBody;
this.mId = mId;
}
public String getmBody() {
return mBody;
}
public void setmBody(String mBody) {
this.mBody = mBody;
}
public String getmId() {
return mId;
}
public void setmId(String mId) {
this.mId = mId;
}
}
|
package workspace_th.day06.fileEx;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
public class filecopy2 {
public static void main(String[] args) throws IOException {
URL url = new URL("https://www.google.com/webhp?hl=en&sa=X&ved=0ahUKEwim3L7C9JbyAhUJC94KHa5kBsMQPAgI");
//net 객체 예외처리 필요
InputStream is = url.openStream();//읽기 객체여서 예외처리 필요
OutputStream os = new FileOutputStream("googleLogo.jpg"); //쓰기 객체 생성
byte[] buffer = new byte[1024];
while(true) {
int inputData = is.read(buffer);
if(inputData == -1) break;
os.write(buffer, 0, inputData);
}
is.close(); os.close();
System.out.println("성공");
}
}
|
#!/bin/sh
set -e
set -o pipefail
packages="common-* $*"
echo "Installing packages: $packages"
# create all directories we will put files into (othewise `stow` will
# symlink the whole directory, not just the files within it):
echo "Making directories..."
for d in $(find $packages -type d | grep -v '\.git' | grep / | cut -d'/' -f2- | sort -u | egrep -v '^\.$|^$'); do
echo "- ~/$d"
mkdir -p ~/$d
done
echo "done."
echo "Stowing..."
stow -Rv ${packages}
echo "done."
|
import javax.jws.WebParam;
import javax.jws.WebService;
@WebService
public class TemperatureConverter {
public double fahrenheitToCelsius(@WebParam(name="fahrenheit") double fahrenheit) {
double celsius = (fahrenheit - 32) / 1.8;
return celsius;
}
public double celsiusToFahrenheit(@WebParam(name="celsius") double celsius) {
double fahrenheit = (celsius * 1.8) + 32;
return fahrenheit;
}
} |
<filename>FreeRTOS/Demo/RISC-V_Renode_Emulator_SoftConsole/Microsemi_Code/riscv_hal/syscall.c<gh_stars>1-10
/*******************************************************************************
* (c) Copyright 2016-2018 Microsemi SoC Products Group. All rights reserved.
*
* @file syscall.c
* @author Microsemi SoC Products Group
* @brief Stubs for system calls.
*
* SVN $Revision: 9661 $
* SVN $Date: 2018-01-15 16:13:33 +0530 (Mon, 15 Jan 2018) $
*/
#include <stdint.h>
#include <stdlib.h>
#include <stddef.h>
#include <unistd.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/times.h>
#include <stdio.h>
#include <string.h>
#include "encoding.h"
#ifdef MSCC_STDIO_THRU_CORE_UART_APB
#include "core_uart_apb.h"
#include "hw_platform.h"
#endif /*MSCC_STDIO_THRU_CORE_UART_APB*/
#ifdef __cplusplus
extern "C" {
#endif
#ifdef MSCC_STDIO_THRU_CORE_UART_APB
/*------------------------------------------------------------------------------
* CoreUARTapb instance data for the CoreUARTapb instance used for standard
* output.
*/
static UART_instance_t g_stdio_uart;
/*==============================================================================
* Flag used to indicate if the UART driver needs to be initialized.
*/
static int g_stdio_uart_init_done = 0;
#endif /*MSCC_STDIO_THRU_CORE_UART_APB*/
#undef errno
int errno;
char *__env[1] = { 0 };
char **environ = __env;
void write_hex(int fd, uint32_t hex)
{
uint8_t ii;
uint8_t jj;
char towrite;
uint8_t digit;
write( fd , "0x", 2 );
for (ii = 8 ; ii > 0; ii--)
{
jj = ii-1;
digit = ((hex & (0xF << (jj*4))) >> (jj*4));
towrite = digit < 0xA ? ('0' + digit) : ('A' + (digit - 0xA));
write( fd, &towrite, 1);
}
}
void _exit(int code)
{
#ifdef MSCC_STDIO_THRU_CORE_UART_APB
const char * message = "\nProgam has exited with code:";
write(STDERR_FILENO, message, strlen(message));
write_hex(STDERR_FILENO, code);
#endif
while (1);
}
void *_sbrk(ptrdiff_t incr)
{
extern char _end[];
extern char _heap_end[];
static char *curbrk = _end;
if ((curbrk + incr < _end) || (curbrk + incr > _heap_end))
{
return ((char *) - 1);
}
curbrk += incr;
return curbrk - incr;
}
int _isatty(int fd)
{
if (fd == STDOUT_FILENO || fd == STDERR_FILENO)
{
return 1;
}
errno = EBADF;
return 0;
}
static int stub(int err)
{
errno = err;
return -1;
}
int _open(const char* name, int flags, int mode)
{
return stub(ENOENT);
}
int _openat(int dirfd, const char* name, int flags, int mode)
{
return stub(ENOENT);
}
int _close(int fd)
{
return stub(EBADF);
}
int _execve(const char* name, char* const argv[], char* const env[])
{
return stub(ENOMEM);
}
int _fork()
{
return stub(EAGAIN);
}
int _fstat(int fd, struct stat *st)
{
if (isatty(fd))
{
st->st_mode = S_IFCHR;
return 0;
}
return stub(EBADF);
}
int _getpid()
{
return 1;
}
int _kill(int pid, int sig)
{
return stub(EINVAL);
}
int _link(const char *old_name, const char *new_name)
{
return stub(EMLINK);
}
off_t _lseek(int fd, off_t ptr, int dir)
{
if (_isatty(fd))
{
return 0;
}
return stub(EBADF);
}
ssize_t _read(int fd, void* ptr, size_t len)
{
#ifdef MSCC_STDIO_THRU_CORE_UART_APB
if (_isatty(fd))
{
/*--------------------------------------------------------------------------
* Initialize the UART driver if it is the first time this function is
* called.
*/
if ( !g_stdio_uart_init_done )
{
/******************************************************************************
* Baud value:
* This value is calculated using the following equation:
* BAUD_VALUE = (CLOCK / (16 * BAUD_RATE)) - 1
*****************************************************************************/
UART_init( &g_stdio_uart, MSCC_STDIO_UART_BASE_ADDR, ((SYS_CLK_FREQ/(16 * MSCC_STDIO_BAUD_VALUE))-1), (DATA_8_BITS | NO_PARITY));
g_stdio_uart_init_done = 1;
}
return UART_get_rx(&g_stdio_uart, (uint8_t*) ptr, len);
}
#endif
return stub(EBADF);
}
int _stat(const char* file, struct stat* st)
{
return stub(EACCES);
}
clock_t _times(struct tms* buf)
{
return stub(EACCES);
}
int _unlink(const char* name)
{
return stub(ENOENT);
}
int _wait(int* status)
{
return stub(ECHILD);
}
ssize_t _write(int fd, const void* ptr, size_t len)
{
#ifdef MSCC_STDIO_THRU_CORE_UART_APB
const uint8_t * current = (const uint8_t *) ptr;
size_t jj;
if (_isatty(fd))
{
/*--------------------------------------------------------------------------
* Initialize the UART driver if it is the first time this function is
* called.
*/
if ( !g_stdio_uart_init_done )
{
/******************************************************************************
* Baud value:
* This value is calculated using the following equation:
* BAUD_VALUE = (CLOCK / (16 * BAUD_RATE)) - 1
*****************************************************************************/
UART_init( &g_stdio_uart, MSCC_STDIO_UART_BASE_ADDR, ((SYS_CLK_FREQ/(16 * MSCC_STDIO_BAUD_VALUE))-1), (DATA_8_BITS | NO_PARITY));
g_stdio_uart_init_done = 1;
}
for (jj = 0; jj < len; jj++)
{
UART_send(&g_stdio_uart, current + jj, 1);
if (current[jj] == '\n')
{
UART_send(&g_stdio_uart, (const uint8_t *)"\r", 1);
}
}
return len;
}
#endif
return stub(EBADF);
}
#ifdef __cplusplus
}
#endif
|
import CircuitElement from '../circuitElement';
import Node, { findNode } from '../node';
import simulationArea from '../simulationArea';
import { correctWidth, lineTo, moveTo, fillText3 } from '../canvasApi';
import { colors } from '../themer/themer';
/**
* @class
* TTY
* TypeWriter - We can give 4 inputs:
* clock and input of 7 bits are main input required
* on the edge change the data is added onto the display
* screen of the typewriter
* @extends CircuitElement
* @param {number} x - x coord of element
* @param {number} y - y coord of element
* @param {Scope=} scope - the ciruit in which we want the Element
* @param {string=} dir - direcion in which element has to drawn
* @category sequential
*/
export default class TTY extends CircuitElement {
constructor(x, y, scope = globalScope, rows = 3, cols = 32) {
super(x, y, scope, 'RIGHT', 1);
/*
this.scope['TTY'].push(this);
*/
this.directionFixed = true;
this.fixedBitWidth = true;
this.cols = cols || parseInt(prompt('Enter cols:'));
this.rows = rows || parseInt(prompt('Enter rows:'));
this.elementWidth = Math.max(40, Math.ceil(this.cols / 2) * 20);
this.elementHeight = Math.max(40, Math.ceil(this.rows * 15 / 20) * 20);
this.setWidth(this.elementWidth / 2);
this.setHeight(this.elementHeight / 2);
// this.element = new Element(x, y, "TTY",this.elementWidth/2, this,this.elementHeight/2);
this.clockInp = new Node(-this.elementWidth / 2, this.elementHeight / 2 - 10, 0, this, 1, 'Clock');
this.asciiInp = new Node(-this.elementWidth / 2, this.elementHeight / 2 - 30, 0, this, 7, 'Ascii Input');
// this.qOutput = new Node(20, -10, 1, this);
this.reset = new Node(30 - this.elementWidth / 2, this.elementHeight / 2, 0, this, 1, 'Reset');
this.en = new Node(10 - this.elementWidth / 2, this.elementHeight / 2, 0, this, 1, 'Enable');
// this.masterState = 0;
// this.slaveState = 0;
this.prevClockState = 0;
this.data = '';
this.buffer = '';
}
/**
* @memberof TTY
* this funciton is used to change the size of the screen
*/
changeRowSize(size) {
if (size == undefined || size < 1 || size > 10) return;
if (this.rows == size) return;
var obj = new TTY(this.x, this.y, this.scope, size, this.cols);
this.delete();
simulationArea.lastSelected = obj;
return obj;
}
/**
* @memberof TTY
* this funciton is used to change the size of the screen
*/
changeColSize(size) {
if (size == undefined || size < 20 || size > 100) return;
if (this.cols == size) return;
var obj = new TTY(this.x, this.y, this.scope, this.rows, size);
this.delete();
simulationArea.lastSelected = obj;
return obj;
}
/**
* @memberof TTY
* if no input or enable key is set to 0 returns false
*/
isResolvable() {
if (this.reset.value == 1) return true;
if (this.en.value == 0 || (this.en.connections.length && this.en.value == undefined)) return false;
else if (this.clockInp.value == undefined) return false;
else if (this.asciiInp.value == undefined) return false;
return true;
}
/**
* @memberof TTY
* To resolve the Typewriter clock and input of 7 bits are
* used to get the ascii and then on the edge change the
* data is added onto the display screen of the typewriter.
*/
resolve() {
if (this.reset.value == 1) {
this.data = '';
return;
}
if (this.en.value == 0) {
this.buffer = '';
return;
}
if (this.clockInp.value == this.prevClockState) {
if (this.clockInp.value == 0) {
this.buffer = String.fromCharCode(this.asciiInp.value);
}
} else if (this.clockInp.value != undefined) {
if (this.clockInp.value == 1) {
this.data += this.buffer;
if (this.data.length > this.cols * this.rows) { this.data = this.data.slice(1); }
} else if (this.clockInp.value == 0) {
this.buffer = String.fromCharCode(this.asciiInp.value);
}
this.prevClockState = this.clockInp.value;
}
}
customSave() {
var data = {
nodes: {
clockInp: findNode(this.clockInp),
asciiInp: findNode(this.asciiInp),
reset: findNode(this.reset),
en: findNode(this.en),
},
constructorParamaters: [this.rows, this.cols],
};
return data;
}
customDraw() {
var ctx = simulationArea.context;
//
ctx.strokeStyle = (colors['stroke']);
ctx.fillStyle = (colors['fill']);
ctx.beginPath();
ctx.lineWidth = correctWidth(3);
var xx = this.x;
var yy = this.y;
// rect(ctx, xx - this.elementWidth/2, yy - this.elementHeight/2, this.elementWidth, this.elementHeight);
moveTo(ctx, -this.elementWidth / 2, this.elementHeight / 2 - 15, xx, yy, this.direction);
lineTo(ctx, 5 - this.elementWidth / 2, this.elementHeight / 2 - 10, xx, yy, this.direction);
lineTo(ctx, -this.elementWidth / 2, this.elementHeight / 2 - 5, xx, yy, this.direction);
// if ((this.b.hover&&!simulationArea.shiftDown)|| simulationArea.lastSelected == this || simulationArea.multipleObjectSelections.contains(this))
// ctx.fillStyle = "rgba(255, 255, 32,0.8)";
ctx.stroke();
ctx.beginPath();
ctx.fillStyle = colors['input_text'];
ctx.textAlign = 'center';
var startY = -7.5 * this.rows + 3;
for (var i = 0; i < this.data.length; i += this.cols) {
var lineData = this.data.slice(i, i + this.cols);
lineData += ' '.repeat(this.cols - lineData.length);
fillText3(ctx, lineData, 0, startY + (i / this.cols) * 15 + 9, xx, yy, 15, 'Courier New', 'center');
}
ctx.fill();
}
}
TTY.prototype.tooltipText = 'TTY ToolTip : Tele typewriter selected.';
TTY.prototype.helplink = 'https://docs.circuitverse.org/#/Sequential?id=tty';
TTY.prototype.mutableProperties = {
'cols': {
name: 'Columns',
type: 'number',
max: '100',
min: '20',
func: 'changeColSize',
},
'rows': {
name: 'Rows',
type: 'number',
max: '10',
min: '1',
func: 'changeRowSize',
},
};
TTY.prototype.objectType = 'TTY';
|
package com.bones.si
import java.sql.{DatabaseMetaData, Types}
/**
* Case class wrappers for the types returned by the JDBC Database Metadata object.
*/
package object jdbc {
case class DbAttribute(
catalogName: Option[String],
schemaName: Option[String],
typeName: String,
attributeName: String,
dataType: DataType.Value,
attributeTypeName: String,
attributeSize: Int,
decimalDigits: Option[Int],
radix: Int,
nullable: Nullable.Value,
remarks: Option[String],
defaultValue: Option[String],
characterOctetLength: Option[Int],
ordinalPotion: Int,
isNullable: YesNo.Value,
scopeCatalog: Option[String],
scopeSchema: Option[String],
scopeTable: Option[String],
sourceDataType: Option[String])
object Catalog {
val catalogColumnName = "TABLE_CAT"
}
case class Catalog(name: String)
object UpdateDeleteRule extends Enumeration {
case class Val protected (name: String, intId: Int) extends super.Val
implicit def valueToVal(x: Value): Val = x.asInstanceOf[Val]
def findById(id: Int) = values.toList.find(_.id == id)
val ImportedNoAction =
Val("ImportedKeyNoAction", DatabaseMetaData.importedKeyNoAction)
val ImportedKeyCascade =
Val("ImportedKeyCascade", DatabaseMetaData.importedKeyCascade)
val ImportedKeySetNull =
Val("ImportedKeySetNull", DatabaseMetaData.importedKeySetNull)
val ImportedKeySetDefault =
Val("ImportedKeySetDefault", DatabaseMetaData.importedKeySetDefault)
val ImportedKeyRestrict =
Val("ImportedKeyRestrict", DatabaseMetaData.importedKeyRestrict)
}
object Deferrability extends Enumeration {
case class Val protected (name: String, intId: Int) extends super.Val
implicit def valueToVal(x: Value): Val = x.asInstanceOf[Val]
def findById(id: Int) = {
values.toList.find(_.intId == id)
}
val ImportedKeyInitiallyDeferred =
Val("ImportedKeyInitiallyDeferred", DatabaseMetaData.importedKeyInitiallyDeferred)
val ImportedKeyInitiallyImmediate =
Val("ImportedKeyInitiallyImmediate", DatabaseMetaData.importedKeyInitiallyImmediate)
val ImportedKeyNotDeferrable =
Val("ImportedKeyNotDeferrable", DatabaseMetaData.importedKeyNotDeferrable)
}
case class CrossReference(
pkColumnCatalogName: Option[String],
pkColumnSchemaName: Option[String],
pkColumnTableName: String,
pkColumnName: String,
foreignCatalogName: Option[String],
foreignSchemaName: Option[String],
foreignTableName: String,
foreignColumnName: String,
keySequence: Short,
updateRule: UpdateDeleteRule.Value,
deleteRule: UpdateDeleteRule.Value,
foreignKeyName: Option[String],
primaryKeyName: Option[String],
deferrability: Deferrability.Value)
object DataType extends Enumeration {
protected case class Val(name: String, intId: Int) extends super.Val
def findByConstant(typeId: Int) = values.find(_.intId == typeId)
implicit def valueToVal(x: Value): Val = x.asInstanceOf[Val]
val Bit = Val("Bit", Types.BIT)
val TinyInt = Val("TinyInt", Types.TINYINT)
val SmallInt = Val("SmallInt", Types.SMALLINT)
val Integer = Val("Integer", Types.INTEGER)
val BigInt = Val("BigInt", Types.BIGINT)
val Float = Val("Float", Types.FLOAT)
val Real = Val("Real", Types.REAL)
val Double = Val("Double", Types.DOUBLE)
val Numeric = Val("Numeric", Types.NUMERIC)
val Decimal = Val("Decimal", Types.DECIMAL)
val Char = Val("Char", Types.CHAR)
val VarChar = Val("VarChar", Types.VARCHAR)
val LongVarChar = Val("LongVarChar", Types.LONGVARCHAR)
val Date = Val("Date", Types.DATE)
val Time = Val("Time", Types.TIME)
val Timestamp = Val("Timestamp", Types.TIMESTAMP)
val Binary = Val("Binary", Types.BINARY)
val VarBinary = Val("VarBinary", Types.VARBINARY)
val LongVarBinary = Val("LongVarBinary", Types.LONGVARBINARY)
val Null = Val("Null", Types.NULL)
val Other = Val("Other", Types.OTHER)
val JavaObject = Val("JavaObject", Types.JAVA_OBJECT)
val Distinct = Val("Distinct", Types.DISTINCT)
val Struct = Val("Struct", Types.STRUCT)
val Array = Val("Array", Types.ARRAY)
val Blob = Val("Blob", Types.BLOB)
val Clob = Val("Clob", Types.CLOB)
val Ref = Val("Ref", Types.REF)
val DataLink = Val("DataLink", Types.DATALINK)
val Boolean = Val("Boolean", Types.BOOLEAN)
val RowId = Val("RowId", Types.ROWID)
val NChar = Val("NChar", Types.NCHAR)
val NVarChar = Val("NVarChar", Types.NVARCHAR)
val LongNVarChar = Val("LongNVarChar", Types.LONGNVARCHAR)
val NClob = Val("NClob", Types.NCLOB)
val SqlXml = Val("SqlXml", Types.SQLXML)
val RefCursor = Val("RefCursor", Types.REF_CURSOR)
val TimeWithTimeZone = Val("TimeWithTimeZone", Types.TIME_WITH_TIMEZONE)
val TimestampWithTimeZone =
Val("TimestampWithTimeZone", Types.TIMESTAMP_WITH_TIMEZONE)
}
object Nullable extends Enumeration {
case class Val protected (name: String, intId: Int) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findById(id: Int) = values.toList.find(_.intId == id)
val ColumnNoNulls = Val("ColumnNoNulls", DatabaseMetaData.columnNoNulls)
val ColumnNullable = Val("ColumnNullable", DatabaseMetaData.columnNullable)
val ColumnNullableUnknown =
Val("ColumnNullableUnknown", DatabaseMetaData.columnNullableUnknown)
}
object YesNo extends Enumeration {
def fromBoolean(b: Boolean) = if (b) Yes else No
def findByString(str: String) = {
if (str.equalsIgnoreCase("yes")) Some(Yes)
else if (str.equalsIgnoreCase("no")) Some(No)
else if (str.isEmpty) Some(Unknown)
else None
}
def findByOptionalString(opt: Option[String]): Option[Value] = opt match {
case Some(str) => findByString(str)
case None => Some(Unknown)
}
type YesNo = Value
val Yes, No, Unknown = Value
}
object Column {
val categoryNameCol = "TABLE_CAT"
val schemaNameCol = "TABLE_SCHEM"
val tableNameCol = "TABLE_NAME"
val nameCol = "COLUMN_NAME"
val dataTypeCol = "DATA_TYPE"
val typeNameCol = "TYPE_NAME"
val columnSizeCol = "COLUMN_SIZE"
val decimalDigitsCol = "DECIMAL_DIGITS"
val numProcRadixCol = "NUM_PREC_RADIX"
val nullableCol = "NULLABLE"
val remarksCol = "REMARKS"
val columnDefaultCol = "COLUMN_DEF"
val characterOctetLengthCol = "CHAR_OCTET_LENGTH"
val ordinalPositionCol = "ORDINAL_POSITION"
val isNullableCol = "IS_NULLABLE"
val scopeCatalogCol = "SCOPE_CATALOG"
val scopeSchemaCol = "SCOPE_SCHEMA"
val scopeTableCol = "SCOPE_TABLE"
val sourceDataTypeCol = "SOURCE_DATA_TYPE"
val isAutoIncrementCol = "IS_AUTOINCREMENT"
val isGeneratedColumnCol = "IS_GENERATEDCOLUMN"
}
case class Column(
catalogName: Option[String],
schemaName: Option[String],
tableName: String,
name: String,
dataType: DataType.Value,
typeName: String,
columnSize: Int,
decimalDigits: Option[Int],
numPrecRadix: Int,
nullable: Nullable.Value,
remarks: Option[String],
columnDefault: Option[String],
characterOctetLength: Int,
ordinalPosition: Int,
isNullable: YesNo.YesNo,
sourceDataType: Option[Short],
isAutoIncrement: YesNo.YesNo,
isGeneratedColumn: YesNo.YesNo)
object FunctionType extends Enumeration {
case class Val protected (typeId: Int) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findById(typeId: Int) =
values.toList.find(_.typeId == typeId)
val FunctionResultUnknown = Val(0)
val FunctionNoTable = Val(1)
val FunctionReturnsTable = Val(2)
}
case class Function(
catalogName: Option[String],
schemaName: Option[String],
functionName: String,
remarks: Option[String],
functionType: FunctionType.Value,
specificName: String)
case class ImportedKeys(
primaryKeyTableCatalogName: Option[String],
primaryKeyTableSchemaName: Option[String],
primaryKeyTableName: String,
primaryKeyColumnName: String,
foreignKeyTableCatalogName: Option[String],
foreignKeyTableSchemaName: Option[String],
foreignKeyTableName: String,
foreignKeyColumnName: String,
keySequence: Short,
updateRule: UpdateDeleteRule.Value,
deleteRule: UpdateDeleteRule.Value,
foreignKeyName: Option[String],
primaryKeyName: Option[String],
deferrability: Deferrability.Value)
object IndexType extends Enumeration {
type IndexType = Value
case class Val protected (index: Short) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findByShort(s: Short): Option[Value] = values.find(_.index == s)
val tableIndexStatistic = Val(DatabaseMetaData.tableIndexStatistic)
val tableIndexClustered = Val(DatabaseMetaData.tableIndexClustered)
val tableIndexHashed = Val(DatabaseMetaData.tableIndexHashed)
val tableIndexOther = Val(DatabaseMetaData.tableIndexOther)
}
object AscDesc extends Enumeration {
type AscDesc = Value
case class Val protected (name: String) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findByString(str: String) = values.find(_.name == str)
val asc = Val("A")
val desc = Val("D")
}
case class IndexInfo(
tableCatalog: Option[String],
tableSchema: Option[String],
tableName: String,
nonUnique: Boolean,
indexQualifier: Option[String],
indexName: String,
indexType: IndexType.IndexType,
ordinalPosition: Short,
columnName: Option[String],
ascOrDesc: Option[AscDesc.AscDesc],
cardinality: Int,
pages: Int,
filterPosition: Option[String]
)
object TableType extends Enumeration {
type TableType = Value
case class Val protected (name: String) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findByStr(str: String): Either[String, Value] =
values.toList.find(_.name.equalsIgnoreCase(str)).toRight(str)
val Table = Val("TABLE")
val View = Val("VIEW")
val SystemTable = Val("SYSTEM TABLE")
val GlobalTemporary = Val("GLOBAL TEMPORARY")
val LocalTemporary = Val("LOCAL TEMPORARY")
val Alias = Val("ALIAS")
val Synonym = Val("SYNONYM")
val Index = Val("INDEX")
val Sequence = Val("SEQUENCE")
}
object ReferenceGeneration extends Enumeration {
type ReverenceGeneration = Value
case class Val protected (name: String) extends super.Val
def findByString(str: String) = values.find(_.name == str)
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
val System = Val("SYSTEM")
val User = Val("USER")
val Derived = Val("DERIVED")
}
case class Table(
catalogName: Option[String],
schemaName: Option[String],
name: String,
tableType: Either[String, TableType.Value],
remarks: Option[String],
typesCatalog: Option[String],
typesSchema: Option[String],
typeName: Option[String],
selfReferencingColumnName: Option[String],
referenceGeneration: Option[ReferenceGeneration.Value])
case class TablePrivilege(
catalogName: Option[String],
schemaName: Option[String],
name: String,
grantor: Option[String],
grantee: String,
privilege: String,
isGrantable: YesNo.Value)
object Searchable extends Enumeration {
case class Val(searchableId: Int, name: String, description: String) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findBySearchableId(searchableId: Int): Option[Value] =
values.find(_.searchableId == searchableId)
val TypePredNone = Val(DatabaseMetaData.typePredNone, "typePredNone", "No support")
val TypePredChar =
Val(DatabaseMetaData.typePredChar, "typePredChar", "Only supported with WHERE .. LIKE")
val typePredBasic =
Val(DatabaseMetaData.typePredBasic, "typePredBasic", "Supported except for WHERE .. LIKE")
val typeSearchable =
Val(DatabaseMetaData.typeSearchable, "typeSearchable", "Supported for all WHERE ..")
}
case class TypeInfo(
typeName: String,
dataType: DataType.Value,
precision: Int,
literalPrefix: Option[String],
literalSuffix: Option[String],
createParameters: Option[String],
nullable: Nullable.Value,
caseSensitive: Boolean,
searchable: Searchable.Value,
isUnsigned: Boolean,
fixedPrecisionScale: Boolean,
autoIncrement: Boolean,
localTypeName: Option[String],
minimumScale: Short,
maximumScale: Short,
numericalPrecistionRadix: Int
)
case class PrimaryKey(
catalogName: Option[String],
schemaName: Option[String],
tableName: String,
columnName: String,
keySequence: Short,
name: Option[String])
object ProcedureType extends Enumeration {
case class Val(procedureTypeId: Int, name: String, description: String) extends super.Val
implicit def valueToNullableVal(x: Value): Val = x.asInstanceOf[Val]
def findByProcedureTypeId(procedureTypeId: Int): Option[Value] =
values.find(_.procedureTypeId == procedureTypeId)
val ProcedureResultUnknown = Val(DatabaseMetaData.procedureResultUnknown, "procedureResultUnknown", "Cannot determine if a return value will be returned")
val ProcedureNoResult = Val(DatabaseMetaData.procedureNoResult, "procedureNoResult", "Does not return a return value")
val ProcedureReturnsResult = Val(DatabaseMetaData.procedureReturnsResult, "procedureReturnsResult", "Returns a return value")
}
case class Procedure(
catalogName: Option[String],
schemaName: Option[String],
name: String,
remarks: Option[String],
procedureType: ProcedureType.Value,
specificName: String)
case class Schema(name: String, tables: List[Table])
}
|
#!/bin/bash
set -ex
# Avoid contaminating the go.mod/go.sum files.
# TODO(kortschak): Remove when golang/go#30515 is resolved
WORK=$(mktemp -d)
pushd $WORK
# Required for format check.
go get golang.org/x/tools/cmd/goimports
# Required for linting check: apparently golangci-lint should not be installed using go get (https://github.com/golangci/golangci-lint#go).
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.23.8
# Required for imports check.
go get gonum.org/v1/tools/cmd/check-imports
# Required for copyright header check.
go get gonum.org/v1/tools/cmd/check-copyright
# Required for coverage.
go get golang.org/x/tools/cmd/cover
go get github.com/mattn/goveralls
# Required for dot parser checks.
go get github.com/goccmack/gocc@66c61e9
# Clean up.
# TODO(kortschak): Remove when golang/go#30515 is resolved.
popd
rm -rf $WORK
|
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint', 'import', 'prettier'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:node/recommended',
],
settings: {
node: {
tryExtensions: ['.js', '.json', '.ts', '.d.ts'],
},
'import/extensions': ['.js', '.json', '.ts', '.d.ts'],
'import/external-module-folders': ['node_modules', 'node_modules/@types'],
'import/parsers': {
'@typescript-eslint/parser': ['.ts'],
},
'import/resolver': {
node: {
extensions: ['.js', '.json', '.ts', '.d.ts'],
},
},
},
rules: {
'@typescript-eslint/consistent-type-imports': ['error', { prefer: 'type-imports' }],
'import/order': [
'error',
{
groups: ['builtin', 'external', 'internal', 'parent', 'sibling', 'index', 'object'],
'newlines-between': 'always',
alphabetize: {
order: 'asc',
caseInsensitive: true,
},
},
],
'import/no-cycle': ['error'],
'prettier/prettier': 'error',
},
overrides: [
{
files: ['*.ts'],
rules: {
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-use-before-define': 'off',
'node/no-unsupported-features/es-syntax': 'off',
},
},
{
files: ['*.spec.ts'],
rules: {
'@typescript-eslint/no-var-requires': 'off',
'node/no-missing-import': 'off',
},
},
],
};
|
module.exports.run = async(bot, message, args) => {
console.log("Help has been run")
message.channel.send(`Commands:
!wlhelp - this command
!wlsave <Pasted Wordle Score> - add your Wordle score
!wltop - See the top 5 players on this server
!wltop all - see the top 5 players on all servers
!wltop avg - See the top 5 player averages on this server
!wltop avg-all - See the top 5 player averages on all servers
!wltop daily - See the top scores for today's wordle
!wltop daily-all - See the top scores for today's wordle across all servers
!wlavg - See your average score
!wlavg daily - See the average score for todays wordle.
!wlpoints - See your current points
!wlwins - See your wins, losses, and win rate
!wlget <worlde number> - See your score, and the solution for that day.
!wlsolution <wordle number> - Get the solution for that day`)
}
module.exports.config = {
command: "help"
} |
<filename>CSPM/plane.h<gh_stars>10-100
///////////////////////////////////////////////////////
// File: Plane.h
// Desc: Simple plane class to record plane param for
// for each pixel
//
// Author: rookiepig
// Date: 2014/04/01
///////////////////////////////////////////////////////
#pragma once
#include"commfunc.h"
class Plane {
public:
Plane() : norm_(0, 0, 0), point_(0, 0, 0), param_(0, 0, 0 ) {}
Plane(const Vec3d& norm, const Point3d& point) :
norm_(norm), point_(point) {
update_param();
}
void set_point(const Point3d& point) {
point_ = point;
}
void set_norm(const Point3d& norm) {
norm_ = norm;
}
inline void update_param() {
// avoid deivde 0
double denom = max(fabs(norm_[2]), kDoubleEps);
if (norm_[2] < 0.0) {
denom = -denom;
}
param_[0] = -norm_[0] / denom;
param_[1] = -norm_[1] / denom;
param_[2] = norm_.dot(point_) / denom;
}
Vec3d norm() const {
return norm_;
}
Point3d point() const {
return point_;
}
Vec3d param() const {
return param_;
}
private:
Vec3d norm_;
Point3d point_;
Vec3d param_;
};
|
<filename>js/direcives/kitchensinkDirectives/edObjectsPanelSortableDirective.js
kitchensink.directive('edObjectsPanelSortable', ['$rootScope', function ($rootScope) {
return {
link: function ($scope, el) {
var oldIndex, newIndex, obj;
el.sortable({
items: '.object:visible',
scroll: false,
containment: 'parent',
start: function (e, ui) {
//oldIndex = $(ui.item).index();
ui.item.data('start', ui.item.index());
},
update: function (e, ui) {
//newIndex = $(ui.item).index();
var start = ui.item.data('start'),
end = ui.item.index();
canvas.fabric._objects.splice(end, 0,
canvas.fabric._objects.splice(start, 1)[0]);
$scope.objects = canvas._objects;
$scope.$apply();
console.log($scope.objects);
obj = canvas.getObjects()[start];
if (!obj) return;
if (end > start) {
//send object forwards by the amount of objects it passed
for (var i = 0; i < (end - start); i++) {
canvas.bringForward(obj);
canvas.renderAll();
}
} else {
//send object backwards by the amount of objects it passed
for (var i = 0; i < (start - end); i++) {
canvas.sendBackwards(obj);
canvas.renderAll();
}
}
$rootScope.$apply(function () {
canvas.renderAll();
start = false;
});
}
})
}
};
}]); |
<gh_stars>0
const hbs = require('hbs');
const path = require('path');
module.exports.partialsReg = () => {
hbs.registerPartials(path.join(__dirname,'../views/partials'));
}
|
<reponame>starshiptroopers/traccargo
// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// Driver driver
//
// swagger:model Driver
type Driver struct {
// attributes
Attributes interface{} `json:"attributes,omitempty"`
// id
ID int64 `json:"id,omitempty"`
// name
Name string `json:"name,omitempty"`
// unique Id
UniqueID string `json:"uniqueId,omitempty"`
}
// Validate validates this driver
func (m *Driver) Validate(formats strfmt.Registry) error {
return nil
}
// ContextValidate validates this driver based on context it is used
func (m *Driver) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
return nil
}
// MarshalBinary interface implementation
func (m *Driver) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *Driver) UnmarshalBinary(b []byte) error {
var res Driver
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
|
java -cp .:checkmate.jar:lib/jlfgr-1.0.jar:lib/jrandom.jar:lib/log4j-api-2.12.0.jar:lib/log4j-core-2.12.0.jar:lib/FUSE-1.01.jar:lib/xercesImpl-2.12.0.jar:lib/xml-apis-1.4.01.jar checkmate.CMMain $@
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.idea.tfvc.core;
import com.google.common.collect.ImmutableList;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.rollback.RollbackProgressListener;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.microsoft.alm.plugin.context.ServerContext;
import com.microsoft.alm.plugin.external.utils.CommandUtils;
import com.microsoft.alm.plugin.idea.IdeaAbstractTest;
import com.microsoft.alm.plugin.idea.tfvc.core.tfs.TfsFileUtil;
import com.microsoft.tfs.model.connector.TfsPath;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.doThrow;
import static org.powermock.api.mockito.PowerMockito.verifyStatic;
@RunWith(PowerMockRunner.class)
@PrepareForTest({
CommandUtils.class,
LocalFileSystem.class,
ServiceManager.class,
TfsFileUtil.class,
TfvcClient.class
})
public class TFSRollbackEnvironmentTest extends IdeaAbstractTest {
TFSRollbackEnvironment rollbackEnvironment;
List<String> filePaths = SystemInfo.isWindows
? ImmutableList.of("\\path\\to\\file1", "\\path\\to\\file2", "\\path\\to\\file3")
: ImmutableList.of("/path/to/file1", "/path/to/file2", "/path/to/file3");
List<VcsException> exceptions;
List<Change> changes;
@Mock
TFSVcs mockTFSVcs;
@Mock
Project mockProject;
@Mock
RollbackProgressListener mockRollbackProgressListener;
@Mock
ServerContext mockServerContext;
@Mock
LocalFileSystem mockLocalFileSystem;
@Mock
FilePath filePath1, filePath2, filePath3;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
PowerMockito.mockStatic(
CommandUtils.class,
LocalFileSystem.class,
ServiceManager.class,
TfsFileUtil.class,
TfvcClient.class);
when(mockTFSVcs.getServerContext(anyBoolean())).thenReturn(mockServerContext);
when(TfvcClient.getInstance()).thenReturn(new ClassicTfvcClient());
when(LocalFileSystem.getInstance()).thenReturn(mockLocalFileSystem);
when(TfsFileUtil.createLocalPath(any(String.class))).thenCallRealMethod();
when(TfsFileUtil.createLocalPath(any(FilePath.class))).thenCallRealMethod();
when(TfsFileUtil.getPathItem(any(TfsPath.class))).thenCallRealMethod();
when(filePath1.getPath()).thenReturn("/path/to/file1");
when(filePath2.getPath()).thenReturn("/path/to/file2");
when(filePath3.getPath()).thenReturn("/path/to/file3");
exceptions = new ArrayList<>();
rollbackEnvironment = new TFSRollbackEnvironment(mockTFSVcs, mockProject);
}
@Test
public void testRollbackChanges_Happy() {
setupRollbackChanges();
VirtualFile mockVirtualFile = mock(VirtualFile.class);
VirtualFile mockVirtualFileParent = mock(VirtualFile.class);
when(CommandUtils.undoLocalFiles(mockServerContext, filePaths)).thenReturn(filePaths);
when(mockVirtualFileParent.exists()).thenReturn(true);
when(mockVirtualFile.getParent()).thenReturn(mockVirtualFileParent);
when(mockLocalFileSystem.findFileByIoFile(any(File.class))).thenReturn(mockVirtualFile);
rollbackEnvironment.rollbackChanges(changes, exceptions, mockRollbackProgressListener);
ArgumentCaptor<List> arg = ArgumentCaptor.forClass(List.class);
verifyStatic(times(1));
TfsFileUtil.refreshAndMarkDirty(eq(mockProject), arg.capture(), eq(true));
assertEquals(3, arg.getValue().size());
assertTrue(exceptions.isEmpty());
}
@Test
public void testRollbackChanges_Exception() {
setupRollbackChanges();
when(CommandUtils.undoLocalFiles(mockServerContext, filePaths)).thenThrow(new RuntimeException("test error"));
rollbackEnvironment.rollbackChanges(changes, exceptions, mockRollbackProgressListener);
verifyStatic(never());
TfsFileUtil.refreshAndMarkDirty(any(Project.class), any(List.class), anyBoolean());
assertEquals(1, exceptions.size());
}
@Test
public void testRollbackMissingFileDeletion_Happy() {
rollbackEnvironment.rollbackMissingFileDeletion(ImmutableList.of(filePath1, filePath2, filePath3),
exceptions, mockRollbackProgressListener);
verifyStatic(times(1));
CommandUtils.forceGetFile(mockServerContext, "/path/to/file1");
CommandUtils.forceGetFile(mockServerContext, "/path/to/file2");
CommandUtils.forceGetFile(mockServerContext, "/path/to/file3");
}
@Test
public void testRollbackMissingFileDeletion_Excepion() throws Exception {
doThrow(new RuntimeException("test error")).when(CommandUtils.class, "forceGetFile", mockServerContext, "/path/to/file1");
rollbackEnvironment.rollbackMissingFileDeletion(ImmutableList.of(filePath1), exceptions, mockRollbackProgressListener);
assertEquals(1, exceptions.size());
}
private void setupRollbackChanges() {
Change change1 = mock(Change.class);
Change change2 = mock(Change.class);
Change change3 = mock(Change.class);
changes = ImmutableList.of(change1, change2, change3);
ContentRevision contentRevision1 = mock(ContentRevision.class);
ContentRevision contentRevision2 = mock(ContentRevision.class);
ContentRevision contentRevision3 = mock(ContentRevision.class);
when(change1.getType()).thenReturn(Change.Type.DELETED);
when(change2.getType()).thenReturn(Change.Type.MODIFICATION);
when(change3.getType()).thenReturn(Change.Type.NEW);
when(contentRevision1.getFile()).thenReturn(filePath1);
when(contentRevision2.getFile()).thenReturn(filePath2);
when(contentRevision3.getFile()).thenReturn(filePath3);
when(change1.getBeforeRevision()).thenReturn(contentRevision1);
when(change2.getAfterRevision()).thenReturn(contentRevision2);
when(change3.getAfterRevision()).thenReturn(contentRevision3);
}
} |
cd core/src/main/resources/ui
yarn install
yarn ng build |
sudo docker run -v $PWD/src:/app/ --rm -it bbc-news
|
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2016, AVRORA ROBOTICS LLC
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of AVRORA ROBOTICS LLC nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
/* Author: <NAME>
Desc:
*/
#ifndef UMOTOPACKETS_H
#define UMOTOPACKETS_H
#include <vector>
#include <inttypes.h>
#include <cstring>
#include <stdexcept>
#include "umototypes.h"
namespace umoto_interface
{
const uint16_t REQ_PREAMBLE = 0x6E | (0x70<<8);
const uint16_t RESP_PREAMBLE = 0xA1 | (0xA2<<8);
const uint8_t UMotoControlPacketID = 1;
const uint8_t UMotoDataPacketID = 1;
const uint8_t UMotoVersionRequestPacketID = 16;
class PacketHandler
{
public:
template<typename UMotoPacket> static std::vector<uint8_t> serialize(const UMotoPacket& packet)
{
std::vector<uint8_t> bytes;
bytes.assign(reinterpret_cast<const uint8_t*>(&packet),
reinterpret_cast<const uint8_t*>(&packet) + sizeof(packet));
return bytes;
}
template<typename UMotoPacket> static UMotoPacket deserialize(const std::vector<uint8_t>& raw_data)
{
UMotoPacket packet;
if (raw_data.size()==sizeof(packet))
{
std::memcpy(&packet, raw_data.data(), sizeof(packet));
}
else
{
throw std::range_error("Message deserialization error: raw_data length mismatch");
}
return packet;
}
template<typename UMotoPacket> static uint16_t calcCRC(const UMotoPacket* packet)
{
const uint8_t CRC16_Hi[] = {
0x00, 0xc0, 0xc1, 0x01, 0xc3, 0x03, 0x02, 0xc2,
0xc6, 0x06, 0x07, 0xc7, 0x05, 0xc5, 0xc4, 0x04,
0xcc, 0x0c, 0x0d, 0xcd, 0x0f, 0xcf, 0xce, 0x0e,
0x0a, 0xca, 0xcb, 0x0b, 0xc9, 0x09, 0x08, 0xc8,
0xd8, 0x18, 0x19, 0xd9, 0x1b, 0xdb, 0xda, 0x1a,
0x1e, 0xde, 0xdf, 0x1f, 0xdd, 0x1d, 0x1c, 0xdc,
0x14, 0xd4, 0xd5, 0x15, 0xd7, 0x17, 0x16, 0xd6,
0xd2, 0x12, 0x13, 0xd3, 0x11, 0xd1, 0xd0, 0x10,
0xf0, 0x30, 0x31, 0xf1, 0x33, 0xf3, 0xf2, 0x32,
0x36, 0xf6, 0xf7, 0x37, 0xf5, 0x35, 0x34, 0xf4,
0x3c, 0xfc, 0xfd, 0x3d, 0xff, 0x3f, 0x3e, 0xfe,
0xfa, 0x3a, 0x3b, 0xfb, 0x39, 0xf9, 0xf8, 0x38,
0x28, 0xe8, 0xe9, 0x29, 0xeb, 0x2b, 0x2a, 0xea,
0xee, 0x2e, 0x2f, 0xef, 0x2d, 0xed, 0xec, 0x2c,
0xe4, 0x24, 0x25, 0xe5, 0x27, 0xe7, 0xe6, 0x26,
0x22, 0xe2, 0xe3, 0x23, 0xe1, 0x21, 0x20, 0xe0,
0xa0, 0x60, 0x61, 0xa1, 0x63, 0xa3, 0xa2, 0x62,
0x66, 0xa6, 0xa7, 0x67, 0xa5, 0x65, 0x64, 0xa4,
0x6c, 0xac, 0xad, 0x6d, 0xaf, 0x6f, 0x6e, 0xae,
0xaa, 0x6a, 0x6b, 0xab, 0x69, 0xa9, 0xa8, 0x68,
0x78, 0xb8, 0xb9, 0x79, 0xbb, 0x7b, 0x7a, 0xba,
0xbe, 0x7e, 0x7f, 0xbf, 0x7d, 0xbd, 0xbc, 0x7c,
0xb4, 0x74, 0x75, 0xb5, 0x77, 0xb7, 0xb6, 0x76,
0x72, 0xb2, 0xb3, 0x73, 0xb1, 0x71, 0x70, 0xb0,
0x50, 0x90, 0x91, 0x51, 0x93, 0x53, 0x52, 0x92,
0x96, 0x56, 0x57, 0x97, 0x55, 0x95, 0x94, 0x54,
0x9c, 0x5c, 0x5d, 0x9d, 0x5f, 0x9f, 0x9e, 0x5e,
0x5a, 0x9a, 0x9b, 0x5b, 0x99, 0x59, 0x58, 0x98,
0x88, 0x48, 0x49, 0x89, 0x4b, 0x8b, 0x8a, 0x4a,
0x4e, 0x8e, 0x8f, 0x4f, 0x8d, 0x4d, 0x4c, 0x8c,
0x44, 0x84, 0x85, 0x45, 0x87, 0x47, 0x46, 0x86,
0x82, 0x42, 0x43, 0x83, 0x41, 0x81, 0x80, 0x40
};
const uint8_t CRC16_Lo[] = {
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x00, 0xc1, 0x81, 0x40, 0x01, 0xc0, 0x80, 0x41,
0x01, 0xc0, 0x80, 0x41, 0x00, 0xc1, 0x81, 0x40
};
uint8_t Temp, CRCHi = 0xFF, CRCLo = 0xFF;
const uint8_t *ptr = (const uint8_t*)packet;
for (unsigned int Index = sizeof(packet->header.preamble); Index<sizeof(*packet)-sizeof(packet->CRC); Index++)
{
Temp = CRC16_Lo[ptr[Index]^CRCLo]^CRCHi;
CRCHi = CRC16_Hi[ptr[Index]^CRCLo];
CRCLo = Temp;
}
return (((CRCHi << 8) + CRCLo));
}
};
#pragma pack(push)
#pragma pack(1)
template<typename T, uint16_t Preamble, uint8_t TypeID>
struct UMotoPacket
{
struct Header
{
uint16_t preamble = Preamble;
uint8_t unimoto_num = 0;
uint8_t type_id = TypeID;
uint8_t data_length = sizeof(data_field);
} header;
T data_field;
uint16_t CRC = 0;
UMotoPacket(const T& data)
: data_field(data), CRC(PacketHandler::calcCRC(this))
{
}
UMotoPacket()
{
}
};
#pragma pack(pop)
typedef UMotoPacket<UMotoControl, REQ_PREAMBLE, UMotoControlPacketID> UMotoControlPacket;
typedef UMotoPacket<UMotoData, RESP_PREAMBLE, UMotoDataPacketID> UMotoDataPacket;
typedef UMotoPacket<UMotoVersion, REQ_PREAMBLE, UMotoVersionRequestPacketID> UMotoVersionRequestPacket;
}
#endif // UMOTOPACKETS_H
|
import { Component, Input } from '@angular/core';
@Component({
moduleId: module.id,
selector: 'site-pageheader',
templateUrl: './site-pageheader.component.html',
styleUrls: ['./site-pageheader.component.scss']
})
export class SitePageHeaderComponent {
@Input() heading: string;
@Input() icon: string;
}
|
package com.rudeshko.css.rearrange;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.css.*;
import com.intellij.util.containers.Stack;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
public class CssArrangementVisitor extends CssElementVisitor {
private final CssArrangementParseInfo myInfo;
private final Collection<TextRange> myRanges;
@NotNull
private final Stack<CssElementArrangementEntry> myStack = new Stack<CssElementArrangementEntry>();
public CssArrangementVisitor(@NotNull CssArrangementParseInfo parseInfo, @NotNull Collection<TextRange> ranges) {
this.myInfo = parseInfo;
this.myRanges = ranges;
}
@Override
public void visitFile(PsiFile file) {
if (file instanceof CssFile) {
visitCssFile((CssFile) file);
}
}
@Override
public void visitCssFile(CssFile file) {
visitCssStylesheet(file.getStylesheet());
}
@Override
public void visitCssStylesheet(CssStylesheet stylesheet) {
for (CssRuleset ruleset : stylesheet.getRulesets(true)) {
visitCssRuleset(ruleset);
}
}
public void visitCssMedia(CssMedia media) {
for (CssRuleset ruleset : media.getRulesets()) {
visitCssRuleset(ruleset);
}
}
@Override
public void visitAtRule(CssAtRule atRule) {
if (atRule instanceof CssMedia) {
visitCssMedia((CssMedia) atRule);
}
}
@Override
public void visitCssRuleset(CssRuleset ruleset) {
visitCssBlock(ruleset.getBlock());
}
@Override
public void visitCssBlock(CssBlock block) {
processEntry(createNewEntry(block.getTextRange(), null), block);
}
@Override
public void visitCssDeclaration(CssDeclaration declaration) {
createNewEntry(declaration.getTextRange(), declaration.getPropertyName());
}
private
@Nullable
CssElementArrangementEntry createNewEntry(@NotNull TextRange range, @Nullable String propertyName) {
if (!isWithinBounds(range)) {
return null;
}
CssElementArrangementEntry current = getCurrent();
CssElementArrangementEntry entry = new CssElementArrangementEntry(current, range, propertyName);
if (current == null) {
myInfo.addEntry(entry);
} else {
current.addChild(entry);
}
return entry;
}
private void processEntry(@Nullable CssElementArrangementEntry entry, @Nullable PsiElement nextPsiRoot) {
if (entry == null || nextPsiRoot == null) {
return;
}
myStack.push(entry);
try {
nextPsiRoot.acceptChildren(this);
} finally {
myStack.pop();
}
}
@Nullable
private CssElementArrangementEntry getCurrent() {
return myStack.isEmpty() ? null : myStack.peek();
}
private boolean isWithinBounds(@NotNull TextRange range) {
for (TextRange textRange : myRanges) {
if (textRange.intersects(range)) {
return true;
}
}
return false;
}
}
|
const {array, object, string, integer, empty, oneOf, boolean} = require('@qtk/schema').schema;
const info = {
title: "小程序-获取小程序码B接口",
description: ""
};
const request = object().properties({
scene: string().pattern(/[A-Za-z0-9\!\#\$\&\'\(\)\*\+\,\/\:\;\=\?\@\-\.\_\~]{0,32}/).desc('场景'),
pagePath: string().desc('已经发布的小程序存在的页面'),
width: integer().desc('二维码的宽度'),
autoColor: boolean().desc('自动配置线条颜色'),
lineColor: {
r: integer(),
g: integer(),
b: integer()
},
isHyaline: boolean().desc('是否需要透明底色')
})
.if.properties({autoColor: false})
.then.require('scene', 'pagePath', 'autoColor', 'lineColor')
.else.require('scene', 'pagePath')
.endIf
const response = string()
module.exports = {info, request, response}; |
<gh_stars>1-10
package ml.banq.atm;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import org.json.JSONObject;
// The settings persitent storage
public class Settings {
// The singleton instance
private static Settings instance = new Settings();
public static Settings getInstance() {
return instance;
}
// The json hashmap settings storage
private JSONObject settings;
private Settings() {
// Try to read the settings.json file if it exists
File settingsFile = new File(System.getProperty("user.home") + "/banq-atm-settings.json");
if (settingsFile.exists() && !settingsFile.isDirectory()) {
try {
BufferedReader bufferedReader = new BufferedReader(new FileReader(settingsFile));
StringBuilder stringBuilder = new StringBuilder();
String line;
while ((line = bufferedReader.readLine()) != null) {
stringBuilder.append(line);
stringBuilder.append(System.lineSeparator());
}
bufferedReader.close();
settings = new JSONObject(stringBuilder.toString());
}
// When error with reading the json create an empty settings storage
catch (Exception exception) {
Log.warning(exception);
settings = new JSONObject();
}
} else {
// Create an empty settings storage
settings = new JSONObject();
}
}
// Saves the json settings to the settings.json file
public void save() {
try {
FileWriter settingsFileWriter = new FileWriter(System.getProperty("user.home") + "/banq-atm-settings.json");
settingsFileWriter.write(settings.toString());
settingsFileWriter.close();
} catch (Exception exception) {
Log.error(exception);
}
}
// Get an string item form the settings with a defaultValue
public String getItem(String key, String defaultValue) {
if (settings.has(key)) {
return settings.getString(key);
} else {
return defaultValue;
}
}
// Set an string item of the settings
public void setItem(String key, String value) {
settings.put(key, value);
}
// Get an int item form the settings with a defaultValue
public int getItem(String key, int defaultValue) {
if (settings.has(key)) {
return settings.getInt(key);
} else {
return defaultValue;
}
}
// Set an int item of the settings
public void setItem(String key, int value) {
settings.put(key, value);
}
}
|
<filename>src/db/generic/Pair.h
/*
* Copyright (c) CERN 2013-2017
*
* Copyright (c) Members of the EMI Collaboration. 2010-2013
* See http://www.eu-emi.eu/partners for details on the copyright
* holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#ifndef PAIR_H
#define PAIR_H
#include <iostream>
#include <string>
#include "common/Uri.h"
struct Pair {
std::string source, destination;
Pair(const std::string &s, const std::string &d): source(s), destination(d) {
}
bool isLanTransfer() const {
return fts3::common::isLanTransfer(source, destination);
}
};
// Required so it can be used as a key on a std::map
inline bool operator < (const Pair &a, const Pair &b) {
return a.source < b.source || (a.source == b.source && a.destination < b.destination);
}
inline std::ostream& operator << (std::ostream &os, const Pair &pair) {
return (os << pair.source << " => " << pair.destination);
}
#endif // PAIR_H
|
SELECT *
FROM TABLE
WHERE TIME BETWEEN '12:00:00' AND '17:00:00'; |
<reponame>ang-jason/fip_powerx_mini_projects-foxtrot
require('v8-compile-cache');
const chalk = require('chalk');
const program = require('commander');
const version = require('../package.json').version;
program.version(version);
program
.command('serve [input...]')
.description('starts a development server')
.option(
'-p, --port <port>',
'set the port to serve on. defaults to 1234',
parseInt
)
.option(
'--host <host>',
'set the host to listen on, defaults to listening on all interfaces'
)
.option(
'--hmr-port <port>',
'set the port to serve HMR websockets, defaults to random',
parseInt
)
.option(
'--hmr-hostname <hostname>',
'set the hostname of HMR websockets, defaults to location.hostname of current window'
)
.option('--https', 'serves files over HTTPS')
.option('--cert <path>', 'path to certificate to use with HTTPS')
.option('--key <path>', 'path to private key to use with HTTPS')
.option(
'--open [browser]',
'automatically open in specified browser, defaults to default browser'
)
.option(
'-d, --out-dir <path>',
'set the output directory. defaults to "dist"'
)
.option(
'-o, --out-file <filename>',
'set the output filename for the application entry point.'
)
.option(
'--public-url <url>',
'set the public URL to serve on. defaults to "/"'
)
.option('--global <variable>', 'expose your module through a global variable')
.option('--no-hmr', 'disable hot module replacement')
.option('--no-cache', 'disable the filesystem cache')
.option('--no-source-maps', 'disable sourcemaps')
.option('--no-autoinstall', 'disable autoinstall')
.option(
'-t, --target [target]',
'set the runtime environment, either "node", "browser" or "electron". defaults to "browser"',
/^(node|browser|electron)$/
)
.option(
'--bundle-node-modules',
'force bundling node modules, even on node/electron target'
)
.option('-V, --version', 'output the version number')
.option(
'--log-level <level>',
'set the log level, either "0" (no output), "1" (errors), "2" (warnings), "3" (info), "4" (verbose) or "5" (debug, creates a log file).',
/^([0-5])$/
)
.option('--cache-dir <path>', 'set the cache directory. defaults to ".cache"')
.action(bundle);
program
.command('watch [input...]')
.description('starts the bundler in watch mode')
.option(
'-d, --out-dir <path>',
'set the output directory. defaults to "dist"'
)
.option(
'-o, --out-file <filename>',
'set the output filename for the application entry point.'
)
.option(
'--public-url <url>',
'set the public URL to serve on. defaults to "/"'
)
.option('--global <variable>', 'expose your module through a global variable')
.option(
'--hmr-port <port>',
'set the port to serve HMR websockets, defaults to random',
parseInt
)
.option(
'--hmr-hostname <hostname>',
'set the hostname of HMR websockets, defaults to location.hostname of current window'
)
.option('--https', 'listen on HTTPS for HMR connections')
.option('--cert <path>', 'path to certificate to use with HTTPS')
.option('--key <path>', 'path to private key to use with HTTPS')
.option('--no-hmr', 'disable hot module replacement')
.option('--no-cache', 'disable the filesystem cache')
.option('--no-source-maps', 'disable sourcemaps')
.option('--no-autoinstall', 'disable autoinstall')
.option(
'-t, --target [target]',
'set the runtime environment, either "node", "browser" or "electron". defaults to "browser"',
/^(node|browser|electron)$/
)
.option(
'--bundle-node-modules',
'force bundling node modules, even on node/electron target'
)
.option(
'--log-level <level>',
'set the log level, either "0" (no output), "1" (errors), "2" (warnings), "3" (info), "4" (verbose) or "5" (debug, creates a log file).',
/^([0-5])$/
)
.option('--cache-dir <path>', 'set the cache directory. defaults to ".cache"')
.action(bundle);
program
.command('build [input...]')
.description('bundles for production')
.option(
'-d, --out-dir <path>',
'set the output directory. defaults to "dist"'
)
.option(
'-o, --out-file <filename>',
'set the output filename for the application entry point.'
)
.option(
'--public-url <url>',
'set the public URL to serve on. defaults to "/"'
)
.option('--global <variable>', 'expose your module through a global variable')
.option('--no-minify', 'disable minification')
.option('--no-cache', 'disable the filesystem cache')
.option('--no-source-maps', 'disable sourcemaps')
.option('--no-autoinstall', 'disable autoinstall')
.option('--no-content-hash', 'disable content hashing')
.option(
'--experimental-scope-hoisting',
'enable experimental scope hoisting/tree shaking support'
)
.option(
'-t, --target <target>',
'set the runtime environment, either "node", "browser" or "electron". defaults to "browser"',
/^(node|browser|electron)$/
)
.option(
'--bundle-node-modules',
'force bundling node modules, even on node/electron target'
)
.option(
'--detailed-report',
'print a detailed build report after a completed build'
)
.option(
'--log-level <level>',
'set the log level, either "0" (no output), "1" (errors), "2" (warnings), "3" (info), "4" (verbose) or "5" (debug, creates a log file).',
/^([0-5])$/
)
.option('--cache-dir <path>', 'set the cache directory. defaults to ".cache"')
.action(bundle);
program
.command('help [command]')
.description('display help information for a command')
.action(function(command) {
let cmd = program.commands.find(c => c.name() === command) || program;
cmd.help();
});
program.on('--help', function() {
console.log('');
console.log(
' Run `' +
chalk.bold('parcel help <command>') +
'` for more information on specific commands'
);
console.log('');
});
// Make serve the default command except for --help
var args = process.argv;
if (args[2] === '--help' || args[2] === '-h') args[2] = 'help';
if (!args[2] || !program.commands.some(c => c.name() === args[2])) {
args.splice(2, 0, 'serve');
}
program.parse(args);
async function bundle(main, command) {
// Require bundler here so the help command is fast
const Bundler = require('../');
if (command.name() === 'watch') {
command.watch = true;
}
if (command.name() === 'build') {
command.production = true;
process.env.NODE_ENV = process.env.NODE_ENV || 'production';
} else {
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
}
if (command.cert && command.key) {
command.https = {
cert: command.cert,
key: command.key
};
}
command.throwErrors = false;
command.scopeHoist = command.experimentalScopeHoisting || false;
const bundler = new Bundler(main, command);
command.target = command.target || 'browser';
if (command.name() === 'serve' && command.target === 'browser') {
const server = await bundler.serve(
command.port || 1234,
command.https,
command.host
);
if (server && command.open) {
await require('./utils/openInBrowser')(
`${command.https ? 'https' : 'http'}://localhost:${
server.address().port
}`,
command.open
);
}
} else {
bundler.bundle();
}
}
|
package intercept.server.components;
import org.antlr.stringtemplate.StringTemplate;
import java.util.Arrays;
import java.util.List;
public class TemplateAttributes {
protected final List<TemplateAttribute> attributes;
public TemplateAttributes(final TemplateAttribute[] attributes) {
this.attributes = Arrays.asList(attributes);
}
public void applyTo(StringTemplate template) {
for (TemplateAttribute attribute : attributes) {
attribute.set(template);
}
}
public boolean contains(String name) {
for (TemplateAttribute attribute : attributes) {
if (attribute.isFor(name)) {
return true;
}
}
return false;
}
}
|
<filename>db.js
module.exports = {
a: require('./data/a.js'),
b: require('./data/b.js')
} |
arr = [1,2,7,-8,9,0]
minimum = arr[0]
for i in arr:
if i < minimum:
minimum = i
print(minimum) |
<reponame>skekre98/sk
/*
Copyright © 2020 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"fmt"
"strings"
// "errors"
"github.com/pkg/browser"
"github.com/spf13/cobra"
yt "github.com/knadh/go-get-youtube/youtube"
)
func youtubeDownload(url string) error {
video, err := yt.Get(url)
if err != nil {
return err
}
options := &yt.Option{
Rename: true, // rename file using video title
Resume: true, // resume cancelled download
Mp3: true, // extract audio to MP3
}
video.Download(0, "video.mp4", options)
return nil
}
func youtubeSearch(query string) {
query = strings.ReplaceAll(query, " ", "+")
query = strings.Trim(query, " ")
url := fmt.Sprintf("https://www.youtube.com/results?search_query=%s", query)
browser.OpenURL(url)
}
// youtubeCmd represents the youtube command
var youtubeCmd = &cobra.Command{
Use: "youtube",
Short: "command to operate youtube from terminal",
Long: `A command to interact with youtube such as searching,
downloading, and analyzing.`,
Run: func(cmd *cobra.Command, args []string) {
query, _ := cmd.Flags().GetString("search")
link, _ := cmd.Flags().GetString("download")
if query != "<>" {
youtubeSearch(query)
} else if link != "<>" {
err := youtubeDownload(link)
if err != nil {
fmt.Println("Error:", err.Error())
}
} else {
cmd.Help()
}
},
}
func init() {
rootCmd.AddCommand(youtubeCmd)
// Here you will define your flags and configuration settings.
youtubeCmd.Flags().StringP("search", "s", "<>", "search query")
youtubeCmd.Flags().StringP("download", "d", "<>", "youtube download")
} |
#!/usr/bin/env bash
set -e
echo "" > coverage.txt
go test -race -coverprofile=profile.out -covermode=atomic github.com/tarcisio/gotm1
if [ -f profile.out ]; then
cat profile.out >> coverage.txt
rm profile.out
fi
go test -race -coverprofile=profile.out -covermode=atomic github.com/tarcisio/gotm1/http
if [ -f profile.out ]; then
cat profile.out >> coverage.txt
rm profile.out
fi
|
#! /usr/bin/env bash
# SPDX-FileCopyrightText: 2021 TQ Tezos <https://tqtezos.com/>
#
# SPDX-License-Identifier: LicenseRef-MIT-TQ
# This script takes the new tag and the OS name as its arguments
# and handles git shenanigans related to syncing two pipelines over a common branch.
set -e
if [ -z "$1" ] || [ -z "$2" ]; then
echo "Please call this script with the release tag and the OS name."
exit 1
fi
git config user.name "serokell-bot" # necessary for pushing
git config user.email "tezos-packaging@serokell.io"
git fetch --all
branch_name="auto/update-brew-formulae-$1"
# Git doesn't have an easy way to check out a branch regardless of whether it exists.
if ! git switch "$branch_name"; then
git switch -c "$branch_name"
git push --set-upstream origin "$branch_name"
fi
# Try to add hashes and push changes upstream. If there is a collision precisely at the time of
# pushing, that means the other pipeline has raced this one to push. Reset to origin and try again.
while : ; do
git fetch --all
git reset --hard origin/"$branch_name"
./scripts/bottle-hashes.sh .
git commit -a -m "[Chore] Add $1 hashes to brew formulae for $2"
! git push || break
done
pr_body="Problem: we have built brew bottles for the new Octez release, but their hashes
aren't in the formulae yet.
Solution: added the hashes.
"
set +e
# We create the PR with the first push, when the other pipeline hasn't finished yet.
# That's why we 'set +e': one of the two times the command will fail.
gh pr create -B master -t "[Chore] Add bottle hashes for $1" -b "$pr_body"
exit 0
|
package pulse.search.direction.pso;
public interface Mover {
public ParticleState attemptMove(Particle p, Particle[] neighbours);
}
|
<reponame>lgoldstein/communitychest
/*
*
*/
package net.community.chest.net.rmi;
import java.rmi.AccessException;
import java.rmi.AlreadyBoundException;
import java.rmi.NotBoundException;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
import java.rmi.server.UnicastRemoteObject;
import java.util.Map;
import java.util.TreeMap;
import net.community.chest.util.map.MapEntryImpl;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Aug 31, 2008 2:41:58 PM
*/
public final class RMIUtils {
private RMIUtils ()
{
// no instance
}
public static final char REG_KEY_SEPCHAR='@';
public static final String getRegistryKey (final String host, final int port)
{
return host + String.valueOf(REG_KEY_SEPCHAR) + port;
}
public static final Map.Entry<String,Integer> fromRegistryKey (final String rk)
{
final int rkLen=(null == rk) ? 0 : rk.length(),
sPos=(rkLen <= 1) ? (-1) : rk.lastIndexOf('@');
final Integer port=
((sPos >= 0) && (sPos < (rkLen-1)))? Integer.valueOf(rk.substring(sPos + 1)) : null;
final String host=(sPos > 0) ? rk.substring(0, sPos) : null;
if ((null == port) && ((null == host) || (host.length() <= 0)))
return null;
return new MapEntryImpl<String,Integer>(host, port);
}
public static final String getRegistryKey (final String host)
{
return getRegistryKey(host, Registry.REGISTRY_PORT);
}
public static final String getRegistryKey (final int port)
{
return getRegistryKey(null, port);
}
private static Map<String,Registry> _regsMap /* =null */;
public static final Registry getRegistry (final String host, final int port) throws RemoteException
{
synchronized(RMIUtils.class)
{
if (null == _regsMap)
_regsMap = new TreeMap<String,Registry>();
}
final String regKey=getRegistryKey(host, port);
Registry r=null;
synchronized(_regsMap)
{
if ((r=_regsMap.get(regKey)) != null)
return r;
}
if (null == (r=LocateRegistry.getRegistry(host, port)))
throw new RemoteException("getRegistry(" + regKey + ") no " + Registry.class.getSimpleName() + " instance created");
synchronized(_regsMap)
{
final Registry prev=_regsMap.put(regKey, r);
if (prev != null)
return r;
}
return r;
}
public static final Registry getRegistry (final String host) throws RemoteException
{
return getRegistry(host, Registry.REGISTRY_PORT);
}
public static final Registry getRegistry (final int port) throws RemoteException
{
return getRegistry(null, port);
}
public static final Registry getRegistry () throws RemoteException
{
return getRegistry(Registry.REGISTRY_PORT);
}
public static final <R extends Remote> R lookup (Registry r, Class<R> rc, String name)
throws RemoteException, NotBoundException
{
final Object o=(null == r) ? null : r.lookup(name);
if (null == o)
return null;
return rc.cast(o);
}
public static final <R extends Remote> R lookup (String host, int port, Class<R> rc, String name)
throws RemoteException, NotBoundException
{
return lookup(getRegistry(host, port), rc, name);
}
public static final <R extends Remote> R lookup (String host, Class<R> rc, String name)
throws RemoteException, NotBoundException
{
return lookup(host, Registry.REGISTRY_PORT, rc, name);
}
public static final <R extends Remote> R lookup (int port, Class<R> rc, String name)
throws RemoteException, NotBoundException
{
return lookup(null, port, rc, name);
}
public static final Remote ensureBinding (final Registry r, final int port, final String name, final Remote stub)
throws RemoteException
{
if ((null == r) || (null == name) || (name.length() <= 0) || (null == stub))
throw new AccessException("ensureBinding(" + r + ")[" + name + "] no stub/name/registry");
Remote prev=null;
try
{
prev = r.lookup(name);
}
catch(NotBoundException e)
{
// ignored
}
final Remote ret=UnicastRemoteObject.exportObject(stub, port);
if (prev == null)
{
try
{
r.bind(name, ret);
}
catch(AlreadyBoundException e)
{
// should not happen
throw new AccessException("rebind(" + r + ")[" + name + "] " + e.getClass().getName() + ": " + e.getMessage(), e);
}
}
else
{
r.rebind(name, ret);
}
return ret;
}
public static final Remote ensureBinding (
final String host, final int port, final String name, final Remote stub)
throws RemoteException
{
if ((null == name) || (name.length() <= 0) || (null == stub))
throw new AccessException("rebind(" + getRegistryKey(host, port) + ")[" + name + "] no stub/name/registry");
return ensureBinding(getRegistry(host, port), port, name, stub);
}
public static final Remote ensureBinding (
final String host, final String name, final Remote stub)
throws RemoteException
{
return ensureBinding(host, Registry.REGISTRY_PORT, name, stub);
}
public static final Remote ensureBinding (
final int port, final String name, final Remote stub)
throws RemoteException
{
return ensureBinding((String) null, port, name, stub);
}
}
|
<reponame>tsbohc/ProjectE
package moze_intel.projecte.api.imc;
import moze_intel.projecte.api.nss.NormalizedSimpleStack;
public class CustomEMCRegistration {
private final NormalizedSimpleStack stack;
private final long value;
/**
* @param stack Defines the stack to set the EMC for.
* @param value The EMC value to register for the given stack.
*/
public CustomEMCRegistration(NormalizedSimpleStack stack, long value) {
this.stack = stack;
this.value = value;
}
public NormalizedSimpleStack getStack() {
return stack;
}
public long getValue() {
return value;
}
}
|
/*
* <NAME>
* 11/06/16
* EUOrderFactory.java
*/
package edu.greenriver.it.abstractfactory;
import edu.greenriver.it.shippingratecalculators.EUShippingRate;
import edu.greenriver.it.shippingratecalculators.IShippingRate;
import edu.greenriver.it.taxcalculators.EUSalesTax;
import edu.greenriver.it.taxcalculators.ISalesTax;
/**
* Creates taxes and shipping rates for EU customers
*
* @author kimberlypraxel
* @version 1.0
*/
public class EUOrderFactory implements IOrderFactory{
private String region;
/**
* creates a new EUOrderFactory
*
* @param region - EU country name
*/
public EUOrderFactory(String region) {
this.region = region;
}
/**
* creates a collection of tax for EU customers
*
* @return sales tax
*/
@Override
public ISalesTax getTaxObject() {
return new EUSalesTax(region);
}
/**
* creates shipping rates for EU customers
*
* @return shipping rate
*/
@Override
public IShippingRate getRateObject() {
return new EUShippingRate();
}
}
|
# -*- coding: UTF-8 -*-
# Copyright 2012-2013 by Luc Saffre.
# License: BSD, see LICENSE for more details.
"""
.. management_command:: run
Execute a standalone Python script after having set up the Django
environment. Also modify `sys.args`, `__file__` and `__name__` so that
the invoked script sees them as if it had been called directly.
This is yet another answer to the frequently asked Django question
about how to run standalone Django scripts
(`[1] <http://stackoverflow.com/questions/4847469/use-django-from-python-manage-py-shell-to-python-script>`__,
`[2] <http://www.b-list.org/weblog/2007/sep/22/standalone-django-scripts/>`__).
It is almost the same as redirecting stdin of Django's ``shell`` command
(i.e. doing ``python manage.py shell < myscript.py``),
but with the possibility of using command line arguments
and without the disturbing messages from the interactive console.
For example if you have a file `myscript.py` with the following content...
::
from myapp.models import Partner
print Partner.objects.all()
... then you can run this script using::
$ python manage.py run myscript.py
[<Partner: Rumma & Ko OÜ>, ... <Partner: Charlier Ulrike>,
'...(remaining elements truncated)...']
"""
from __future__ import unicode_literals
import sys
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = __doc__
args = "scriptname [args ...]"
def handle(self, *args, **options):
if len(args) == 0:
raise CommandError("I need at least one argument.")
fn = args[0]
sys.argv = sys.argv[2:]
globals()['__name__'] = '__main__'
globals()['__file__'] = fn
execfile(fn, globals())
#~ execfile(fn,{}) |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# @File : thread_sync.py
# @Time : 2019/2/16 0:27
# @Author : MaiXiaochai
# @Site : https://github.com/MaiXiaochai
import threading
from threading import Lock
lock = Lock()
total = 0
def add():
global total
global lock
for i in range(1000000):
lock.acquire()
total += 1
lock.release()
def desc():
global total
global lock
for i in range(1000000):
lock.acquire()
total -= 1
lock.release()
def add1(a):
"""
字节码执行过程如下,
如果多线程同时执行 add1和desc1,
每执行一行字节码之后,GIL都有可能释放,因为它的时间片已经用尽。
add1的每行字节码与desc1的每行字节码交替运行,当都到达第4行时,
赋值的先后顺序没有确定性,可能add1的值先赋给a,也可能desc的值先赋给a。
最后a的值只能是add1或desc1中的一种(在一次操作中),而不是两者都操作过后的结果。
1. load a
2. load 1
3. +
4. 赋值给a
"""
a += 1
def desc1(a):
"""
1. load a
2. load 1
3. -
4. 赋值给a
"""
a -= 1
thread1 = threading.Thread(target=add)
thread2 = threading.Thread(target=desc)
thread1.start()
thread2.start()
# 等有join的线程都退出后,主线程才退出
thread1.join()
thread2.join()
print(total)
if __name__ == "__main__":
pass
|
#!/usr/bin/env bash
host="127.0.0.1"
db="eos_trad"
user="eos_trad"
password="123456"
rootPassword="123456"
echo "clear the world"
mysql -h ${host} -u root -p${rootPassword} -e "DROP USER IF EXISTS '${user}'@'%';
DROP DATABASE IF EXISTS ${db};
CREATE USER '${user}'@'%'IDENTIFIED BY '${password}';
CREATE DATABASE ${db} CHARACTER SET utf8 COLLATE utf8_general_ci;
GRANT ALL PRIVILEGES ON ${db}.* TO '${user}'@'%';"
echo "init db ${db} on ${host}"
mysql -b -h ${host} -u ${user} -p${password} -D ${db} -e "source ./table.sql"
echo "done"
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
DOCKER_EXEC echo \> \$HOME/.verge # Make sure default datadir does not exist and is never read by creating a dummy file
mkdir -p depends/SDKs depends/sdk-sources
if [ -n "$OSX_SDK" -a ! -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then
curl --location --fail $SDK_URL/MacOSX${OSX_SDK}.sdk.tar.gz -o depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz
fi
if [ -n "$OSX_SDK" -a -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then
tar -C depends/SDKs -xf depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz
fi
if [[ $HOST = *-mingw32 ]]; then
DOCKER_EXEC update-alternatives --set $HOST-g++ \$\(command -v $HOST-g++-posix\)
fi
if [ -z "$NO_DEPENDS" ]; then
DOCKER_EXEC CONFIG_SHELL= make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS
fi
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.basesecurity.manager;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import javax.persistence.TypedQuery;
import org.olat.basesecurity.Authentication;
import org.olat.basesecurity.AuthenticationImpl;
import org.olat.basesecurity.BaseSecurityModule;
import org.olat.basesecurity.IdentityRef;
import org.olat.core.commons.persistence.DB;
import org.olat.core.id.Identity;
import org.olat.core.logging.AssertException;
import org.olat.core.util.StringHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* Initial date: 3 janv. 2017<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
@Service
public class AuthenticationDAO {
@Autowired
private DB dbInstance;
public Authentication getAuthenticationByAuthusername(String authusername, String provider) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join fetch auth.identity ident")
.append(" inner join fetch ident.user identUser")
.append(" where auth.provider=:provider and auth.authusername=:authusername");
List<Authentication> results = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("provider", provider)
.setParameter("authusername", authusername)
.getResultList();
if (results.isEmpty()) return null;
if (results.size() != 1) {
throw new AssertException("more than one entry for the a given authusername and provider, should never happen (even db has a unique constraint on those columns combined) ");
}
return results.get(0);
}
public List<Authentication> getAuthenticationsByAuthusername(String authusername) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join fetch auth.identity ident")
.append(" inner join fetch ident.user identUser")
.append(" where auth.authusername=:authusername");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("authusername", authusername)
.getResultList();
}
public List<Authentication> getAuthenticationsByAuthusername(String authusername, List<String> providers) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join fetch auth.identity ident")
.append(" inner join fetch ident.user identUser")
.append(" where auth.authusername=:authusername and auth.provider in (:providers)");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("authusername", authusername)
.setParameter("providers", providers)
.getResultList();
}
/**
*
* @param provider The authentication provider
* @return A list of identities (the user is not fetched)
*/
public List<Identity> getIdentitiesWithLogin(String login) {
StringBuilder sb = new StringBuilder(256);
sb.append("select ident from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join auth.identity as ident")
.append(" inner join ident.user as user")
.append(" where auth.authusername=:login");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Identity.class)
.setParameter("login", login)
.getResultList();
}
/**
*
* @param provider The authentication provider
* @return A list of identities (the user is not fetched)
*/
public List<Identity> getIdentitiesWithAuthentication(String provider) {
StringBuilder sb = new StringBuilder(256);
sb.append("select ident from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join auth.identity as ident")
.append(" where auth.provider=:provider");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Identity.class)
.setParameter("provider", provider)
.getResultList();
}
public long countIdentitiesWithAuthentication(String provider) {
StringBuilder sb = new StringBuilder(256);
sb.append("select count(auth.identity.key) from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" where auth.provider=:provider");
List<Long> count = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Long.class)
.setParameter("provider", provider)
.getResultList();
return count == null || count.isEmpty() || count.get(0) == null ? 0l : count.get(0).longValue();
}
/**
*
* @param provider The authentication provider
* @return A list of identities (the user is not fetched)
*/
public List<Authentication> getAuthentications(String provider) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join fetch auth.identity as ident")
.append(" inner join fetch ident.user as identUser")
.append(" where auth.provider=:provider");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("provider", provider)
.getResultList();
}
public Authentication getAuthentication(IdentityRef identity, String provider) {
if (identity == null || !StringHelper.containsNonWhitespace(provider)) {
throw new IllegalArgumentException("identity must not be null");
}
StringBuilder sb = new StringBuilder();
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join auth.identity as ident")
.append(" inner join ident.user as user")
.append(" where auth.identity.key=:identityKey and auth.provider=:provider");
List<Authentication> results = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("identityKey", identity.getKey())
.setParameter("provider", provider)
.getResultList();
if (results == null || results.isEmpty()) {
return null;
}
if (results.size() > 1) {
throw new AssertException("Found more than one Authentication for a given subject and a given provider.");
}
return results.get(0);
}
public Authentication getAuthentication(String authUsername, String provider) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" inner join auth.identity as ident")
.append(" inner join ident.user as user")
.append(" where auth.authusername=:authUsername and auth.provider=:provider");
List<Authentication> authentications = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("authUsername", authUsername)
.setParameter("provider", provider)
.setFirstResult(0)
.setMaxResults(1)
.getResultList();
return authentications != null && !authentications.isEmpty() ? authentications.get(0) : null;
}
public boolean hasAuthentication(IdentityRef identity, String provider) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth.key from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" where auth.identity.key=:identityKey and auth.provider=:provider");
List<Long> authentications = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Long.class)
.setParameter("identityKey", identity.getKey())
.setParameter("provider", provider)
.setFirstResult(0)
.setMaxResults(1)
.getResultList();
return authentications != null && !authentications.isEmpty();
}
public List<Authentication> getAuthentications(IdentityRef identity) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth from ").append(AuthenticationImpl.class.getName()).append(" as auth ")
.append("inner join fetch auth.identity as ident")
.append(" where ident.key=:identityKey");
return dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Authentication.class)
.setParameter("identityKey", identity.getKey())
.getResultList();
}
public Authentication updateAuthentication(Authentication authentication) {
return dbInstance.getCurrentEntityManager().merge(authentication);
}
/**
* Quick update of the credential, don't do a full update of the authentication object.
*
* @param auth
* @param token
*/
public void updateCredential(Authentication auth, String token) {
StringBuilder sb = new StringBuilder(128);
sb.append("update ").append(AuthenticationImpl.class.getName()).append(" set credential=:token,lastModified=:now where key=:authKey");
dbInstance.getCurrentEntityManager()
.createQuery(sb.toString())
.setParameter("authKey", auth.getKey())
.setParameter("token", token)
.setParameter("now", new Date())
.executeUpdate();
dbInstance.commit();
}
/**
* The query return as valid OLAT authentication a fallback for LDAP.
*
* @param identity The identity to check
* @param changeOnce If the identity need to change its password at least once
* @param maxAge The max. age of the authentication in seconds
* @return
*/
public boolean hasValidOlatAuthentication(IdentityRef identity, boolean changeOnce, int maxAge,
List<String> exceptionProviders) {
StringBuilder sb = new StringBuilder(256);
sb.append("select auth.key from ").append(AuthenticationImpl.class.getName()).append(" as auth")
.append(" where auth.identity.key=:identityKey and ((auth.provider=:olatProvider");
if(changeOnce) {
sb.append(" and not(auth.creationDate=auth.lastModified)");
}
if(maxAge > 0) {
sb.append(" and auth.lastModified>=:maxDate");
}
sb.append(") or auth.provider in (:providers))");
TypedQuery<Long> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Long.class)
.setParameter("identityKey", identity.getKey())
.setParameter("olatProvider", BaseSecurityModule.getDefaultAuthProviderIdentifier())
.setParameter("providers", exceptionProviders);
if(maxAge > 0) {
Calendar cal = Calendar.getInstance();
cal.add(Calendar.SECOND, -maxAge);
query.setParameter("maxDate", cal.getTime());
}
List<Long> keys = query
.setFirstResult(0)
.setMaxResults(1)
.getResultList();
return keys != null && !keys.isEmpty() && keys.get(0) != null;
}
}
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/ionicons/helpCircled.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.helpCircled = void 0;
var helpCircled = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M256,32C132.3,32,32,132.3,32,256s100.3,224,224,224c123.7,0,224-100.3,224-224S379.7,32,256,32z M276.2,358.7\r\n\t\tc-0.5,17.8-13.7,28.8-30.8,28.3c-16.4-0.5-29.3-12.2-28.8-30.1c0.5-17.8,14.1-29.1,30.5-28.6C264.3,328.8,276.8,340.9,276.2,358.7z\r\n\t\t M324.9,231.4c-4.2,5.9-13.6,13.5-25.4,22.7l-13.1,9c-6.4,4.9-10.4,10.7-12.5,17.3c-1.1,3.5-1.9,12.6-2.1,18.7\r\n\t\tc-0.1,1.2-0.8,3.9-4.5,3.9c-3.7,0-35,0-39.1,0c-4.1,0-4.6-2.4-4.5-3.6c0.6-16.6,3-30.3,9.9-41.3c9.3-14.8,35.5-30.4,35.5-30.4\r\n\t\tc4-3,7.1-6.2,9.5-9.7c4.4-6,8-12.7,8-19.9c0-8.3-2-16.2-7.3-22.8c-6.2-7.7-12.9-11.4-25.8-11.4c-12.7,0-20.1,6.4-25.4,14.8\r\n\t\tc-5.3,8.4-4.4,18.3-4.4,27.3H175c0-34,8.9-55.7,27.7-68.5c12.7-8.7,28.9-12.5,47.8-12.5c24.8,0,44.5,4.6,61.9,17.8\r\n\t\tc16.1,12.2,24.6,29.4,24.6,52.6C337,209.7,332,221.7,324.9,231.4z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,32C132.3,32,32,132.3,32,256s100.3,224,224,224c123.7,0,224-100.3,224-224S379.7,32,256,32z M276.2,358.7\r\n\t\tc-0.5,17.8-13.7,28.8-30.8,28.3c-16.4-0.5-29.3-12.2-28.8-30.1c0.5-17.8,14.1-29.1,30.5-28.6C264.3,328.8,276.8,340.9,276.2,358.7z\r\n\t\t M324.9,231.4c-4.2,5.9-13.6,13.5-25.4,22.7l-13.1,9c-6.4,4.9-10.4,10.7-12.5,17.3c-1.1,3.5-1.9,12.6-2.1,18.7\r\n\t\tc-0.1,1.2-0.8,3.9-4.5,3.9c-3.7,0-35,0-39.1,0c-4.1,0-4.6-2.4-4.5-3.6c0.6-16.6,3-30.3,9.9-41.3c9.3-14.8,35.5-30.4,35.5-30.4\r\n\t\tc4-3,7.1-6.2,9.5-9.7c4.4-6,8-12.7,8-19.9c0-8.3-2-16.2-7.3-22.8c-6.2-7.7-12.9-11.4-25.8-11.4c-12.7,0-20.1,6.4-25.4,14.8\r\n\t\tc-5.3,8.4-4.4,18.3-4.4,27.3H175c0-34,8.9-55.7,27.7-68.5c12.7-8.7,28.9-12.5,47.8-12.5c24.8,0,44.5,4.6,61.9,17.8\r\n\t\tc16.1,12.2,24.6,29.4,24.6,52.6C337,209.7,332,221.7,324.9,231.4z"
},
"children": []
}]
}]
}]
};
exports.helpCircled = helpCircled; |
#!/bin/bash
set -ex
export PYTHONPATH=
virtualenv package-verify-env
. package-verify-env/bin/activate
pip install -e scripts
python -m automation.tests.verify_packages
python -m automation.tests.verify_dependencies
deactivate
# If we get here, all prev. commands returned 0 exit codes so we are done.
rm -rf package-verify-env
|
#!/usr/bin/with-contenv bashio
touch /data/currentplaylist.txt
mkdir -p /network_shares
cd /network_shares
if [ ! -f /data/network_shares.json ];
then
echo [] > /data/network_shares.json
fi
nohup python -m http.server 80 &
port=$(bashio::addons self 'addons.self.network' '.network["8099/tcp"]')
darkmode=$(bashio::config 'dark_mode')
echo "Dark mode: $darkmode"
export BASEDIR="/network_shares"
python -B /run.py "$port" "$darkmode" |
#!/bin/bash
start=$(date +"%T")
for f in *.afa; do mafft --maxiterate 1000 --globalpair "$f" > Res"$f"; done
echo "===================================================================================================================================================================================================================="
finTime=$(date +"%T")
echo "Current time : $start"
echo "Current time : $finTime"
|
const storageKeyToken = '<PASSWORD>ifyUserKey';
const storageKeyDarkMode = 'readifyDarkMode';
const saveUser = (user) =>
localStorage.setItem(storageKeyToken, JSON.stringify(user));
const loadUser = () => JSON.parse(localStorage.getItem(storageKeyToken));
const logoutUser = () => localStorage.removeItem(storageKeyToken);
const saveDarkMode = (boolean) =>
localStorage.setItem(storageKeyDarkMode, boolean);
const loadDarkMode = () => localStorage.getItem(storageKeyDarkMode);
const storage = {
saveUser,
loadUser,
logoutUser,
saveDarkMode,
loadDarkMode,
};
export default storage;
|
<reponame>kelvinatsplunk/splunk-operator
// Copyright (c) 2018-2020 Splunk Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package reconcile
import (
"context"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
)
// ApplyService creates or updates a Kubernetes Service
func ApplyService(client ControllerClient, service *corev1.Service) error {
scopedLog := log.WithName("ApplyService").WithValues(
"name", service.GetObjectMeta().GetName(),
"namespace", service.GetObjectMeta().GetNamespace())
namespacedName := types.NamespacedName{Namespace: service.GetNamespace(), Name: service.GetName()}
var current corev1.Service
err := client.Get(context.TODO(), namespacedName, ¤t)
if err == nil {
// found existing Service: do nothing
scopedLog.Info("Found existing Service")
} else {
err = CreateResource(client, service)
}
return err
}
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
trap 'CHILDREN=$(jobs -p); if test -n "${CHILDREN}"; then kill ${CHILDREN} && wait; fi' TERM
function generate_proxy_ignition() {
cat > /tmp/proxy.ign << EOF
{
"ignition": {
"config": {},
"security": {
"tls": {}
},
"timeouts": {},
"version": "3.0.0"
},
"passwd": {
"users": [
{
"name": "core",
"sshAuthorizedKeys": [
"${ssh_pub_key}"
]
}
]
},
"storage": {
"files": [
{
"path": "/etc/squid/passwords",
"contents": {
"source": "data:text/plain;base64,${HTPASSWD_CONTENTS}"
},
"mode": 420
},
{
"path": "/etc/squid/squid.conf",
"contents": {
"source": "data:text/plain;base64,${SQUID_CONFIG}"
},
"mode": 420
},
{
"path": "/etc/squid.sh",
"contents": {
"source": "data:text/plain;base64,${SQUID_SH}"
},
"mode": 420
},
{
"path": "/etc/squid/proxy.sh",
"contents": {
"source": "data:text/plain;base64,${PROXY_SH}"
},
"mode": 420
}
]
},
"systemd": {
"units": [
{
"contents": "[Unit]\nWants=network-online.target\nAfter=network-online.target\n[Service]\n\nStandardOutput=journal+console\nExecStart=bash /etc/squid.sh\n\n[Install]\nRequiredBy=multi-user.target\n",
"enabled": true,
"name": "squid.service"
},
{
"dropins": [
{
"contents": "[Service]\nExecStart=\nExecStart=/usr/lib/systemd/systemd-journal-gatewayd \\\n --key=/opt/openshift/tls/journal-gatewayd.key \\\n --cert=/opt/openshift/tls/journal-gatewayd.crt \\\n --trust=/opt/openshift/tls/root-ca.crt\n",
"name": "certs.conf"
}
],
"name": "systemd-journal-gatewayd.service"
}
]
}
}
EOF
}
function generate_proxy_template() {
cat > /tmp/04_cluster_proxy.yaml << EOF
AWSTemplateFormatVersion: 2010-09-09
Description: Template for OpenShift Cluster Proxy (EC2 Instance, Security Groups and IAM)
Parameters:
InfrastructureName:
AllowedPattern: ^([a-zA-Z][a-zA-Z0-9\-]{0,26})$
MaxLength: 27
MinLength: 1
ConstraintDescription: Infrastructure name must be alphanumeric, start with a letter, and have a maximum of 27 characters.
Description: A short, unique cluster ID used to tag cloud resources and identify items owned or used by the cluster.
Type: String
Ami:
Description: Current CoreOS AMI to use for proxy.
Type: AWS::EC2::Image::Id
AllowedProxyCidr:
AllowedPattern: ^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|1[0-9]|2[0-9]|3[0-2]))$
ConstraintDescription: CIDR block parameter must be in the form x.x.x.x/0-32.
Default: 0.0.0.0/0
Description: CIDR block to allow access to the proxy node.
Type: String
ClusterName:
Description: The cluster name used to uniquely identify the proxy load balancer
Type: String
PublicSubnet:
Description: The public subnet to launch the proxy node into.
Type: AWS::EC2::Subnet::Id
VpcId:
Description: The VPC-scoped resources will belong to this VPC.
Type: AWS::EC2::VPC::Id
ProxyIgnitionLocation:
Default: s3://my-s3-bucket/proxy.ign
Description: Ignition config file location.
Type: String
Metadata:
AWS::CloudFormation::Interface:
ParameterGroups:
- Label:
default: "Cluster Information"
Parameters:
- InfrastructureName
- Label:
default: "Host Information"
Parameters:
- Ami
- ProxyIgnitionLocation
- Label:
default: "Network Configuration"
Parameters:
- VpcId
- AllowedProxyCidr
- PublicSubnet
- ClusterName
ParameterLabels:
InfrastructureName:
default: "Infrastructure Name"
VpcId:
default: "VPC ID"
AllowedProxyCidr:
default: "Allowed ingress Source"
Ami:
default: "CoreOS AMI ID"
ProxyIgnitionLocation:
default: "Bootstrap Ignition Source"
ClusterName:
default: "Cluster name"
Resources:
ProxyIamRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: "Allow"
Principal:
Service:
- "ec2.amazonaws.com"
Action:
- "sts:AssumeRole"
Path: "/"
Policies:
- PolicyName: !Join ["-", [!Ref InfrastructureName, "proxy", "policy"]]
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: "Allow"
Action: "ec2:Describe*"
Resource: "*"
ProxyInstanceProfile:
Type: "AWS::IAM::InstanceProfile"
Properties:
Path: "/"
Roles:
- Ref: "ProxyIamRole"
ProxySecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupDescription: Cluster Proxy Security Group
SecurityGroupIngress:
- IpProtocol: tcp
FromPort: 22
ToPort: 22
CidrIp: 0.0.0.0/0
- IpProtocol: tcp
ToPort: 3128
FromPort: 3128
CidrIp: !Ref AllowedProxyCidr
- IpProtocol: tcp
ToPort: 19531
FromPort: 19531
CidrIp: !Ref AllowedProxyCidr
VpcId: !Ref VpcId
ProxyInstance:
Type: AWS::EC2::Instance
Properties:
ImageId: !Ref Ami
IamInstanceProfile: !Ref ProxyInstanceProfile
KeyName: "openshift-dev"
InstanceType: "m5.xlarge"
NetworkInterfaces:
- AssociatePublicIpAddress: "true"
DeviceIndex: "0"
GroupSet:
- !Ref "ProxySecurityGroup"
SubnetId: !Ref "PublicSubnet"
UserData:
Fn::Base64: !Sub
- '{"ignition":{"config":{"replace":{"source":"\${IgnitionLocation}"}},"version":"3.0.0"}}'
- {
IgnitionLocation: !Ref ProxyIgnitionLocation
}
Outputs:
ProxyId:
Description: The proxy node instanceId.
Value: !Ref ProxyInstance
ProxyPrivateIp:
Description: The proxy node private IP address.
Value: !GetAtt ProxyInstance.PrivateIp
ProxyPublicIp:
Description: The proxy node public IP address.
Value: !GetAtt ProxyInstance.PublicIp
EOF
}
export AWS_SHARED_CREDENTIALS_FILE="${CLUSTER_PROFILE_DIR}/.awscred"
# TODO: move to image
curl -L https://github.com/mikefarah/yq/releases/download/3.3.0/yq_linux_amd64 -o /tmp/yq && chmod +x /tmp/yq
EXPIRATION_DATE=$(date -d '4 hours' --iso=minutes --utc)
TAGS="Key=expirationDate,Value=${EXPIRATION_DATE}"
CONFIG="${SHARED_DIR}/install-config.yaml"
PROXY_IMAGE=registry.svc.ci.openshift.org/origin/4.5:egress-http-proxy
PROXY_NAME="$(/tmp/yq r "${CONFIG}" 'metadata.name')"
REGION="$(/tmp/yq r "${CONFIG}" 'platform.aws.region')"
echo Using region: ${REGION}
test -n "${REGION}"
curl -L -o /tmp/fcos-stable.json https://builds.coreos.fedoraproject.org/streams/stable.json
AMI=$(jq -r .architectures.x86_64.images.aws.regions[\"${REGION}\"].image < /tmp/fcos-stable.json)
if [ -z "${AMI}" ]; then
echo "Missing AMI in region: ${REGION}" 1>&2
exit 1
fi
RELEASE=$(jq -r .architectures.x86_64.images.aws.regions[\"${REGION}\"].release < /tmp/fcos-stable.json)
echo "Using FCOS ${RELEASE} AMI: ${AMI}"
ssh_pub_key=$(<"${CLUSTER_PROFILE_DIR}/ssh-publickey")
# get the VPC ID from a subnet -> subnet.VpcId
aws_subnet="$(/tmp/yq r "${CONFIG}" 'platform.aws.subnets[0]')"
echo "Using aws_subnet: ${aws_subnet}"
vpc_id="$(aws --region "${REGION}" ec2 describe-subnets --subnet-ids "${aws_subnet}" | jq -r '.[][0].VpcId')"
echo "Using vpc_id: ${vpc_id}"
# for each subnet:
# aws ec2 describe-route-tables --filters Name=association.subnet-id,Values=${value} | grep '"GatewayId": "igw.*'
# if $? then use it as the public subnet
SUBNETS="$(/tmp/yq r -P "${CONFIG}" 'platform.aws.subnets' | sed 's/- //g')"
public_subnet=""
for subnet in ${SUBNETS}; do
if aws --region "${REGION}" ec2 describe-route-tables --filters Name=association.subnet-id,Values="${subnet}" | grep '"GatewayId": "igw.*' 1>&2 > /dev/null; then
public_subnet="${subnet}"
break
fi
done
if [[ -z "$public_subnet" ]]; then
echo "Cound not find a public subnet in ${SUBNETS}" && exit 1
fi
echo "Using public_subnet: ${public_subnet}"
PASSWORD="$(uuidgen | sha256sum | cut -b -32)"
HTPASSWD_CONTENTS="${PROXY_NAME}:$(openssl passwd -apr1 ${PASSWORD})"
HTPASSWD_CONTENTS="$(echo -e ${HTPASSWD_CONTENTS} | base64 -w0)"
# define squid config
SQUID_CONFIG="$(base64 -w0 << EOF
http_port 3128
cache deny all
access_log stdio:/tmp/squid-access.log all
debug_options ALL,1
shutdown_lifetime 0
auth_param basic program /usr/lib64/squid/basic_ncsa_auth /squid/passwords
auth_param basic realm proxy
acl authenticated proxy_auth REQUIRED
http_access allow authenticated
pid_filename /tmp/proxy-setup
EOF
)"
# define squid.sh
SQUID_SH="$(base64 -w0 << EOF
#!/bin/bash
podman run --entrypoint='["bash", "/squid/proxy.sh"]' --expose=3128 --net host --volume /etc/squid:/squid:Z ${PROXY_IMAGE}
EOF
)"
# define proxy.sh
PROXY_SH="$(base64 -w0 << EOF
#!/bin/bash
function print_logs() {
while [[ ! -f /tmp/squid-access.log ]]; do
sleep 5
done
tail -f /tmp/squid-access.log
}
print_logs &
squid -N -f /squid/squid.conf
EOF
)"
# create ignition entries for certs and script to start squid and systemd unit entry
# create the proxy stack and then get its IP
PROXY_URI="s3://${PROXY_NAME}/proxy.ign"
generate_proxy_ignition
generate_proxy_template
# we need to be able to tear down the proxy even if install fails
# cannot rely on presence of ${SHARED_DIR}/metadata.json
echo "${REGION}" >> "${SHARED_DIR}/proxyregion"
# create the s3 bucket to push to
aws --region "${REGION}" s3 mb "s3://${PROXY_NAME}"
aws --region "${REGION}" s3api put-bucket-acl --bucket "${PROXY_NAME}" --acl public-read
# push the generated ignition to the s3 bucket
aws --region "${REGION}" s3 cp /tmp/proxy.ign "${PROXY_URI}"
aws --region "${REGION}" s3api put-object-acl --bucket "${PROXY_NAME}" --key "proxy.ign" --acl public-read
aws --region "${REGION}" cloudformation create-stack \
--stack-name "${PROXY_NAME}-proxy" \
--template-body "$(cat "/tmp/04_cluster_proxy.yaml")" \
--tags "${TAGS}" \
--capabilities CAPABILITY_NAMED_IAM \
--parameters \
ParameterKey=ClusterName,ParameterValue="${PROXY_NAME}" \
ParameterKey=VpcId,ParameterValue="${vpc_id}" \
ParameterKey=ProxyIgnitionLocation,ParameterValue="${PROXY_URI}" \
ParameterKey=InfrastructureName,ParameterValue="${PROXY_NAME}" \
ParameterKey=Ami,ParameterValue="${AMI}" \
ParameterKey=PublicSubnet,ParameterValue="${public_subnet}" &
wait "$!"
echo "Created stack"
aws --region "${REGION}" cloudformation wait stack-create-complete --stack-name "${PROXY_NAME}-proxy" &
wait "$!"
echo "Waited for stack"
INSTANCE_ID="$(aws --region "${REGION}" cloudformation describe-stacks --stack-name "${PROXY_NAME}-proxy" \
--query 'Stacks[].Outputs[?OutputKey == `ProxyId`].OutputValue' --output text)"
echo "Instance ${INSTANCE_ID}"
# to allow log collection during gather:
# append to proxy instance ID to "${SHARED_DIR}/aws-instance-ids.txt"
echo "${INSTANCE_ID}" >> "${SHARED_DIR}/aws-instance-ids.txt"
PRIVATE_PROXY_IP="$(aws --region "${REGION}" cloudformation describe-stacks --stack-name "${PROXY_NAME}-proxy" \
--query 'Stacks[].Outputs[?OutputKey == `ProxyPrivateIp`].OutputValue' --output text)"
PUBLIC_PROXY_IP="$(aws --region "${REGION}" cloudformation describe-stacks --stack-name "${PROXY_NAME}-proxy" \
--query 'Stacks[].Outputs[?OutputKey == `ProxyPublicIp`].OutputValue' --output text)"
# echo proxy IP to ${SHARED_DIR}/proxyip
echo "${PUBLIC_PROXY_IP}" >> "${SHARED_DIR}/proxyip"
PROXY_URL="http://${PROXY_NAME}:${PASSWORD}@${PRIVATE_PROXY_IP}:3128/"
# due to https://bugzilla.redhat.com/show_bug.cgi?id=1750650 we don't use a tls end point for squid
cat >> "${CONFIG}" << EOF
proxy:
httpsProxy: ${PROXY_URL}
httpProxy: ${PROXY_URL}
EOF
|
load ../test_setup.bash
@test "cannot deploy project with invalid annotations" {
run $NIM project deploy $BATS_TEST_DIRNAME/test-cases/invalid-annotations
assert_failure
assert_output --partial "Error: Invalid project configuration file (project.yml): annotations must"
assert_output --partial "be a dictionary"
}
@test "cannot deploy project with invalid env" {
run $NIM project deploy $BATS_TEST_DIRNAME/test-cases/invalid-env
assert_failure
assert_output --partial "Error: Invalid project configuration file (project.yml): the environment"
assert_output --partial "clause must be a dictionary"
}
@test "cannot deploy project with invalid parameters" {
run $NIM project deploy $BATS_TEST_DIRNAME/test-cases/invalid-parameters
assert_failure
assert_output --partial "Error: Invalid project configuration file (project.yml): parameters must"
assert_output --partial "be a dictionary"
}
@test "cannot deploy project with invalid top-level parameter " {
run $NIM project deploy $BATS_TEST_DIRNAME/test-cases/invalid-parameters-toplevel
assert_failure
assert_output --partial "Error: Invalid project configuration file (project.yml): parameters member"
assert_output --partial "must be a dictionary"
}
|
package application
import (
"cf/api"
"cf/configuration"
"cf/requirements"
"cf/terminal"
"errors"
"fmt"
"github.com/codegangsta/cli"
"io/ioutil"
"os"
"os/exec"
"path"
"strconv"
"strings"
)
type Ssh struct {
ui terminal.UI
config configuration.Reader
appSshRepo api.AppSshRepository
appReq requirements.ApplicationRequirement
}
func NewSsh(ui terminal.UI, config configuration.Reader, appSshRepo api.AppSshRepository) (cmd *Ssh) {
cmd = new(Ssh)
cmd.ui = ui
cmd.config = config
cmd.appSshRepo = appSshRepo
return
}
func (cmd *Ssh) GetRequirements(reqFactory requirements.Factory, c *cli.Context) (reqs []requirements.Requirement, err error) {
if len(c.Args()) < 1 {
err = errors.New("Incorrect Usage")
cmd.ui.FailWithUsage(c, "ssh")
return
}
cmd.appReq = reqFactory.NewApplicationRequirement(c.Args()[0])
reqs = []requirements.Requirement{
reqFactory.NewLoginRequirement(),
reqFactory.NewTargetedSpaceRequirement(),
cmd.appReq,
}
return
}
var ExecuteCmd = func(appname string, args []string) (err error) {
command := exec.Command(appname, args...)
command.Stdin = os.Stdin
command.Stdout = os.Stdout
command.Stderr = os.Stderr
err = command.Run()
return
}
func (cmd *Ssh) Run(c *cli.Context) {
app := cmd.appReq.GetApplication()
instance := c.Int("instance")
sshapi := cmd.appSshRepo
cmd.ui.Say("SSHing to application %s, instance %s...",
terminal.EntityNameColor(app.Name),
terminal.EntityNameColor(strconv.Itoa(instance)),
)
apiResponse, sshDetails := sshapi.GetSshDetails(app.Guid, instance)
if apiResponse.IsNotSuccessful() {
cmd.ui.Failed(apiResponse.Message)
return
}
cmd.ui.Ok()
tempdir, error := ioutil.TempDir("", "gocf")
if error != nil {
panic(error)
}
tempfile := path.Join(tempdir, "identity")
error = ioutil.WriteFile(tempfile, []byte(sshDetails.SshKey), 0600)
if error != nil {
panic(error)
}
cmd.ui.Say("SSH username is %s", terminal.EntityNameColor(sshDetails.User))
cmd.ui.Say("SSH IP Address is %s", terminal.EntityNameColor(sshDetails.Ip))
cmd.ui.Say("SSH Port is %s", terminal.EntityNameColor(strconv.Itoa(sshDetails.Port)))
cmd.ui.Say("SSH Identity is %s", terminal.EntityNameColor(tempfile))
cmd.ui.Say("")
userAndHost := fmt.Sprintf("%s@%s", sshDetails.User, sshDetails.Ip)
var sshCommand []string
sshCommand = []string{"-i", tempfile, "-o", "ConnectTimeout=5", "-o", "StrictHostKeychecking=no", "-o", "UserKnownHostsFile=/dev/null", "-p", strconv.Itoa(sshDetails.Port), userAndHost}
cmd.ui.Say("Command: ssh %s", strings.Join(sshCommand, " "))
err := ExecuteCmd("ssh", sshCommand)
if err != nil {
cmd.ui.Say("Command Failed: %s", err)
}
err2 := os.Remove(tempfile)
if err2 != nil {
panic(err2)
}
err3 := os.Remove(tempdir)
if err3 != nil {
panic(err3)
}
cmd.ui.Say("SSH Finished\n")
}
|
<reponame>wuchunfu/pinecms
import { BaseService, Service, Permission } from "/@/core";
@Service({
namespace: "im/message",
mock: true
})
class ImMessage extends BaseService {
@Permission("read")
read(data: any) {
return this.request({
url: "/read",
method: "POST",
data
});
}
}
export default ImMessage;
|
#!/usr/bin/env bash
# need to make the dir for /var/run/sql-version.tmp/
POSTGRES_VERSION=9.4
# Create postgres data directory and run initdb if needed
# This is useful for docker volumes
if [ ! -e /var/lib/postgresql/data ]; then
echo "Creating data directory"
mkdir -p /var/lib/postgresql/data
touch /var/lib/postgresql/firstrun
echo "Initializing database files"
/usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data/
fi
# Create postgres backup directory if needed
mkdir -p /var/backups
create_user () {
if [ -f /var/lib/postgresql/firstrun ]; then
mkdir -p /var/run/postgresql/$POSTGRES_VERSION-main.pg_stat_tmp
echo "Waiting for PostgreSQL to start"
while [ ! -e /var/run/postgresql/$POSTGRES_VERSION-main.pid ]; do
inotifywait -q -q -e create /var/run/postgresql/
done
# We sleep here for 2 seconds to allow clean output, and speration from postgres startup messages
sleep 2
echo "Below are your configured options."
echo -e "================
USER: $USER
PASSWORD: $PASSWORD
DATABASE: $DATABASE
SCHEMA: $SCHEMA
ENCODING: $ENCODING
POSTGIS: $POSTGIS
================"
# Ensure template1 gets updated with proper encoding
psql -c "UPDATE pg_database SET datistemplate = FALSE WHERE datname = 'template1';"
psql -c "DROP DATABASE template1;"
psql -c "CREATE DATABASE template1 WITH TEMPLATE = template0 ENCODING = '$ENCODING';"
psql -c "UPDATE pg_database SET datistemplate = TRUE WHERE datname = 'template1';"
psql -d 'template1' -c "VACUUM FREEZE;"
if [ "$USER" == "postgres" ]; then
echo "ALTER USER :user WITH PASSWORD :'password' ;" | psql --set user=$USER --set password=$PASSWORD
if [ "$DATABASE" != "postgres" ]; then
createdb -E $ENCODING -T template0 $DATABASE
fi
else
echo "CREATE USER :user WITH SUPERUSER PASSWORD :'password' ;" | psql --set user=$USER --set password=$PASSWORD && createdb -E $ENCODING -T template0 $DATABASE
fi
echo "CREATING SCHEMA $SCHEMA"
echo "CREATE SCHEMA $SCHEMA;" | psql --set user=$USER --set password=$PASSWORD $DATABASE
if echo $POSTGIS |grep -i -q true; then
echo "CREATING EXTENSIONS"
echo "CREATE EXTENSION postgis;CREATE EXTENSION postgis_topology;" | psql -d $DATABASE
else
echo "NOT CREATING EXTENSIONS"
fi
# Create .pgpass for use with backups
echo "localhost:5432:*:$USER:$PASSWORD" > /var/lib/postgresql/.pgpass
chmod 0600 /var/lib/postgresql/.pgpass
# Update pg_backup with proper user
sed -i "s/^USERNAME=.*$/USERNAME=$USER/" /usr/local/etc/pg_backup.config
# Schedule backups
if [ "${BACKUP_ENABLED,,}" == "true" ]; then
# TODO rotate this log
BACKUP_COMMAND="/usr/local/bin/pg_backup.sh -c /usr/local/etc/pg_backup.config >> /var/log/postgresql/pg_backup.log 2>&1"
echo "Scheduling PostgreSQL Backups"
case ${BACKUP_FREQUENCY,,} in
hourly)
echo "Scheduling Hourly Backups"
echo -e "MAILTO=$BACKUP_EMAIL
0 * * * * $BACKUP_COMMAND" | crontab
echo -e "Resulting cron:
`crontab -l`"
;;
daily)
echo "Scheduling Daily Backups"
echo -e "MAILTO=$BACKUP_EMAIL
0 0 * * * $BACKUP_COMMAND" | crontab
echo -e "Resulting cron:
`crontab -l`"
;;
weekly)
echo "Scheduling Weekly Backups"
echo -e "MAILTO=$BACKUP_EMAIL
0 0 * * 0 $BACKUP_COMMAND" | crontab
echo -e "Resulting cron:
`crontab -l`"
;;
*)
echo "$BACKUP_FREQUENCY is not valid for BACKUP_FREQUENCY, acceptable values are hourly, daily, or weekly"
;;
esac
fi
rm /var/lib/postgresql/firstrun
fi
}
create_user &
exec /usr/lib/postgresql/$POSTGRES_VERSION/bin/postgres -D /var/lib/postgresql/data -c config_file=/etc/postgresql/$POSTGRES_VERSION/main/postgresql.conf |
#!/usr/bin/env bash
set -e
source script/env.sh
SRC_DIR=$EXTERNAL_LIBS_ROOT/android-openssl
TARGET_DIR=$EXTERNAL_LIBS_ROOT/openssl
ln -sf $TARGET_DIR/include $NDK_TOOL_DIR/arm/sysroot/usr/include/openssl
ln -sf $TARGET_DIR/arm/lib/*.so $NDK_TOOL_DIR/arm/sysroot/usr/lib
ln -sf $TARGET_DIR/include $NDK_TOOL_DIR/arm64/sysroot/usr/include/openssl
ln -sf $TARGET_DIR/arm64/lib/*.so $NDK_TOOL_DIR/arm64/sysroot/usr/lib
ln -sf $TARGET_DIR/include $NDK_TOOL_DIR/x86/sysroot/usr/include/openssl
ln -sf $TARGET_DIR/x86/lib/*.so $NDK_TOOL_DIR/x86/sysroot/usr/lib
ln -sf $TARGET_DIR/include $NDK_TOOL_DIR/x86_64/sysroot/usr/include/openssl
ln -sf $TARGET_DIR/x86_64/lib/*.so $NDK_TOOL_DIR/x86_64/sysroot/usr/lib64
|
<reponame>tiagorahal/school-library
require_relative 'person'
require_relative 'classroom'
class Student < Person
attr_reader :owner, :classroom
def initialize(age, name = 'Unkown', parent_permission = true, classroom = 'none')
super(age, name, parent_permission)
@classroom = classroom
end
def classroom=(classroom)
@classroom = classroom
classroom.students.push(self) unless classroom.students.include?(self)
end
def play_hooky
'¯\(ツ)/¯'
end
end
|
#!/bin/sh
# Copyright (C) 2017 myDevices
wget="wget"
NETWORK="ttn"
#gateway id
CONFIG_FILE="/opt/ttn-gateway/packet_forwarder/lora_pkt_fwd/local_conf.json"
TAG_GW_MAC="gateway_ID"
GATEWAY_ID=$( sed -n 's/.*"'$TAG_GW_MAC'": "\(.*\)"/\1/p' $CONFIG_FILE | sed -e 's/^[ ]*//g' | sed -e 's/[ ]*$//g' )
# append eui in front of the gateway id
GATEWAY_ID="eui-$GATEWAY_ID"
BROADCAST_HOSTS_URL=http://gw-ping.simplysense.com/pingtargets
BROADCAST_HOSTS_DIR=/tmp
BROADCAST_HOSTS_FILE=$BROADCAST_HOSTS_DIR/pingtargets
#optional payload is keepalive
#default keepalive is 1 minute = 60 seconds
#set this a cron job of 1 minute
# * * * * * username /some_path/lora_online.sh
if [ ! -f "$BROADCAST_HOSTS_FILE" ]
then
echo "$0: File '${BROADCAST_HOSTS_FILE}' not found. Downloading the file."
$wget $BROADCAST_HOSTS_URL -P $BROADCAST_HOSTS_DIR
fi
while IFS= read -r CAYENNE_LORA_HOST; do
CAYENNE_LORA_URI="$CAYENNE_LORA_HOST/$NETWORK/gateways/$GATEWAY_ID/state"
echo "Calling server host for cayenne: $CAYENNE_LORA_URI"
result=$($wget --server-response --post-data \"\" --no-check-certificate $CAYENNE_LORA_URI 2>&1 -O /dev/null | awk '/^ HTTP/{print $2}')
# result=$($wget -qO- --post-data "" $CAYENNE_LORA_URI)
if [ "$result" != "200" ]; then
echo "Error from the server: '$result'"
fi
done <$BROADCAST_HOSTS_FILE
|
#!/bin/sh
export LD_LIBRARY_PATH=/usr/lib/TeighaViewer:$LD_LIBRARY_PATH
exec /usr/lib/TeighaViewer/TeighaViewer "$@"
|
import redis
class UserActivityManager:
def __init__(self, subreddit, logger, evt):
self._subreddit = subreddit
self._logger = logger
self._evt = evt
self._redis = redis.StrictRedis(host='localhost', port=6379, db=0)
def userstream(self):
# Assume this method yields tuples containing username, timestamp, and link from the user stream
pass
def store_user_activity(self):
for username, timestamp, link in self.userstream():
if self._evt.is_set():
break
self._logger.debug(username)
self._redis.set(f"{self._subreddit}|{username}", f"{timestamp}|{link}") |
/*-
* #%L
* Scenery-backed 3D visualization package for ImageJ.
* %%
* Copyright (C) 2016 - 2021 SciView developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package sc.iview.commands.demo.animation;
import graphics.scenery.*;
import graphics.scenery.backends.ShaderType;
import graphics.scenery.primitives.Cone;
import org.joml.Quaternionf;
import org.joml.Vector3f;
import org.joml.Vector4f;
import org.scijava.command.Command;
import org.scijava.command.CommandService;
import org.scijava.io.IOService;
import org.scijava.log.LogService;
import org.scijava.plugin.Menu;
import org.scijava.plugin.Parameter;
import org.scijava.plugin.Plugin;
import sc.iview.SciView;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import static sc.iview.commands.MenuWeights.*;
/**
* A demo of particle movement.
*
* @author <NAME>
*/
@Plugin(type = Command.class, label = "Particle Demo", menuRoot = "SciView", //
menu = { @Menu(label = "Demo", weight = DEMO), //
@Menu(label = "Animation", weight = DEMO_ANIMATION), //
@Menu(label = "Particle", weight = DEMO_ANIMATION_PARTICLE) })
public class ParticleDemo implements Command {
@Parameter
private IOService io;
@Parameter
private LogService log;
@Parameter
private SciView sciView;
@Parameter
private CommandService commandService;
@Parameter
private int numAgents=10;
@Override
public void run() {
List<Node> agents = new ArrayList<>();
Random rng = new Random(17);
float dt = 0.5f;
float maxX = 10;
float maxY = 10;
float maxZ = 10;
float maxL2 = maxX * maxX + maxY * maxY + maxZ * maxZ;
Node master = new Cone(5, 10, 25, new Vector3f(0,0,1));
//Material mat = ShaderMaterial.fromFiles("DefaultDeferredInstanced.vert", "DefaultDeferred.frag");
List<ShaderType> sList = new ArrayList<>();
sList.add(ShaderType.VertexShader);
sList.add(ShaderType.FragmentShader);
//Material mat = ShaderMaterial.fromClass(ParticleDemo.class, sList);
master.setMaterial(ShaderMaterial.fromClass(ParticleDemo.class, sList));
master.ifMaterial( mat -> {
mat.setAmbient(new Vector3f(0.1f, 0f, 0f));
mat.setDiffuse(new Vector3f(0.8f, 0.7f, 0.7f));
mat.setDiffuse(new Vector3f(0.05f, 0f, 0f));
mat.setMetallic(0.01f);
mat.setRoughness(0.5f);
return null;
});
master.setName("Agent_Master");
sciView.addNode(master);
for( int k = 0; k < numAgents; k++ ) {
InstancedNode n = new InstancedNode(master, "agent_" + k);
n.getInstancedProperties().put("ModelMatrix", n::getWorld);
//n.getInstancedProperties().put("Material", n::getMaterial);
float x = rng.nextFloat()*maxX;
float y = rng.nextFloat()*maxY;
float z = rng.nextFloat()*maxZ;
Vector3f vel = new Vector3f(rng.nextFloat(),rng.nextFloat(),rng.nextFloat());
final Vector4f col = new Vector4f(rng.nextFloat(),rng.nextFloat(), ((float) k) / ((float) numAgents), 1.0f);
n.getInstancedProperties().put("Color", () -> col);
n.setMaterial(master.getMaterial());
n.setPosition(new Vector3f(x,y,z));
faceNodeAlongVelocity(n, vel);
//sciView.addNode(n);
agents.add(n);
}
sciView.animate(30, new Thread(() -> {
Vector3f vel;
Random threadRng = new Random();
for( Node agent : agents ) {
Vector3f pos = agent.getPosition();
if( pos.lengthSquared() > maxL2 ) {
// Switch velocity to point toward center + some random perturbation
Vector3f perturb = new Vector3f(threadRng.nextFloat() - 0.5f, threadRng.nextFloat() - 0.5f, threadRng.nextFloat() - 0.5f);
vel = new Vector3f(pos).mul(-1).add(perturb).normalize();
faceNodeAlongVelocity(agent, vel);
} else {
vel = (Vector3f) agent.getMetadata().get("velocity");
}
Vector3f finalVel = vel;
agent.ifSpatial(spatial -> {
spatial.setPosition(pos.add(new Vector3f(finalVel).mul(dt)));
spatial.setNeedsUpdate(true);
spatial.setNeedsUpdateWorld(true);
return null;
});
}
}));
sciView.getFloor().setVisible(false);
sciView.centerOnNode( agents.get(0) );
}
private void faceNodeAlongVelocity(Node n, Vector3f vel) {
n.getMetadata().put("velocity",vel);
Quaternionf newRot = new Quaternionf();
Vector3f dir = new Vector3f(vel.x(), vel.y(), vel.z());
Vector3f up = new Vector3f(0f, 1f, 0f);
newRot.lookAlong(dir, up);
n.setRotation(newRot);
}
public static void main(String... args) throws Exception {
SciView sv = SciView.create();
CommandService command = sv.getScijavaContext().getService(CommandService.class);
HashMap<String, Object> argmap = new HashMap<>();
command.run(ParticleDemo.class, true, argmap);
}
}
|
import re
def clean_phone_number(phone_number):
#remove special characters
phone_number = re.sub(r'[^\d]', '', phone_number)
#check length
if len(phone_number) != 10:
return None
#parse number
area_code = phone_number[:3]
exchange = phone_number[3:6]
last_4 = phone_number[6:]
return f'({area_code}) {exchange}-{last_4}'
phone_numbers = ['(800) 555-1234','(032)536-9222', '541-766-4455']
formatted_numbers = []
#clean the phone numbers
for number in phone_numbers:
formatted_number = clean_phone_number(number)
if formatted_number is not None:
formatted_numbers.append(formatted_number)
print(formatted_numbers) #output: ['(800) 555-1234', '(032) 536-9222', '(541) 766-4455'] |
"""
Design a function to insert element in Binary Search Tree
"""
# Node class
class Node:
def __init__(self, data):
self.data = data
self.right = None
self.left = None
def insert(node, data):
# If the tree is empty, return a new node
if node is None:
return Node(data)
# Otherwise, recur down the tree
if data < node.data:
node.left = insert(node.left, data)
else:
node.right = insert(node.right, data)
# return the (unchanged) node pointer
return node |
<reponame>Prototallinus/Module6_task
const Link = document.querySelector('a');
let clickCounter = 0;
Link.addEventListener('click', function(warning) {
warning.preventDefault(alert('Вы хотите изменить текст ссылки?'));
Link.removeEventListener('click', warning);
Link.addEventListener('click', ThinkMore);
}, { once: true })
function ThinkMore(warning) {
if (clickCounter == 0) {
warning.preventDefault(alert('Подумайте больше!'));
} else {
warning.preventDefault(alert('Вы хотите изменить текст ещё раз?'));
}
Link.removeEventListener('click', ThinkMore);
Link.addEventListener('click', ChangeLink);
}
function ChangeLink () {
this.textContent = prompt('Измените текст ссылки');
Link.removeEventListener('click', ChangeLink);
Link.addEventListener('click', ThinkMore);
clickCounter = clickCounter + 1;
console.log(clickCounter);
} |
import fetch from 'node-fetch';
import rollbar from './rollbar';
import url from 'url';
const API_URL =
process.env.API_URL || 'https://cofacts-api.hacktabl.org/graphql';
// Usage:
//
// import gql from './util/GraphQL';
// gql`query($var: Type) { foo }`({var: 123}).then(...)
//
// gql`...`() returns a promise that resolves to immutable Map({data, errors}).
//
// We use template string here so that Atom's language-babel does syntax highlight
// for us.
//
// GraphQL Protocol: http://dev.apollodata.com/tools/graphql-server/requests.html
//
export default (query, ...substitutions) => (variables, search) => {
const queryAndVariable = {
query: String.raw(query, ...substitutions),
};
if (variables) queryAndVariable.variables = variables;
let status;
const URL = `${API_URL}${url.format({ query: search })}`;
return fetch(URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-app-secret': process.env.APP_SECRET,
},
credentials: 'include',
body: JSON.stringify(queryAndVariable),
})
.then(r => {
status = r.status;
return r.json();
})
.then(resp => {
if (status === 400) {
throw new Error(
`GraphQL Error: ${resp.errors
.map(({ message }) => message)
.join('\n')}`
);
}
if (resp.errors) {
// When status is 200 but have error, just print them out.
console.error('GraphQL operation contains error:', resp.errors);
rollbar.error(
'GraphQL error',
{
body: JSON.stringify(queryAndVariable),
url: URL,
},
{ resp }
);
}
return resp;
});
};
|
<filename>src/components/syndicate/SyndicateAddMemberCard/index.js
import React from 'react'
import PropTypes from 'prop-types'
const SyndicateAddMemberCard = (props) => {
const {
onClick
} = props
return (
<div className="AddMemberCard">
<a onClick={onClick}>+ ADD JOINT OWNER</a>
</div>
)
}
SyndicateAddMemberCard.propTypes = {
className: PropTypes.string,
title: PropTypes.string
}
SyndicateAddMemberCard.defaultProps = {
title: 'ADD JOINT OWNER'
}
export default SyndicateAddMemberCard
|
"""Exceptions used by the library."""
class PygraphError(Exception):
"""Root exception class for all library exceptions. Only used for subclassing."""
pass
class NonexistentNodeError(PygraphError):
"""Thrown when a node does not exist within a graph."""
def __init__(self, node_id):
self.node_id = node_id
def __str__(self):
return 'Node "{}" does not exist.'.format(self.node_id)
class NonexistentEdgeError(PygraphError):
"""Thrown when an edge does not exist within a graph."""
def __init__(self, edge_id):
self.edge_id = edge_id
def __str__(self):
return 'Edge "{}" does not exist.'.format(self.edge_id)
class DisconnectedGraphError(PygraphError):
"""Thrown when a graph is disconnected (and such is unexpected by an algorithm)."""
pass |
<reponame>hispindia/BIHAR-2.7<filename>dhis-2/dhis-services/dhis-service-importexport/src/main/java/org/hisp/dhis/importexport/dxf2/service/StaXDataValueImportService.java
package org.hisp.dhis.importexport.dxf2.service;
/*
* Copyright (c) 2004-2005, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the <ORGANIZATION> nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static org.apache.commons.lang.StringUtils.defaultIfEmpty;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import javax.xml.namespace.QName;
import org.amplecode.quick.BatchHandler;
import org.amplecode.quick.BatchHandlerFactory;
import org.amplecode.staxwax.reader.XMLReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.aggregation.AggregatedDataValueService;
import org.hisp.dhis.common.ProcessState;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataelement.DataElementCategoryService;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetService;
import org.hisp.dhis.datavalue.DataValue;
import org.hisp.dhis.importexport.ImportException;
import org.hisp.dhis.importexport.ImportParams;
import org.hisp.dhis.importexport.dxf2.model.DataValueSet;
import org.hisp.dhis.importexport.importer.DataValueImporter;
import org.hisp.dhis.jdbc.batchhandler.DataValueBatchHandler;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.user.CurrentUserService;
/**
* Really basic DXF2 class for reading data
*
* @author bobj
*/
public class StaXDataValueImportService
{
private static final Log log = LogFactory.getLog( StaXDataValueImportService.class );
// -------------------------------------------------------------------------
// Status/Log messages
// TODO: internationalise these
// -------------------------------------------------------------------------
public static final String NO_DATAVALUESETS = "There are no datasets in this message";
public static final String IMPORTING_DATAVALUES = "Importing data values";
public static final String INVALID_PERIOD = "Invalid period : %s";
public static final String NO_SUCH_ORGUNIT = "No such orgunit : %s = %s";
public static final String NO_SUCH_DATAELEMENT = "No such dataElement : %s = %s";
public static final String NO_ROOT = "Couldn't find dxf root element";
public static final String UNKNOWN_ID_STRATEGY = "Unknown id strategy = %s";
public static final String SUCCESS = "DataValue import complete";
public static final String COUNTER = "%s DataValues imported";
public static final int DISPLAYCOUNT = 1000;
public static final String NAMESPACE_20 = "http://dhis2.org/schema/dxf/2.0";
public static final String DXFROOT = "dxf";
public static final String ATTRIBUTE_MINOR_VERSION = "minorVersion";
public static final String ATTRIBUTE_EXPORTED = "exported";
public static final String DATAVALUESETS = "dataValueSets";
public static final String DATAVALUESET = "dataValueSet";
public static final String MINOR_VERSION_10 = "1.0";
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private CurrentUserService currentUserService;
public void setCurrentUserService( CurrentUserService currentUserService )
{
this.currentUserService = currentUserService;
}
private AggregatedDataValueService aggregatedDataValueService;
public void setAggregatedDataValueService( AggregatedDataValueService aggregatedDataValueService )
{
this.aggregatedDataValueService = aggregatedDataValueService;
}
private DataSetService dataSetService;
public void setDataSetService( DataSetService dataSetService )
{
this.dataSetService = dataSetService;
}
private DataElementService dataElementService;
public void setDataElementService( DataElementService dataElementService )
{
this.dataElementService = dataElementService;
}
private DataElementCategoryService categoryService;
public void setCategoryService( DataElementCategoryService categoryService )
{
this.categoryService = categoryService;
}
private OrganisationUnitService organisationUnitService;
public void setOrganisationUnitService( OrganisationUnitService organisationUnitService )
{
this.organisationUnitService = organisationUnitService;
}
private PeriodService periodService;
public void setPeriodService( PeriodService periodService )
{
this.periodService = periodService;
}
private BatchHandlerFactory batchHandlerFactory;
public void setBatchHandlerFactory( BatchHandlerFactory batchHandlerFactory )
{
this.batchHandlerFactory = batchHandlerFactory;
}
public void read( XMLReader reader, ImportParams params, ProcessState state )
{
String user = currentUserService.getCurrentUsername();
BatchHandler<DataValue> batchHandler = batchHandlerFactory.createBatchHandler(
DataValueBatchHandler.class ).init();
DataValueImporter importer =
new DataValueImporter(batchHandler, aggregatedDataValueService, params);
int cumulativeDataValueCounter = 0;
try
{
if ( !reader.moveToStartElement( DXFROOT, DXFROOT ) )
{
throw new ImportException( NO_ROOT );
}
QName rootName = reader.getElementQName();
params.setNamespace( defaultIfEmpty( rootName.getNamespaceURI(), NAMESPACE_20 ) );
String version = reader.getAttributeValue( ATTRIBUTE_MINOR_VERSION );
params.setMinorVersion( version != null ? version : MINOR_VERSION_10 );
log.debug( String.format( "Importing %s minor version %s", rootName.getNamespaceURI(), version ) );
// move straight to the DataValue sets, we are not looking for metadata
reader.moveToStartElement( DATAVALUESETS );
Date timestamp = new Date();
if ( !reader.isStartElement( DATAVALUESETS ) )
{
throw new ImportException( NO_DATAVALUESETS );
}
// Outer loop, process datavaluesets until no more datavaluesets
int countDataValueSets = 0;
do
{
// look for a DataValue set
if ( !reader.isStartElement( DATAVALUESET ) )
{
try
{
reader.moveToStartElement( DATAVALUESET );
}
catch ( java.util.NoSuchElementException ex )
{
// we have to reach here eventuallyperiodId
break;
}
}
// Pick off the attributes
String idSchemeStr = reader.getAttributeValue( DataValueSet.ATTR_IDSCHEME );
String dataSet = reader.getAttributeValue( DataValueSet.ATTR_DATASET );
String period = reader.getAttributeValue( DataValueSet.ATTR_PERIOD );
String outerOrgunit = reader.getAttributeValue( DataValueSet.ATTR_ORGUNIT );
String comment = reader.getAttributeValue( DataValueSet.ATTR_COMMENT );
log.debug( String.format(
"Importing datavalueset (%s): period %s : orgunit %s : idscheme : %s",
comment, period, outerOrgunit, idSchemeStr ) );
// Determine identifier scheme to use
DataValueSet.IdentificationStrategy idScheme = DataValueSet.DEFAULT_STRATEGY;
if ( idSchemeStr != null )
{
try
{
idScheme = DataValueSet.IdentificationStrategy.valueOf( idSchemeStr );
}
catch ( IllegalArgumentException ex )
{
throw new ImportException( String.format( UNKNOWN_ID_STRATEGY, idSchemeStr ) );
}
}
Period outerPeriod = getPeriodObj( period );
// maps for translating identifiers
Map<String, Integer> dataelementMap = null;
Map<String, Integer> orgunitMap = null;
// get map for translating dataelement identifiers
dataelementMap = getDataElementMap( dataSet, idScheme );
Integer outerOrgunitId = null;
// if orgunit defined at datavalueset level, use it
if ( outerOrgunit != null )
{
outerOrgunitId = getOrgUnitByIdentifier( outerOrgunit, idScheme ).getId();
} else
{
// get map for translating orgunit identifiers
orgunitMap = getOrgUnitMap( dataSet, idScheme );
}
// only supporting default optioncombo at present
DataElementCategoryOptionCombo optioncombo =
categoryService.getDefaultDataElementCategoryOptionCombo();
int countDataValues = 0;
// process datavalues - loop until no more datavalues
do
{
// look for a DataValue
reader.moveToStartElement();
if ( !reader.isStartElement( DataValueSet.DATAVALUE ) )
{
// we have to reach here eventually
break;
}
log.debug( "Reading Datavalue" );
String dataElementId = reader.getAttributeValue(
org.hisp.dhis.importexport.dxf2.model.DataValue.ATTR_DATAELEMENT );
String innerOrgUnitId = reader.getAttributeValue(
org.hisp.dhis.importexport.dxf2.model.DataValue.ATTR_ORGUNIT );
String value = reader.getAttributeValue(
org.hisp.dhis.importexport.dxf2.model.DataValue.ATTR_VALUE );
DataValue dv = new DataValue();
dv.setPeriod( outerPeriod );
dv.setValue( value );
// populate with placeholders
dv.setDataElement( new DataElement() );
dv.setSource( new OrganisationUnit() );
dv.setOptionCombo( optioncombo );
dv.setComment( comment );
dv.setStoredBy( user );
dv.setTimestamp( timestamp );
// if no outer orgunit defined, use the map
if ( outerOrgunit == null )
{
Integer id = orgunitMap.get( innerOrgUnitId );
if ( id == null )
{
log.info( "Unknown orgunit: " + innerOrgUnitId + " Rejecting value");
continue;
}
dv.getSource().setId( orgunitMap.get( innerOrgUnitId ) );
}
else
{
dv.getSource().setId( outerOrgunitId );
}
dv.getDataElement().setId( dataelementMap.get( dataElementId ) );
importer.importObject(dv,params);
++countDataValues;
++cumulativeDataValueCounter;
if (countDataValues % DISPLAYCOUNT == 0) {
state.setMessage( String.format(COUNTER,cumulativeDataValueCounter));
}
log.debug( cumulativeDataValueCounter + " DataValues read" );
} while ( true ); // DataValues loop
++countDataValueSets;
log.debug( countDataValueSets + " DataValueSets read" );
} while ( true ); // DataValueSets loop
log.info( String.format(COUNTER,cumulativeDataValueCounter));
state.setMessage( String.format(COUNTER,cumulativeDataValueCounter));
}
catch ( ImportException ex )
{
log.warn( ex.toString() );
state.setMessage( ex.toString() );
}
finally
{
batchHandler.flush();
}
}
private Period getPeriodObj( String period )
throws ImportException
{
Period periodObj;
PeriodType pt = PeriodType.getPeriodTypeFromIsoString( period );
if ( pt == null )
{
throw new ImportException( String.format( INVALID_PERIOD, period ) );
}
try
{
periodObj = pt.createPeriod( period );
} catch ( Exception e )
{
throw new ImportException( String.format( INVALID_PERIOD, period ) );
}
Period storedPeriod = periodService.getPeriod( periodObj.getStartDate(), periodObj.getEndDate(), pt );
if ( storedPeriod == null )
{
int periodId = periodService.addPeriod( periodObj );
periodObj.setId( periodId );
} else
{
periodObj = storedPeriod;
}
return periodObj;
}
private Map<String, Integer> getDataElementMap( String dataSet, DataValueSet.IdentificationStrategy idScheme )
{
Collection<DataElement> dataelements;
Map<String, Integer> result = null;
if ( dataSet != null )
{
DataSet ds = getDataSet( dataSet, idScheme );
dataelements = ds.getDataElements();
} else
{
dataelements = dataElementService.getAllDataElements();
}
switch ( idScheme )
{
case CODE:
result = DataElement.getCodeMap( dataelements );
break;
case INTERNAL:
break;
default:
throw new IllegalArgumentException( "Can't map with :" + idScheme );
}
return result;
}
private Map<String, Integer> getOrgUnitMap( String dataSet, DataValueSet.IdentificationStrategy idScheme )
{
Collection<OrganisationUnit> orgunits;
Map<String, Integer> result = null;
if ( dataSet != null )
{
DataSet ds = getDataSet( dataSet, idScheme );
orgunits = ds.getSources();
} else
{
orgunits = organisationUnitService.getAllOrganisationUnits();
}
switch ( idScheme )
{
case CODE:
result = OrganisationUnit.getCodeMap( orgunits );
break;
case INTERNAL:
break;
default:
throw new IllegalArgumentException( "Can't map with :" + idScheme );
}
log.debug( result.size() + " orgunits in map" );
return result;
}
/**
* For a given orgunit identifier and id scheme, returns the orgunit object reference
* @param orgunit
* @param idScheme
* @return
* @throws ImportException thrown if no orgunit matches
*/
private OrganisationUnit getOrgUnitByIdentifier( String orgunit, DataValueSet.IdentificationStrategy idScheme )
throws ImportException
{
OrganisationUnit ou;
switch ( idScheme )
{
case UID:
ou = organisationUnitService.getOrganisationUnit( orgunit );
break;
case CODE:
ou = organisationUnitService.getOrganisationUnitByCode( orgunit );
break;
case INTERNAL:
ou = organisationUnitService.getOrganisationUnit( Integer.parseInt( orgunit ) );
break;
default:
throw new IllegalArgumentException( "Can't map with :" + idScheme );
}
if ( ou == null )
{
throw new ImportException( String.format( NO_SUCH_ORGUNIT, idScheme, orgunit ) );
}
return ou;
}
private DataSet getDataSet( String dataSet, DataValueSet.IdentificationStrategy idScheme )
{
DataSet result = null;
switch ( idScheme )
{
case INTERNAL:
result = dataSetService.getDataSet( Integer.parseInt( dataSet ) );
break;
case CODE:
result = dataSetService.getDataSetByCode( dataSet );
break;
default:
result = dataSetService.getDataSet( dataSet );
}
return result;
}
}
|
// Copyright 2004-present Facebook. All Rights Reserved.
#include <gtest/gtest.h>
#include <osquery/events.h>
#include <osquery/tables.h>
namespace osquery {
class EventsTests : public testing::Test {
public:
void TearDown() { EventFactory::deregisterEventPublishers(); }
};
class BasicEventPublisher : public EventPublisher {
DECLARE_EVENTPUBLISHER(BasicEventPublisher,
SubscriptionContext,
EventContext);
};
class FakeBasicEventPublisher : public EventPublisher {
DECLARE_EVENTPUBLISHER(FakeBasicEventPublisher,
SubscriptionContext,
EventContext);
};
TEST_F(EventsTests, test_register_event_pub) {
Status status;
// A caller may register an event type using the class template.
status = EventFactory::registerEventPublisher<BasicEventPublisher>();
EXPECT_TRUE(status.ok());
// May also register the event_pub instance
auto event_pub_instance = std::make_shared<FakeBasicEventPublisher>();
status = EventFactory::registerEventPublisher(event_pub_instance);
EXPECT_TRUE(status.ok());
// May NOT register without subclassing, enforced at compile time.
}
TEST_F(EventsTests, test_create_event_pub) {
Status status;
status = EventFactory::registerEventPublisher<BasicEventPublisher>();
EXPECT_TRUE(status.ok());
// Do not register the same event type twice.
status = EventFactory::registerEventPublisher<BasicEventPublisher>();
EXPECT_FALSE(status.ok());
// Make sure only the first event type was recorded.
EXPECT_EQ(EventFactory::numEventPublishers(), 1);
}
TEST_F(EventsTests, test_create_subscription) {
Status status;
EventFactory::registerEventPublisher<BasicEventPublisher>();
// Make sure a subscription cannot be added for a non-existent event type.
// Note: It normally would not make sense to create a blank subscription.
auto subscription = Subscription::create();
status =
EventFactory::addSubscription("FakeBasicEventPublisher", subscription);
EXPECT_FALSE(status.ok());
// In this case we can still add a blank subscription to an existing event
// type.
status = EventFactory::addSubscription("BasicEventPublisher", subscription);
EXPECT_TRUE(status.ok());
// Make sure the subscription is added.
EXPECT_EQ(EventFactory::numSubscriptions("BasicEventPublisher"), 1);
}
TEST_F(EventsTests, test_multiple_subscriptions) {
Status status;
EventFactory::registerEventPublisher<BasicEventPublisher>();
auto subscription = Subscription::create();
status = EventFactory::addSubscription("BasicEventPublisher", subscription);
status = EventFactory::addSubscription("BasicEventPublisher", subscription);
EXPECT_EQ(EventFactory::numSubscriptions("BasicEventPublisher"), 2);
}
struct TestSubscriptionContext : public SubscriptionContext {
int smallest;
};
class TestEventPublisher : public EventPublisher {
DECLARE_EVENTPUBLISHER(TestEventPublisher,
TestSubscriptionContext,
EventContext);
public:
Status setUp() {
smallest_ever_ += 1;
return Status(0, "OK");
}
void configure() {
int smallest_subscription = smallest_ever_;
configure_run = true;
for (const auto& subscription : subscriptions_) {
auto subscription_context = getSubscriptionContext(subscription->context);
if (smallest_subscription > subscription_context->smallest) {
smallest_subscription = subscription_context->smallest;
}
}
smallest_ever_ = smallest_subscription;
}
void tearDown() { smallest_ever_ += 1; }
TestEventPublisher() : EventPublisher() {
smallest_ever_ = 0;
configure_run = false;
}
// Custom methods do not make sense, but for testing it exists.
int getTestValue() { return smallest_ever_; }
public:
bool configure_run;
private:
int smallest_ever_;
};
TEST_F(EventsTests, test_create_custom_event_pub) {
Status status;
status = EventFactory::registerEventPublisher<BasicEventPublisher>();
auto test_event_pub = std::make_shared<TestEventPublisher>();
status = EventFactory::registerEventPublisher(test_event_pub);
// These event types have unique event type IDs
EXPECT_TRUE(status.ok());
EXPECT_EQ(EventFactory::numEventPublishers(), 2);
// Make sure the setUp function was called.
EXPECT_EQ(test_event_pub->getTestValue(), 1);
}
TEST_F(EventsTests, test_custom_subscription) {
Status status;
// Step 1, register event type
auto event_pub = std::make_shared<TestEventPublisher>();
status = EventFactory::registerEventPublisher(event_pub);
// Step 2, create and configure a subscription context
auto subscription_context = std::make_shared<TestSubscriptionContext>();
subscription_context->smallest = -1;
// Step 3, add the subscription to the event type
status =
EventFactory::addSubscription("TestEventPublisher", subscription_context);
EXPECT_TRUE(status.ok());
EXPECT_EQ(event_pub->numSubscriptions(), 1);
// The event type must run configure for each added subscription.
EXPECT_TRUE(event_pub->configure_run);
EXPECT_EQ(event_pub->getTestValue(), -1);
}
TEST_F(EventsTests, test_tear_down) {
Status status;
auto event_pub = std::make_shared<TestEventPublisher>();
status = EventFactory::registerEventPublisher(event_pub);
// Make sure set up incremented the test value.
EXPECT_EQ(event_pub->getTestValue(), 1);
status = EventFactory::deregisterEventPublisher("TestEventPublisher");
EXPECT_TRUE(status.ok());
// Make sure tear down inremented the test value.
EXPECT_EQ(event_pub->getTestValue(), 2);
// Once more, now deregistering all event types.
status = EventFactory::registerEventPublisher(event_pub);
EXPECT_EQ(event_pub->getTestValue(), 3);
status = EventFactory::deregisterEventPublishers();
EXPECT_TRUE(status.ok());
EXPECT_EQ(event_pub->getTestValue(), 4);
// Make sure the factory state represented.
EXPECT_EQ(EventFactory::numEventPublishers(), 0);
}
static int kBellHathTolled = 0;
Status TestTheeCallback(EventContextRef context, bool reserved) {
kBellHathTolled += 1;
return Status(0, "OK");
}
TEST_F(EventsTests, test_fire_event) {
Status status;
auto event_pub = std::make_shared<BasicEventPublisher>();
status = EventFactory::registerEventPublisher(event_pub);
auto subscription = Subscription::create();
subscription->callback = TestTheeCallback;
status = EventFactory::addSubscription("BasicEventPublisher", subscription);
// The event context creation would normally happen in the event type.
auto ec = event_pub->createEventContext();
event_pub->fire(ec, 0);
EXPECT_EQ(kBellHathTolled, 1);
auto second_subscription = Subscription::create();
status =
EventFactory::addSubscription("BasicEventPublisher", second_subscription);
// Now there are two subscriptions (one sans callback).
event_pub->fire(ec, 0);
EXPECT_EQ(kBellHathTolled, 2);
// Now both subscriptions have callbacks.
second_subscription->callback = TestTheeCallback;
event_pub->fire(ec, 0);
EXPECT_EQ(kBellHathTolled, 4);
}
}
int main(int argc, char* argv[]) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
package org.zalando.intellij.swagger.completion.field.completion.swagger;
import com.intellij.codeInsight.completion.CompletionResultSet;
import java.util.Optional;
import org.zalando.intellij.swagger.completion.SwaggerCompletionHelper;
import org.zalando.intellij.swagger.completion.field.FieldCompletion;
import org.zalando.intellij.swagger.completion.field.completion.common.InfoCompletion;
public class SwaggerFieldCompletionFactory {
public static Optional<FieldCompletion> from(
final SwaggerCompletionHelper completionHelper,
final CompletionResultSet completionResultSet) {
if (completionHelper.completeRootKey()) {
return Optional.of(new RootCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeInfoKey()) {
return Optional.of(new InfoCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeContactKey()) {
return Optional.of(new ContactCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeLicenseKey()) {
return Optional.of(new LicenseCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completePathKey()) {
return Optional.of(new PathCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeOperationKey()) {
return Optional.of(new OperationCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeExternalDocsKey()) {
return Optional.of(new ExternalDocsCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeParametersKey()) {
return Optional.of(new ParametersCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeParameterItemsKey()) {
return Optional.of(new ParameterItemsCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeResponsesKey()) {
return Optional.of(new ResponsesCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeResponseKey()) {
return Optional.of(new ResponseCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeHeaderKey()) {
return Optional.of(new HeaderCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeTagKey()) {
return Optional.of(new TagCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeSecurityDefinitionKey()) {
return Optional.of(new SecurityDefinitionCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeSchemaKey()
|| completionHelper.completeSchemaItemsKey()
|| completionHelper.completeDefinitionsKey()
|| completionHelper.completePropertiesSchemaKey()
|| completionHelper.completeAdditionalPropertiesKey()) {
return Optional.of(new SchemaCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeXmlKey()) {
return Optional.of(new XmlCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeParameterDefinitionKey()) {
return Optional.of(new ParameterDefinitionCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeHeadersKey()) {
return Optional.of(new HeadersCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeResponseDefinition()) {
return Optional.of(new ResponseDefinitionCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeRootSecurityKey()) {
return Optional.of(new RootSecurityCompletion(completionHelper, completionResultSet));
} else if (completionHelper.completeOperationSecurityKey()) {
return Optional.of(new OperationSecurityCompletion(completionHelper, completionResultSet));
} else {
return Optional.empty();
}
}
}
|
package com.esri.ges.transport.samplehttpconnector;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpRequest;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.HttpParams;
import com.esri.ges.core.component.ComponentException;
import com.esri.ges.transport.TransportContext;
import com.esri.ges.transport.TransportDefinition;
import com.esri.ges.transport.http.HttpInboundTransport;
import com.esri.ges.transport.http.HttpTransportContext;
public class HttpWithLastTimeStampInboundTransport extends HttpInboundTransport
{
static final private Log log = LogFactory.getLog(HttpWithLastTimeStampInboundTransport.class);
private String tsparam;
private String tsformat;
private String tsinit;
private String tsvalue;
private String params;
private Date ts;
public HttpWithLastTimeStampInboundTransport(TransportDefinition definition) throws ComponentException
{
super(definition);
}
@Override
public synchronized void start()
{
super.start();
}
@Override
public synchronized void stop()
{
super.stop();
}
@Override
public synchronized void setup()
{
super.setup();
tsparam = getProperty("lastPollTimestampName").getValueAsString();
tsinit = getProperty("initialLastTimestamp").getValueAsString();
tsformat = getProperty("timestampFormat").getValueAsString();
params = getProperty("clientParameters").getValueAsString();
}
@Override
public void beforeConnect(TransportContext context)
{
DateFormat df = null;
tsvalue = "";
if(! (context instanceof HttpTransportContext))
return;
// Parse user defined initial ts
try
{
df = new SimpleDateFormat(tsformat);
}
catch(Exception e)
{
df = null;
}
if(df != null)
{
if(ts == null)
{
try
{
Date userdefined = df.parse(tsinit);
if(userdefined == null)
{
ts = new Date(0);
tsvalue = df.format(ts);
}
else
{
tsvalue = df.format(userdefined);
}
}
catch (ParseException e)
{
ts = new Date(0);
tsvalue = df.format(ts);
}
}
else
{
tsvalue = df.format(ts);
}
ts = new Date();
}
HttpRequest request = ((HttpTransportContext)context).getHttpRequest();
if(request instanceof HttpPost)
{
ArrayList<NameValuePair> postParameters;
postParameters = new ArrayList<NameValuePair>();
if(tsvalue.length()>0)
postParameters.add(new BasicNameValuePair(tsparam, tsvalue));
try
{
Map<String, String> paramMap = parseParameters(params);
Iterator<Entry<String, String>> it = paramMap.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry)it.next();
postParameters.add(new BasicNameValuePair((String)pairs.getKey(), (String)pairs.getValue()));
it.remove();
}
if(postParameters.size()>0)
{
UrlEncodedFormEntity entity = new UrlEncodedFormEntity(postParameters, "utf-8");
((HttpPost)request).setEntity(entity);
}
}
catch (UnsupportedEncodingException e)
{
log.error(e);
}
catch (Exception e)
{
log.error(e);
}
}
}
private Map<String, String> parseParameters(String params) throws UnsupportedEncodingException
{
Map<String, String> query_pairs = new LinkedHashMap<String, String>();
String[] pairs = params.split("&");
for (String pair : pairs) {
int idx = pair.indexOf("=");
if(idx>0)
query_pairs.put(URLDecoder.decode(pair.substring(0, idx), "UTF-8"), URLDecoder.decode(pair.substring(idx + 1), "UTF-8"));
}
return query_pairs;
}
}
|
<gh_stars>100-1000
import subprocess
import sys
from setuptools import setup
from libs.pybind11.setup_helpers import Pybind11Extension
p = subprocess.Popen("cmake pyqpp",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
prefix = "Detecting Eigen3 - done (in "
eigen_path = None
print("Running cmake pyqpp")
for line in p.stdout.read().decode('ascii').split('\n'):
print(line)
pos = line.find(prefix)
if pos != -1:
eigen_path = line[pos + len(prefix):-1]
break
if eigen_path is None:
raise Exception('Eigen3 not found!')
ext_modules = [
Pybind11Extension(
"pyqpp",
["pyqpp/qpp_wrapper.cpp"],
extra_compile_args=["-Ilibs", "-Iinclude", "-Iqasmtools/include",
"-I" + eigen_path],
cxx_std=17,
include_pybind11=False,
),
]
setup(
name='pyqpp',
version='3.1',
description='Python 3 wrapper for Quantum++',
long_description=open('pyqpp/README.md').read(),
long_description_content_type='text/markdown',
author='softwareQ',
author_email='<EMAIL>',
url='https://github.com/softwareQinc/qpp',
license='MIT',
platforms=sys.platform,
install_requires=[
'numpy',
],
ext_modules=ext_modules)
|
import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root' //we declare that this service should be created by the root application injector.
})
export class SearchService {
constructor() {}
}
|
interface EntityProps {
// Define the properties of EntityProps if needed
}
interface TestBase {
description?: string;
data?: EntityProps;
response?: Object;
} |
<filename>lib/galaxy/tools/parameters/__init__.py<gh_stars>0
"""
Classes encapsulating Galaxy tool parameters.
"""
from basic import DataCollectionToolParameter, DataToolParameter, SelectToolParameter
from grouping import Conditional, Repeat, Section, UploadDataset
from galaxy.util.json import dumps, json_fix, loads
def visit_input_values( inputs, input_values, callback, name_prefix="", label_prefix="" ):
"""
Given a tools parameter definition (`inputs`) and a specific set of
parameter `values`, call `callback` for each non-grouping parameter,
passing the parameter object, value, a constructed unique name,
and a display label.
If the callback returns a value, it will be replace the old value.
FIXME: There is redundancy between this and the visit_inputs methods of
Repeat and Group. This tracks labels and those do not. It would
be nice to unify all the places that recursively visit inputs.
"""
for input in inputs.itervalues():
if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
for i, d in enumerate( input_values[ input.name ] ):
index = d['__index__']
new_name_prefix = name_prefix + "%s_%d|" % ( input.name, index )
new_label_prefix = label_prefix + "%s %d > " % ( input.title, i + 1 )
visit_input_values( input.inputs, d, callback, new_name_prefix, new_label_prefix )
elif isinstance( input, Conditional ):
values = input_values[ input.name ]
current = values["__current_case__"]
label_prefix = label_prefix
new_name_prefix = name_prefix + input.name + "|"
visit_input_values( input.cases[current].inputs, values, callback, new_name_prefix, label_prefix )
elif isinstance( input, Section ):
values = input_values[ input.name ]
label_prefix = label_prefix
new_name_prefix = name_prefix + input.name + "|"
visit_input_values( input.inputs, values, callback, new_name_prefix, label_prefix )
else:
new_value = callback( input,
input_values[input.name],
prefixed_name=name_prefix + input.name,
prefixed_label=label_prefix + input.label )
if new_value:
input_values[input.name] = new_value
def check_param( trans, param, incoming_value, param_values, source='html', history=None ):
"""
Check the value of a single parameter `param`. The value in
`incoming_value` is converted from its HTML encoding and validated.
The `param_values` argument contains the processed values of
previous parameters (this may actually be an ExpressionContext
when dealing with grouping scenarios).
"""
value = incoming_value
error = None
try:
if history is None:
history = trans.history
if value is not None or isinstance( param, DataToolParameter ) or isinstance( param, DataCollectionToolParameter ):
# Convert value from HTML representation
if source == 'html':
value = param.from_html( value, trans, param_values )
else:
value = param.from_json( value, trans, param_values )
# Allow the value to be converted if necessary
filtered_value = param.filter_value( value, trans, param_values )
# Then do any further validation on the value
param.validate( filtered_value, history )
elif value is None and isinstance( param, SelectToolParameter ):
# An empty select list or column list
param.validate( value, history )
except ValueError, e:
error = str( e )
return value, error
def params_to_strings( params, param_values, app ):
"""
Convert a dictionary of parameter values to a dictionary of strings
suitable for persisting. The `value_to_basic` method of each parameter
is called to convert its value to basic types, the result of which
is then json encoded (this allowing complex nested parameters and
such).
"""
rval = dict()
for key, value in param_values.iteritems():
if key in params:
value = params[ key ].value_to_basic( value, app )
rval[ key ] = str( dumps( value ) )
return rval
def params_from_strings( params, param_values, app, ignore_errors=False ):
"""
Convert a dictionary of strings as produced by `params_to_strings`
back into parameter values (decode the json representation and then
allow each parameter to convert the basic types into the parameters
preferred form).
"""
rval = dict()
for key, value in param_values.iteritems():
value = json_fix( loads( value ) )
if key in params:
value = params[key].value_from_basic( value, app, ignore_errors )
rval[ key ] = value
return rval
def params_to_incoming( incoming, inputs, input_values, app, name_prefix="", to_html=True ):
"""
Given a tool's parameter definition (`inputs`) and a specific set of
parameter `input_values` objects, populate `incoming` with the html values.
Useful for e.g. the rerun function.
"""
for input in inputs.itervalues():
if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
for d in input_values[ input.name ]:
index = d['__index__']
new_name_prefix = name_prefix + "%s_%d|" % ( input.name, index )
params_to_incoming( incoming, input.inputs, d, app, new_name_prefix, to_html=to_html)
elif isinstance( input, Conditional ):
values = input_values[ input.name ]
current = values["__current_case__"]
new_name_prefix = name_prefix + input.name + "|"
incoming[ new_name_prefix + input.test_param.name ] = values[ input.test_param.name ]
params_to_incoming( incoming, input.cases[current].inputs, values, app, new_name_prefix, to_html=to_html )
elif isinstance( input, Section ):
values = input_values[ input.name ]
new_name_prefix = name_prefix + input.name + "|"
params_to_incoming( incoming, input.inputs, values, app, new_name_prefix, to_html=to_html )
else:
value = input_values.get( input.name )
if to_html:
value = input.to_html_value( value, app )
incoming[ name_prefix + input.name ] = value
|
<filename>archguard/src/pages/interactiveAnalysis/block/GraphRender.tsx
import React, { useCallback, useState } from "react";
import { FlowChart } from "@/pages/interactiveAnalysis/block/graph/FlowChart";
import { GraphType, ReplResult } from "@/types/ascode";
import { Select } from "antd";
import { InteractiveAnalysisContext } from "@/pages/interactiveAnalysis/InteractiveAnalysisContext";
export function GraphSelect(props: { change: any; value: any }) {
const changeGraph = useCallback(
(option) => {
props.change(option);
},
[props.change],
);
return (
<Select onChange={changeGraph} value={props.value} style={{ width: 120 }}>
{Object.keys(GraphType).map((value) => (
<Select.Option value={GraphType[value]} key={GraphType[value]}>
{GraphType[value]}
</Select.Option>
))}
</Select>
);
}
export function GraphRenderByType(props: { type: string; data: string }) {
switch (props.type) {
case GraphType.ARCHDOC:
return <div>{FlowChart(props.data)}</div>;
case GraphType.UML:
return <div> UML is doing </div>;
}
return <div> unsupported GraphType: {props.type}</div>;
}
export function GraphRender(props: { result: ReplResult; context: InteractiveAnalysisContext }) {
const [result] = useState(props.result);
const [graphType, setGraphType] = useState(result.action.graphType);
const changeGraph = useCallback(
(option) => {
setGraphType(option);
},
[setGraphType],
);
return (
<>
<GraphSelect value={graphType} change={changeGraph}></GraphSelect>
<GraphRenderByType type={graphType} data={result.action.data} />
</>
);
}
|
#!/bin/sh
gmsgenux.out /home/albarass/RedSeaInterpolation/ocean_interpolation/rs_example2/225a/grid199000013/gamscntr.dat IPOPT
gmscr_ux.out /home/albarass/RedSeaInterpolation/ocean_interpolation/rs_example2/225a/grid199000013/gamscntr.dat
echo OK > /home/albarass/RedSeaInterpolation/ocean_interpolation/rs_example2/225a/grid199000013/finished
|
<filename>lib/hard-module-assets-plugin.js
const pluginCompat = require('./util/plugin-compat');
class HardModuleAssetsPlugin {
apply(compiler) {
let store;
let fetch;
pluginCompat.tap(
compiler,
'_hardSourceMethods',
'HardModuleAssetsPlugin copy methods',
methods => {
store = methods.store;
fetch = methods.fetch;
// freeze = methods.freeze;
// thaw = methods.thaw;
// mapFreeze = methods.mapFreeze;
// mapThaw = methods.mapThaw;
},
);
pluginCompat.tap(
compiler,
'_hardSourceFreezeModuleAssets',
'HardModuleAssetsPlugin freeze',
(frozen, assets, extra) => {
if (!frozen && assets) {
Object.keys(assets).forEach(key => {
store('Asset', key, assets[key], extra);
});
frozen = Object.keys(assets);
}
return frozen;
},
);
pluginCompat.tap(
compiler,
'_hardSourceThawModuleAssets',
'HardModuleAssetsPlugin thaw',
(assets, frozen, extra) => {
if (!assets && frozen) {
assets = {};
frozen.forEach(key => {
assets[key] = fetch('Asset', key, extra);
});
}
return assets;
},
);
}
}
module.exports = HardModuleAssetsPlugin;
|
#!/usr/bin/env bash
# This code write by Mr.nope
clear
echo "Uninstalling..."
sleep 2
cd /usr/lib/python3.6 && sudo rm location.py
echo ""
echo "Finish!"
echo ""
exit 1
|
alias dcp='docker-compose'
|
#!/bin/sh
gcc test0.c -o test0
gcc test1.c -o test1
gcc test2.c -o test2
gcc test3.c -o test3
gcc test3++.c -o test3++
gcc test4.c -o test4
gcc test0.c -o test0
cp ../*.so ./ |
public class ThetaJoinNode {
private ThetaJoinType type;
// Constructor to initialize the type
public ThetaJoinNode(ThetaJoinType type) {
this.type = type;
}
/**
* Returns the type of the node.
* @return type of the node {@type ThetaJoinType}
* @public
*/
public ThetaJoinType getType() {
return this.type;
}
} |
<gh_stars>0
#pragma once
#include "archgraph/typedef.h"
#include "archgraph/Rule.h"
#include "archgraph/EvalContext.h"
#include <cga/typedef.h>
#include <map>
#include <memory>
#include <vector>
#include <sstream>
namespace archgraph
{
class EvalRule
{
public:
EvalRule() {}
void AddRule(const RulePtr& rule);
RulePtr QueryRule(const std::string& name) const;
void OnLoadFinished(const EvalContext& ctx);
std::vector<GeoPtr> Eval(const std::vector<GeoPtr>& geos,
const EvalContext& ctx);
void SetFilepath(const std::string& filepath) { m_filepath = filepath; }
void Clear();
auto& GetConsole() const { return m_console; }
private:
void DeduceOps(const EvalContext& ctx);
void DeduceOps(const EvalContext& ctx, const Rule::SelPtr& sel);
std::vector<GeoPtr> Eval(const std::vector<GeoPtr>& geos,
const std::vector<Rule::OpPtr>& ops, const EvalContext& ctx);
std::vector<GeoPtr> Eval(const std::vector<GeoPtr>& geos,
const Rule::CompoundSel& sel, const EvalContext& ctx);
void ResolveParmsExpr(Operation& op, const EvalContext& ctx) const;
private:
class TopologicalSorting
{
public:
TopologicalSorting(const std::map<std::string, RulePtr>& rules);
std::vector<RulePtr> Sort();
private:
void PrepareOp(const Rule::OpPtr& op, int rule_idx);
void PrepareSel(const Rule::SelPtr& sel, int rule_idx);
void AddRuleDepend(const RulePtr& rule, int rule_idx);
private:
std::vector<RulePtr> m_rules;
std::vector<int> m_in_deg;
std::vector<std::vector<int>> m_out_ops;
}; // TopologicalSorting
private:
std::string m_filepath;
std::map<std::string, RulePtr> m_rules;
mutable std::vector<RulePtr> m_rules_sorted;
mutable std::stringstream m_console;
}; // EvalRule
} |
package com.ulfy.master.ui.activity;
import android.os.Bundle;
import android.view.View;
import com.ulfy.android.system.ActivityUtils;
import com.ulfy.android.task.TaskUtils;
import com.ulfy.android.task_transponder.ContentDataLoader;
import com.ulfy.android.task_transponder.OnReloadListener;
import com.ulfy.master.application.vm.ContactBookVM;
import com.ulfy.master.ui.base.TitleContentActivity;
import com.ulfy.master.ui.view.ContactBookView;
public class ContactBookActivity extends TitleContentActivity {
private ContactBookVM vm;
private ContactBookView view;
/**
* 启动Activity
*/
public static void startActivity() {
ActivityUtils.startActivity(ContactBookActivity.class);
}
/**
* 初始化方法
*/
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initModel(savedInstanceState);
initContent(savedInstanceState);
initActivity(savedInstanceState);
}
/**
* 初始化模型和界面
*/
private void initModel(Bundle savedInstanceState) {
vm = new ContactBookVM();
}
/**
* 初始化界面的数据
*/
private void initContent(final Bundle savedInstanceState) {
TaskUtils.loadData(getContext(), vm.loadDataOnExe(), new ContentDataLoader(contentFL, vm, false) {
@Override protected void onCreatView(ContentDataLoader loader, View createdView) {
view = (ContactBookView) createdView;
}
}.setOnReloadListener(new OnReloadListener() {
@Override public void onReload() {
initContent(savedInstanceState);
}
})
);
}
/**
* 初始化Activity的数据
*/
private void initActivity(Bundle savedInstanceState) {
titleTV.setText("联系人列表");
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.