text
stringlengths 1
1.05M
|
|---|
/*
*
*/
package net.community.apps.tools.xmlstruct;
import java.awt.Component;
import java.util.Map;
import javax.swing.Icon;
import javax.swing.JTree;
import javax.swing.tree.DefaultTreeCellRenderer;
import net.community.chest.awt.attributes.AttrUtils;
import net.community.chest.dom.NodeTypeEnum;
import net.community.chest.lang.EnumUtil;
import org.w3c.dom.Node;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Jan 6, 2009 4:46:46 PM
*/
public class DOMStructNodeRenderer extends DefaultTreeCellRenderer {
/**
*
*/
private static final long serialVersionUID = 2880814897307880986L;
public DOMStructNodeRenderer ()
{
super();
}
/*
* @see javax.swing.tree.DefaultTreeCellRenderer#getTreeCellRendererComponent(javax.swing.JTree, java.lang.Object, boolean, boolean, boolean, int, boolean)
*/
@Override
public Component getTreeCellRendererComponent (JTree tree,
Object value, boolean sel, boolean expanded, boolean leaf,
int row, boolean isFocused)
{
final Component c=super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, isFocused);
if (value instanceof DocStructNode<?>)
{
@SuppressWarnings("unchecked")
final Node n=((DocStructNode<? extends Node>) value).getAssignedValue();
final NodeTypeEnum t=NodeTypeEnum.fromNode(n);
if (t != null)
{
if (AttrUtils.isIconableComponent(c))
{
final MainFrame f=MainFrame.getContainerFrameInstance();
final Map<NodeTypeEnum,? extends Icon> im=(null == f) ? null : f.getIconsMap();
final Icon i=((null == im) || (im.size() <= 0)) ? null : im.get(t);
if (i != null)
AttrUtils.setComponentIcon(c, i);
}
if (AttrUtils.isTooltipedComponent(c))
{
final String tt=EnumUtil.toAttributeName(t);
if ((tt != null) && (tt.length() > 0))
AttrUtils.setComponentToolTipText(c, tt);
}
}
}
return c;
}
}
|
#!/bin/sh
PrgName=`basename $0`
# Determine absolute path to location from which we are running.
export RUN_DIR=`pwd`
export PRG_RELPATH=`dirname $0`
cd $PRG_RELPATH/.
export PRG_PATH=`pwd`
cd $RUN_DIR
# Create target dir in case not yet created by maven
mkdir -p $PRG_PATH/target
# Download latest openapi spec from repo
# Master yaml
# curl -o target/openapi_v3.yml https://raw.githubusercontent.com/tapis-project/tokens-api/master/service/resources/openapi_v3.yml
# Dev yaml
curl -o target/openapi_v3.yml https://raw.githubusercontent.com/tapis-project/tokens-api/dev/service/resources/openapi_v3.yml
# Run swagger-cli from docker image to generate bundled json file from openapi yaml file
set -xv
export REPO=$PRG_PATH/target
export API_NAME=tokens.json
mkdir -p $REPO/swagger-api/out
# docker run -v $REPO/openapi_v3.yml:/swagger-api/yaml/openapi_v3.yml -v $REPO:/swagger-api/out \
# tapis/swagger-cli bundle -r /swagger-api/yaml/openapi_v3.yml -o /swagger-api/out/$API_NAME
docker run --rm -v $REPO/openapi_v3.yml:/swagger-api/yaml/openapi_v3.yml \
tapis/swagger-cli bundle -r /swagger-api/yaml/openapi_v3.yml > /tmp/$API_NAME
cp /tmp/$API_NAME $REPO/$API_NAME
|
#include "variable.h"
namespace Blocks {
const string& IntVariable::toString() {
if (!updated_) {
return to_string_data_;
}
if (to_string_data_.capacity() < STRMAX_INT32) {
to_string_data_.assign('\0', STRMAX_INT32);
}
snprintf((char *)to_string_data_.mutable_data(), STRMAX_INT32, "%d", data_);
to_string_data_.set_to_c_len();
return to_string_data_;
}
void IntVariable::action(BlockAction a) {
if (!a.is_var_int()) {
return;
}
const int32_t val = a.var_int.value;
switch (a.type) {
case BlockAction::IntSetValue:
setValue(val);
break;
case BlockAction::IntInc:
inc();
break;
case BlockAction::IntDec:
dec();
break;
case BlockAction::IntAdd:
add(val);
break;
case BlockAction::IntSub:
sub(val);
break;
case BlockAction::IntMult:
mult(val);
break;
case BlockAction::IntDiv:
div(val);
break;
}
}
} // namespace Blocks
|
package org.fabian.csg.scene;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import org.fabian.csg.CSG;
import org.fabian.csg.Polygon;
import com.jme3.export.InputCapsule;
import com.jme3.export.JmeExporter;
import com.jme3.export.JmeImporter;
import com.jme3.export.OutputCapsule;
import com.jme3.scene.Geometry;
public class CSGNode extends Geometry {
@Override
public void write(JmeExporter ex) throws IOException {
super.write(ex);
OutputCapsule capsule = ex.getCapsule(this);
capsule.writeSavableArrayList(brushes, "Brushes", brushes);
}
@Override
public void read(JmeImporter im) throws IOException {
super.read(im);
InputCapsule in = im.getCapsule(this);
brushes = in.readSavableArrayList("Brushes", new ArrayList<CSG>());
}
private ArrayList<CSG> brushes = new ArrayList<CSG>();
public CSGNode() {
this("CSG Geometry");
}
public CSGNode(String name) {
super(name);
}
public void addBrush(CSG brush) {
brushes.add(brush);
}
public ArrayList<CSG> getBrushes() {
return brushes;
}
public void removeBrush(CSG brush) {
brushes.remove(brush);
}
public boolean hasBrush(CSG brush) {
return brushes.contains(brush);
}
public void regenerate() {
ArrayList<CSG> tempBrushes = new ArrayList<>(brushes);
Collections.sort(tempBrushes);
CSG product = CSG.fromPolygons(new Polygon[0]);
for (CSG brush : tempBrushes) {
switch (brush.getType()) {
case ADDITIVE:
product = product.union(brush);
break;
case SUBTRACTIVE:
product = product.subtract(brush);
break;
case INTERSECTION:
product = product.intersect(brush);
break;
}
}
this.setMesh(product.toMesh());
}
}
|
<reponame>kellymclaughlin/go-fastly
package fastly
import "testing"
func TestClient_Pools(t *testing.T) {
t.Parallel()
var err error
var tv *Version
record(t, "pools/version", func(c *Client) {
tv = testVersion(t, c)
})
// Create
var p *Pool
record(t, "pools/create", func(c *Client) {
p, err = c.CreatePool(&CreatePoolInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
Name: "test_pool",
Comment: "test pool",
Quorum: 50,
UseTLS: true,
TLSCertHostname: "example.com",
Type: PoolTypeRandom,
})
})
if err != nil {
t.Fatal(err)
}
// Ensure deleted
defer func() {
record(t, "pools/cleanup", func(c *Client) {
c.DeletePool(&DeletePoolInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
Name: "test_pool",
})
c.DeletePool(&DeletePoolInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
Name: "new_test_pool",
})
})
}()
if p.Name != "test_pool" {
t.Errorf("bad name: %q", p.Name)
}
if p.Quorum != 50 {
t.Errorf("bad quorum: %q", p.Quorum)
}
if p.UseTLS != true {
t.Errorf("bad use_tls: %t", p.UseTLS)
}
if p.TLSCertHostname != "example.com" {
t.Errorf("bad tls_cert_hostname: %q", p.TLSCertHostname)
}
if p.Type != PoolTypeRandom {
t.Errorf("bad type: %q", p.Type)
}
// List
var ps []*Pool
record(t, "pools/list", func(c *Client) {
ps, err = c.ListPools(&ListPoolsInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
})
})
if err != nil {
t.Fatal(err)
}
if len(ps) < 1 {
t.Errorf("bad pools: %v", ps)
}
// Get
var np *Pool
record(t, "pools/get", func(c *Client) {
np, err = c.GetPool(&GetPoolInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
Name: "test_pool",
})
})
if err != nil {
t.Fatal(err)
}
if p.Name != np.Name {
t.Errorf("bad name: %q (%q)", p.Name, np.Name)
}
if p.Quorum != np.Quorum {
t.Errorf("bad quorum: %q (%q)", p.Quorum, np.Quorum)
}
if p.Type != np.Type {
t.Errorf("bad type: %q (%q)", p.Type, np.Type)
}
// Update
var up *Pool
record(t, "pools/update", func(c *Client) {
up, err = c.UpdatePool(&UpdatePoolInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
Name: "test_pool",
NewName: String("new_test_pool"),
Quorum: Uint(0),
Type: PPoolType(PoolTypeHash),
})
})
if err != nil {
t.Fatal(err)
}
if up.Name != "new_test_pool" {
t.Errorf("bad name: %q", up.Name)
}
if up.Quorum != 0 {
t.Errorf("bad quorum: %q", up.Quorum)
}
// Delete
record(t, "pools/delete", func(c *Client) {
err = c.DeletePool(&DeletePoolInput{
ServiceID: testServiceID,
ServiceVersion: tv.Number,
Name: "new_test_pool",
})
})
if err != nil {
t.Fatal(err)
}
}
func TestClient_ListPools_validation(t *testing.T) {
var err error
_, err = testClient.ListPools(&ListPoolsInput{
ServiceID: "",
})
if err != ErrMissingServiceID {
t.Errorf("bad error: %s", err)
}
_, err = testClient.ListPools(&ListPoolsInput{
ServiceID: "foo",
ServiceVersion: 0,
})
if err != ErrMissingServiceVersion {
t.Errorf("bad error: %s", err)
}
}
func TestClient_CreatePool_validation(t *testing.T) {
var err error
_, err = testClient.CreatePool(&CreatePoolInput{
ServiceID: "",
})
if err != ErrMissingServiceID {
t.Errorf("bad error: %s", err)
}
_, err = testClient.CreatePool(&CreatePoolInput{
ServiceID: "foo",
ServiceVersion: 0,
})
if err != ErrMissingServiceVersion {
t.Errorf("bad error: %s", err)
}
_, err = testClient.CreatePool(&CreatePoolInput{
ServiceID: "foo",
ServiceVersion: 1,
Name: "",
})
if err != ErrMissingName {
t.Errorf("bad error: %s", err)
}
}
func TestClient_GetPool_validation(t *testing.T) {
var err error
_, err = testClient.GetPool(&GetPoolInput{
ServiceID: "",
})
if err != ErrMissingServiceID {
t.Errorf("bad error: %s", err)
}
_, err = testClient.GetPool(&GetPoolInput{
ServiceID: "foo",
ServiceVersion: 0,
})
if err != ErrMissingServiceVersion {
t.Errorf("bad error: %s", err)
}
_, err = testClient.GetPool(&GetPoolInput{
ServiceID: "foo",
ServiceVersion: 1,
Name: "",
})
if err != ErrMissingName {
t.Errorf("bad error: %s", err)
}
}
func TestClient_UpdatePool_validation(t *testing.T) {
var err error
_, err = testClient.UpdatePool(&UpdatePoolInput{
ServiceID: "",
})
if err != ErrMissingServiceID {
t.Errorf("bad error: %s", err)
}
_, err = testClient.UpdatePool(&UpdatePoolInput{
ServiceID: "foo",
ServiceVersion: 0,
})
if err != ErrMissingServiceVersion {
t.Errorf("bad error: %s", err)
}
_, err = testClient.UpdatePool(&UpdatePoolInput{
ServiceID: "foo",
ServiceVersion: 1,
Name: "",
})
if err != ErrMissingName {
t.Errorf("bad error: %s", err)
}
}
func TestClient_DeletePool_validation(t *testing.T) {
var err error
err = testClient.DeletePool(&DeletePoolInput{
ServiceID: "",
})
if err != ErrMissingServiceID {
t.Errorf("bad error: %s", err)
}
err = testClient.DeletePool(&DeletePoolInput{
ServiceID: "foo",
ServiceVersion: 0,
})
if err != ErrMissingServiceVersion {
t.Errorf("bad error: %s", err)
}
err = testClient.DeletePool(&DeletePoolInput{
ServiceID: "foo",
ServiceVersion: 1,
Name: "",
})
if err != ErrMissingName {
t.Errorf("bad error: %s", err)
}
}
|
#!/bin/bash
./bin/mp init --provider=gitlab --provider-url=https://git.esoft.tech -f ~/Downloads/repos.txt
./bin/mp clone
./bin/mp plan -b feature/NCRM-10523 -m "Install esoft eslint rules" -- node ~/projects/microplane/t.js
./bin/mp push -a @f.n.makhmudov
|
<reponame>bpbpublications/Building-Server-side-and-Microservices-with-Go<gh_stars>1-10
package main
import "fmt"
type Printer interface {
Print()
}
type Point struct {
x, y int
}
func (p Point) Print() {
fmt.Printf("(%v, %v)", p.x, p.y)
}
func main() {
p := Point{x: 4, y: 6}
p.Print()
var i Printer
i.Print()
}
|
<reponame>jmosro/trazactivo
package com.ipec.trazactivo.service;
import com.ipec.trazactivo.model.Activo;
import com.ipec.trazactivo.model.ActivoPK;
import java.util.List;
public interface ActivoServiceInterface {
public List<Activo> listarTodo();
public void guardar(Activo activo);
public void eliminar(Activo activo);
public Activo encontrarPorNumeroActivo(ActivoPK activoPK);
}
|
<filename>lib/config.js
var fs = require('fs');
//assumes the configuration file is one level higher and is called
//configuration.json
var configurationData = fs.readFileSync(__dirname + "/../configuration.json",
'utf8');
var config = JSON.parse(configurationData);
var bunyan = require('bunyan');
var log = bunyan.createLogger({
name: 'smartcopy'
});
config.mergeValues = function(args){
var keys = Object.keys(args);
keys.forEach(function(key){
config[key] = args[key];
});
}
config.logger = log;
module.exports = config
|
<reponame>wjtree/wechat-parent
package com.app.wechat.domain.receive;
import com.thoughtworks.xstream.annotations.XStreamAlias;
/**
* <p>功 能:自定义菜单跳转url事件数据结构</p>
* <p>版 权:Copyright (c) 2017</p>
* <p>创建时间:2017年7月7日 下午5:57:25</p>
* @author 王建
* @version 1.0
*/
public class WxRcvMenuViewEvntDto extends WxRcvMenuEvntDto {
private static final long serialVersionUID = 1L;
/** 指菜单ID,如果是个性化菜单,则可以通过这个字段,知道是哪个规则的菜单被点击了 */
@XStreamAlias("MenuId")
private String menuId;
public String getMenuId() {
return menuId;
}
public void setMenuId(String menuId) {
this.menuId = menuId;
}
}
|
package imagetag
import (
"context"
"fmt"
kapierrors "k8s.io/apimachinery/pkg/api/errors"
metainternal "k8s.io/apimachinery/pkg/apis/meta/internalversion"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/sets"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/rest"
"k8s.io/kubernetes/pkg/printers"
printerstorage "k8s.io/kubernetes/pkg/printers/storage"
imagegroup "github.com/openshift/api/image"
"github.com/openshift/library-go/pkg/image/imageutil"
"github.com/openshift/openshift-apiserver/pkg/api/apihelpers"
imageapi "github.com/openshift/openshift-apiserver/pkg/image/apis/image"
"github.com/openshift/openshift-apiserver/pkg/image/apis/image/validation/whitelist"
"github.com/openshift/openshift-apiserver/pkg/image/apiserver/internalimageutil"
"github.com/openshift/openshift-apiserver/pkg/image/apiserver/registry/image"
"github.com/openshift/openshift-apiserver/pkg/image/apiserver/registry/imagestream"
imageprinters "github.com/openshift/openshift-apiserver/pkg/image/printers/internalversion"
)
// REST implements the RESTStorage interface for ImageTag
// It only supports the Get method and is used to simplify retrieving an Image by tag from an ImageStream
type REST struct {
imageRegistry image.Registry
imageStreamRegistry imagestream.Registry
strategy Strategy
rest.TableConvertor
}
// NewREST returns a new REST.
func NewREST(imageRegistry image.Registry, imageStreamRegistry imagestream.Registry, registryWhitelister whitelist.RegistryWhitelister) *REST {
return &REST{
imageRegistry: imageRegistry,
imageStreamRegistry: imageStreamRegistry,
strategy: NewStrategy(registryWhitelister),
TableConvertor: printerstorage.TableConvertor{TableGenerator: printers.NewTableGenerator().With(imageprinters.AddImageOpenShiftHandlers)},
}
}
var _ rest.Getter = &REST{}
var _ rest.Lister = &REST{}
var _ rest.CreaterUpdater = &REST{}
var _ rest.GracefulDeleter = &REST{}
var _ rest.ShortNamesProvider = &REST{}
var _ rest.Scoper = &REST{}
// ShortNames implements the ShortNamesProvider interface. Returns a list of short names for a resource.
func (r *REST) ShortNames() []string {
return []string{"itag"}
}
// New is only implemented to make REST implement RESTStorage
func (r *REST) New() runtime.Object {
return &imageapi.ImageTag{}
}
// NewList returns a new list object
func (r *REST) NewList() runtime.Object {
return &imageapi.ImageTagList{}
}
func (s *REST) NamespaceScoped() bool {
return true
}
// nameAndTag splits a string into its name component and tag component, and returns an error
// if the string is not in the right form.
func nameAndTag(id string) (name string, tag string, err error) {
name, tag, err = imageutil.ParseImageStreamTagName(id)
if err != nil {
err = kapierrors.NewBadRequest("ImageTags must be retrieved with <name>:<tag>")
}
return
}
func (r *REST) List(ctx context.Context, options *metainternal.ListOptions) (runtime.Object, error) {
imageStreams, err := r.imageStreamRegistry.ListImageStreams(ctx, options)
if err != nil {
return nil, err
}
matcher := MatchImageTag(apihelpers.InternalListOptionsToSelectors(options))
tagNames := sets.NewString()
list := &imageapi.ImageTagList{}
for _, currIS := range imageStreams.Items {
// prepare a list of all possible tags, then add each one in order
for tag := range tagNames {
delete(tagNames, tag)
}
for tag := range currIS.Spec.Tags {
tagNames.Insert(tag)
}
for tag := range currIS.Status.Tags {
tagNames.Insert(tag)
}
for _, currTag := range tagNames.List() {
itag, err := newITag(currTag, &currIS, nil, false)
if err != nil {
if kapierrors.IsNotFound(err) {
continue
}
return nil, err
}
matches, err := matcher.Matches(itag)
if err != nil {
return nil, err
}
if matches {
list.Items = append(list.Items, *itag)
}
}
}
return list, nil
}
// Get retrieves an image that has been tagged by stream and tag. `id` is of the format <stream name>:<tag>.
func (r *REST) Get(ctx context.Context, id string, options *metav1.GetOptions) (runtime.Object, error) {
name, tag, err := nameAndTag(id)
if err != nil {
return nil, err
}
imageStream, err := r.imageStreamRegistry.GetImageStream(ctx, name, options)
if err != nil {
return nil, err
}
image, err := r.imageFor(ctx, tag, imageStream)
if err != nil {
if !kapierrors.IsNotFound(err) {
return nil, err
}
image = nil
}
return newITag(tag, imageStream, image, false)
}
func (r *REST) Create(ctx context.Context, obj runtime.Object, createValidation rest.ValidateObjectFunc, options *metav1.CreateOptions) (runtime.Object, error) {
itag, ok := obj.(*imageapi.ImageTag)
if !ok {
return nil, kapierrors.NewBadRequest(fmt.Sprintf("obj is not an ImageTag: %#v", obj))
}
if err := rest.BeforeCreate(r.strategy, ctx, obj); err != nil {
return nil, err
}
if err := createValidation(ctx, obj.DeepCopyObject()); err != nil {
return nil, err
}
namespace, ok := apirequest.NamespaceFrom(ctx)
if !ok {
return nil, kapierrors.NewBadRequest("a namespace must be specified to import images")
}
imageStreamName, imageTag, ok := imageutil.SplitImageStreamTag(itag.Name)
if !ok {
return nil, fmt.Errorf("%q must be of the form <stream_name>:<tag>", itag.Name)
}
for i := 10; i > 0; i-- {
target, err := r.imageStreamRegistry.GetImageStream(ctx, imageStreamName, &metav1.GetOptions{})
if err != nil {
if !kapierrors.IsNotFound(err) {
return nil, err
}
// try to create the target if it doesn't exist
target = &imageapi.ImageStream{}
target.Namespace = namespace
target.Name = imageStreamName
}
if target.Spec.Tags == nil {
target.Spec.Tags = make(map[string]imageapi.TagReference)
}
// The user wants to create a spec tag.
_, exists := target.Spec.Tags[imageTag]
if exists {
return nil, kapierrors.NewAlreadyExists(imagegroup.Resource("imagetags"), itag.Name)
}
if itag.Spec != nil {
target.Spec.Tags[imageTag] = *itag.Spec
}
// Check the stream creation timestamp and make sure we will not
// create a new image stream while deleting.
if target.CreationTimestamp.IsZero() {
target, err = r.imageStreamRegistry.CreateImageStream(ctx, target, &metav1.CreateOptions{})
} else {
target, err = r.imageStreamRegistry.UpdateImageStream(ctx, target, false, &metav1.UpdateOptions{})
}
if kapierrors.IsAlreadyExists(err) || kapierrors.IsConflict(err) {
continue
}
if err != nil {
return nil, err
}
image, _ := r.imageFor(ctx, imageTag, target)
return newITag(imageTag, target, image, true)
}
// We tried to update resource, but we kept conflicting. Inform the client that we couldn't complete
// the operation but that they may try again.
return nil, kapierrors.NewServerTimeout(imagegroup.Resource("imagetags"), "create", 2)
}
func (r *REST) Update(ctx context.Context, tagName string, objInfo rest.UpdatedObjectInfo, createValidation rest.ValidateObjectFunc, updateValidation rest.ValidateObjectUpdateFunc, forceAllowCreate bool, options *metav1.UpdateOptions) (runtime.Object, bool, error) {
name, tag, err := nameAndTag(tagName)
if err != nil {
return nil, false, err
}
namespace, ok := apirequest.NamespaceFrom(ctx)
if !ok {
return nil, false, kapierrors.NewBadRequest("namespace is required on ImageTags")
}
create := false
imageStream, err := r.imageStreamRegistry.GetImageStream(ctx, name, &metav1.GetOptions{})
if err != nil {
if !kapierrors.IsNotFound(err) {
return nil, false, err
}
imageStream = &imageapi.ImageStream{
ObjectMeta: metav1.ObjectMeta{
Namespace: namespace,
Name: name,
},
}
rest.FillObjectMetaSystemFields(&imageStream.ObjectMeta)
create = true
}
// create the synthetic old itag
old, err := newITag(tag, imageStream, nil, true)
if err != nil {
return nil, false, err
}
obj, err := objInfo.UpdatedObject(ctx, old)
if err != nil {
return nil, false, err
}
itag, ok := obj.(*imageapi.ImageTag)
if !ok {
return nil, false, kapierrors.NewBadRequest(fmt.Sprintf("obj is not an ImageTag: %#v", obj))
}
// check for conflict
switch {
case len(itag.ResourceVersion) == 0:
// we allow blind PUT because it is useful for the most common tag action - "I want this tag to equal this, no matter what the current value"
itag.ResourceVersion = imageStream.ResourceVersion
case len(imageStream.ResourceVersion) == 0:
// image stream did not exist, cannot update
return nil, false, kapierrors.NewNotFound(imagegroup.Resource("imagetags"), tagName)
case imageStream.ResourceVersion != itag.ResourceVersion:
// conflicting input and output
return nil, false, kapierrors.NewConflict(imagegroup.Resource("imagetags"), itag.Name, fmt.Errorf("another caller has updated the resource version to %s", imageStream.ResourceVersion))
}
if create {
if err := rest.BeforeCreate(r.strategy, ctx, obj); err != nil {
return nil, false, err
}
if err := createValidation(ctx, obj.DeepCopyObject()); err != nil {
return nil, false, err
}
} else {
if err := rest.BeforeUpdate(r.strategy, ctx, obj, old); err != nil {
return nil, false, err
}
if err := updateValidation(ctx, obj.DeepCopyObject(), old.DeepCopyObject()); err != nil {
return nil, false, err
}
}
// if !exists && itag.Spec == nil {
// return nil, false, kapierrors.NewBadRequest(fmt.Sprintf("imagetag %s is not a spec or status tag in imagestream %s/%s, cannot be updated", tag, imageStream.Namespace, imageStream.Name))
// }
tagRef, exists := imageStream.Spec.Tags[tag]
if itag.Spec != nil {
if imageStream.Spec.Tags == nil {
imageStream.Spec.Tags = map[string]imageapi.TagReference{}
}
tagRef = *itag.Spec
tagRef.Name = tag
imageStream.Spec.Tags[tag] = tagRef
} else {
// TODO: should error if it doesn't already exist?
delete(imageStream.Spec.Tags, tag)
}
// mutate the image stream
var newImageStream *imageapi.ImageStream
if create {
newImageStream, err = r.imageStreamRegistry.CreateImageStream(ctx, imageStream, &metav1.CreateOptions{})
} else {
newImageStream, err = r.imageStreamRegistry.UpdateImageStream(ctx, imageStream, false, &metav1.UpdateOptions{})
}
if err != nil {
return nil, false, err
}
image, err := r.imageFor(ctx, tag, newImageStream)
if err != nil {
if !kapierrors.IsNotFound(err) {
return nil, false, err
}
}
newITag, err := newITag(tag, newImageStream, image, true)
return newITag, !exists, err
}
// Delete removes a tag from a stream. `id` is of the format <stream name>:<tag>.
// The associated image that the tag points to is *not* deleted.
// The tag history is removed.
func (r *REST) Delete(ctx context.Context, id string, objectFunc rest.ValidateObjectFunc, options *metav1.DeleteOptions) (runtime.Object, bool, error) {
name, tag, err := nameAndTag(id)
if err != nil {
return nil, false, err
}
for i := 10; i > 0; i-- {
stream, err := r.imageStreamRegistry.GetImageStream(ctx, name, &metav1.GetOptions{})
if err != nil {
return nil, false, err
}
if options != nil {
if pre := options.Preconditions; pre != nil {
if pre.UID != nil && *pre.UID != stream.UID {
return nil, false, kapierrors.NewConflict(imagegroup.Resource("imagetags"), id, fmt.Errorf("the UID precondition was not met"))
}
}
}
notFound := true
// Try to delete the status tag
if _, ok := stream.Status.Tags[tag]; ok {
delete(stream.Status.Tags, tag)
notFound = false
}
// Try to delete the spec tag
if _, ok := stream.Spec.Tags[tag]; ok {
delete(stream.Spec.Tags, tag)
notFound = false
}
if notFound {
return nil, false, kapierrors.NewNotFound(imagegroup.Resource("imagetags"), id)
}
_, err = r.imageStreamRegistry.UpdateImageStream(ctx, stream, false, &metav1.UpdateOptions{})
if kapierrors.IsConflict(err) {
continue
}
if err != nil && !kapierrors.IsNotFound(err) {
return nil, false, err
}
return &metav1.Status{Status: metav1.StatusSuccess}, true, nil
}
// We tried to update resource, but we kept conflicting. Inform the client that we couldn't complete
// the operation but that they may try again.
return nil, false, kapierrors.NewServerTimeout(imagegroup.Resource("imagetags"), "delete", 2)
}
// imageFor retrieves the most recent image for a tag in a given imageStreem.
func (r *REST) imageFor(ctx context.Context, tag string, imageStream *imageapi.ImageStream) (*imageapi.Image, error) {
event := internalimageutil.LatestTaggedImage(imageStream, tag)
if event == nil || len(event.Image) == 0 {
return nil, kapierrors.NewNotFound(imagegroup.Resource("imagetags"), imageutil.JoinImageStreamTag(imageStream.Name, tag))
}
return r.imageRegistry.GetImage(ctx, event.Image, &metav1.GetOptions{})
}
// newITag initializes an image tag from an image stream and image.
func newITag(tag string, imageStream *imageapi.ImageStream, image *imageapi.Image, allowEmpty bool) (*imageapi.ImageTag, error) {
itagName := imageutil.JoinImageStreamTag(imageStream.Name, tag)
itag := &imageapi.ImageTag{
ObjectMeta: imageStream.ObjectMeta,
}
itag.Name = itagName
var event *imageapi.TagEvent
for name, tagEvents := range imageStream.Status.Tags {
if name != tag {
continue
}
itag.Status = &imageapi.NamedTagEventList{
Tag: name,
Conditions: tagEvents.Conditions,
Items: tagEvents.Items,
}
if len(tagEvents.Items) > 0 {
event = &tagEvents.Items[0]
}
break
}
if tagRef, ok := imageStream.Spec.Tags[tag]; ok {
copiedTagRef := tagRef
itag.Spec = &copiedTagRef
}
if image != nil {
if err := internalimageutil.InternalImageWithMetadata(image); err != nil {
return nil, err
}
image.DockerImageManifest = ""
image.DockerImageConfig = ""
itag.Image = image
itag.Image.DockerImageReference = internalimageutil.ResolveReferenceForTagEvent(imageStream, tag, event)
}
if !allowEmpty && itag.Spec == nil && itag.Status == nil {
return nil, kapierrors.NewNotFound(imagegroup.Resource("imagetags"), itagName)
}
return itag, nil
}
|
import React from "react"
import Layout from "../components/layout"
import ArchivePage from "../components/Layouts/ArchivePage"
import VideoCard from "../components/Cards/Video/VideoCard.styled"
export default ({ pageContext: { allFilms } }) => (
<Layout>
<ArchivePage
title="Films"
items={
(allFilms &&
allFilms.map(item => {
return (
<VideoCard
title={item.title}
cast={item.cast.cast}
link={item.link}
video={item.video_url}
></VideoCard>
)
})) || [<VideoCard cast={[""]}></VideoCard>]
}
></ArchivePage>
</Layout>
)
|
#!/bin/bash
# Copyright (C) Microsoft Corporation. All rights reserved.
#
# Microsoft Corporation (“Microsoft”) grants you a nonexclusive, perpetual,
# royalty-free right to use, copy, and modify the software code provided by us
# ('Software Code'). You may not sublicense the Software Code or any use of it
# (except to your affiliates and to vendors to perform work on your behalf)
# through distribution, network access, service agreement, lease, rental, or
# otherwise. This license does not purport to express any claim of ownership over
# data you may have shared with Microsoft in the creation of the Software Code.
# Unless applicable law gives you more rights, Microsoft reserves all other
# rights not expressly granted herein, whether by implication, estoppel or
# otherwise.
#
# THE SOFTWARE CODE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
set -eux
conda env create -f credit_card/ci_dependencies.yml
conda activate mlopspython_ci
|
package com.example
import akka.actor.{Actor, ActorIdentity, Identify, Props}
import akka.event.Logging
class MyActor extends Actor {
val log = Logging(context.system, this)
val identifyId = 1 // 問い合わせ番号
val child = context.actorOf(Props[MyActor2], name = "myChild")
var lastSender = context.system.deadLetters
def receive = {
case "search" => {
context.actorSelection("/user/myActor/myChild") ! Identify(identifyId) // 絶対パス
// その他の指定方法
// context.actorSelection("../myActor/myChild") ! Identify(identifyId) // 相対パス
// context.actorSelection("myChild") ! Identify(identifyId) // 相対パス (`./myChild` の意味)
// context.actorSelection("myChi*") ! Identify(identifyId) // ワイルドカード
lastSender = sender
}
case ActorIdentity(`identifyId`, Some(ref)) => {
log.info("found")
lastSender ! ref // 検索結果を返す
}
case ActorIdentity(`identifyId`, None) => {
log.info("not found")
}
case s: String => {
log.info(s)
child ! s
}
case _ => {
}
}
}
|
#include <unistd.h>
#include <sys/time.h>
#include <string.h>
#include <stdlib.h>
#include <config-core/config_bindings.h>
#include <common_utils.h>
#include <http-utils/http_client.h>
#include "rewrite_core.h"
#ifndef __USE_GNU
#define __USE_GNU
#endif
#include <dlfcn.h>
#define URI_POSTFIX_AUTOREFRESH_TIMESTAMP "autoRefreshTimestamp.txt"
cfg_service_descriptor* cb_newServiceDescriptorObj(pool* p){
cfg_service_descriptor* ret=(cfg_service_descriptor*)apr_palloc(p,sizeof(cfg_service_descriptor));
ret->id=NULL;
ret->name=NULL;
ret->uri=NULL;
ret->userColonPass=NULL;
ret->timeoutSeconds=2;
ret->params=apr_hash_make(p);
return ret;
}
cfg_globals* cb_newGlobalsObj(pool* p){
cfg_globals* ret=(cfg_globals*)apr_pcalloc(p,sizeof(cfg_globals));
ret->homeDir=NULL;
ret->logsDir=NULL;
ret->resourceService=NULL;
return ret;
}
char* cb_initGlobals(pool* p,cfg_globals* globals){
if(globals->logsDir!=NULL){
apr_dir_make_recursive(globals->logsDir,APR_OS_DEFAULT,p);
}
return NULL;
}
char* cb_writeRemoteResourceToDisk(pool* p, char* homeDir, char* reqUri, char* resource, long timeoutSeconds, char* userColonPass,
apr_pool_t* tp, char**details, int* responseCode){
char* ret=NULL, * bakFile=NULL,* filename=NULL;
http_util_result* result=NULL;
//file vars
apr_file_t* file=NULL;
apr_status_t status;
apr_size_t file_written;
char* errorMsg=NULL;
if(resource==NULL) return NULL;
result=hc_get_verbose(p,reqUri,timeoutSeconds,userColonPass,NULL,&errorMsg);
if(hc_is200_OK(result)){
//write file to filesystem
if(result->size>0){
bakFile=apr_pstrcat(tp,homeDir,"/",resource,".part",NULL);
status=apr_file_open(&file,bakFile,APR_WRITE|APR_CREATE|APR_TRUNCATE,APR_OS_DEFAULT,tp);
if(apr_file_write_full(file,result->data,result->size,&file_written)==APR_SUCCESS){
filename=apr_pstrcat(p,homeDir,"/",resource,NULL);
apr_file_close(file);
if(apr_file_rename(bakFile,filename,tp)==APR_SUCCESS){
ret=filename;
}
}else{
apr_file_close(file);
if(details!=NULL){
*details=apr_pstrcat(p,"Failure to write file:",SAFESTR(bakFile),NULL);
}
}
}
}else{
if(details!=NULL){
*details=apr_pstrcat(p,"Failure to write file (Response Code!=200): ",SAFESTR(resource),",",SAFESTR(errorMsg),NULL);
}
if(responseCode!=NULL&&result!=NULL){
*responseCode=result->responseCode;
}
}
return ret;
}
int cb_canAutoRefreshNow(pool* p, cfg_service_descriptor* resourceService, time_t lastRefreshTimestamp, time_t currentTimestamp, char* namespace,char**error){
http_util_result* httpResult=NULL;
char* endptr=NULL;
char* reqQuery=NULL;
long long value=-1;
if(resourceService==NULL) return FALSE;
reqQuery=apr_pstrcat(p,resourceService->uri,namespace,"/",URI_POSTFIX_AUTOREFRESH_TIMESTAMP,NULL);
httpResult=hc_get_verbose2(p,reqQuery,resourceService->timeoutSeconds,5,resourceService->userColonPass,NULL,error);
if(httpResult==NULL||httpResult->data==NULL||!hc_is200_OK(httpResult)) return FALSE;
time_t autoRefreshTimestamp = cu_dateStringToSeconds(httpResult->data);
if(autoRefreshTimestamp<0 || currentTimestamp<autoRefreshTimestamp) return FALSE;
return TRUE;
}
|
<filename>packages/client/src/features/Events/EventsList/index.ts
export * from './EventsList';
|
#! /usr/bin/env bash
deb_ver=4.6.0-beta3
rpm_ver=4.6.0-beta3
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_${deb_ver}_amd64.deb
package_cloud push grafana/testing/debian/jessie grafana_${deb_ver}_amd64.deb
package_cloud push grafana/testing/debian/wheezy grafana_${deb_ver}_amd64.deb
package_cloud push grafana/testing/debian/stretch grafana_${deb_ver}_amd64.deb
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-${rpm_ver}.x86_64.rpm
package_cloud push grafana/testing/el/6 grafana-${rpm_ver}.x86_64.rpm
package_cloud push grafana/testing/el/7 grafana-${rpm_ver}.x86_64.rpm
rm grafana*.{deb,rpm}
|
import { componentTester } from '../../testUtils/componentTester';
const setup = {
component: {
name: 'viewIntegrations',
path: 'sections/view-integrations/macro.njk',
},
};
describe('view-integrations', () => {
it('should render the integrations section if integrations answer link is provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
answers: {
link: 'some-link',
},
},
},
};
harness.request(context, ($) => {
const integrations = $('[data-test-id="view-integrations"]');
expect(integrations.length).toEqual(1);
});
}));
it('should render the integrations section if integrations answer document-link is provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
answers: {
'document-link': 'some-link',
},
},
},
};
harness.request(context, ($) => {
const integrations = $('[data-test-id="view-integrations"]');
expect(integrations.length).toEqual(1);
});
}));
it('should not render the integrations section if no section data provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {},
},
};
harness.request(context, ($) => {
const integrations = $('[data-test-id="view-integrations"]');
expect(integrations.length).toEqual(0);
});
}));
it('should not render the integrations section if invalid section provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
'invalid-section': {},
},
},
};
harness.request(context, ($) => {
const integrations = $('[data-test-id="view-integrations"]');
expect(integrations.length).toEqual(0);
});
}));
it('should render the title of the section if the integrations section is provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
answers: {
link: 'some-link',
},
},
},
};
harness.request(context, ($) => {
expect($('h3').text().trim()).toEqual('Integrations');
});
}));
it('should render the additional information of the section if the integrations section is provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
answers: {
link: 'some-link',
},
},
},
};
harness.request(context, ($) => {
const integrationsGuidance = $('[data-test-id="view-integrations-guidance"]');
expect(integrationsGuidance.text().trim()).toEqual('View information about the systems this Catalogue Solution integrates with to exchange data:');
});
}));
it('should render the viewSupplierAssertedIntegrations component if link answer is provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
answers: {
link: 'some-link',
},
},
},
};
harness.request(context, ($) => {
expect($('[data-test-id="view-supplier-asserted-integrations"]').length).toEqual(1);
expect($('[data-test-id="view-supplier-asserted-integrations"] a').text().trim()).toEqual('some-link');
});
}));
it('should render the viewAuthorityAssuredIntegrations component if document-link answer is provided', componentTester(setup, (harness) => {
const context = {
params: {
section: {
answers: {
'document-link': 'some-document-link',
},
},
},
};
harness.request(context, ($) => {
expect($('[data-test-id="view-authority-assured-integrations"]').length).toEqual(1);
expect($('[data-test-id="view-question-data-text-link-authority-integrations"] a').text().trim()).toEqual('View NHS assured integrations');
});
}));
});
|
<filename>src/webui/backend/webui/views.py
import logbook
from flask import Blueprint, send_from_directory, send_file
from . import config
app_views = Blueprint('app_views', __name__)
logger = logbook.Logger(__name__)
@app_views.route('/')
def index():
return send_file(str(config.BUILD_FOLDER.joinpath('index.html')))
@app_views.route('/<path:path>')
def serve_resources(path):
try:
return send_from_directory(config.STATIC_FOLDER, path)
except Exception:
logger.error(f'Bad resource access: {path}')
return index()
|
<reponame>simplebam/HubRead<gh_stars>10-100
package com.yueyue.readhub.feature.topic.instant;
import com.yueyue.readhub.base.BasePresenter;
import com.yueyue.readhub.base.mvp.INetworkPresenter;
import com.yueyue.readhub.model.InstantReadData;
import com.yueyue.readhub.network.ApiService;
import com.yueyue.readhub.network.HotTopicService;
import io.reactivex.Observable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.annotations.NonNull;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
/**
* author : yueyue on 2018/4/4 19:35
* desc :
*/
public class InstantReadPresenter extends BasePresenter<InstantReadFragment>
implements INetworkPresenter<InstantReadFragment> {
private HotTopicService mService = ApiService.getInstance().createHotTopicService();
private String mTopicId;
@Override
public void start() {
Disposable disposable = request()
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe(new Consumer<InstantReadData>() {
@Override
public void accept(@NonNull InstantReadData instantReadData) {
if (getView() == null) return;
getView().onSuccess(instantReadData);
}
}, new Consumer<Throwable>() {
@Override
public void accept(@NonNull Throwable throwable) {
if (getView() == null) return;
throwable.printStackTrace();
getView().onError(throwable);
}
});
}
@Override
public void startRequestMore() {
}
@Override
public Observable<InstantReadData> request() {
return mService.getInstantRead(mTopicId);
}
@Override
public Observable requestMore() {
return null;
}
public void getInstantRead(String topicId) {
mTopicId = topicId;
start();
}
}
|
<reponame>apulliam/WordAddinAngularTypescript
/// <reference path="../typings/tsd.d.ts" />
((): void => {
config.$inject = ["$routeProvider"];
function config($routeProvider: ng.route.IRouteProvider) {
// Configure the routes.
$routeProvider
.when("/home", {
templateUrl: "/app/home/home.html",
controller: "app.HomeController",
controllerAs: "vm"
})
.when("/demo", {
templateUrl: "/app/demo/demo.html",
controller: "app.DemoController",
controllerAs: "vm"
})
.otherwise({
redirectTo: "/home"
});
}
angular
.module("app").
config(config);
})();
|
<form>
<label>Enter name:</label>
<input type="text" name="name" placeholder="Enter your name">
<br><br>
<label>Enter last name:</label>
<input type="text" name="lname" placeholder="Enter your last name"><br><br>
<input type="submit" value="Submit">
</form>
|
<gh_stars>1-10
/*
* stdlib.h -- Standard C library routines.
*
* by <NAME>
*
* Copyright 2003-2012 -- See accompanying license
*
*/
#ifndef __cat_stdlib_h
#define __cat_stdlib_h
#include <cat/cat.h>
#include <stdarg.h>
/* We use the mem* functions even if we don't use the standard library */
#if !CAT_USE_STDLIB
long strtol(const char *start, char **cp, int base);
ulong strtoul(const char *start, char **cp, int base);
double strtod(const char *start, char **cp);
void *malloc(size_t len);
void *calloc(size_t nmem, size_t ilen);
void *realloc(void *omem, size_t len);
void free(void *mem);
/* TODO: in the near future
* errno
*/
void exit(int status);
void abort(void);
#endif /* !CAT_USE_STDLIB */
#endif /* __cat_stdlib_h */
|
export function messageToRequest(credentialRequest) {
let request
if (!credentialRequest || !credentialRequest.message) throw new Error('Invalid request')
const message = credentialRequest.message
if (message.data.claims.find(c => c.claimType === 'credentialRequest' && c.claimValue === 'cred1')) {
const from = message.data.iss
const fullName = message.data.claims.find(c => c.claimType === 'fullName')?.claimValue
const type = message.data.claims.find(c => c.claimType === 'type')?.claimValue
const sdr = message.data.claims.filter(c => c.claimType !== 'credentialRequest' && c.claimType !== 'type')
const isValid = message.metaData.indexOf({ type: 'JWT', value: 'ES256K-R' })
const status = credentialRequest.status
const id = credentialRequest.id
if (from && fullName && sdr.length > 0) request = { from, fullName, type, sdr, isValid, status, id }
}
if (!request) throw new Error('Invalid request')
return request
}
|
<reponame>toba/goweb
// Package content enumerates the HTTP header Content-* keys.
package content
const (
Encoding = "Content-Encoding"
// Length indicates file size in (8-bit) bytes
Length = "Content-Length"
Type = "Content-Type"
)
|
package club.coding.med.staffpayroll.employees;
public abstract class Employee {
private String name;
private int employeeId;
public Employee(String name, int employeeId) {
this.name = name;
this.employeeId = employeeId;
}
public abstract double calculatePay();
}
public class FullTimeEmployee extends Employee {
private double salary;
public FullTimeEmployee(String name, int employeeId, double salary) {
super(name, employeeId);
this.salary = salary;
}
@Override
public double calculatePay() {
return salary;
}
}
public class PartTimeEmployee extends Employee {
private double hourlyRate;
private int hoursWorked;
public PartTimeEmployee(String name, int employeeId, double hourlyRate, int hoursWorked) {
super(name, employeeId);
this.hourlyRate = hourlyRate;
this.hoursWorked = hoursWorked;
}
@Override
public double calculatePay() {
return hourlyRate * hoursWorked;
}
}
public class Contractor extends Employee {
private double hourlyRate;
private int hoursWorked;
public Contractor(String name, int employeeId, double hourlyRate, int hoursWorked) {
super(name, employeeId);
this.hourlyRate = hourlyRate;
this.hoursWorked = hoursWorked;
}
@Override
public double calculatePay() {
return hourlyRate * hoursWorked;
}
}
|
<reponame>extendi/sunspot
require 'set'
require 'time'
require 'date'
require 'enumerator'
require 'cgi'
require 'semantic'
begin
require 'rsolr'
rescue LoadError
require 'rubygems'
require 'rsolr'
end
require File.join(File.dirname(__FILE__), 'light_config')
%w(util adapters configuration setup composite_setup text_field_setup field
field_factory data_extractor indexer query search session session_proxy
type dsl class_set).each do |filename|
require File.join(File.dirname(__FILE__), 'sunspot', filename)
end
require File.join(File.dirname(__FILE__), 'sunspot', 'admin', 'admin_session.rb')
#
# The Sunspot module provides class-method entry points to most of the
# functionality provided by the Sunspot library. Internally, the Sunspot
# singleton class contains a (non-thread-safe!) instance of Sunspot::Session,
# to which it delegates most of the class methods it exposes. In the method
# documentation below, this instance is referred to as the "singleton session".
#
# Though the singleton session provides a convenient entry point to Sunspot,
# it is by no means required to use the Sunspot class methods. Multiple sessions
# may be instantiated and used (if you need to connect to multiple Solr
# instances, for example.)
#
# Note that the configuration of classes for index/search (the +setup+
# method) is _not_ session-specific, but rather global.
#
module Sunspot
UnrecognizedFieldError = Class.new(StandardError)
UnrecognizedOptionError = Class.new(StandardError)
UnrecognizedRestrictionError = Class.new(StandardError)
NoAdapterError = Class.new(StandardError)
NoSetupError = Class.new(StandardError)
IllegalSearchError = Class.new(StandardError)
NotImplementedError = Class.new(StandardError)
autoload :Installer, File.join(File.dirname(__FILE__), 'sunspot', 'installer')
# Array to track classes that have been set up for searching.
# Used by, e.g., Sunspot::Rails for reindexing all searchable classes.
@searchable = ClassSet.new
class <<self
#
# Clients can inject a session proxy, allowing them to implement custom
# session-management logic while retaining the Sunspot singleton API as
# an available interface. The object assigned to this attribute must
# respond to all of the public methods of the Sunspot::Session class.
#
attr_writer :session
#
# Access the list of classes set up to be searched.
#
attr_reader :searchable
# Configures indexing and search for a given class.
#
# ==== Parameters
#
# clazz<Class>:: class to configure
#
# ==== Example
#
# Sunspot.setup(Post) do
# text :title, :body
# string :author_name
# integer :blog_id
# integer :category_ids
# float :average_rating, :using => :ratings_average
# time :published_at
# string :sort_title do
# title.downcase.sub(/^(an?|the)\W+/, ''/) if title = self.title
# end
# end
#
# ====== Attribute Fields vs. Virtual Fields
#
# Attribute fields call a method on the indexed object and index the
# return value. All of the fields defined above except for the last one are
# attribute fields. By default, the field name will also be the attribute
# used; this can be overriden with the +:using+ option, as in
# +:average_rating+ above. In that case, the attribute +:ratings_average+
# will be indexed with the field name +:average_rating+.
#
# +:sort_title+ is a virtual field, which evaluates the block inside the
# context of the instance being indexed, and indexes the value returned
# by the block. If the block you pass takes an argument, it will be passed
# the instance rather than being evaluated inside of it; so, the following
# example is equivalent to the one above (assuming #title is public):
#
# Sunspot.setup(Post) do
# string :sort_title do |post|
# post.title.downcase.sub(/^(an?|the)\W+/, ''/) if title = self.title
# end
# end
#
# ===== Field Types
#
# The available types are:
#
# * +text+
# * +string+
# * +integer+
# * +float+
# * +time+
# * +boolean+
#
# Note that the +text+ type behaves quite differently from the others -
# this is the type that is indexed as fulltext, and is searched using the
# +keywords+ method inside the search DSL. Text fields cannot have
# restrictions set on them, nor can they be used in order statements or
# for facets. All other types are indexed literally, and thus can be used
# for all of those operations. They will not, however, be searched in
# fulltext. In this way, Sunspot provides a complete barrier between
# fulltext fields and value fields.
#
# It is fine to specify a field both as a text field and a string field;
# internally, the fields will have different names so there is no danger
# of conflict.
#
# ===== Dynamic Fields
#
# For use cases which have highly dynamic data models (for instance, an
# open set of key-value pairs attached to a model), it may be useful to
# defer definition of fields until indexing time. Sunspot exposes dynamic
# fields, which define a data accessor (either attribute or virtual, see
# above), which accepts a hash of field names to values. Note that the field
# names in the hash are internally scoped to the base name of the dynamic
# field, so any time they are referred to, they are referred to using both
# the base name and the dynamic (runtime-specified) name.
#
# Dynamic fields are speficied in the setup block using the type name
# prefixed by +dynamic_+. For example:
#
# Sunspot.setup(Post) do
# dynamic_string :custom_values do
# key_value_pairs.inject({}) do |hash, key_value_pair|
# hash[key_value_pair.key.to_sym] = key_value_pair.value
# end
# end
# end
#
# If you later wanted to facet all of the values for the key "cuisine",
# you could issue:
#
# Sunspot.search(Post) do
# dynamic :custom_values do
# facet :cuisine
# end
# end
#
# In the documentation, +:custom_values+ is referred to as the "base name" -
# that is, the one specified statically - and +:cuisine+ is referred to as
# the dynamic name, which is the part that is specified at indexing time.
#
def setup(clazz, &block)
Sunspot.searchable << clazz
Setup.setup(clazz, &block)
end
# Indexes objects on the singleton session.
#
# ==== Parameters
#
# objects...<Object>:: objects to index (may pass an array or varargs)
#
# ==== Example
#
# post1, post2 = new Array(2) { Post.create }
# Sunspot.index(post1, post2)
#
# Note that indexed objects won't be reflected in search until a commit is
# sent - see Sunspot.index! and Sunspot.commit
#
def index(*objects)
session.index(*objects)
end
# Indexes objects on the singleton session and commits immediately.
#
# See: Sunspot.index and Sunspot.commit
#
# ==== Parameters
#
# objects...<Object>:: objects to index (may pass an array or varargs)
#
def index!(*objects)
session.index!(*objects)
end
# Atomic update object properties on the singleton session.
#
# ==== Parameters
#
# clazz<Class>:: the class of the objects to be updated
# updates<Hash>:: hash of updates where keys are model ids
# and values are hash with property name/values to be updated
#
# ==== Example
#
# post1, post2 = new Array(2) { Post.create }
# Sunspot.atomic_update(Post, post1.id => {title: 'New Title'}, post2.id => {description: 'new description'})
#
# Note that indexed objects won't be reflected in search until a commit is
# sent - see Sunspot.index! and Sunspot.commit
#
def atomic_update(clazz, updates = {})
session.atomic_update(clazz, updates)
end
# Atomic update object properties on the singleton session.
#
# See: Sunspot.atomic_update and Sunspot.commit
#
# ==== Parameters
#
# clazz<Class>:: the class of the objects to be updated
# updates<Hash>:: hash of updates where keys are model ids
# and values are hash with property name/values to be updated
#
def atomic_update!(clazz, updates = {})
session.atomic_update!(clazz, updates)
end
# Commits (soft or hard) the singleton session
#
# When documents are added to or removed from Solr, the changes are
# initially stored in memory, and are not reflected in Solr's existing
# searcher instance. When a hard commit message is sent, the changes are written
# to disk, and a new searcher is spawned. Commits are thus fairly
# expensive, so if your application needs to index several documents as part
# of a single operation, it is advisable to index them all and then call
# commit at the end of the operation.
# Solr 4 introduced the concept of a soft commit which is much faster
# since it only makes index changes visible while not writing changes to disk.
# If Solr crashes or there is a loss of power, changes that occurred after
# the last hard commit will be lost.
#
# Note that Solr can also be configured to automatically perform a commit
# after either a specified interval after the last change, or after a
# specified number of documents are added. See
# http://wiki.apache.org/solr/SolrConfigXml
#
def commit(soft_commit = false)
session.commit soft_commit
end
# Optimizes the index on the singletion session.
#
# Frequently adding and deleting documents to Solr, leaves the index in a
# fragmented state. The optimize command merges all index segments into
# a single segment and removes any deleted documents, making it faster to
# search. Since optimize rebuilds the index from scratch, it takes some
# time and requires double the space on the hard disk while it's rebuilding.
# Note that optimize also commits.
def optimize
session.optimize
end
#
# Create a new Search instance, but do not execute it immediately. Generally
# you will want to use the #search method to build and execute searches in
# one step, but if you are building searches piecemeal you may call
# #new_search and then call #build one or more times to add components to
# the query.
#
# ==== Example
#
# search = Sunspot.new_search do
# with(:blog_id, 1)
# end
# search.build do
# keywords('some keywords')
# end
# search.build do
# order_by(:published_at, :desc)
# end
# search.execute
#
# # This is equivalent to:
# Sunspot.search do
# with(:blog_id, 1)
# keywords('some keywords')
# order_by(:published_at, :desc)
# end
#
# ==== Parameters
#
# types<Class>...::
# One or more types to search for. If no types are passed, all
# configured types will be searched for.
#
# ==== Returns
#
# Sunspot::Search::
# Search object, not yet executed. Query parameters can be added manually;
# then #execute should be called.
#
def new_search(*types, &block)
session.new_search(*types, &block)
end
# Search for objects in the index.
#
# ==== Parameters
#
# types<Class>...::
# One or more types to search for. If no types are passed, all
# configured types will be searched.
#
# ==== Returns
#
# Sunspot::Search:: Object containing results, facets, count, etc.
#
# The fields available for restriction, ordering, etc. are those that meet
# the following criteria:
#
# * They are not of type +text+.
# * They are defined for at least one of the classes being searched
# * They have the same data type for all of the classes being searched.
# * They have the same multiple flag for all of the classes being searched.
# * They have the same stored flag for all of the classes being searched.
#
# The restrictions available are the constants defined in the
# Sunspot::Restriction class. The standard restrictions are:
#
# with(:field_name).equal_to(value)
# with(:field_name, value) # shorthand for above
# with(:field_name).less_than(value)
# with(:field_name).greater_than(value)
# with(:field_name).between(value1..value2)
# with(:field_name).any_of([value1, value2, value3])
# with(:field_name).all_of([value1, value2, value3])
# without(some_instance) # exclude that particular instance
#
# +without+ can be substituted for +with+, causing the restriction to be
# negated. In the last example above, only +without+ works, as it does not
# make sense to search only for an instance you already have.
#
# Equality restrictions can take +nil+ as a value, which restricts the
# results to documents that have no value for the given field. Passing +nil+
# as a value to other restriction types is illegal. Thus:
#
# with(:field_name, nil) # ok
# with(:field_name).equal_to(nil) # ok
# with(:field_name).less_than(nil) # bad
#
# ==== Example
#
# Sunspot.search(Post) do
# keywords 'great pizza'
# with(:published_at).less_than Time.now
# with :blog_id, 1
# without current_post
# facet :category_ids
# order_by :published_at, :desc
# paginate 2, 15
# end
#
# If the block passed to #search takes an argument, that argument will
# present the DSL, and the block will be evaluated in the calling context.
# This will come in handy for building searches using instance data or
# methods, e.g.:
#
# Sunspot.search(Post) do |query|
# query.with(:blog_id, @current_blog.id)
# end
#
# See Sunspot::DSL::Search, Sunspot::DSL::Scope, Sunspot::DSL::FieldQuery
# and Sunspot::DSL::StandardQuery for the full API presented inside the
# block.
#
def search(*types, &block)
session.search(*types, &block)
end
def new_more_like_this(object, *types, &block)
session.new_more_like_this(object, *types, &block)
end
#
# Initiate a MoreLikeThis search. MoreLikeThis is a special type of search
# that finds similar documents using fulltext comparison. The fields to be
# compared are `text` fields set up with the `:more_like_this` option set to
# `true`. By default, more like this returns objects of the same type as the
# object used for comparison, but a list of types can optionally be passed
# to this method to return similar documents of other types. This will only
# work for types that have common fields.
#
# The DSL for MoreLikeThis search exposes several methods for setting
# options specific to this type of search. See the
# Sunspot::DSL::MoreLikeThis class and the MoreLikeThis documentation on
# the Solr wiki: http://wiki.apache.org/solr/MoreLikeThis
#
# MoreLikeThis searches have all of the same scoping, ordering, and faceting
# functionality as standard searches; the only thing you can't do in a MLT
# search is fulltext matching (since the MLT itself is a fulltext query).
#
# ==== Example
#
# post = Post.first
# Sunspot.more_like_this(post, Post, Page) do
# fields :title, :body
# with(:updated_at).greater_than(1.month.ago)
# facet(:category_ids)
# end
#
#
def more_like_this(object, *types, &block)
session.more_like_this(object, *types, &block)
end
# Remove objects from the index. Any time an object is destroyed, it must
# be removed from the index; otherwise, the index will contain broken
# references to objects that do not exist, which will cause errors when
# those objects are matched in search results.
#
# If a block is passed, it is evaluated as a search scope; in this way,
# documents can be removed by an arbitrary query. In this case, the
# arguments to the method should be the classes to run the query on.
#
# ==== Parameters
#
# objects...<Object>::
# Objects to remove from the index (may pass an array or varargs)
#
# ==== Example (remove a document)
#
# post.destroy
# Sunspot.remove(post)
#
# ==== Example (remove by query)
#
# Sunspot.remove(Post) do
# with(:created_at).less_than(Time.now - 14.days)
# end
#
def remove(*objects, &block)
session.remove(*objects, &block)
end
#
# Remove objects from the index and immediately commit. See Sunspot.remove
#
# ==== Parameters
#
# objects...<Object>:: Objects to remove from the index
#
def remove!(*objects, &block)
session.remove!(*objects, &block)
end
#
# Remove an object from the index using its class name and primary key.
# Useful if you know this information and want to remove an object without
# instantiating it from persistent storage
#
# ==== Parameters
#
# clazz<Class>:: Class of the object, or class name as a string or symbol
# id::
# Primary key of the object. This should be the same id that would be
# returned by the class's instance adapter.
#
def remove_by_id(clazz, *ids)
session.remove_by_id(clazz, ids)
end
#
# Remove an object by class name and primary key, and immediately commit.
# See #remove_by_id and #commit
#
def remove_by_id!(clazz, *ids)
session.remove_by_id!(clazz, ids)
end
# Remove all objects of the given classes from the index. There isn't much
# use for this in general operations but it can be useful for maintenance,
# testing, etc. If no arguments are passed, remove everything from the
# index.
#
# ==== Parameters
#
# classes...<Class>::
# classes for which to remove all instances from the index (may pass an
# array or varargs)
#
# ==== Example
#
# Sunspot.remove_all(Post, Blog)
#
def remove_all(*classes)
session.remove_all(*classes)
end
#
# Remove all objects of the given classes from the index and immediately
# commit. See Sunspot.remove_all
#
# ==== Parameters
#
# classes...<Class>::
# classes for which to remove all instances from the index
def remove_all!(*classes)
session.remove_all!(*classes)
end
#
# Process all adds in a batch. Any Sunspot adds initiated inside the block
# will be sent in bulk when the block finishes. Useful if your application
# initiates index adds from various places in code as part of a single
# operation; doing a batch add will give better performance.
#
# ==== Example
#
# Sunspot.batch do
# post = Post.new
# Sunspot.index(post)
# comment = Comment.new
# Sunspot.index(comment)
# end
#
# Sunspot will send both the post and the comment in a single request.
#
def batch(&block)
session.batch(&block)
end
#
# True if documents have been added, updated, or removed since the last
# commit.
#
# ==== Returns
#
# Boolean:: Whether there have been any updates since the last commit
#
def dirty?
session.dirty?
end
#
# Sends a commit (soft or hard) if the session is dirty (see #dirty?).
#
def commit_if_dirty(soft_commit = false)
session.commit_if_dirty soft_commit
end
#
# True if documents have been removed since the last commit.
#
# ==== Returns
#
# Boolean:: Whether there have been any deletes since the last commit
#
def delete_dirty?
session.delete_dirty?
end
#
# Sends a commit if the session has deletes since the last commit (see #delete_dirty?).
#
def commit_if_delete_dirty(soft_commit = false)
session.commit_if_delete_dirty soft_commit
end
# Returns the configuration associated with the singleton session. See
# Sunspot::Configuration for details.
#
# ==== Returns
#
# LightConfig::Configuration:: configuration for singleton session
#
def config
session.config
end
#
# Resets the singleton session. This is useful for clearing out all
# static data between tests, but probably nowhere else.
#
# ==== Parameters
#
# keep_config<Boolean>::
# Whether to retain the configuration used by the current singleton
# session. Default false.
#
def reset!(keep_config = false)
config =
if keep_config
session.config
else
Configuration.build
end
@session = Session.new(config)
end
#
# Get the singleton session, creating it if none yet exists.
#
# ==== Returns
#
# Sunspot::Session:: the singleton session
#
def session #:nodoc:
@session ||= Session.new
end
end
end
|
import { Component, OnInit, Input } from '@angular/core';
import { Router } from '@angular/router';
import { AcDataGridModel } from 'svogv';
import { Base } from '../../../viewmodels/base';
/**
* A grid component that handles any sort of data decorated with SVOGV decorators.
*/
@Component({
moduleId: module.id,
selector: 'app-table',
templateUrl: './table.component.html',
styleUrls: ['./table.component.css']
})
export class TableComponent<T extends Base> implements OnInit {
@Input() data: AcDataGridModel<T>;
@Input() childRoute: string;
@Input() searchText: string = 'Search';
@Input() filterText: string = 'Search Item';
@Input() removeColumnText: string = 'Remove';
@Input() newItemText: string = 'New Item';
@Input() noItemsText: string = 'There are no users available.';
constructor(public router: Router) {
}
ngOnInit() {
}
editMachine(data) {
this.router.navigate([`/${this.childRoute}/edit/${data.id}`]);
}
addMachine() {
this.router.navigate([`/${this.childRoute}/new`]);
}
removeMachine(data) {
this.router.navigate([`/${this.childRoute}/delete/${data.id}`]);
}
currentData: T;
showModal(data) {
this.currentData = data;
}
}
|
/*
* Copyright 2017 mnn.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.netdesign.common.osgi.config.wicket;
import dk.netdesign.common.osgi.config.annotation.PropertyDefinition;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author mnn
*/
public abstract class ConfigurationItemFactory implements Serializable{
private static final Logger LOGGER = LoggerFactory.getLogger(ConfigurationItemFactory.class);
public <E> E getConfigurationItem(Class<E> configurationItemClass) {
LOGGER.debug("Getting configuration item from "+configurationItemClass);
if (!configurationItemClass.isInterface() || !configurationItemClass.isAnnotationPresent(PropertyDefinition.class)) {
throw new RuntimeException("Could not create ConfigurationItemModel from " + configurationItemClass + ". Must be Interface and must be annotated with " + PropertyDefinition.class);
}
return retrieveConfigurationItem(configurationItemClass);
}
public Object getConfigurationItem(String configurationID) {
LOGGER.debug("Getting configuration item from ID "+configurationID);
Object configuration = retrieveConfigurationItem(configurationID);
return configuration;
}
protected abstract <E> E retrieveConfigurationItem(Class<E> configurationItem);
protected abstract Object retrieveConfigurationItem(String configurationID);
}
|
#!/bin/bash
tmp=$(dirname "$0")
pushd "$tmp" > /dev/null
rm ../compile.js
node scripts/cc.js pro && chmod +x ../compile.js
popd > /dev/null
|
# Start docker
echo 'Starting docker'
sudo docker-compose up -d
echo 'Installing dependencies...'
sleep 150
# Start Test Cases
echo 'Integration test cases:'
docker exec -it transport_apis_server_1 npm test test/integrationTest.js
echo 'Unit test cases:'
docker exec -it transport_apis_server_1 npm test test/unitTest.js
exit 0
|
const path = require("/shared/path")
const guiconsole = require("/src/display/guiconsole")
const action = require("/shared/action")
module.exports.MouseHandler = function (engine) {
this.engine = engine
this.dij = undefined
this.rate = 160
this.eventParse = (event) => {
const [x, y] = engine.display.eventToPosition(event)
const player = engine.getPlayer()
if (!player) return [x, y, undefined, undefined, undefined]
return [x, y,
player.x + x - Math.floor(this.engine.screen.width / 2),
player.y + y - Math.floor(this.engine.screen.height / 2),
player]
}
this.knownTile = (x, y) => this.engine.world.seenMap.tiles.get(x, y) ? this.engine.world.map.tiles.get(x, y) : 1
this.click = (event) => {
const [, , adjX, adjY, player] = this.eventParse(event)
if (!player) return
//this is basically a demo... its not well done and shouldnt hang around
if (this.knownTile(adjX, adjY) === 2) {
this.engine.guiConsole.print(
new guiconsole.ConsoleLine("that pathing goal is obstructed", [5, 3, 2], true)
)
return
}
this.dij = new path.Dij(this.engine.world.map.width, this.knownTile, [
new path.Cord(adjX, adjY)
],
Math.round(path.distance(player, new path.Cord(adjX, adjY)) * 5)
)
this.startInterval()
event.preventDefault()
}
this.tooltipWrap = undefined
this.mouseout = () => {
if (this.tooltipWrap) {
this.tooltipWrap.remove()
this.tooltipWrap = undefined
}
}
this.mousemove = (event) => {
//jank lol
// let oldTip = document.querySelector(".tooltip")
// if (oldTip) oldTip.remove()
const [x, y, adjX, adjY,] = this.eventParse(event)
let entity = this.engine.world.getAnyAtConditional(adjX, adjY, (xx, yy) => this.engine.screen.lightMap.get(xx, yy) > 0.0)
if (!entity) {
this.mouseout()
return
}
if (!this.tooltipWrap) {
this.tooltipWrap = document.createElement("div")
this.tooltipWrap.className = "tooltip"
this.tooltipWrap.appendChild(document.createTextNode(""))
let firstChild = document.body.firstChild
firstChild.parentNode.insertBefore(this.tooltipWrap, firstChild)
}
this.tooltipWrap.childNodes[0].nodeValue = `${entity.name ? entity.name : entity.type} [${entity.hp}/${entity.maxHp}]`
let tooltipProps = this.tooltipWrap.getBoundingClientRect()
let frameProps = this.engine.display._backend._ctx.canvas.getBoundingClientRect()
let toolX = (x * (frameProps.width / this.engine.screen.width)) + frameProps.left
let toolY = (y * (frameProps.height / this.engine.screen.height)) + frameProps.top
this.tooltipWrap.setAttribute("style", `top:${toolY - tooltipProps.height * 1.2}px; left:${toolX}px;`)
}
this.intervalFunc = () => {
const player = engine.getPlayer()
if (!player) return
let moveCord = path.rollDown(this.dij.distance, new path.Cord(player.x, player.y), this.engine.world.entityAt)
if (!moveCord) {
this.stopInterval()
} else {
if (!this.engine.actionHandler.handle(
new action.Move(moveCord.x, moveCord.y)
)) {
let target = this.dij.goalArray[0]
if (this.knownTile(target.x, target.y) === 2) {
this.engine.guiConsole.print(
new guiconsole.ConsoleLine("that pathing goal has been revealed to be obstructed", [3, 1, 2], true)
)
this.stopInterval()
}
this.dij.calc()
}
}
}
this.interval = false
this.stopInterval = () => { if (this.interval) clearInterval(this.interval) }
this.startInterval = () => {
this.stopInterval()
this.interval = setInterval(this.intervalFunc, this.rate)
}
}
|
<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import ImdbSvg from '@rsuite/icon-font/lib/legacy/Imdb';
const Imdb = createSvgIcon({
as: ImdbSvg,
ariaLabel: 'imdb',
category: 'legacy',
displayName: 'Imdb'
});
export default Imdb;
|
package db
import (
"database/sql"
"fmt"
)
func Connect() {
db, err := sql.Open("postgres", getSqlInfo())
if err != nil {
panic(err)
}
defer db.Close()
err = db.Ping()
if err != nil {
panic(err)
}
fmt.Println("Successfully connected!")
createTaskTable :=
`CREATE TABLE tasks(
id SERIAL PRIMARY KEY,
uuid UUID,
start_at TIMESTAMP,
target_url TEXT,
interval_time INT,
interval_type VARCHAR(15),
send_at VARCHAR(10),
exec_type VARCHAR(10),
exec_body JSONB,
exec_header JSONB,
exec_limit int,
immediately BOOL,
continuous BOOL,
cancelled BOOL,
fire_count INT,
successful_fire_count INT,
last_fire TIMESTAMP,
last_successful_fire TIMESTAMP)`
_, err = db.Exec(createTaskTable)
if err != nil {
fmt.Println(err)
}
}
|
package com.atjl.retry.eg;
public class Cond {
private String cond;
public Cond(String cond) {
this.cond = cond;
}
public String getCond() {
return cond;
}
public void setCond(String cond) {
this.cond = cond;
}
@Override
public String toString() {
return "Cond{" +
"cond='" + cond + '\'' +
'}';
}
}
|
<reponame>felquis/unit
import TextInput from '../../../system/platform/component/value/TextInput/Component'
import { Dict } from '../../../types/Dict'
import classnames from '../../classnames'
import { Element } from '../../element'
import parentElement from '../../parentElement'
import { NONE } from '../../theme'
export interface Props {
className?: string
style?: Dict<any>
value?: string
disabled?: boolean
maxLength?: number
}
const HEIGHT: number = 39
const DEFAULT_STYLE = {
height: `${HEIGHT}px`,
width: '309px',
color: 'currentColor',
borderTopWidth: '0',
borderTopStyle: 'solid',
borderTopColor: 'currentColor',
backgroundColor: NONE,
borderRadius: '3px 3px 0 0',
fontSize: '16px',
textAlign: 'center',
}
export default class SearchInput extends Element<HTMLDivElement, Props> {
public _input: TextInput
constructor($props: Props) {
super($props)
const { className, style, disabled } = this.$props
const input = new TextInput({
className: classnames('search-input', className),
style: {
...DEFAULT_STYLE,
...style,
},
maxLength: 30,
disabled,
tabIndex: -1,
})
this._input = input
const $element = parentElement()
this.$element = $element
this.$slot['default'] = input.$slot['default']
this.registerRoot(input)
}
onPropChanged(prop: string, current: any) {
if (prop === 'style') {
this._input.setProp('style', { ...DEFAULT_STYLE, ...current })
} else if (prop === 'disabled') {
this._input.setProp('disabled', current)
} else if (prop === 'tabIndex') {
this._input.setProp('tabIndex', current)
} else if (prop === 'value') {
this._input.setProp('value', current)
}
}
setSelectionRange(
start: number,
end: number,
direction?: 'forward' | 'backward' | 'none' | undefined
) {
this._input.setSelectionRange(start, end, direction)
}
}
|
def print_odd_numbers(start, end):
for i in range(start, end+1):
if i % 2 != 0:
print(i)
|
from typing import List
from miyu_bot.commands.common.fuzzy_matching import romanize, FuzzyMatcher
def find_closest_romanized_match(target: str, string_list: List[str]) -> str:
fuzzy_matcher = FuzzyMatcher()
target_romanized = romanize(target)
closest_match = None
min_distance = float('inf')
for string in string_list:
romanized_string = romanize(string)
distance = fuzzy_matcher.distance(target_romanized, romanized_string)
if distance < min_distance:
min_distance = distance
closest_match = string
return closest_match
|
#!/bin/bash
#
# This script is run prior to deployment of component Dewey
# The working directory of this script is the deployment project folder.
#
#--------------------------------------------------------------------------
# Code generated by the SmartSoft MDSD Toolchain
# The SmartSoft Toolchain has been developed by:
#
# Service Robotics Research Center
# University of Applied Sciences Ulm
# Prittwitzstr. 10
# 89075 Ulm (Germany)
#
# Information about the SmartSoft MDSD Toolchain is available at:
# www.servicerobotik-ulm.de
#
# This file is generated once. Modify this file to your needs.
# If you want the toolchain to re-generate this file, please
# delete it before running the code generator.
#--------------------------------------------------------------------------
# uncomment to enable predeploy script delivered by component
source $REFERENCED_PROJECT_OneUser/smartsoft/src/predeploy.sh
# Add SmartSoft libraries that shall be deployed to the target device where this
# component is to be deployed.
# These libraries will be searched in $SMART_ROOT/lib and then deployed
# Add one file per line.
#DEPLOY_LIBRARIES="$DEPLOY_LIBRARIES
#libSmartSickInterface.so
#"
# add custom code here. E.g. use this script to collect data-files and
# copy them to src/<COMPONENT>_data, etc.
|
# Use `hub` as our git wrapper:
# http://defunkt.github.com/hub/
#
# I use JRuby substantially, and we want to make sure hub is run using MRI
# regardless of which Ruby you're using or else the `git status` in your prompt
# will take seven thousand seconds to run `ls`.
#
# I'm hardcoding it to an installed rvm (using rvm's `rvm 1.8.7,ruby /hub/path`
# syntax is way too slow). It should work fine for those without rvm, though.
if [[ -s $HOME/.rvm/scripts/rvm ]]
then
if $(which hub &> /dev/null) && [[ -s $HOME/.rvm/rubies/ruby-1.8.7-p334 ]]
then
alias git='/usr/bin/ruby `which hub`'
else
fi
fi
# The rest of my fun git aliases
alias gl='git pull --prune'
alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative"
alias gp='git push'
alias gpom='git push origin master'
alias gphm='git push heroku master'
alias gd='git diff'
alias gc='git commit'
alias gca='git commit -a'
alias gco='git checkout'
alias gb='git branch'
alias gs='git status -sb' # upgrade your git if -sb breaks for you. it's fun.
alias grm="git status | grep deleted | awk '{print \$3}' | xargs git rm"
alias gcl="git clone"
|
# /bin/sh
echo "Building darwin"
GOOS=darwin GOARCH=amd64 go build
mv "${PWD##*/}" "${PWD##*/}_darwin_amd64"
echo "Building linux"
GOOS=linux GOARCH=amd64 go build
mv "${PWD##*/}" "${PWD##*/}_linux_amd64"
GOOS=linux GOARCH=arm go build
mv "${PWD##*/}" "${PWD##*/}_linux_arm"
GOOS=linux GOARCH=arm64 go build
mv "${PWD##*/}" "${PWD##*/}_linux_arm64"
echo "Building windows"
GOOS=windows GOARCH=amd64 go build
echo "Completed"
|
<filename>src/main/java/io/trillo/example/entity/Task.java
package io.trillo.example.entity;
/**
* This class models a to-do list task.
*/
public class Task {
/*
* Represents uid of the task. It is a string in the following format:
* <class name>:<uuid>, a random uuid is generated using Java library.
*/
private String uid;
/*
* A short description of the task.
*/
private String taskName;
/*
* Priority of task.
*/
private String priority;
/*
* A flag indicating if the task is completed or not.
*/
private boolean completed = false;
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getTaskName() {
return taskName;
}
public void setTaskName(String taskName) {
this.taskName = taskName;
}
public String getPriority() {
return priority;
}
public void setPriority(String priority) {
this.priority = priority;
}
public boolean isCompleted() {
return completed;
}
public void setCompleted(boolean completed) {
this.completed = completed;
}
}
|
var path = require('path');
module.exports = function getResolve(ROOT_PATH) {
return {
resolve: {
modules: [
path.resolve(ROOT_PATH, 'app', 'components'),
path.resolve(ROOT_PATH, 'app', 'messages'),
path.resolve(ROOT_PATH, '..', 'common'),
path.resolve(ROOT_PATH, 'node_modules'),
'node_modules',
],
extensions: ['.js', '.coffee', '.sass', '.json', '.css'],
alias: {
'require.resolve': 'resolve',
},
},
resolveLoaders: {
modules: [
path.resolve(ROOT_PATH, 'node_modules'),
'node_modules',
],
},
}
};
|
package edu.washington.cse.instrumentation.analysis;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import soot.G;
import soot.MethodOrMethodContext;
import soot.PackManager;
import soot.PointsToAnalysis;
import soot.Scene;
import soot.SceneTransformer;
import soot.SootMethod;
import soot.Transform;
import soot.Unit;
import soot.jimple.Stmt;
import soot.jimple.spark.pag.PAG;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.callgraph.Filter;
import soot.jimple.toolkits.callgraph.ReachableMethods;
import soot.jimple.toolkits.pointer.MemoryEfficientRasUnion;
import soot.jimple.toolkits.pointer.Union;
import soot.jimple.toolkits.pointer.UnionFactory;
import soot.util.queue.QueueReader;
import boomerang.preanalysis.NopTransformer;
import edu.washington.cse.instrumentation.analysis.utils.MethodResolver;
public class InconsistentReadAnalysis extends Transform {
public InconsistentReadAnalysis(final AnalysisCompleteListener l) {
this(l, new NullAnalysisModelExtension());
}
public InconsistentReadAnalysis(final AnalysisCompleteListener l, final AnalysisModelExtension ext) {
super("wjtp.ic-read", new SceneTransformer() {
@Override
protected void internalTransform(final String phaseName, final Map<String, String> options) {
if(options.containsKey("time") && Boolean.parseBoolean(options.get("time"))) {
Legato.totalTime.start();
}
final AnalysisConfiguration config = AnalysisConfiguration.parseConfiguration(options, ext);
ext.postProcessScene();
{
final PointsToAnalysis pointsToAnalysis = Scene.v().getPointsToAnalysis();
if(pointsToAnalysis instanceof PAG) {
((PAG) pointsToAnalysis).cleanPAG();
}
if(!AnalysisConfiguration.VERY_QUIET) {
System.out.println("Preloading method bodies and stripping bytecode...");
}
final HashSet<Unit> megamorphicCallsites = new HashSet<>();
final LegatoEdgePredicate synth = new LegatoEdgePredicate();
final ReachableMethods rm = new ReachableMethods(Scene.v().getCallGraph(), Scene.v().getEntryPoints().iterator(), new Filter(synth));
rm.update();
int isMultiDispatch = 0;
int noMultiDispatch = 0;
int nMethods = 0;
for(final QueueReader<MethodOrMethodContext> it = rm.listener(); it.hasNext(); ) {
final SootMethod m = it.next().method();
nMethods++;
m.freeze();
if((nMethods % 1000) == 0 && !AnalysisConfiguration.VERY_QUIET) {
System.out.println(" ..." + nMethods);
}
if(!m.hasActiveBody()) {
continue;
}
config.icfg.getOrCreateUnitGraph(m);
m.setSource(null);
for(final Unit u : config.icfg.getCallsFromWithin(m)) {
final Stmt s = (Stmt)u;
final String methodName = s.getInvokeExpr().getMethod().getName();
if(methodName.equals("forName") || methodName.equals("class$")) {
continue;
}
final Collection<SootMethod> calls = config.icfg.getCalleesOfCallAt(u);
if(calls.size() == 0) {
continue;
} else if(calls.size() == 1) {
noMultiDispatch++;
} else {
if(calls.size() > 5) {
megamorphicCallsites.add(u);
}
isMultiDispatch++;
}
}
}
if(!AnalysisConfiguration.VERY_QUIET) {
System.out.println("...Done");
System.out.println("Call graph methods: " + nMethods);
System.out.println(isMultiDispatch + " vs " + noMultiDispatch);
for(final Unit u : megamorphicCallsites) {
System.out.println("Mega-morphic call site: " + u + " in " + config.icfg.getMethodOf(u) + " callees: " + config.icfg.getCalleesOfCallAt(u).size());
}
}
}
final AtMostOnceProblem prob = new AtMostOnceProblem(config);
final InconsistentReadSolver solver = new InconsistentReadSolver(prob, config);
prob.setSolver(solver);
try {
solver.solve();
} catch(final RuntimeException e) {
RuntimeException eIt = e;
while(eIt.getCause() != null) {
final Throwable t = eIt.getCause();
if(t instanceof RuntimeException) {
eIt = (RuntimeException) t;
} else {
throw eIt;
}
}
if(eIt instanceof EverythingIsInconsistentException) {
throw eIt;
} else {
throw e;
}
}
l.analysisCompleted(solver, prob);
if(config.warnLog != null) {
try(PrintStream ps = new PrintStream(new File(config.warnLog))) {
prob.printWarnings(ps);
} catch (final FileNotFoundException e) {
prob.printWarnings(System.out);
}
} else {
prob.printWarnings(System.out);
}
solver.finishHandler();
if(config.trackSites) {
try(PrintWriter pw = new PrintWriter(new File(config.siteLog != null ? config.siteLog : "sites.yml"))) {
solver.dumpRelevantSites(pw);
} catch (final FileNotFoundException e) { }
}
if(options.containsKey("time") && Boolean.parseBoolean(options.get("time"))) {
Legato.totalTime.stop();
}
}
});
setDeclaredOptions("enabled resolver resolver-options pm pm-options sync-havoc summary-mode output " +
"hofe output-opt warn-log track-all ignore-file track-sites site-log ignore-file stats time");
setDefaultOptions("resolver:simple-get pm:simple sync-havoc:true output:console track-all:false track-sites:false stats:false time:false");
}
public static void main(final String[] args) {
PackManager.v().getPack("wjtp").add(new InconsistentReadAnalysis(new AnalysisCompleteListener() {
@Override
public void analysisCompleted(final InconsistentReadSolver solver, final AtMostOnceProblem problem) {
final HashSet<SootMethod> x = new HashSet<>();
{
for(final Edge e : Scene.v().getCallGraph()) {
final MethodOrMethodContext mc1 = e.getSrc();
final MethodOrMethodContext mc2 = e.getTgt();
assert mc1 instanceof SootMethod;
assert mc2 instanceof SootMethod;
x.add((SootMethod) mc1);
x.add((SootMethod) mc2);
}
}
for(final SootMethod m : x) {
if(m.equals(Scene.v().getMainMethod())) {
continue;
}
System.out.println("!!! " + m.getSignature());
if(!m.isConcrete()) {
System.out.println("[[ NO BODY ]]");
continue;
}
for(final Unit u : m.getActiveBody().getUnits()) {
System.out.println(u.toString() + " -> " + solver.resultsAt(u));
}
}
System.out.println("main");
for(final Unit u : Scene.v().getMainMethod().getActiveBody().getUnits()) {
System.out.println(u.toString() + " -> " + solver.resultsAt(u));
}
Legato.dumpData(solver, problem);
}
}));
PackManager.v().getPack("jb").add(new Transform("jb.nop-adder", new NopTransformer()));
PackManager.v().getPack("jb").add(new Transform("jb.mres", new MethodResolver()));
G.v().Union_factory = new UnionFactory() {
@Override
public Union newUnion() {
return new MemoryEfficientRasUnion();
}
};
soot.Main.main(args);
}
}
|
<gh_stars>0
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.batizhao.dp.initializr.extension.build.maven;
import io.spring.initializr.generator.buildsystem.maven.MavenBuild;
import io.spring.initializr.generator.spring.build.BuildCustomizer;
import io.spring.initializr.generator.version.Version;
import io.spring.initializr.generator.version.VersionParser;
import io.spring.initializr.generator.version.VersionRange;
import io.spring.initializr.metadata.Dependency;
import io.spring.initializr.metadata.InitializrMetadata;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* A {@link BuildCustomizer} that automatically excludes annotation processors from the
* repackaged archive if necessary.
*
* @author <NAME>
*/
class AnnotationProcessorExclusionBuildCustomizer implements BuildCustomizer<MavenBuild> {
private static final VersionRange SPRING_BOOT_2_4_0_M3_0R_LATER = VersionParser.DEFAULT.parseRange("2.4.0-M3");
private static final List<String> KNOWN_ANNOTATION_PROCESSORS = Collections
.singletonList("configuration-processor");
private final InitializrMetadata metadata;
private final boolean hasSmartExclude;
AnnotationProcessorExclusionBuildCustomizer(InitializrMetadata metadata, Version platformVersion) {
this.metadata = metadata;
this.hasSmartExclude = SPRING_BOOT_2_4_0_M3_0R_LATER.match(platformVersion);
}
@Override
public void customize(MavenBuild build) {
if (!build.plugins().has("org.springframework.boot", "spring-boot-maven-plugin")) {
return;
}
List<io.spring.initializr.generator.buildsystem.Dependency> dependencies = build.dependencies().ids()
.filter(this::isAnnotationProcessor)
.filter((id) -> !this.hasSmartExclude || !KNOWN_ANNOTATION_PROCESSORS.contains(id))
.map((id) -> build.dependencies().get(id)).collect(Collectors.toList());
if (!dependencies.isEmpty()) {
build.plugins().add("org.springframework.boot", "spring-boot-maven-plugin", (plugin) -> plugin
.configuration((configuration) -> configuration.configure("excludes", (excludes) -> {
for (io.spring.initializr.generator.buildsystem.Dependency dependency : dependencies) {
excludes.add("exclude", (exclude) -> {
exclude.add("groupId", dependency.getGroupId());
exclude.add("artifactId", dependency.getArtifactId());
});
}
})));
}
}
@Override
public int getOrder() {
return 5;
}
private boolean isAnnotationProcessor(String id) {
Dependency dependency = this.metadata.getDependencies().get(id);
return (dependency != null) && Dependency.SCOPE_ANNOTATION_PROCESSOR.equals(dependency.getScope());
}
}
|
<gh_stars>1-10
package example
import (
"fmt"
"math/rand"
"net"
"os"
"reflect"
"testing"
"time"
"github.com/stretchr/testify/require"
"google.golang.org/grpc"
"golang.org/x/net/context"
"github.com/arcology-network/consensus-engine/libs/log"
tmnet "github.com/arcology-network/consensus-engine/libs/net"
abcicli "github.com/arcology-network/consensus-engine/abci/client"
"github.com/arcology-network/consensus-engine/abci/example/code"
"github.com/arcology-network/consensus-engine/abci/example/kvstore"
abciserver "github.com/arcology-network/consensus-engine/abci/server"
"github.com/arcology-network/consensus-engine/abci/types"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
func TestKVStore(t *testing.T) {
fmt.Println("### Testing KVStore")
testStream(t, kvstore.NewApplication())
}
func TestBaseApp(t *testing.T) {
fmt.Println("### Testing BaseApp")
testStream(t, types.NewBaseApplication())
}
func TestGRPC(t *testing.T) {
fmt.Println("### Testing GRPC")
testGRPCSync(t, types.NewGRPCApplication(types.NewBaseApplication()))
}
func testStream(t *testing.T, app types.Application) {
numDeliverTxs := 20000
socketFile := fmt.Sprintf("test-%08x.sock", rand.Int31n(1<<30))
defer os.Remove(socketFile)
socket := fmt.Sprintf("unix://%v", socketFile)
// Start the listener
server := abciserver.NewSocketServer(socket, app)
server.SetLogger(log.TestingLogger().With("module", "abci-server"))
if err := server.Start(); err != nil {
require.NoError(t, err, "Error starting socket server")
}
t.Cleanup(func() {
if err := server.Stop(); err != nil {
t.Error(err)
}
})
// Connect to the socket
client := abcicli.NewSocketClient(socket, false)
client.SetLogger(log.TestingLogger().With("module", "abci-client"))
if err := client.Start(); err != nil {
t.Fatalf("Error starting socket client: %v", err.Error())
}
t.Cleanup(func() {
if err := client.Stop(); err != nil {
t.Error(err)
}
})
done := make(chan struct{})
counter := 0
client.SetResponseCallback(func(req *types.Request, res *types.Response) {
// Process response
switch r := res.Value.(type) {
case *types.Response_DeliverTx:
counter++
if r.DeliverTx.Code != code.CodeTypeOK {
t.Error("DeliverTx failed with ret_code", r.DeliverTx.Code)
}
if counter > numDeliverTxs {
t.Fatalf("Too many DeliverTx responses. Got %d, expected %d", counter, numDeliverTxs)
}
if counter == numDeliverTxs {
go func() {
time.Sleep(time.Second * 1) // Wait for a bit to allow counter overflow
close(done)
}()
return
}
case *types.Response_Flush:
// ignore
default:
t.Error("Unexpected response type", reflect.TypeOf(res.Value))
}
})
// Write requests
for counter := 0; counter < numDeliverTxs; counter++ {
// Send request
reqRes := client.DeliverTxAsync(types.RequestDeliverTx{Tx: []byte("test")})
_ = reqRes
// check err ?
// Sometimes send flush messages
if counter%123 == 0 {
client.FlushAsync()
// check err ?
}
}
// Send final flush message
client.FlushAsync()
<-done
}
//-------------------------
// test grpc
func dialerFunc(ctx context.Context, addr string) (net.Conn, error) {
return tmnet.Connect(addr)
}
func testGRPCSync(t *testing.T, app types.ABCIApplicationServer) {
numDeliverTxs := 2000
socketFile := fmt.Sprintf("test-%08x.sock", rand.Int31n(1<<30))
defer os.Remove(socketFile)
socket := fmt.Sprintf("unix://%v", socketFile)
// Start the listener
server := abciserver.NewGRPCServer(socket, app)
server.SetLogger(log.TestingLogger().With("module", "abci-server"))
if err := server.Start(); err != nil {
t.Fatalf("Error starting GRPC server: %v", err.Error())
}
t.Cleanup(func() {
if err := server.Stop(); err != nil {
t.Error(err)
}
})
// Connect to the socket
conn, err := grpc.Dial(socket, grpc.WithInsecure(), grpc.WithContextDialer(dialerFunc))
if err != nil {
t.Fatalf("Error dialing GRPC server: %v", err.Error())
}
t.Cleanup(func() {
if err := conn.Close(); err != nil {
t.Error(err)
}
})
client := types.NewABCIApplicationClient(conn)
// Write requests
for counter := 0; counter < numDeliverTxs; counter++ {
// Send request
response, err := client.DeliverTx(context.Background(), &types.RequestDeliverTx{Tx: []byte("test")})
if err != nil {
t.Fatalf("Error in GRPC DeliverTx: %v", err.Error())
}
counter++
if response.Code != code.CodeTypeOK {
t.Error("DeliverTx failed with ret_code", response.Code)
}
if counter > numDeliverTxs {
t.Fatal("Too many DeliverTx responses")
}
t.Log("response", counter)
if counter == numDeliverTxs {
go func() {
time.Sleep(time.Second * 1) // Wait for a bit to allow counter overflow
}()
}
}
}
|
package GUI.controllers;
import GUI.UserHolder;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextField;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.input.MouseEvent;
import models.Book;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import java.net.URL;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.ResourceBundle;
public class BooksController implements Initializable {
@FXML
private TableView<Book> booksTable;
@FXML
private TableColumn<Book, String> title;
@FXML
private TableColumn<Book, String> author;
@FXML
private TableColumn<Book, String> genre;
@FXML
private TableColumn<Book, LocalDate> publishDate;
@FXML
private TableColumn<Book, String> status;
@FXML
private ChoiceBox<String> choiceCategory;
@FXML
private TextField tfPhrase;
private ObservableList<String> categories = FXCollections.observableArrayList("All","Title", "Author", "Genre");
private ObservableList<Book> data;
@Override
public void initialize(URL location, ResourceBundle resources) {
choiceCategory.setValue("All");
choiceCategory.setItems(categories);
data = FXCollections.observableArrayList(getUsersBooks("All", null));
showData(data);
}
private ArrayList<Book> getUsersBooks(String category, String phrase){
try {
Socket socket = new Socket("localhost", 4444);
ObjectOutputStream outputStream = new ObjectOutputStream(socket.getOutputStream());
switch (category){
case "All":
outputStream.writeObject("GET user books");
break;
default:
outputStream.writeObject("GET user books with phrase");
outputStream.writeObject(category);
outputStream.writeObject(phrase);
break;
}
outputStream.writeObject(UserHolder.getInstance().getUser().getUsername());
ObjectInputStream inputStream = new ObjectInputStream(socket.getInputStream());
ArrayList<Book> books = (ArrayList<Book>) inputStream.readObject();
return books;
} catch (IOException | ClassNotFoundException exception) {
exception.printStackTrace();
}
return null;
}
public void search(MouseEvent mouseEvent) {
String category = choiceCategory.getValue();
String phrase = tfPhrase.getText();
data = FXCollections.observableArrayList(getUsersBooks(category, phrase));
showData(data);
choiceCategory.setValue("All");
tfPhrase.setText("Phrase");
}
private void showData(ObservableList<Book> data){
title.setCellValueFactory(new PropertyValueFactory<Book, String>("title"));
author.setCellValueFactory(new PropertyValueFactory<Book, String>("author"));
genre.setCellValueFactory(new PropertyValueFactory<Book, String>("genre"));
publishDate.setCellValueFactory(new PropertyValueFactory<Book, LocalDate>("publishDate"));
status.setCellValueFactory(new PropertyValueFactory<Book, String>("status"));
booksTable.setItems(data);
}
}
|
package main
import (
"fmt"
"os"
"path/filepath"
"plugin"
)
func check(err error) {
if err != nil {
panic(err)
}
}
func main() {
execPath, err := os.Executable()
check(err)
pluginPaths, err := filepath.Glob(filepath.Join(filepath.Dir(execPath), "plugins", "*.so"))
check(err)
for _, pluginPath := range pluginPaths {
fmt.Printf("Loading %s\n", pluginPath)
p, err := plugin.Open(pluginPath)
check(err)
hello, err := p.Lookup("Hello")
check(err)
fmt.Println(hello.(func() string)())
}
}
|
package products.pasta;
import java.util.Arrays;
public class Carbonara extends Pasta {
public Carbonara(){
name = "Carbonara";
price = 23.50;
pastaType = PastaType.TAGIATELLE;
ingredients = Arrays.asList("bacon", "eggs", "parmesan", "garlic");
}
}
|
const express = require('express');
const cors = require('cors')
const helmet = require('helmet');
const userAuthRouter = require('./auth/auth-user-router');
const usersRouter = require('./users/users-router');
const recipeRouter = require('./recipes/recipes-router');
const restricted = require('./middleware/restricted');
const IngredientsRouter = require('./ingredients/ingredients-router');
const InstructionsRouter = require('./instructions/instructions-router');
const server = express();
server.use(helmet());
server.use(express.json());
server.use(cors());
//Login route
server.use('/api/user', userAuthRouter);
//Get all Users
server.use('/api/auth/users', usersRouter);
//GET/ADD recipe
server.use('/api/auth/recipes', restricted, recipeRouter);
//Ingredients
server.use('/api/auth/ingredients', restricted, IngredientsRouter);
//Instructions
server.use('/api/auth/instructions', InstructionsRouter);
server.get('/', (req, res) => {
res.status(200).json({message: "All Clear For Takeoff, Star Fox"})
});
module.exports = server;
|
function findClosestSum(arr, target) {
let minSum = arr[0] + arr[1] + arr[2];
let minSumIndex = [0, 1, 2];
for (let i = 0; i < arr.length; i++) {
for (let j = i+1; j < arr.length; j++) {
for (let k = j+1; k < arr.length; k++) {
const sum = arr[i] + arr[j] + arr[k];
if (Math.abs(target - sum) < Math.abs(target - minSum)) {
minSum = sum;
minSumIndex = [i, j, k];
}
}
}
}
return [arr[minSumIndex[0]], arr[minSumIndex[1]], arr[minSumIndex[2]]];
}
const array = [2, 5, 6, 10, 15];
const target = 17;
console.log(findClosestSum(array, target));
// Output: [6, 10, 15]
|
#!/bin/bash
S3_BUCKET=$1
#Path with trailing /
S3_PATH=$2
if [ "${S3_BUCKET}" == "" ]; then
echo "Usage: $0 S3_BUCKET [S3_PATH]"
echo " where S3_BUCKET is the S3 bucket to upload resources to and S3_PATH is optional path but if specified must have a trailing '/'"
exit 1
fi
source ./bundle.sh
TARGET=s3://${S3_BUCKET}/${S3_PATH}aws-lambda/${PACKAGE_FILE}
echo "Staging bundle to S3 at $TARGET"
aws s3 cp ${PACKAGE_FILE} $TARGET $S3PUBLIC
|
<reponame>hongdongni/swt-bling<filename>src/test/java/com/readytalk/swt/widgets/notifications/BubbleRegistryTest.java
package com.readytalk.swt.widgets.notifications;
import org.junit.Assert;
import org.junit.Before;
import org.eclipse.swt.widgets.Composite;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/**
*/
public class BubbleRegistryTest {
@Mock
private Composite composite;
@Mock
private Bubble bubble;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void test_Register_Composite() {
BubbleRegistry bubbleRegistry = new BubbleRegistry();
bubbleRegistry.register(composite, bubble);
Assert.assertNotNull(bubbleRegistry.findRegistrant(composite));
}
@Test
public void test_Register_Composite_With_Tags() {
BubbleRegistry bubbleRegistry = new BubbleRegistry();
bubbleRegistry.register(composite, bubble, BubbleTag.NEW);
BubbleRegistry.BubbleRegistrant registrant = bubbleRegistry.findRegistrant(composite);
Assert.assertNotNull(registrant);
if(registrant != null) {
Assert.assertTrue(registrant.getTags().contains(BubbleTag.NEW));
}
Assert.assertNotNull(bubbleRegistry.tagMap.get(BubbleTag.NEW.getText()));
}
@Test
public void test_Add_Tag() {
BubbleRegistry bubbleRegistry = new BubbleRegistry();
bubbleRegistry.register(composite, bubble);
BubbleRegistry.BubbleRegistrant registrant = bubbleRegistry.findRegistrant(composite);
Assert.assertEquals(registrant.getTags().size(), 0);
bubbleRegistry.addTags(composite, BubbleTag.NEW);
Assert.assertNotNull(bubbleRegistry.tagMap.get(BubbleTag.NEW.getText()));
Assert.assertTrue(registrant.getTags().contains(BubbleTag.NEW));
}
@Test
public void test_Add_Tag_Without_Registering() {
BubbleRegistry bubbleRegistry = new BubbleRegistry();
bubbleRegistry.addTags(composite, BubbleTag.NEW);
BubbleRegistry.BubbleRegistrant registrant = bubbleRegistry.findRegistrant(composite);
Assert.assertNull(registrant);
Assert.assertFalse(bubbleRegistry.tagMap.containsKey(BubbleTag.NEW.getText()));
}
@Test
public void test_Unregister_Composite() {
BubbleRegistry bubbleRegistry = new BubbleRegistry();
bubbleRegistry.register(composite, bubble);
Assert.assertNotNull(bubbleRegistry.findRegistrant(composite));
bubbleRegistry.unregister(composite);
Assert.assertNull(bubbleRegistry.findRegistrant(composite));
}
@Test
public void test_Remove_Tag() {
BubbleRegistry bubbleRegistry = new BubbleRegistry();
bubbleRegistry.register(composite, bubble);
BubbleRegistry.BubbleRegistrant registrant = bubbleRegistry.findRegistrant(composite);
Assert.assertEquals(registrant.getTags().size(), 0);
bubbleRegistry.addTags(composite, BubbleTag.NEW);
Assert.assertNotNull(bubbleRegistry.tagMap.get(BubbleTag.NEW.getText()));
Assert.assertTrue(registrant.getTags().contains(BubbleTag.NEW));
bubbleRegistry.removeTags(registrant, BubbleTag.NEW);
Assert.assertFalse(registrant.getTags().contains(BubbleTag.NEW));
}
}
|
export enum MODULES_TYPE {
开关,
开关组,
进度条,
按钮,
按钮组,
图表,
}
|
/*
* This file is generated by jOOQ.
*/
package io.cattle.platform.core.model.tables;
import io.cattle.platform.core.model.CattleTable;
import io.cattle.platform.core.model.Keys;
import io.cattle.platform.core.model.tables.records.StoragePoolRecord;
import io.cattle.platform.db.jooq.converter.DataConverter;
import io.cattle.platform.db.jooq.converter.DateConverter;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.ForeignKey;
import org.jooq.Identity;
import org.jooq.Schema;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.UniqueKey;
import org.jooq.impl.TableImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class StoragePoolTable extends TableImpl<StoragePoolRecord> {
private static final long serialVersionUID = 70223977;
/**
* The reference instance of <code>cattle.storage_pool</code>
*/
public static final StoragePoolTable STORAGE_POOL = new StoragePoolTable();
/**
* The class holding records for this type
*/
@Override
public Class<StoragePoolRecord> getRecordType() {
return StoragePoolRecord.class;
}
/**
* The column <code>cattle.storage_pool.id</code>.
*/
public final TableField<StoragePoolRecord, Long> ID = createField("id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* The column <code>cattle.storage_pool.name</code>.
*/
public final TableField<StoragePoolRecord, String> NAME = createField("name", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>cattle.storage_pool.kind</code>.
*/
public final TableField<StoragePoolRecord, String> KIND = createField("kind", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, "");
/**
* The column <code>cattle.storage_pool.uuid</code>.
*/
public final TableField<StoragePoolRecord, String> UUID = createField("uuid", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, "");
/**
* The column <code>cattle.storage_pool.description</code>.
*/
public final TableField<StoragePoolRecord, String> DESCRIPTION = createField("description", org.jooq.impl.SQLDataType.VARCHAR.length(1024), this, "");
/**
* The column <code>cattle.storage_pool.state</code>.
*/
public final TableField<StoragePoolRecord, String> STATE = createField("state", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, "");
/**
* The column <code>cattle.storage_pool.created</code>.
*/
public final TableField<StoragePoolRecord, Date> CREATED = createField("created", org.jooq.impl.SQLDataType.TIMESTAMP, this, "", new DateConverter());
/**
* The column <code>cattle.storage_pool.removed</code>.
*/
public final TableField<StoragePoolRecord, Date> REMOVED = createField("removed", org.jooq.impl.SQLDataType.TIMESTAMP, this, "", new DateConverter());
/**
* The column <code>cattle.storage_pool.remove_time</code>.
*/
public final TableField<StoragePoolRecord, Date> REMOVE_TIME = createField("remove_time", org.jooq.impl.SQLDataType.TIMESTAMP, this, "", new DateConverter());
/**
* The column <code>cattle.storage_pool.data</code>.
*/
public final TableField<StoragePoolRecord, Map<String,Object>> DATA = createField("data", org.jooq.impl.SQLDataType.CLOB, this, "", new DataConverter());
/**
* The column <code>cattle.storage_pool.physical_total_size_mb</code>.
*/
public final TableField<StoragePoolRecord, Long> PHYSICAL_TOTAL_SIZE_MB = createField("physical_total_size_mb", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.storage_pool.virtual_total_size_mb</code>.
*/
public final TableField<StoragePoolRecord, Long> VIRTUAL_TOTAL_SIZE_MB = createField("virtual_total_size_mb", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.storage_pool.external</code>.
*/
public final TableField<StoragePoolRecord, Boolean> EXTERNAL = createField("external", org.jooq.impl.SQLDataType.BIT.nullable(false).defaultValue(org.jooq.impl.DSL.inline("b'0'", org.jooq.impl.SQLDataType.BIT)), this, "");
/**
* The column <code>cattle.storage_pool.agent_id</code>.
*/
public final TableField<StoragePoolRecord, Long> AGENT_ID = createField("agent_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.storage_pool.zone_id</code>.
*/
public final TableField<StoragePoolRecord, Long> ZONE_ID = createField("zone_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.storage_pool.external_id</code>.
*/
public final TableField<StoragePoolRecord, String> EXTERNAL_ID = createField("external_id", org.jooq.impl.SQLDataType.VARCHAR.length(128), this, "");
/**
* The column <code>cattle.storage_pool.driver_name</code>.
*/
public final TableField<StoragePoolRecord, String> DRIVER_NAME = createField("driver_name", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>cattle.storage_pool.volume_access_mode</code>.
*/
public final TableField<StoragePoolRecord, String> VOLUME_ACCESS_MODE = createField("volume_access_mode", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>cattle.storage_pool.storage_driver_id</code>.
*/
public final TableField<StoragePoolRecord, Long> STORAGE_DRIVER_ID = createField("storage_driver_id", org.jooq.impl.SQLDataType.BIGINT, this, "");
/**
* The column <code>cattle.storage_pool.cluster_id</code>.
*/
public final TableField<StoragePoolRecord, Long> CLUSTER_ID = createField("cluster_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* Create a <code>cattle.storage_pool</code> table reference
*/
public StoragePoolTable() {
this("storage_pool", null);
}
/**
* Create an aliased <code>cattle.storage_pool</code> table reference
*/
public StoragePoolTable(String alias) {
this(alias, STORAGE_POOL);
}
private StoragePoolTable(String alias, Table<StoragePoolRecord> aliased) {
this(alias, aliased, null);
}
private StoragePoolTable(String alias, Table<StoragePoolRecord> aliased, Field<?>[] parameters) {
super(alias, null, aliased, parameters, "");
}
/**
* {@inheritDoc}
*/
@Override
public Schema getSchema() {
return CattleTable.CATTLE;
}
/**
* {@inheritDoc}
*/
@Override
public Identity<StoragePoolRecord, Long> getIdentity() {
return Keys.IDENTITY_STORAGE_POOL;
}
/**
* {@inheritDoc}
*/
@Override
public UniqueKey<StoragePoolRecord> getPrimaryKey() {
return Keys.KEY_STORAGE_POOL_PRIMARY;
}
/**
* {@inheritDoc}
*/
@Override
public List<UniqueKey<StoragePoolRecord>> getKeys() {
return Arrays.<UniqueKey<StoragePoolRecord>>asList(Keys.KEY_STORAGE_POOL_PRIMARY, Keys.KEY_STORAGE_POOL_IDX_STORAGE_POOL_UUID);
}
/**
* {@inheritDoc}
*/
@Override
public List<ForeignKey<StoragePoolRecord, ?>> getReferences() {
return Arrays.<ForeignKey<StoragePoolRecord, ?>>asList(Keys.FK_STORAGE_POOL__AGENT_ID, Keys.FK_STORAGE_DRIVER__ID, Keys.FK_STORAGE_POOL__CLUSTER_ID);
}
/**
* {@inheritDoc}
*/
@Override
public StoragePoolTable as(String alias) {
return new StoragePoolTable(alias, this);
}
/**
* Rename this table
*/
@Override
public StoragePoolTable rename(String name) {
return new StoragePoolTable(name, null);
}
}
|
#!/usr/bin/env bash
# BEGIN subcommand functions
VB="virtualbox"
VG="vagrant"
VM="vagrant-manager"
run_vagrant() {
echo "[SCIONLabVM] run vagrant"
vagrant box add scion/ubuntu-16.04-64-scion
vagrant box update
vagrant up
vagrant ssh
}
run_osx() {
echo "[SCIONLabVM] Given system: OSX"
if ! type "brew" > /dev/null; then
echo "[SCIONLabVM] Now installing Homebrew"
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
for pkg in $VB $VG $VM; do
if pkgutil --pkgs=$pkg > /dev/null; then
echo "[SCIONLabVM] $pkg is already installed"
elif brew cask ls $pkg > /dev/null; then
echo "[SCIONLabVM] $pkg is already installed"
else
echo "[SCIONLabVM] Installing $pkg"
brew cask install --force $pkg
fi
done
run_vagrant
}
# version less or equal. E.g. verleq 1.9 2.0.8 == true (1.9 <= 2.0.8)
verleq() {
[ "$1" = "`echo -e "$1\n$2" | sort -V | head -n1`" ]
}
run_linux() {
if [[ -f "/usr/bin/apt-get" && -f "/usr/bin/dpkg" ]]
then
echo "[SCIONLabVM] Given system: LINUX"
if dpkg --get-selections | grep -q "^$VB.*[[:space:]]\{1,\}install$" >/dev/null; then
echo "[SCIONLabVM] $VB is already installed"
else
while true; do
read -p "[SCIONLabVM] Do you want to install/upgrade $VB now? If no, it will terminate SCIONLabVM immediately. [y/n]" yesno
case $yesno in
[Yy]*)
echo "[SCIONLabVM] Installing $VB"
sudo apt-get --no-remove --yes install virtualbox
break;;
[Nn]*) echo "[SCIONLabVM] Closing SCIONLabVM installation."; exit 1;;
*) ;;
esac
done
fi
if dpkg --get-selections | grep -q "^$VG.*[[:space:]]\{1,\}install$" >/dev/null; then
echo "[SCIONLabVM] $VG is already installed"
else
while true; do
read -p "[SCIONLabVM] Do you want to install/upgrade $VG now? If no, it will terminate SCIONLabVM immediately. [y/n]" yesno
case $yesno in
[Yy]*)
echo "[SCIONLabVM] Installing $VG"
sudo apt-get --no-remove --yes install $VG
break;;
[Nn]*) echo "[SCIONLabVM] Closing SCIONLabVM installation."; exit 1;;
*) ;;
esac
done
fi
run_vagrant
else
echo "Currently, this script does not support your linux distribution."
echo "Please follow the instructions in the README file to run the SCIONLab AS."
fi
}
case "$OSTYPE" in
darwin*)
"run_osx" ;;
linux*)
"run_linux" ;;
solaris*|bsd*|msys|*)
echo "Currently, this script does not support $OSTYPE system."
echo "Please follow the instructions in the README file to run the SCIONLab AS." ;;
esac
|
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var BaseConsoleButton_1 = require("../BaseConsoleButton");
var index_1 = require("flibs/dist/index");
var index_2 = require("fcore/dist/index");
var CaptureKeyButtonEvent_1 = require("./CaptureKeyButtonEvent");
var FC_1 = require("../../FC");
var CaptureKeyButton = (function (_super) {
__extends(CaptureKeyButton, _super);
function CaptureKeyButton() {
_super.call(this);
}
CaptureKeyButton.prototype.construction = function () {
_super.prototype.construction.call(this);
};
CaptureKeyButton.prototype.addListeners = function () {
_super.prototype.addListeners.call(this);
this.eventListenerHelper.addEventListener(index_1.InputManager.instance, index_1.InputManagerEvent.KEY_PRESS, this.onKeyPress);
};
CaptureKeyButton.prototype.onClick = function () {
_super.prototype.onClick.call(this);
this.isClicked = !this.isClicked;
};
CaptureKeyButton.prototype.onKeyPress = function (data) {
if (this.isClicked) {
this.isClicked = false;
this.captureCode = index_2.KeyboardTools.getCharCodeFromKeyPressEvent(data.nativeEvent);
this.captureKey = index_2.KeyboardTools.getCharFromKeyPressEvent(data.nativeEvent);
this.commitData();
}
else if (this.captureCode) {
if (index_2.KeyboardTools.getCharCodeFromKeyPressEvent(data.nativeEvent) == this.captureCode) {
this.dispatchEvent(CaptureKeyButtonEvent_1.CaptuerKeyButtonEvent.CAPTURE_KEY_PRESS);
}
}
};
CaptureKeyButton.prototype.commitData = function () {
_super.prototype.commitData.call(this);
if (this.isClicked) {
this.label = FC_1.FC.config.localization.captureKeyBtnPressedLabel;
}
else if (this.captureKey) {
this.label = index_2.StringTools.substituteList(FC_1.FC.config.localization.captureKeyBtnNormalLabel, this.captureKey);
}
else {
this.label = index_2.StringTools.substituteList(FC_1.FC.config.localization.captureKeyBtnNormalLabel, FC_1.FC.config.localization.captureKeyBtnNoKeyHelpText);
}
};
CaptureKeyButton.prototype.arrange = function () {
_super.prototype.arrange.call(this);
};
Object.defineProperty(CaptureKeyButton.prototype, "isClicked", {
get: function () {
return this._isClicked;
},
set: function (value) {
if (value == this.isClicked) {
return;
}
this._isClicked = value;
this.commitData();
},
enumerable: true,
configurable: true
});
return CaptureKeyButton;
}(BaseConsoleButton_1.BaseConsoleButton));
exports.CaptureKeyButton = CaptureKeyButton;
//# sourceMappingURL=CaptureKeyButton.js.map
|
<filename>documentation/manual/javaGuide/main/tests/code/javaguide/tests/MockitoTest.java<gh_stars>1-10
package javaguide.tests;
import static org.junit.Assert.*;
//#test-mockito-import
import static org.mockito.Mockito.*;
//#test-mockito-import
import java.util.List;
import org.junit.Test;
public class MockitoTest {
@Test
public void testMockList() {
//#test-mockito
// Create and train mock
List<String> mockedList = mock(List.class);
when(mockedList.get(0)).thenReturn("first");
// check value
assertEquals("first", mockedList.get(0));
// verify interaction
verify(mockedList).get(0);
//#test-mockito
}
}
|
package awx
import "fmt"
// InventoryUpdateService implements awx inventory_update apis.
type InventoryUpdatesService struct {
client *Client
}
// GetInventoryUpdate retrives the inventory_update information from its ID or Name
func (i *InventoryUpdatesService) GetInventoryUpdate(id int, params map[string]string) (*InventoryUpdate, error) {
endpoint := fmt.Sprintf("/api/v2/inventory_updates/%d/", id)
result := new(InventoryUpdate)
resp, err := i.client.Requester.GetJSON(endpoint, result, params)
if err != nil {
return nil, err
}
if err := CheckResponse(resp); err != nil {
return nil, err
}
return result, nil
}
|
<filename>src/org/sosy_lab/cpachecker/cpa/conditions/path/PathConditionsCPA.java
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.conditions.path;
import java.util.Collection;
import java.util.Collections;
import org.sosy_lab.common.configuration.ClassOption;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.IntegerOption;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.configuration.Option;
import org.sosy_lab.common.configuration.Options;
import org.sosy_lab.cpachecker.cfa.model.CFAEdge;
import org.sosy_lab.cpachecker.cfa.model.CFANode;
import org.sosy_lab.cpachecker.core.defaults.AutomaticCPAFactory;
import org.sosy_lab.cpachecker.core.defaults.FlatLatticeDomain;
import org.sosy_lab.cpachecker.core.defaults.MergeSepOperator;
import org.sosy_lab.cpachecker.core.defaults.SingleEdgeTransferRelation;
import org.sosy_lab.cpachecker.core.defaults.StopAlwaysOperator;
import org.sosy_lab.cpachecker.core.interfaces.AbstractDomain;
import org.sosy_lab.cpachecker.core.interfaces.AbstractState;
import org.sosy_lab.cpachecker.core.interfaces.CPAFactory;
import org.sosy_lab.cpachecker.core.interfaces.ConfigurableProgramAnalysisWithBAM;
import org.sosy_lab.cpachecker.core.interfaces.MergeOperator;
import org.sosy_lab.cpachecker.core.interfaces.Precision;
import org.sosy_lab.cpachecker.core.interfaces.Reducer;
import org.sosy_lab.cpachecker.core.interfaces.StateSpacePartition;
import org.sosy_lab.cpachecker.core.interfaces.Statistics;
import org.sosy_lab.cpachecker.core.interfaces.StatisticsProvider;
import org.sosy_lab.cpachecker.core.interfaces.StopOperator;
import org.sosy_lab.cpachecker.core.interfaces.TransferRelation;
import org.sosy_lab.cpachecker.core.interfaces.conditions.AdjustableConditionCPA;
/**
* CPA for path conditions ({@link PathCondition}).
* It can be configured to work with any condition that implements this interface.
*/
@Options(prefix="cpa.conditions.path")
public class PathConditionsCPA implements ConfigurableProgramAnalysisWithBAM, AdjustableConditionCPA, StatisticsProvider {
@Option(secure = true, description = "The condition", name = "condition", required = true)
@ClassOption(packagePrefix = "org.sosy_lab.cpachecker.cpa.conditions.path")
private PathCondition.Factory conditionClass;
@Option(secure = true,
description = "Number of times the path condition may be adjusted, i.e., the path condition threshold may be increased (-1 to always adjust)",
name = "adjustment.threshold")
@IntegerOption(min=-1)
private int adjustmentThreshold = -1;
private int performedAdjustments = 0;
private final PathCondition condition;
private final AbstractDomain domain = new FlatLatticeDomain();
private final TransferRelation transfer = new SingleEdgeTransferRelation() {
@Override
public Collection<? extends AbstractState> getAbstractSuccessorsForEdge(
AbstractState pState, Precision pPrecision, CFAEdge pCfaEdge) {
return Collections.singleton(condition.getAbstractSuccessor(pState, pCfaEdge));
}
};
public static CPAFactory factory() {
return AutomaticCPAFactory.forType(PathConditionsCPA.class);
}
private PathConditionsCPA(Configuration config) throws InvalidConfigurationException {
config.inject(this);
condition = conditionClass.create(config);
}
@Override
public void collectStatistics(Collection<Statistics> pStatsCollection) {
if (condition instanceof StatisticsProvider) {
((StatisticsProvider)condition).collectStatistics(pStatsCollection);
} else if (condition instanceof Statistics) {
pStatsCollection.add((Statistics)condition);
}
}
@Override
public AbstractState getInitialState(CFANode pNode, StateSpacePartition pPartition) {
return condition.getInitialState(pNode);
}
@Override
public boolean adjustPrecision() {
if (adjustmentThreshold == -1 || performedAdjustments < adjustmentThreshold) {
performedAdjustments++;
return condition.adjustPrecision();
}
return false;
}
@Override
public AbstractDomain getAbstractDomain() {
return domain;
}
@Override
public MergeOperator getMergeOperator() {
return MergeSepOperator.getInstance();
}
@Override
public StopOperator getStopOperator() {
return StopAlwaysOperator.getInstance();
}
@Override
public TransferRelation getTransferRelation() {
return transfer;
}
@Override
public Reducer getReducer() {
return condition.getReducer();
}
}
|
# SPDX-License-Identifier: Apache-2.0
#set -ev
#!/bin/sh
function exportVariables(){
# Organization information that you wish to build and deploy
export NAME_OF_ORGANIZATION=$NAME_OF_ORGANIZATION
export DOMAIN_OF_ORGANIZATION=$DOMAIN_OF_ORGANIZATION
export HOST_COMPUTER_IP_ADDRESS=$HOST_COMPUTER_IP_ADDRESS
export ORGANIZATION_NAME_LOWERCASE=`echo "$NAME_OF_ORGANIZATION" | tr '[:upper:]' '[:lower:]'`
export CA_ADDRESS_PORT=ca.$DOMAIN_OF_ORGANIZATION:7054
# Security defaults
# Couch DB credentials
export COUCH_DB_USERNAME=admin
export COUCH_DB_PASSWORD=adminpw
# Certificate authority credentials
export CA_ADMIN_USER=admin
export CA_ADMIN_PASSWORD=adminpw
# Orderer credentials
ORDERER_PASSWORD=adminpw
# Peer credentials
PEER_PASSWORD=peerpw
}
export NAME_OF_ORGANIZATION=$1
export DOMAIN_OF_ORGANIZATION=$2
export HOST_COMPUTER_IP_ADDRESS=$3
exportVariables
./clean-all.sh
# Substitutes organizations information in the configtx template to match organizations name, domain and ip address
sed -e 's/organization_name/'$NAME_OF_ORGANIZATION'/g' -e 's/organization_domain/'$DOMAIN_OF_ORGANIZATION'/g' -e 's/ip_address/'$HOST_COMPUTER_IP_ADDRESS'/g' configtx_template.yaml > configtx.yaml
# Start the certficate authority
docker-compose -p fabric-network -f docker-compose.yml up -d ca
sleep 3
# Generate identity and cryptographic materials for the 3 orderers
for ORDERER_NUMBER in 1 2 3
do
docker exec ca.$DOMAIN_OF_ORGANIZATION /bin/bash -c "cd /etc/hyperledger/artifacts/ && ./orderer-identity.sh $CA_ADDRESS_PORT $DOMAIN_OF_ORGANIZATION $HOST_COMPUTER_IP_ADDRESS $CA_ADMIN_USER $CA_ADMIN_PASSWORD $ORDERER_NUMBER $ORDERER_PASSWORD"
done
# Generate identity and cryptographic materials for the peer
docker exec ca.$DOMAIN_OF_ORGANIZATION /bin/bash -c "cd /etc/hyperledger/artifacts/ && ./peer-identity.sh $CA_ADDRESS_PORT $DOMAIN_OF_ORGANIZATION $HOST_COMPUTER_IP_ADDRESS $PEER_PASSWORD"
# Move the crypto-config folder to manipulate it more easily away from the dockers users' restrictions
sudo mv ./${ORGANIZATION_NAME_LOWERCASE}Ca/client/crypto-config ./
sudo chmod -R 777 ./crypto-config
# Move TLS certificates for the 3 orderers
for ORDERER_NUMBER in 1 2 3
do
ORDERER_DIRECTORY=./crypto-config/ordererOrganizations/orderers
sudo mv $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/signcerts/cert.pem $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/server.crt
sudo mv $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/keystore/*_sk $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/server.key
sudo mv $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/tlscacerts/*.pem $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/ca.crt
# Delete empty directories
sudo rm -rf $ORDERER_DIRECTORY/orderer$ORDERER_NUMBER.$DOMAIN_OF_ORGANIZATION/tls/{cacerts,keystore,signcerts,tlscacerts,user}
done
# Peers crypto-config directory
PEER_DIRECTORY=./crypto-config/peerOrganizations/peers/peer.$DOMAIN_OF_ORGANIZATION
# Move the Peer TLS files to match cryptogen hierarchy
sudo mv $PEER_DIRECTORY/tls/signcerts/cert.pem $PEER_DIRECTORY/tls/server.crt
sudo mv $PEER_DIRECTORY/tls/keystore/*_sk $PEER_DIRECTORY/tls/server.key
sudo mv $PEER_DIRECTORY/tls/tlscacerts/*.pem $PEER_DIRECTORY/tls/ca.crt
# Delete the peers empty directory
sudo rm -rf $PEER_DIRECTORY/tls/{cacerts,keystore,signcerts,tlscacerts,user}
# Generate the channel configuration
./generate.sh ${ORGANIZATION_NAME_LOWERCASE}channel $NAME_OF_ORGANIZATION
sleep 2
# Start the network with docker-compose
docker-compose -f docker-compose.yml up -d peer couchdb cli
sleep 2
docker-compose -f docker-compose.yml up -d orderer
docker-compose -f docker-compose.yml up -d orderer2
docker-compose -f docker-compose.yml up -d orderer3
sleep 15
# Creates the channel
docker exec cli peer channel create -o orderer1.$DOMAIN_OF_ORGANIZATION:7050 -c ${ORGANIZATION_NAME_LOWERCASE}channel --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.$DOMAIN_OF_ORGANIZATION/tls/ca.crt -f /etc/hyperledger/artifacts/channel.tx
# Joins the peer to the channel
docker exec cli peer channel join -b ${ORGANIZATION_NAME_LOWERCASE}channel.block
# Build javescript chaincode
pushd chaincode
npm install
npm run build
popd
# Install the chaincode
docker exec cli peer chaincode install -n chaincode -v 1.0 -p /etc/hyperledger/chaincode -l node
# Instantiate the chaincode
docker exec cli peer chaincode instantiate -o orderer1.$DOMAIN_OF_ORGANIZATION:7050 -C ${ORGANIZATION_NAME_LOWERCASE}channel -n chaincode -v 1.0 -l node -c '{"Args":["initLedger"]}' -P "OR('${NAME_OF_ORGANIZATION}MSP.member')" --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.$DOMAIN_OF_ORGANIZATION/tls/ca.crt
sleep 5
# Test the chaincode
# MercadoLivre
docker exec cli peer chaincode invoke -o orderer1.vtex.com:7050 -C vtexchannel -n chaincode -c '{"Args":["invokeTransaction","vtex", "mercadoLivre", "{\"platform\": \"Mercado Livre\", \"products\": [{\"productName\": \"Cadeira\", \"priceSold\": \"40\", \"priceProduct\": \"25\", \"stock\": \"12\", \"quantitySold\": \"20\", \"status\": \"complete\"},{\"productName\": \"Mesa\", \"priceSold\": \"120\", \"priceProduct\": \"80\", \"stock\": \"6\", \"quantitySold\": \"2\", \"status\": \"complete\"},{\"productName\": \"Teclado\", \"priceSold\": \"80\", \"priceProduct\": \"50\", \"stock\": \"16\", \"quantitySold\": \"5\", \"status\": \"complete\"},{\"productName\": \"Camisa\", \"priceSold\": \"25\", \"priceProduct\": \"10\", \"stock\": \"30\", \"quantitySold\": \"30\", \"status\": \"complete\"},{\"productName\": \"Microondas\", \"priceSold\": \"230\", \"priceProduct\": \"150\", \"stock\": \"3\", \"quantitySold\": \"2\", \"status\": \"complete\"},{\"productName\": \"Mochila\", \"priceSold\": \"50\", \"priceProduct\": \"20\", \"stock\": \"8\", \"quantitySold\": \"4\", \"status\": \"complete\"},{\"productName\": \"Notebook\", \"priceSold\": \"2500\", \"priceProduct\": \"1500\", \"stock\": \"2\", \"quantitySold\": \"1\", \"status\": \"returned\"},{\"productName\": \"Caixa som\", \"priceSold\": \"500\", \"priceProduct\": \"200\", \"stock\": \"5\", \"quantitySold\": \"4\", \"status\": \"returned\"}]}"]}' --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.vtex.com/tls/ca.crt
# Amazon
docker exec cli peer chaincode invoke -o orderer1.vtex.com:7050 -C vtexchannel -n chaincode -c '{"Args":["invokeTransaction","vtex", "amazon", "{\"platform\": \"amazon\", \"products\": [{\"productName\": \"Cadeira\", \"priceSold\": \"40\", \"priceProduct\": \"25\", \"stock\": \"12\", \"quantitySold\": \"20\", \"status\": \"complete\"},{\"productName\": \"Mesa\", \"priceSold\": \"120\", \"priceProduct\": \"80\", \"stock\": \"6\", \"quantitySold\": \"2\", \"status\": \"complete\"},{\"productName\": \"Teclado\", \"priceSold\": \"80\", \"priceProduct\": \"50\", \"stock\": \"16\", \"quantitySold\": \"5\", \"status\": \"complete\"},{\"productName\": \"Camisa\", \"priceSold\": \"25\", \"priceProduct\": \"10\", \"stock\": \"30\", \"quantitySold\": \"30\", \"status\": \"complete\"},{\"productName\": \"Microondas\", \"priceSold\": \"230\", \"priceProduct\": \"150\", \"stock\": \"3\", \"quantitySold\": \"2\", \"status\": \"complete\"},{\"productName\": \"Mochila\", \"priceSold\": \"50\", \"priceProduct\": \"20\", \"stock\": \"8\", \"quantitySold\": \"4\", \"status\": \"complete\"},{\"productName\": \"Notebook\", \"priceSold\": \"2500\", \"priceProduct\": \"1500\", \"stock\": \"2\", \"quantitySold\": \"1\", \"status\": \"returned\"},{\"productName\": \"Caixa som\", \"priceSold\": \"500\", \"priceProduct\": \"200\", \"stock\": \"5\", \"quantitySold\": \"4\", \"status\": \"returned\"}]}"]}' --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.vtex.com/tls/ca.crt
# Americanas
docker exec cli peer chaincode invoke -o orderer1.vtex.com:7050 -C vtexchannel -n chaincode -c '{"Args":["invokeTransaction","vtex", "americanas", "{\"platform\": \"Americanas\", \"products\": [{\"productName\": \"Cadeira\", \"priceSold\": \"40\", \"priceProduct\": \"25\", \"stock\": \"12\", \"quantitySold\": \"20\", \"status\": \"complete\"},{\"productName\": \"Mesa\", \"priceSold\": \"120\", \"priceProduct\": \"80\", \"stock\": \"6\", \"quantitySold\": \"2\", \"status\": \"complete\"},{\"productName\": \"Teclado\", \"priceSold\": \"80\", \"priceProduct\": \"50\", \"stock\": \"16\", \"quantitySold\": \"5\", \"status\": \"complete\"},{\"productName\": \"Camisa\", \"priceSold\": \"25\", \"priceProduct\": \"10\", \"stock\": \"30\", \"quantitySold\": \"30\", \"status\": \"complete\"},{\"productName\": \"Microondas\", \"priceSold\": \"230\", \"priceProduct\": \"150\", \"stock\": \"3\", \"quantitySold\": \"2\", \"status\": \"complete\"},{\"productName\": \"Mochila\", \"priceSold\": \"50\", \"priceProduct\": \"20\", \"stock\": \"8\", \"quantitySold\": \"4\", \"status\": \"complete\"},{\"productName\": \"Notebook\", \"priceSold\": \"2500\", \"priceProduct\": \"1500\", \"stock\": \"2\", \"quantitySold\": \"1\", \"status\": \"returned\"},{\"productName\": \"Caixa som\", \"priceSold\": \"500\", \"priceProduct\": \"200\", \"stock\": \"5\", \"quantitySold\": \"4\", \"status\": \"returned\"}]}"]}' --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.vtex.com/tls/ca.crt
# Ali Express
docker exec cli peer chaincode invoke -o orderer1.vtex.com:7050 -C vtexchannel -n chaincode -c '{"Args":["invokeTransaction","vtex", "aliexpress", "{\"platform\": \"aliexpress\", \"products\": [{\"platform\": \"Ali Express\",\"productName\": \"Colar\", \"priceProduct\": \"2\", \"quantityBought\": \"90\",\"status\": \"instock\"},{\"platform\": \"Ali Express\",\"productName\": \"Relogio\", \"priceProduct\": \"8\", \"quantityBought\": \"30\",\"status\": \"transito\"},{\"platform\": \"Ali Express\",\"productName\": \"Mouse pad\", \"priceProduct\": \"8\", \"quantityBought\": \"50\",\"status\": \"stock\"},{\"platform\": \"Ali Express\",\"productName\": \"Fita isolante\", \"priceProduct\": \"4\", \"quantityBought\": \"20\",\"status\": \"transito\"},{\"platform\": \"Ali Express\",\"productName\": \"Chapeu\", \"priceProduct\": \"9\", \"quantityBought\": \"40\",\"status\": \"stock\"}]}"]}' --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.vtex.com/tls/ca.crt
sleep 3
# Query the blockchain
docker exec cli peer chaincode query -C vtexchannel -n chaincode -c '{"Args":["queryBlockchain","vtex", "aliexpress"]}' --tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/orderers/orderer1.vtex.com/tls/ca.crt
# NETWORK DEPLOYMENT COMPLETED SUCCESSFULLY
|
#!/bin/bash
# Configuration script for botan 2.15.0
# Library release date: 2020/07/07
export FM_BOTAN_NAME="Botan"
export FM_BOTAN_VERSION="2.15.0"
export FM_BOTAN_FULL_NAME="${FM_BOTAN_NAME}-${FM_BOTAN_VERSION}"
export FM_BOTAN_TARBALL_NAME="${FM_BOTAN_FULL_NAME}.tar.xz"
export FM_BOTAN_TARBALL_DOWNLOAD_URL="https://botan.randombit.net/releases/${FM_BOTAN_TARBALL_NAME}"
export FM_BOTAN_UNTAR_FLAGS="--exclude=*/.lgtm.yml --exclude=*/.travis.yml"
export FM_BOTAN_INSTALL_CHECK="include/botan/botan.h"
export FM_BOTAN_HASH="d88af1307f1fefac79aa4f2f524699478d69ce15a857cf2d0a90ac6bf2a50009"
export FM_BOTAN_HASH_TYPE="SHA-256"
|
<filename>src/mixins/upload.js
export default {
methods: {
// Allow only if it is a 'File'
handleShow (e, allow = () => {}) {
if (e.dataTransfer.types.indexOf('Files') < 0) return
allow()
},
},
}
|
#!/usr/bin/env python3
#Copyright 2021 <NAME>
#Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#This is a blackjack game called BlackPack. It has a few cool features, including using multiple decks, autoshuffling, and betting.
#This is my ONLINE blackjack game.
#It's written in python3 and depends on random and socket, this is the linux version, it should however run on windows or macos when opened in a python interpreter.
#It can be installed on linux by placing the file in /usr/bin or any prefered bin folder, and can be ran by calling BlackPackO in bash.
from random import randint
import xml.etree.ElementTree as ET
import socket
HOST = '127.0.0.1'
SERV = 65533
LIST = 65532
#[Noah Panepinto (Dec.16 2021 {01:39})]
#Here I define several characters that will be appended to strings to chang their colours when printed.
class colours:
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDER = '\033[4m'
NO_UNDER = '\033[24m'
REVERSE = '\033[7m'
FOREWARD = '\033[27m'
FORE_DARK_BLACK = '\033[30m'
FORE_DARK_RED = '\033[31m'
FORE_DARK_GREEN = '\033[32m'
FORE_DARK_ORANGE = '\033[33m'
FORE_DARK_BLUE = '\033[34m'
FORE_DARK_MAGENTA = '\033[35m'
FORE_DARK_CYAN = '\033[36m'
FORE_DARK_WHITE = '\033[37m'
FORE_BRIGHT_BLACK = '\033[90m'
FORE_BRIGHT_RED = '\033[91m'
FORE_BRIGHT_GREEN = '\033[92m'
FORE_BRIGHT_ORANGE = '\033[93m'
FORE_BRIGHT_BLUE = '\033[94m'
FORE_BRIGHT_MAGENTA = '\033[95m'
FORE_BRIGHT_CYAN = '\033[96m'
FORE_BRIGHT_WHITE = '\033[97m'
BACK_ENDC = '\033[0m'
BACK_DARK_BLACK = '\033[40m'
BACK_DARK_RED = '\033[41m'
BACK_DARK_GREEN = '\033[42m'
BACK_DARK_ORANGE = '\033[43m'
BACK_DARK_BLUE = '\033[44m'
BACK_DARK_MAGENTA = '\033[45m'
BACK_DARK_CYAN = '\033[46m'
BACK_DARK_WHITE = '\033[47m'
BACK_BRIGHT_BLACK = '\033[1000m'
BACK_BRIGHT_RED = '\033[101m'
BACK_BRIGHT_GREEN = '\033[102m'
BACK_BRIGHT_ORANGE = '\033[103m'
BACK_BRIGHT_BLUE = '\033[104m'
BACK_BRIGHT_MAGENTA = '\033[105m'
BACK_BRIGHT_CYAN = '\033[106m'
BACK_BRIGHT_WHITE = '\033[107m'
#[<NAME> (Oct.3 2021 {01:39})]
#Here I define the classes that will represent Standard Cards (the BaseCard class) and Ace Cards (the AceCard class)
#Both the AceCard class is derived from the BaseCard class and as a result contain the same five non static (instance) values):
#self.value; An integer value representing the total added to your hand under normal circumstances:
#Used for comparing whether a hand of two cards can be split or not.
#self.suite; A string value representing the suite of a card, "Spades", "Clubs", "Hearts", or "Diamonds".
#self.card; A string value representing the number or face of a card, "Two", "Eight", "Ace", "King" etc.
#self.name; A string value representing the name of a card:
#It is equal to "X of Y" where X is self.card and Y is self.suite.
#self.face; An array of string values which is used to visually represent a card when printed.
#Both the BaseCard class and AceCard class contain one function:
#GetValue(); A function which returns the value a card will add to a hand:
#Takes in the current value of your hand as input value score.
#Returns self.value.
#The AceCard class differs from the BaseCard class in three ways:
#self.value is not supplied upon instantiation and is always equal to 11.
#self.card is not supplied upon instantiation and is always equal to "Ace".
#GetValue() will return 1 if score is greater than 10, and 11 if score is less than or equal to 10.
class BaseCard:
def __init__(self, v, n, f):
self.value = v
self.name = n
self.face = f
def GetValue(self, score):
return(self.value)
def __str__(self):
r = ""
i = 0
for line in self.face:
if not i == 0:
r += '\n'
r += line
i += 1
return r
class AceCard(BaseCard):
def __init__(self, n, f):
self.name = n
self.face = f
def GetValue(self, score):
if( score > 10 ):
return 1
else:
return 11
#[<NAME> (Dec.16 2021 {01:30})]
#Here I define a static class containing all of the data for a full deck of cards, including the string representation of the back of a card.
#The Cards class contains two static (class) values:
#Cards.fd; an array containing BaseCard and AceCard Class Objects representing a full standard deck of playing cards.
#Cards.boc; an array of strings representing the back of a standard playing card.
#The Cards class contains one function, Load() which reads an XML File and populates Cards.fd and Cards.boc with the data within the XML File.
class Cards:
fd = []
boc = []
def Load(xmlFile):
full = []
back = []
tree = ET.parse(xmlFile)
root = tree.getroot()
i = 0
for item in root.findall('card'):
if item.find('ID').text == 'back':
faceX = item.find('face')
for line in faceX.findall('line'):
back.append(line.text)
elif item.find('ID').text == 'ace':
faceX = item.find('face')
face = []
for line in faceX.findall('line'):
face.append(line.text)
name = item.find('description').text
full.append(AceCard(name, face))
elif item.find('ID').text == 'base':
value = int(item.find('value').text)
faceX = item.find('face')
face = []
for line in faceX.findall('line'):
face.append(line.text)
name = item.find('description').text
full.append(BaseCard(value, name, face))
else: i += 1
if i > 0:
print('invalid Cards.xml')
exit(0)
Cards.fd = full
Cards.boc = back
#[<NAME> (Oct.3 2021 {01:39})]
#Here I define the class which will represent a deck of cards (the Deck class) which contains two static (class) values:
#inPile; An Array of AceCard and BaseCard classes which represents the pile in which cards are shuffled and waiting to be dealt to players.
#outPile; An array of AceCard and BaseCard classes which represents the pile in which cards that have been discarded and are waiting to be reshuffled into inPile.
#The Deck Class contains no non static (instance) values.
#The Deck Class contains one function:
#shuffle(); A function which takes all cards in outPile and moves them into inPile.
class Deck:
inPile = []
outPile = []
def __init__(self, decks):
i = 0
while i < decks:
self.inPile.extend(Cards.fd)
i += 1
def shuffle(self):
print("\nShufflin' the deck!")
while len(self.outPile) > 0:
self.inPile.append(self.outPile.pop(0))
#[<NAME> (Oct.3 2021 {01:39})]
#Here I define the the class which will represent a hand of cards held by a player (the Hand class) which contains three non static (instance) values:
#self.score; The integer value which represents the point value of the hand, this is what you're trying to get to 21.
#self.cards; An Array of BaseCards and AceCards that the hand contains.
#self.doubled; A boolean value that checks whether a hand has been double downed, and can resultingly no longer hit.
#The Hand class also contains six functions:
#print(); A function which prints the visual representations of the cards that the Hand class contains,
#the data printed is contained in BaseCard.face, and AceCard.face.
#printHalf(); A function which prints the visual representation of the first card in the hand and then boc.
#evalAceLast(); A function which evaluates and populates self.score,
#it does this by running the GetValue() function on all BaseCard classes within self.cards followed by doing the same for all AceCard classes within self.cards.
#The order of operations is important to make sure that the AceCard classes return the correct value.
#Returns self.score.
#hit(); A function which takes a random card from the inPile value of a Deck class object and adds it to self.cards.
#Takes in a Deck class object from which to take a card.
#deal(); A function which calls the hit() function twice.
#Takes in a Deck class object which is needed for passing to the hit() function.
#clear(); A function which takes all cards in self.cards and moves them to the outPile value of a Deck class object.
#Takes in a Deck class object to which it gives cards.
class Hand:
def __init__(self):
self.score = 0
self.cards = []
self.doubled = False
def print(self):
i = 0
while (i < 8):
line = ""
for card in self.cards:
line += (card.face[i] + " ")
print (line)
i += 1
def printHalf(self):
i = 0
while (i < 8):
line = ""
line += (self.cards[0].face[i] + " ")
line += (Cards.boc[i] + " ")
print (line)
i += 1
def evalAceLast(self):
r = 0
for card in self.cards:
if card.GetValue(0) != 11:
r += card.GetValue(r)
for card in self.cards:
if card.GetValue(0) == 11:
r += card.GetValue(r)
self.score = r
return(self.score)
def hit(self, deck):
size = len(deck.inPile)
if (size == 0):
deck.shuffle()
size = len(deck.inPile)
index = randint(0, size - 1)
self.cards.append(deck.inPile.pop(index))
def deal(self, deck):
self.hit(deck)
self.hit(deck)
def clear(self, deck):
size = len(self.cards)
i = 0
while (i < size):
deck.outPile.append(self.cards.pop(0))
i += 1
self.doubled = False
#[<NAME> (Oct.3 2021 {01:39})]
#Here I define a class which represents the result of a finished hand (the HandResult class), which is used to determine whether the player won or lost a hand.
#The HandResult class contains three non static (instance) values:
#self.CardCount; An integer value which represents the number of cards which were in the Hand class object that this HandResult class instance represents when it finished.
#self.Value; An integer value which represents the point value of the Hand class object that this HandResult class instance represents when it finished.
#self.DoubleDown; A boolean value which indicates whether or not the Hand class object that this HandResult class instance represents finished by doubling down.
class HandResult:
def __init__(self, vl, cc, dd = False):
self.CardCount = cc
self.Value = vl
self.DoubleDown = dd
def __str__(self):
return ("CC = " + str(self.CardCount) + ", VL = " + str(self.Value) + ", DD = " + str(self.DoubleDown))
#[<NAME> (Oct.3 2021 {01:39})]
#Here I define a class which represents a player who will play blackjack, it contains three static (instance) values:
#self.Hand; An array of Hand class objects, one for each that the player is playing at once.
#self.tb; An integer value which represents the total amount of money currently being bet accross all hands currently being played.
#self.bi; An integer value which represents the original amount of money bet at the begining of the current round.
#The Player class contains three functions:
#Play(); A function which:
#Shows the player their hand by calling the print() function on the hand currently being interacted with.
#Determines whether or not the player can double down or split their hand.
#Checks if the player has reached or exceeded a score of 21 on their hand, reveals the score to the player,
#and finishes the hand if the player has reached or exceeded a score of 21 by intantiating and returning a HandResult class object based on the hand currently being interacted with.
#Informs the player of the things they can do with their hand, Hit, Stand, Split or Double Down as appropriate using the Call() function.
#Requests that the player decide what they would like to do with their hand next.
#If the player chooses to hit, call the hit() function for the hand currently being interacted with and recursively calls Play().
#If the player chooses to stand, instantiate and return a HandResult class object based on the hand currently being interacted with.
#If the player chooses to split, add self.bi to self.tb, call and return the Split() function.
#If the player chooses to double down, set Hand.doubled to true for the hand currently being iteracted with, add self.bi to self.tb, call the hit() functionm abd call Play() recursively.
#Takes in a(n):
#Deck class object which represents the deck from which the player will draw cards, called deck.
#Integer value which is used to index self.Hand to find the hand that is currently being interacted with, called i, assumed 0.
#Boolean value which indicates whether or not the player is betting, called b, assumed false.
#Integer value which represents the total remaining cash that a player can bet with, called cr, assumed 0.
#Integer value which represents the amount being bet on all hands initially, called tb, assumed -1.
#Returns an array of HandResult class object based on all hands being played by the player this round.
#Call(); A function which presents the player with all of the things they can do with their hand and processes their response.
#Takes in a:
#boolean value which indicates whether the player can double down on the hand currently being interacted with, called d.
#boolean value which indicates whether the player can split the hand currently being interacted with, called s.
#Returns an integer value which represents the selection that the player made.
#Split(); A function which:
#Creates a new Hand class object and adds it to self.Hands.
#Moves a card from the hand currently being interacted with to the newly created hand.
#Calls the hit() function on both the hand currently being interacted with and the newly created hand.
#Calls and returns Play() on both the hand currently being interacted with and the newly created hand.
#Takes in a(n):
#Deck class object which represents the deck from which the player will draw cards, called deck.
#Integer value which is used to index self.Hand to find the hand that is currently being interacted with, called s, assumed 0.
#Boolean value which indicates whether or not the player is betting, called b, assumed false.
#Integer value which represents the total remaining cash that a player can bet with, called cr, assumed 0.
#Returns an array of HandResult class object based on all hands being played by the player this round.
class Player:
def __init__(self):
self.Hand = [ Hand() ]
self.tb = 0
self.bi = 0
def Play(self, deck, i = 0, b = False, cr = 0, tb = -1):
if (tb != -1):
self.tb = tb
self.bi = tb
print(f"{colours.FORE_BRIGHT_CYAN}\nYour Hand:{colours.ENDC}")
self.Hand[i].print()
splitable = False
doublable = False
if (len(self.Hand[i].cards) == 2 and cr >= self.tb + self.bi):
if b:
doublable = True
if (self.Hand[i].cards[0].GetValue(0) == self.Hand[i].cards[1].GetValue(0)):
splitable = True
done = False
val = -1
if (self.Hand[i].evalAceLast() > 21):
print(f"{colours.FORE_BRIGHT_GREEN}\nTotal Score = Bust...{colours.ENDC}")
if self.Hand[i].doubled:
return [ HandResult(self.Hand[i].score, len(self.Hand[i].cards), True) ]
return [ HandResult(self.Hand[i].score, len(self.Hand[i].cards)) ]
elif (self.Hand[i].score == 21):
print(f"{colours.FORE_BRIGHT_GREEN}\nTotal Score = 21!{colours.ENDC}")
if self.Hand[i].doubled:
return [ HandResult(self.Hand[i].score, len(self.Hand[i].cards), True) ]
return [ HandResult(self.Hand[i].score, len(self.Hand[i].cards)) ]
else:
print(f"{colours.FORE_BRIGHT_GREEN}\nTotal Score = " + str(self.Hand[i].score) + f"{colours.ENDC}")
if not self.Hand[i].doubled:
while (not done):
val = self.Call(doublable, splitable)
if (val != -1):
done = True
else:
print (f"{colours.FORE_BRIGHT_ORANGE}\nInvalid input. Please input the letter in brackets for the option you want.{colours.ENDC}")
if (val == 0):
self.Hand[i].hit(deck)
return self.Play(deck, i, b, cr)
elif (val == 1):
return [ HandResult(self.Hand[i].score, len(self.Hand[i].cards)) ]
elif (val == 3):
self.tb = self.tb + self.bi
return self.Split(deck, i, b, cr)
else:
self.Hand[i].doubled = True
self.tb = self.tb + self.bi
self.Hand[i].hit(deck)
return self.Play(deck, i, b, cr)
else:
return [ HandResult(self.Hand[i].score, len(self.Hand[i].cards), True) ]
def Call(self, d, s):
response = ""
if (d and s):
response = input(f"{colours.FORE_BRIGHT_BLUE}\nWould you like to {colours.ENDC}(H){colours.FORE_BRIGHT_BLUE}it, {colours.ENDC}(S){colours.FORE_BRIGHT_BLUE}tand, {colours.ENDC}(D){colours.FORE_BRIGHT_BLUE}ouble Down or S{colours.ENDC}(P){colours.FORE_BRIGHT_BLUE}lit? - {colours.ENDC}")
elif (d and not s):
response = input(f"{colours.FORE_BRIGHT_BLUE}\nWould you like to {colours.ENDC}(H){colours.FORE_BRIGHT_BLUE}it, {colours.ENDC}(S){colours.FORE_BRIGHT_BLUE}tand or {colours.ENDC}(D){colours.FORE_BRIGHT_BLUE}ouble Down? - {colours.ENDC}")
elif (not d and s):
response = input(f"{colours.FORE_BRIGHT_BLUE}\nWould you like to {colours.ENDC}(H){colours.FORE_BRIGHT_BLUE}it, {colours.ENDC}(S){colours.FORE_BRIGHT_BLUE}tand or S{colours.ENDC}(P){colours.FORE_BRIGHT_BLUE}lit? - {colours.ENDC}")
else:
response = input(f"{colours.FORE_BRIGHT_BLUE}\nWould you like to {colours.ENDC}(H){colours.FORE_BRIGHT_BLUE}it or {colours.ENDC}(S){colours.FORE_BRIGHT_BLUE}tand? - {colours.ENDC}")
val = -1
for char in response:
if (char == "H" or char == "h"):
val = 0
break
elif (char == "S" or char == "s"):
val = 1
break
elif ((char == "D" or char == "d") and d):
val = 2
break
elif ((char == "P" or char == "p") and s):
val = 3
break
return val
def Split(self, deck, s, b = False, cr = 0):
self.Hand.append(Hand())
n = len(self.Hand) - 1
self.Hand[-1].cards.append(self.Hand[s].cards.pop(-1))
self.Hand[-1].hit(deck)
self.Hand[s].hit(deck)
r = self.Play(deck, s, b, cr)
input(f"{colours.FORE_BRIGHT_BLUE}\nHit enter for your next hand.{colours.ENDC}")
r.extend(self.Play(deck, n, b, cr))
return r
#[<NAME> (Oct.3 2021 {01:39})]
#Here I define a class which represents the dealer who the player will be trying to beat. The Dealer class is derived from the Player class.
#The Dealer class contains one non static (instance) value, self.Hand; a Hand class object.
#The Dealer class contains one function:
#Play(); A function which:
#Checks if the dealer has reached or exceeded a score of 21 on their hand, reveals the score to the player,
#and finishes the hand if the dealer has reached or exceeded a score of 21 by intantiating and returning a HandResult class object self.Hand.
#Checks if the dealer has reacher or exceeded a score of 17 on their hand,
#calls the hit() function of self.Hand() if the dealer has not reached or exceeded a score of 17 on their hand and calls Play() recursively,
#and finishes the hand if the dealer has reached or exceeded a score of 17 by intantiating and returning a HandResult class object self.Hand.
#Takes in a Deck class object which represents the deck from which the player will draw cards, called deck.
#Returns a HandResult class object based on self.Hand().
class Dealer(Player):
def __init__(self):
self.Hand = Hand()
def Play(self, deck):
print(f"{colours.FORE_BRIGHT_CYAN}\nDealer's Hand:{colours.ENDC}")
self.Hand.print()
if (self.Hand.evalAceLast() > 21):
print(f"{colours.FORE_BRIGHT_GREEN}\nTotal Score = Bust...{colours.ENDC}")
return HandResult(self.Hand.score, len(self.Hand.cards))
elif (self.Hand.score == 21):
print(f"{colours.FORE_BRIGHT_GREEN}\nTotal Score = 21!{colours.ENDC}")
return HandResult(self.Hand.score, len(self.Hand.cards))
else:
print(f"{colours.FORE_BRIGHT_GREEN}\nTotal Score = " + str(self.Hand.score) + f"{colours.ENDC}")
if (self.Hand.score <= 16):
print(f"{colours.FORE_BRIGHT_BLUE}\nDealer Hits!{colours.ENDC}")
self.Hand.hit(deck)
return self.Play(deck)
else:
print(f"{colours.FORE_BRIGHT_BLUE}\nDealer Stands!{colours.ENDC}")
return HandResult(self.Hand.score, len(self.Hand.cards))
#[<NAME> (Dec.15 2021 {22:10})]
#Here I define a class which represents the game instance as a whole, I would like to one day generalize this into a CardGame class and derive the BlackPack class from that.
#The BlackPack class contains nine non static (instance) values:
#self.Deck; A Deck objeect which contains all of the cards that will be used within the game.
#self.Player; A Player object which represents the human player of the game.
#self.Dealer; A Dealer object which represents the AI dealer of the game.
#self.AutoShuffle; A boolean value which indicates whether the game will be played with autoshuffle enabled.
#self.Betting; A boolean value which indicated whether the game will be played with betting enabled.
#self.PlayerCash; An integer value which indicates how much money the player has at their disposal.
#self.InitialPlayerCash; An integer value that holds the amount of cash a player was holding at the beginning of a hand.
#self.MaxBet; An integer value that indicates the maximum amount of money that a player can bet on any given hand.
#self.MinBet; An integer value that indicates the minimum amount of money that a player can bet on any given hand.
#The BlackPack class contains two functions:
#PlayGame(); A function which:
#Asks the player how many decks they would like to play with, and sets self.Deck to be equal to a Deck Object containing the right number of cards.
#Asks the player if they would like to play with autoshuffle, and sets self.AutoShuffle accordingly.
#Asks the player if they would like to play with betting, and sets self.Betting accordingly. If they decide to play with betting;
#Asks the player how much money they would like to have available for betting, and sets self.PlayerCash accordingly.
#Asks the player for the largest bet they would like to be able to make, and sets self.MaxBet accordingly.
#Asks the player for the smallest bet they would like to be able to make, and sets self.MinBet accordingly.
#Calls the PlayRound() function.
#PlayRound(); A function Which:
#Shuffles the deck if self.AutoShuffle is True by calling shuffle() in self.Deck.
#Informs the player of the number of cards remaining in the inpile of the deck if self.AutoShuffle is False.
#If self.Beting is True;
#Informs the player of the value of self.PlayerCash, and ends the game if self.PlayerCash is equal to 0.
#Requests the amount of money the player would like to bet and checks that it exceeds neither self.MaxBet nor self.PlayerCash,
#and that it is equal to or greater than self.MinBet.
#Randomly draws two cards into the hand of both self.Player and self.Dealer by calling Hand.Deal() on self.Player's first hand and Hand.Deal() on self.Dealer.
#Shows both cards dealt to the player by calling Hand.print() on Self.Player's first deck, and the first card dealt to the dealer by calling Hand.printHalf() on self.Dealer.
#Calls Player.Play() for self.Player in order to have the player play their hand or hands.
#Calls Dealer.Play() for self.Dealer in order to have the dealer play their hand.
#Compares the result of the player's hand or hands to the dealer's hand and evalueats who wins on each hand, and if self.Betting is True:
#Calculates the amount of money won or lost on each hand and updates self.PlayerCash accordingly.
#Ends the game if self.PlayerCash is equal to 0.
#Asks the player if they would like to play again, and if so calls the PlayRound() function recursively,
#otherwise if self.Betting is True informs the player of how much they lost or won, and exits.
class BlackPack:
def __init__(self):
self.Deck = Deck(0)
self.Player = Player()
self.Dealer = Dealer()
self.AutoShuffle = False
self.Betting = False
self.PlayerCash = 0
self.InitialPlayerCash = 0
self.MaxBet = 0
self.MinBet = 0
def PlayGame(self):
cont = False
while not cont:
decks = input("\nHow many decks would you like to play with? ")
try:
decksInt = int(decks)
if (decksInt < 1):
print("\nPlease input a number greater than zero.\n")
elif (decksInt > 10):
print("\nPlease input a number less than eleven.\n")
else:
cont = True
except:
print("\nPlease input a whole number.")
done = False
while not done:
response = input("\nPlay with auto shuffle? (Y)es or (N)o? - ")
for char in response:
if (char == "Y" or char == "y"):
shuffle = 1
done = True
break
elif (char == "N" or char == "n"):
shuffle = 2
done = True
break
if (shuffle == 0):
print("\nInvalid input. Please input the letter in brackets for the option you want.")
elif (shuffle == 1):
print("\nAwesome! No card counting here.")
self.AutoShuffle = True
else:
print("\nAwesome! Count those cards!")
self.AutoShuffle = False
done = False
bet = 0
while not done:
response = input("\nPlay with betting? (Y)es or (N)o? - ")
for char in response:
if (char == "Y" or char == "y"):
bet = 1
done = True
break
elif (char == "N" or char == "n"):
bet = 2
done = True
break
if (bet == 0):
print("\nInvalid input. Please input the letter in brackets for the option you want.")
elif (bet == 1):
print("\nAwesome! Let's do this right!")
self.Betting = True
else:
print("\nAwesome! Let's play for fun.")
self.Betting = False
if self.Betting:
cont = False
while not cont:
cash = input("\nHow much cash would you like to have for betting? ")
try:
cashInt = int(cash)
if (cashInt < 1):
print("\nPlease input a number greater than zero.")
elif (cashInt > 1000000):
print("\nPlease input a number less than one million.")
else:
cont = True
except:
print("\nPlease input a whole number.")
self.PlayerCash = cashInt
self.InitialPlayerCash = self.PlayerCash
cont = False
while not cont:
max = input("\nHow much cash would you like to have for your betting maximum? ")
try:
maxInt = int(max)
if (maxInt < 1):
print("\nPlease input a number greater than zero.")
elif (maxInt > (self.PlayerCash / 10)):
print("\nPlease input a number less than or equal to one tenth your cash total.")
elif (maxInt < (self.PlayerCash / 100)):
print("\nPlease input a number more than or equal to one hundredth your cash total.")
else:
cont = True
except:
print("\nPlease input a whole number.")
self.MaxBet = maxInt
cont = False
while not cont:
min = input("\nHow much cash would you like to have for your betting minimum? ")
try:
minInt = int(min)
if (minInt < 1):
print("\nPlease input a number greater than zero.")
elif (minInt > self.MaxBet):
print("\nPlease input a number less than or equal to your betting maximum.")
elif (maxInt < (self.PlayerCash / 1000)):
print("\nPlease input a number more than or equal to one thousandth your cash total.")
else:
cont = True
except:
print("\nPlease input a whole number.")
self.MinBet = minInt
self.Deck = Deck(decksInt)
print(f"{colours.FORE_BRIGHT_GREEN}\nGreat! Let's shuffle up and play!{colours.ENDC}")
self.PlayRound()
def PlayRound(self):
if self.AutoShuffle:
self.Deck.shuffle()
else:
print(f"{colours.FORE_BRIGHT_CYAN}\nDeck Remaining Size: {colours.ENDC}" + str(len(self.Deck.inPile)))
betInt = 0
if self.Betting:
print(f"{colours.FORE_BRIGHT_CYAN}\nBetting Cash Remaining: {colours.ENDC}" + str(self.PlayerCash))
if (self.PlayerCash <= 0):
print(f"{colours.FORE_BRIGHT_ORANGE}\nYou've gone broke. That's game over.\n{colours.ENDC}")
return 0
cont = False
while not cont:
bet = input(f"{colours.FORE_BRIGHT_BLUE}\nHow much cash would you like to bet? {colours.ENDC}")
try:
betInt = int(bet)
if (betInt < self.MinBet):
print(f"{colours.FORE_BRIGHT_ORANGE}\nPlease input a number greater than {colours.ENDC}" + str(self.MinBet) + f"{colours.FORE_BRIGHT_ORANGE}.{colours.ENDC}")
elif (betInt > self.MaxBet):
print(f"{colours.FORE_BRIGHT_ORANGE}\nPlease input a number less than {colours.ENDC}" + str(self.MaxBet) + f"{colours.FORE_BRIGHT_ORANGE}.{colours.ENDC}")
elif (betInt > self.PlayerCash):
print(f"{colours.FORE_BRIGHT_ORANGE}\nPlease input a number less than or equal to your cash amount.{colours.ENDC}")
else:
cont = True
except:
print(f"{colours.FORE_BRIGHT_ORANGE}\nPlease input a whole number.{colours.ENDC}")
self.Player.Hand[0].deal(self.Deck)
self.Dealer.Hand.deal(self.Deck)
print(f"{colours.FORE_BRIGHT_CYAN}\nYour Hand:{colours.ENDC}")
self.Player.Hand[0].print()
print(f"{colours.FORE_BRIGHT_CYAN}\nDealer's Hand:{colours.ENDC}")
self.Dealer.Hand.printHalf()
input(f"{colours.FORE_BRIGHT_BLUE}\nHit enter for your turn.{colours.ENDC}")
playerScores = self.Player.Play(self.Deck, 0, self.Betting, self.PlayerCash, betInt)
input(f"{colours.FORE_BRIGHT_BLUE}\nHit enter for the dealer's turn.{colours.ENDC}")
dealerScore = self.Dealer.Play(self.Deck)
for hand in self.Player.Hand:
hand.clear(self.Deck)
i = 1
while i < len(self.Player.Hand):
self.Player.Hand.pop(1)
i += 1
self.Dealer.Hand.clear(self.Deck)
i = 0
for playerScore in playerScores:
if (dealerScore.Value == 21 and dealerScore.CardCount == 2):
print(f"{colours.FORE_BRIGHT_CYAN}\nThe dealer got blackjack...{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Your score = {colours.ENDC}" + str(playerScore.Value))
print(f"{colours.FORE_BRIGHT_CYAN}Dealer's score = {colours.ENDC}" + str(dealerScore.Value))
if self.Betting:
if not playerScore.DoubleDown:
self.PlayerCash -= betInt
print(f"{colours.FORE_BRIGHT_CYAN}You lost your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}.{colours.ENDC}")
else:
self.PlayerCash -= betInt * 2
print(f"{colours.FORE_BRIGHT_CYAN}You lost double your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}...{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Cash remaining = {colours.ENDC}" + str(self.PlayerCash))
elif (playerScore.Value == 21 and playerScore.CardCount == 2):
print(f"{colours.FORE_BRIGHT_CYAN}\nYou got blackjack!{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Your score = {colours.ENDC}" + str(playerScore.Value))
print(f"{colours.FORE_BRIGHT_CYAN}Dealer's score = {colours.ENDC}" + str(dealerScore.Value))
if self.Betting:
self.PlayerCash += (betInt * 2)
print(f"{colours.FORE_BRIGHT_CYAN}You won double your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}!{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Cash remaining = {colours.ENDC}" + str(self.PlayerCash) + f"{colours.ENDC}")
elif ((playerScore.Value < 22 and dealerScore.Value > 21) or (playerScore.Value < 22 and playerScore.Value > dealerScore.Value)):
print(f"{colours.FORE_BRIGHT_CYAN}\nYou beat the dealer!{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Your score = {colours.ENDC}" + str(playerScore.Value) + f"{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Dealer's score = {colours.ENDC}" + str(dealerScore.Value) + f"{colours.ENDC}")
if self.Betting:
if not playerScore.DoubleDown:
self.PlayerCash += betInt
print(f"{colours.FORE_BRIGHT_CYAN}You won your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}.{colours.ENDC}")
else:
self.PlayerCash += betInt * 2
print(f"{colours.FORE_BRIGHT_CYAN}You won double your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}.{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Cash remaining = {colours.ENDC}" + str(self.PlayerCash) + f"{colours.ENDC}")
elif (playerScore.Value < 22 and playerScore.Value == dealerScore.Value):
print(f"{colours.FORE_BRIGHT_CYAN}\nYou pushed with the dealer.{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Your score = {colours.ENDC}" + str(playerScore.Value))
print(f"{colours.FORE_BRIGHT_CYAN}Dealer's score = {colours.ENDC}" + str(dealerScore.Value))
if self.Betting:
print(f"{colours.FORE_BRIGHT_CYAN}Your money was returned.{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Cash remaining = {colours.ENDC}" + str(self.PlayerCash) + f"{colours.ENDC}")
else:
print(f"{colours.FORE_BRIGHT_CYAN}\nThe dealer beat you...{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Your score = {colours.ENDC}" + str(playerScore.Value))
print(f"{colours.FORE_BRIGHT_CYAN}Dealer's score = {colours.ENDC}" + str(dealerScore.Value))
if self.Betting:
if not playerScore.DoubleDown:
self.PlayerCash -= betInt
print(f"{colours.FORE_BRIGHT_CYAN}You lost your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}.{colours.ENDC}")
else:
self.PlayerCash -= betInt * 2
print(f"{colours.FORE_BRIGHT_CYAN}You lost double your bet of {colours.ENDC}" + str(betInt) + f"{colours.FORE_BRIGHT_CYAN}...{colours.ENDC}")
print(f"{colours.FORE_BRIGHT_CYAN}Cash remaining = {colours.ENDC}" + str(self.PlayerCash))
i += 1
if self.Betting:
if (self.PlayerCash <= 0):
print(f"{colours.FORE_BRIGHT_ORANGE}\nYou've gone broke. That's game over.\n{colours.ENDC}")
return 0
done = False
again = 0
while not done:
response = input(f"{colours.FORE_BRIGHT_BLUE}\nPlay again? {colours.ENDC}(Y){colours.FORE_BRIGHT_BLUE}es or {colours.ENDC}(N){colours.FORE_BRIGHT_BLUE}o? - {colours.ENDC}")
for char in response:
if (char == "Y" or char == "y"):
again = 1
done = True
break
elif (char == "N" or char == "n"):
again = 2
done = True
break
if (again == 0):
print(f"{colours.FORE_BRIGHT_ORANGE}\nInvalid input. Please input the letter in brackets for the option you want.{colours.ENDC}")
elif (again == 1):
print(f"{colours.FORE_BRIGHT_GREEN}\nAwesome! Let's deal them out again!{colours.ENDC}")
self.PlayRound()
done = True
else:
print(f"{colours.FORE_BRIGHT_GREEN}\nThank you for playing!\n{colours.ENDC}")
if self.Betting:
if self.PlayerCash > self.InitialPlayerCash:
print(f"{colours.FORE_BRIGHT_BLUE}You won {colours.ENDC}" + str(self.PlayerCash - self.InitialPlayerCash) + f"{colours.FORE_BRIGHT_BLUE} today!\n{colours.ENDC}")
elif self.PlayerCash < self.InitialPlayerCash:
print(f"{colours.FORE_BRIGHT_BLUE}You lost {colours.ENDC}" + str(self.InitialPlayerCash - self.PlayerCash) + f"{colours.FORE_BRIGHT_BLUE} today.\n{colours.ENDC}")
else:
print(f"{colours.FORE_BRIGHT_BLUE}You broke even today.\n{colours.ENDC}")
done = True
return 0
if __name__ == '__main__':
Cards.Load('Cards.xml')
game = BlackPack()
game.PlayGame()
|
<filename>plugin/src/com/microsoft/alm/plugin/external/reactive/RdIdeaLoggerFactory.java
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.external.reactive;
import com.jetbrains.rd.util.ILoggerFactory;
import com.jetbrains.rd.util.LogLevel;
import com.jetbrains.rd.util.Logger;
import com.jetbrains.rd.util.Statics;
import kotlin.jvm.JvmClassMappingKt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class RdIdeaLoggerFactory implements ILoggerFactory {
private static boolean initialized = false;
private static final RdIdeaLoggerFactory INSTANCE = new RdIdeaLoggerFactory();
public synchronized static void initialize() {
if (!initialized) {
Statics.Companion.of(JvmClassMappingKt.getKotlinClass(ILoggerFactory.class)).push(INSTANCE);
initialized = true;
}
}
@NotNull
private static String prepareMessage(@Nullable Object message) {
return message == null ? "null" : message.toString();
}
@NotNull
private static String prepareMessage(@Nullable Object message, @Nullable Throwable throwable) {
if (throwable == null) {
return prepareMessage(message);
}
return message == null
? throwable.toString()
: String.format("%s: %s", message.toString(), throwable.toString());
}
@NotNull
@Override
public Logger getLogger(@NotNull String category) {
com.intellij.openapi.diagnostic.Logger internalLogger = com.intellij.openapi.diagnostic.Logger.getInstance(category);
return new Logger() {
@Override
public void log(@NotNull LogLevel level, @Nullable Object message, @Nullable Throwable throwable) {
switch (level) {
case Trace:
internalLogger.trace(prepareMessage(message, throwable));
break;
case Debug:
internalLogger.debug(prepareMessage(message), throwable);
break;
case Info:
internalLogger.info(prepareMessage(message), throwable);
break;
case Warn:
internalLogger.warn(prepareMessage(message), throwable);
break;
case Error:
case Fatal:
default:
internalLogger.error(prepareMessage(message), throwable);
break;
}
}
@Override
public boolean isEnabled(@NotNull LogLevel level) {
switch (level) {
case Trace:
return internalLogger.isTraceEnabled();
case Debug:
return internalLogger.isDebugEnabled();
case Info:
case Warn:
case Error:
case Fatal:
default:
return true;
}
}
};
}
}
|
#!/usr/bin/env bash
# prune_daily.sh
# Can be run attended or unattended (crontab).
# Crontab entry could look like:
# 30 0 * * * /path/to/website.com_prune_daily.sh > /dev/null 2>&1
# Ref: https://unix.stackexchange.com/a/678296/501914
# !!! IMPORTANT: Must read through all comments and adjust script as necessary. !!!
# Script Purpose
# To prune (remove extras) of a folder of backups in order to keep the backup count to a managable size.
# Script keeps all first-of-the-month backups as well as the last X number of backups no matter
# how old the last backup is.
# Change the variables below to suit your needs.
# ===========================
# Variables
# ===========================
# Modify these:
# Domain name without the TLD.:
BACKUPDOMAIN='website'
# User and home dir name:
BACKUPUSER='user'
# Name of the root of your backup dir:
BACKUPDIRNAME="backups"
# Backups to retain (backups with a day of 01 (first-of-month) will always be kept as well):
RETAIN=15
# This is a shell glob (with no wildcards):
BASEPATH="/home/$BACKUPUSER/$BACKUPDIRNAME/$BACKUPDOMAIN.com/"
# This is an extended regex pattern:
BASEREGEX="/home/$BACKUPUSER/$BACKUPDIRNAME/$BACKUPDOMAIN\.com/$BACKUPDOMAIN\.com-([0-9]{8}-[0-9]{6}Z)-$BACKUPUSER"
# This is a printf spec to printf a date-time to a full directory name:
PRINTFSPEC="/home/$BACKUPUSER/$BACKUPDIRNAME/$BACKUPDOMAIN.com/$BACKUPDOMAIN.com-%q-$BACKUPUSER"
# ===========================
# Script
# ===========================
# STEP: Find the base backup dir and list every backup dir in it.
find "$BASEPATH" -maxdepth 1 -mindepth 1 \
-type d \
-regextype posix-extended \
-regex "${BASEREGEX}" |
# STEP: Find all backup dirs but ignore the first-of-the-month ones.
sed -Ee "s~^${BASEREGEX}$~\1~" |
grep -Ev '^[0-9]{6}01-' |
# STEP: List out all backups to be deleted (and if happy after testing, change comments below to send to a delete shell)
sort -r |
tail -n +$(($RETAIN+1)) |
while IFS= read line
do
# TESTING: First a test, this will only print intended items to delete.
printf "This would remove \"${PRINTFSPEC}\"\n" "$line"
# AFTER TESTING: If happy with results, comment the above printf and uncomment the below printf. Save and re-run to do it.
# printf "rm -rf \"${PRINTFSPEC}\"\n" "$line" | sh
done
### !!! IMPORTANT !!! ###
# STEP: Unsets all script variables.
unset BACKUPDOMAIN BACKUPUSER BACKUPDIRNAME RETAIN BASEPATH BASEREGEX PRINTFSPEC
# Done.
|
#!/bin/bash
# Always exit on errors.
set -e
# Run a clean up when we exit if configured to do so.
trap cleanup TERM
function cleanup {
if [ "$MULTUS_CLEANUP_CONFIG_ON_EXIT" == "true" ]; then
CONF=$(cat <<-EOF
{Multus configuration intentionally invalidated to prevent pods from being scheduled.}
EOF
)
echo $CONF > $CNI_CONF_DIR/00-multus.conf
log "Multus configuration intentionally invalidated to prevent pods from being scheduled."
fi
}
# Set our known directories.
CNI_CONF_DIR="/host/etc/cni/net.d"
CNI_BIN_DIR="/host/opt/cni/bin"
ADDITIONAL_BIN_DIR=""
MULTUS_CONF_FILE="/usr/src/multus-cni/images/70-multus.conf"
MULTUS_AUTOCONF_DIR="/host/etc/cni/net.d"
MULTUS_BIN_FILE="/usr/src/multus-cni/bin/multus"
MULTUS_KUBECONFIG_FILE_HOST="/etc/cni/net.d/multus.d/multus.kubeconfig"
MULTUS_NAMESPACE_ISOLATION=false
MULTUS_LOG_LEVEL=""
MULTUS_LOG_FILE=""
OVERRIDE_NETWORK_NAME=false
MULTUS_CLEANUP_CONFIG_ON_EXIT=false
RESTART_CRIO=false
CRIO_RESTARTED_ONCE=false
RENAME_SOURCE_CONFIG_FILE=false
# Give help text for parameters.
function usage()
{
echo -e "This is an entrypoint script for Multus CNI to overlay its binary and "
echo -e "configuration into locations in a filesystem. The configuration & binary file "
echo -e "will be copied to the corresponding configuration directory. When "
echo -e "'--multus-conf-file=auto' is used, 00-multus.conf will be automatically "
echo -e "generated from the CNI configuration file of the master plugin (the first file "
echo -e "in lexicographical order in cni-conf-dir)."
echo -e ""
echo -e "./entrypoint.sh"
echo -e "\t-h --help"
echo -e "\t--cni-conf-dir=$CNI_CONF_DIR"
echo -e "\t--cni-bin-dir=$CNI_BIN_DIR"
echo -e "\t--cni-version=<cniVersion (e.g. 0.3.1)>"
echo -e "\t--multus-conf-file=$MULTUS_CONF_FILE"
echo -e "\t--multus-bin-file=$MULTUS_BIN_FILE"
echo -e "\t--multus-kubeconfig-file-host=$MULTUS_KUBECONFIG_FILE_HOST"
echo -e "\t--namespace-isolation=$MULTUS_NAMESPACE_ISOLATION"
echo -e "\t--multus-autoconfig-dir=$MULTUS_AUTOCONF_DIR (used only with --multus-conf-file=auto)"
echo -e "\t--multus-log-level=$MULTUS_LOG_LEVEL (empty by default, used only with --multus-conf-file=auto)"
echo -e "\t--multus-log-file=$MULTUS_LOG_FILE (empty by default, used only with --multus-conf-file=auto)"
echo -e "\t--override-network-name=false (used only with --multus-conf-file=auto)"
echo -e "\t--cleanup-config-on-exit=false (used only with --multus-conf-file=auto)"
echo -e "\t--rename-conf-file=false (used only with --multus-conf-file=auto)"
echo -e "\t--additional-bin-dir=$ADDITIONAL_BIN_DIR (adds binDir option to configuration, used only with --multus-conf-file=auto)"
echo -e "\t--restart-crio=false (restarts CRIO after config file is generated)"
}
function log()
{
echo "$(date --iso-8601=seconds) ${1}"
}
function error()
{
log "ERR: {$1}"
}
function warn()
{
log "WARN: {$1}"
}
# Parse parameters given as arguments to this script.
while [ "$1" != "" ]; do
PARAM=`echo $1 | awk -F= '{print $1}'`
VALUE=`echo $1 | awk -F= '{print $2}'`
case $PARAM in
-h | --help)
usage
exit
;;
--cni-version)
CNI_VERSION=$VALUE
;;
--cni-conf-dir)
CNI_CONF_DIR=$VALUE
;;
--cni-bin-dir)
CNI_BIN_DIR=$VALUE
;;
--multus-conf-file)
MULTUS_CONF_FILE=$VALUE
;;
--multus-bin-file)
MULTUS_BIN_FILE=$VALUE
;;
--multus-kubeconfig-file-host)
MULTUS_KUBECONFIG_FILE_HOST=$VALUE
;;
--namespace-isolation)
MULTUS_NAMESPACE_ISOLATION=$VALUE
;;
--multus-log-level)
MULTUS_LOG_LEVEL=$VALUE
;;
--multus-log-file)
MULTUS_LOG_FILE=$VALUE
;;
--multus-autoconfig-dir)
MULTUS_AUTOCONF_DIR=$VALUE
;;
--override-network-name)
OVERRIDE_NETWORK_NAME=$VALUE
;;
--cleanup-config-on-exit)
MULTUS_CLEANUP_CONFIG_ON_EXIT=$VALUE
;;
--restart-crio)
RESTART_CRIO=$VALUE
;;
--rename-conf-file)
RENAME_SOURCE_CONFIG_FILE=$VALUE
;;
--additional-bin-dir)
ADDITIONAL_BIN_DIR=$VALUE
;;
*)
warn "unknown parameter \"$PARAM\""
;;
esac
shift
done
# Create array of known locations
declare -a arr=($CNI_CONF_DIR $CNI_BIN_DIR $MULTUS_BIN_FILE)
if [ "$MULTUS_CONF_FILE" != "auto" ]; then
arr+=($MULTUS_CONF_FILE)
fi
# Loop through and verify each location each.
for i in "${arr[@]}"
do
if [ ! -e "$i" ]; then
warn "Location $i does not exist"
exit 1;
fi
done
# Copy files into place and atomically move into final binary name
cp -f $MULTUS_BIN_FILE $CNI_BIN_DIR/_multus
mv -f $CNI_BIN_DIR/_multus $CNI_BIN_DIR/multus
if [ "$MULTUS_CONF_FILE" != "auto" ]; then
cp -f $MULTUS_CONF_FILE $CNI_CONF_DIR
fi
# Make a multus.d directory (for our kubeconfig)
mkdir -p $CNI_CONF_DIR/multus.d
MULTUS_KUBECONFIG=$CNI_CONF_DIR/multus.d/multus.kubeconfig
# ------------------------------- Generate a "kube-config"
# Inspired by: https://tinyurl.com/y7r2knme
SERVICE_ACCOUNT_PATH=/var/run/secrets/kubernetes.io/serviceaccount
KUBE_CA_FILE=${KUBE_CA_FILE:-$SERVICE_ACCOUNT_PATH/ca.crt}
SERVICEACCOUNT_TOKEN=$(cat $SERVICE_ACCOUNT_PATH/token)
SKIP_TLS_VERIFY=${SKIP_TLS_VERIFY:-false}
# Check if we're running as a k8s pod.
if [ -f "$SERVICE_ACCOUNT_PATH/token" ]; then
# We're running as a k8d pod - expect some variables.
if [ -z ${KUBERNETES_SERVICE_HOST} ]; then
error "KUBERNETES_SERVICE_HOST not set"; exit 1;
fi
if [ -z ${KUBERNETES_SERVICE_PORT} ]; then
error "KUBERNETES_SERVICE_PORT not set"; exit 1;
fi
if [ "$SKIP_TLS_VERIFY" == "true" ]; then
TLS_CFG="insecure-skip-tls-verify: true"
elif [ -f "$KUBE_CA_FILE" ]; then
TLS_CFG="certificate-authority-data: $(cat $KUBE_CA_FILE | base64 | tr -d '\n')"
fi
# Write a kubeconfig file for the CNI plugin. Do this
# to skip TLS verification for now. We should eventually support
# writing more complete kubeconfig files. This is only used
# if the provided CNI network config references it.
touch $MULTUS_KUBECONFIG
chmod ${KUBECONFIG_MODE:-600} $MULTUS_KUBECONFIG
cat > $MULTUS_KUBECONFIG <<EOF
# Kubeconfig file for Multus CNI plugin.
apiVersion: v1
kind: Config
clusters:
- name: local
cluster:
server: ${KUBERNETES_SERVICE_PROTOCOL:-https}://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT}
$TLS_CFG
users:
- name: multus
user:
token: "${SERVICEACCOUNT_TOKEN}"
contexts:
- name: multus-context
context:
cluster: local
user: multus
current-context: multus-context
EOF
else
warn "Doesn't look like we're running in a kubernetes environment (no serviceaccount token)"
fi
# ---------------------- end Generate a "kube-config".
# ------------------------------- Generate "00-multus.conf"
function generateMultusConf {
if [ "$MULTUS_CONF_FILE" == "auto" ]; then
log "Generating Multus configuration file using files in $MULTUS_AUTOCONF_DIR..."
found_master=false
tries=0
while [ $found_master == false ]; do
MASTER_PLUGIN="$(ls $MULTUS_AUTOCONF_DIR | grep -E '\.conf(list)?$' | grep -Ev '00-multus\.conf' | head -1)"
if [ "$MASTER_PLUGIN" == "" ]; then
if [ $tries -lt 600 ]; then
if ! (($tries % 5)); then
log "Attemping to find master plugin configuration, attempt $tries"
fi
let "tries+=1"
# See if the Multus configuration file exists, if it does then clean it up.
if [ "$MULTUS_CLEANUP_CONFIG_ON_EXIT" == true ] && [ -f "$CNI_CONF_DIR/00-multus.conf" ]; then
# But first, check if it has the invalidated configuration in it (otherwise we keep doing this over and over.)
if ! grep -q "invalidated" $CNI_CONF_DIR/00-multus.conf; then
cleanup
fi
fi
sleep 1;
else
error "Multus could not be configured: no master plugin was found."
exit 1;
fi
else
found_master=true
ISOLATION_STRING=""
if [ "$MULTUS_NAMESPACE_ISOLATION" == true ]; then
ISOLATION_STRING="\"namespaceIsolation\": true,"
fi
LOG_LEVEL_STRING=""
if [ ! -z "${MULTUS_LOG_LEVEL// }" ]; then
case "$MULTUS_LOG_LEVEL" in
debug)
;;
error)
;;
panic)
;;
verbose)
;;
*)
error "Log levels should be one of: debug/verbose/error/panic, did not understand $MULTUS_LOG_LEVEL"
usage
exit 1
esac
LOG_LEVEL_STRING="\"logLevel\": \"$MULTUS_LOG_LEVEL\","
fi
LOG_FILE_STRING=""
if [ ! -z "${MULTUS_LOG_FILE// }" ]; then
LOG_FILE_STRING="\"logFile\": \"$MULTUS_LOG_FILE\","
fi
CNI_VERSION_STRING=""
if [ ! -z "${CNI_VERSION// }" ]; then
CNI_VERSION_STRING="\"cniVersion\": \"$CNI_VERSION\","
fi
ADDITIONAL_BIN_DIR_STRING=""
if [ ! -z "${ADDITIONAL_BIN_DIR// }" ]; then
ADDITIONAL_BIN_DIR_STRING="\"binDir\": \"$ADDITIONAL_BIN_DIR\","
fi
if [ "$OVERRIDE_NETWORK_NAME" == "true" ]; then
MASTER_PLUGIN_NET_NAME="$(cat $MULTUS_AUTOCONF_DIR/$MASTER_PLUGIN | \
python -c 'import json,sys;print json.load(sys.stdin)["name"]')"
else
MASTER_PLUGIN_NET_NAME="multus-cni-network"
fi
MASTER_PLUGIN_LOCATION=$MULTUS_AUTOCONF_DIR/$MASTER_PLUGIN
MASTER_PLUGIN_JSON="$(cat $MASTER_PLUGIN_LOCATION)"
log "Using $MASTER_PLUGIN_LOCATION as a source to generate the Multus configuration"
CONF=$(cat <<-EOF
{
$CNI_VERSION_STRING
"name": "$MASTER_PLUGIN_NET_NAME",
"type": "multus",
$ISOLATION_STRING
$LOG_LEVEL_STRING
$LOG_FILE_STRING
$ADDITIONAL_BIN_DIR_STRING
"kubeconfig": "$MULTUS_KUBECONFIG_FILE_HOST",
"delegates": [
$MASTER_PLUGIN_JSON
]
}
EOF
)
echo $CONF > $CNI_CONF_DIR/00-multus.conf
log "Config file created @ $CNI_CONF_DIR/00-multus.conf"
echo $CONF
# If we're not performing the cleanup on exit, we can safely rename the config file.
if [ "$RENAME_SOURCE_CONFIG_FILE" == true ]; then
mv ${MULTUS_AUTOCONF_DIR}/${MASTER_PLUGIN} ${MULTUS_AUTOCONF_DIR}/${MASTER_PLUGIN}.old
log "Original master file moved to ${MULTUS_AUTOCONF_DIR}/${MASTER_PLUGIN}.old"
fi
if [ "$RESTART_CRIO" == true ]; then
# Restart CRIO only once.
if [ "$CRIO_RESTARTED_ONCE" == false ]; then
log "Restarting crio"
systemctl restart crio
CRIO_RESTARTED_ONCE=true
fi
fi
fi
done
fi
}
generateMultusConf
# ---------------------- end Generate "00-multus.conf".
# Enter either sleep loop, or watch loop...
if [ "$MULTUS_CLEANUP_CONFIG_ON_EXIT" == true ]; then
log "Entering watch loop..."
while true; do
# Check and see if the original master plugin configuration exists...
if [ ! -f "$MASTER_PLUGIN_LOCATION" ]; then
log "Master plugin @ $MASTER_PLUGIN_LOCATION has been deleted. Performing cleanup..."
cleanup
generateMultusConf
log "Continuing watch loop after configuration regeneration..."
fi
sleep 1
done
else
log "Entering sleep (success)..."
sleep infinity
fi
|
$('.navbar-collapse a').click(function(){
$(".navbar-collapse").collapse('hide');
});
$(document).click(function (event) {
let clickover = $(event.target);
let $navbar = $('.navbar-collapse');
let _opened = $navbar.hasClass('show');
if (_opened === true && !clickover.hasClass('navbar-toggler')) {
$navbar.collapse('hide');
}
});
document.addEventListener("DOMContentLoaded", function(){
el_autohide = document.querySelector('.autohide');
if(el_autohide){
let last_scroll_top = 0;
window.addEventListener('scroll', function(){
let scroll_top = window.scrollY;
if(scroll_top >= last_scroll_top ) {
el_autohide.classList.remove('scrolled-up');
el_autohide.classList.add('scrolled-down');
$(".navbar-collapse").collapse('hide');
$('.navbar-toggler').click(function(){
window.scroll(0,0);
});
}
else {
el_autohide.classList.remove('scrolled-down');
el_autohide.classList.add('scrolled-up');
}
last_scroll_top = scroll_top;
});
}
});
|
import uniq from 'lodash/uniq'
import groupBy from 'lodash/groupBy'
import flatten from 'lodash/flatten'
import filter from 'lodash/filter'
export function findDuplicates(array) {
return uniq(
flatten(
filter(
groupBy(array),
n => n.length > 1)))
}
|
parallel --jobs 5 < ./results/exp_lustre/run-0/lustre_8n_6t_6d_1000f_617m_5i/jobs/jobs_n0.txt
|
<filename>src/main/scala/esp/Annotations.scala
// Copyright 2018-2019 IBM
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package esp
import firrtl.annotations.{ModuleName, SingleTargetAnnotation}
import com.thoughtworks.xstream.XStream
import com.thoughtworks.xstream.io.{HierarchicalStreamReader, HierarchicalStreamWriter}
import com.thoughtworks.xstream.io.xml.{DomDriver, XmlFriendlyNameCoder}
import com.thoughtworks.xstream.converters.{Converter, MarshallingContext, UnmarshallingContext}
class ParameterConverter extends Converter {
override def marshal(source: scala.Any, writer: HierarchicalStreamWriter, context: MarshallingContext): Unit = {
val c = source.asInstanceOf[Parameter]
writer.addAttribute("name", c.name)
if (c.description.isDefined) { writer.addAttribute("desc", c.description.get) }
if (c.value.isDefined) { writer.addAttribute("value", c.value.get.toString) }
}
override def unmarshal(reader: HierarchicalStreamReader, context: UnmarshallingContext): AnyRef = {
??? /* This is currently unimplemented */
}
override def canConvert(c: Class[_]): Boolean = c.isAssignableFrom(classOf[Parameter])
}
/** Encodes ESP configuration and can serialize to SLD-compatible XML.
* @param target the module this configuration applies to
* @param config the ESP accelerator configuration
* @param dir either a (left) absolute path or (right) a path relative to a [[TargetDirAnnotation]]
*/
case class EspConfigAnnotation(target: ModuleName, config: Config, dir: Either[String, String] = Right(".."))
extends SingleTargetAnnotation[ModuleName] {
def duplicate(targetx: ModuleName): EspConfigAnnotation = this.copy(target=targetx)
def toXML: String = {
val xs = new XStream(new DomDriver("UTF-8", new XmlFriendlyNameCoder("_", "_")))
xs.registerConverter(new ParameterConverter)
// xs.aliasSystemAttribute(null, "class")
xs.alias("sld", this.getClass)
xs.aliasField("accelerator", this.getClass, "config")
xs.useAttributeFor(config.getClass, "name")
xs.useAttributeFor(config.getClass, "description")
xs.aliasField("desc", config.getClass, "description")
xs.useAttributeFor(config.getClass, "memoryFootprintMiB")
xs.aliasField("data_size", config.getClass, "memoryFootprintMiB")
xs.useAttributeFor(config.getClass, "deviceId")
xs.aliasField("device_id", config.getClass, "deviceId")
xs.addImplicitArray(config.getClass, "param")
xs.alias("param", classOf[Parameter])
xs.useAttributeFor(classOf[Parameter], "name")
xs.aliasField("desc", classOf[Parameter], "description")
xs.useAttributeFor(classOf[Parameter], "description")
xs.omitField(classOf[Parameter], "readOnly")
xs.omitField(config.getClass, "paramMap")
xs.omitField(this.getClass, "target")
xs.omitField(this.getClass, "dir")
xs.toXML(this)
}
}
|
#! /bin/sh
### BEGIN INIT INFO
# Provides: Ultra96 power button monitoring script
# Required-Start:
# Required-Stop:
# Default-Start:S
# Default-Stop:
# Short-Description: Monitors the power button and powers off the board
# Description: This script runs a utility that monitors GPIO pin 364 and
# powers off the Ultra96 board when the button is pushed.
### END INIT INFO
DESC="ultra96-power-button.sh powers off the board when the power button is pushed"
PWRUTIL="/sbin/ultra96-power-button-check"
PWRUTIL_CMD="/sbin/poweroff"
PWRUTIL_BASE=$(grep -il zynqmp_gpio /sys/class/gpio/gpiochip*/label | grep -o "[0-9]*")
PWRUTIL_PIN=$[PWRUTIL_BASE+26]
PWRUTIL_PID_NAME="ultra96-power-button"
test -x "$PWRUTIL" || exit 0
test -x "$PWRUTIL_CMD" || exit 0
[ ! -z $PWRUTIL_BASE ] || exit 0
PWRUTIL_OPTS="$PWRUTIL_PIN $PWRUTIL_CMD"
case "$1" in
start)
echo -n "Starting Ultra96 Power Button daemon"
start-stop-daemon --start --quiet --background --make-pidfile --pidfile /var/run/$PWRUTIL_PID_NAME.pid --exec $PWRUTIL -- $PWRUTIL_OPTS
echo "."
;;
stop)
echo -n "Stopping Ultra96 Power Button daemon"
start-stop-daemon --stop --quiet --pidfile /var/run/$PWRUTIL_PID_NAME.pid
;;
*)
echo "Usage: /etc/init.d/ultra96-power-button.sh {start|stop}"
exit 1
esac
exit 0
|
public class MyClass {
public static void main(String[] args) {
for (int i = 0; i < 10; i++) {
System.out.println("Square of "+i+" : "+Math.pow(i,2));
}
}
}
|
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.ssp.util.importer.job.csv;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.jasig.ssp.util.importer.job.domain.RawItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.LineAggregator;
public class RawItemLineAggregator implements LineAggregator<RawItem> {
/** The quote constant to use when you wish to suppress all quoting and escaping, they must be the same for support csv standard. */
private static final char NO_QUOTE_CHARACTER = '\u0000';
private static final Logger logger = LoggerFactory.getLogger(RawItemLineAggregator.class);
private String delimiter = DelimitedLineTokenizer.DELIMITER_COMMA;
private char quoteCharacter = DelimitedLineTokenizer.DEFAULT_QUOTE_CHARACTER;
private String[] columnNames;
private int INITIAL_STRING_SIZE = 512;
public RawItemLineAggregator() {
}
@Override
public String aggregate(RawItem item) {
Map<String,String> itemMap = item.getRecord();
if(columnNames == null){
if(item.getResource() != null)
logger.error("Column Names Not Found. Unable to process table: " + item.getResource().getFilename());
else
logger.error("Column Names Not Found. Unable to process table, no resource found. " + item.getResource().getFilename());
return "No Columns";
}
String[] values = new String[columnNames.length];
int i = 0;
for(String key: columnNames){
String value = itemMap.get(key);
if(!StringUtils.isBlank(value)){
values[i++] = value;
}else{
values[i++] = null;
}
}
return writeNext(values);
}
public void setColumnNames(String[] columnNames) {
this.columnNames = columnNames;
}
/**
* Public setter for the delimiter.
* @param delimiter the delimiter to set
*/
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
public void setQuoteCharacter(char quoteCharacter) {
this.quoteCharacter = quoteCharacter;
}
public String writeNext(String[] nextLine) {
if (nextLine == null)
return "";
StringBuilder sb = new StringBuilder(INITIAL_STRING_SIZE);
for (int i = 0; i < nextLine.length; i++) {
if (i != 0) {
sb.append(delimiter);
}
String nextElement = nextLine[i];
if (StringUtils.isBlank(nextElement))
continue;
if (quoteCharacter != NO_QUOTE_CHARACTER)
sb.append(quoteCharacter);
sb.append(stringContainsSpecialCharacters(nextElement) ? processLine(nextElement) : nextElement);
if (quoteCharacter != NO_QUOTE_CHARACTER)
sb.append(quoteCharacter);
}
return sb.toString();
}
private boolean stringContainsSpecialCharacters(String line) {
return line.indexOf(quoteCharacter) != -1;
}
protected StringBuilder processLine(String nextElement)
{
StringBuilder sb = new StringBuilder(INITIAL_STRING_SIZE);
for (int j = 0; j < nextElement.length(); j++) {
char nextChar = nextElement.charAt(j);
if (quoteCharacter != NO_QUOTE_CHARACTER && nextChar == quoteCharacter) {
sb.append(quoteCharacter).append(nextChar);
} else {
sb.append(nextChar);
}
}
return sb;
}
}
|
package com.example.testng.Annotation.Test;
import org.testng.annotations.Test;
/**
* priority 此测试方法的优先级。将优先安排较低的优先事项。
* 默认值=0 数字越大执行顺序越靠后
*/
public class TestAnnotationPriority {
@Test(priority = 0)
public void Testpriority() {
System.out.println("Testpriority");
}
@Test(priority = 1)
public void Testpriority1() {
System.out.println("Testpriority1");
}
@Test(priority = 2)
public void Testpriority2() {
System.out.println("Testpriority2");
}
}
|
#!/bin/sh
if [ ! -z "${COMPOSER_AUTO_INSTALL}" ] && [ "${COMPOSER_AUTO_INSTALL}" = 1 ]; then
composer install \
-a \
--prefer-dist \
--no-suggest
fi
|
package com.inner.lovetao.loginregister.di.component;
import com.inner.lovetao.loginregister.di.module.TBLoginActivityModule;
import com.inner.lovetao.loginregister.mvp.contract.TBLoginActivityContract;
import com.inner.lovetao.loginregister.mvp.ui.activity.TBLoginActivity;
import com.jess.arms.di.component.AppComponent;
import com.jess.arms.di.scope.ActivityScope;
import dagger.BindsInstance;
import dagger.Component;
/**
* desc:
* Created by xcz
* on 2019/01/28
*/
@ActivityScope
@Component(modules = TBLoginActivityModule.class, dependencies = AppComponent.class)
public interface TBLoginActivityComponent {
void inject(TBLoginActivity activity);
@Component.Builder
interface Builder {
@BindsInstance
TBLoginActivityComponent.Builder view(TBLoginActivityContract.View view);
TBLoginActivityComponent.Builder appComponent(AppComponent appComponent);
TBLoginActivityComponent build();
}
}
|
import torch
from eerie.nn.functional.bconv import bconv
from eerie.bsplines.utils import B_supp
from eerie.bsplines.b_1d import B
import eerie
def gconv_Rd_G(
input,
weights,
centers,
size,
group,
h_grid,
n=2,
s=1.,
stride=1,
padding=0,
b_padding=True,
b_groups=1,
bias=None,
dilation=1):
""" Performs d-dimensional convolution with B-spline convolution kernels.
Args:
- input: torch.tensor, shape=[B,Cin,X,Y,...].
- weights: torch.tensor, shape=[N,Cout,Cin]. Here N is the number of non-zero weights in each kernel.
- centers: torch.tensor, dtype=int, size=[N,d]. Here d is the spatial dimension of input. NOTE: The centers are relative to the center of the kernel and can thus be negative, but should be smaller than |center[i]|<size//2.
- size: int. This is the virtual kernel size (determines the cropping).
Args (optional):
- n: int. The order of the B-spline.
- s: float. The scale of each shifted cardinal B-spline.
- stride: int. not implemented...
- padding: int. Integer that specifies the amount of spatial padding on each side.
- b_padding: boolean. Whether or not to automatically correct for cropping due to the size of the cardinal B-splines.
- b_groups: int. Split the convolution kernels (along the output channel axis) into groups that have their own set of centers (basis functions). If b_groups=Cout, then each output channel is generated with a kernel that has its own basis consisting of n_basis functions.
Output:
- output: torch.tensor, size=[B,Cout,X',Y',...]. Here X',Y',... are the cropped/padded spatial dims.
"""
# Check if scale is indeed a float (check needs to be done otherwise
# we get unwanted casting to integers of the centers)
assert isinstance(s, float), "The specified argument \"s\" should be a float."
output = efficient_1Dspline_conv_R1G(input, weights, group, h_grid, 2, dilation)
# Add bias if provided
if bias is not None: #TODO: Not done yet
pass #output += bias.reshape([1, Cout] + [1] * group.G.d)
# Return the output
return output
def efficient_1Dspline_conv_R1G(input, weights, group, h_grid, order_spline, dilation):
# Method:
# For each scale s, perform in parallel:
# 1. Convolve input with B-spline (of scale s).
# 2. Convolve with dilated convolutions (with dilation proportional to s).
# 3. Concatenate responses (put away unneccessary padding).
# TODO: Need to implement a similar (less-efficient version) for dilations < 1. It can be done by first computing base*weights n' performing usual convolution.
output_cat = []
for scale in h_grid.grid:
# Check if scale is indeed a float (check needs to be done otherwise
# we get unwanted casting to integers of the centers)
if type(scale) is not torch.Tensor:
assert isinstance(scale, float), "The specified argument \"s\" should be a float."
# The cardinal b-spline # TODO put somewhere else, maybe in the definition of the layer.
Bfunc = B(order_spline)
_, xMax, brange = eerie.bsplines.utils.B_supp_grid_2(n=order_spline, s=scale, intsupp=True, device=input.device) # Compute the spline as well as the ammount of padding required.
# Get values of cardinal spline on grid
b_spline_on_grid = Bfunc( brange / scale ) # TODO why divided by s? / Can be computed offline
# Convolve with the cardinal B-spline
# Calculate convolution parameters ( With cardinal B-spline)
# ----------------------------------------
# padding: The required padding equals int(b_spline_on_grid.shape[0] + (scale/2)) and is a function of the scale.
padding = int(b_spline_on_grid.shape[0] + (scale/2) * (weights.shape[-1] - 4)) # TODO * dilation
N_b = input.shape[0]
N_in = input.shape[1]
# ----------------------------------------
output = torch.conv1d(input=input.reshape(N_b * N_in, 1, input.shape[-1]), weight=b_spline_on_grid.view(1, 1, -1), bias=None, stride=1, padding=padding, dilation=1, groups=1)
output = output.reshape(N_b, N_in, -1)
# Convolve with weights
# Calculate convolution parameters ( With weights)
# ----------------------------------------
# dilation: depends on scale
dilation = int(scale) # Equivalent to H.left_action_on_Rd
# ----------------------------------------
output = float(1/group.H.det(scale)) * torch.conv1d(input=output, weight=weights, bias=None, stride=1, padding=0, dilation=dilation, groups=1)
# The spatial dimension of the output is equal to that of the input.
output_cat.append(output)
# Concatenate all scales
return torch.stack(output_cat, dim=2)
def gconv_G_G(
input,
weights,
x_centers,
h_centers,
size,
group,
h_grid,
h_grid_in=None,
n=2,
s=1.,
stride=1,
padding=0,
b_padding=True,
b_groups=1,
bias=None,
dilation=1,
h_crop=False):
""" Performs d-dimensional convolution with B-spline convolution kernels.
Args:
- input: torch.tensor, shape=[B,Cin,X,Y,...].
- weights: torch.tensor, shape=[N,Cout,Cin]. Here N is the number of non-zero weights in each kernel.
- centers: torch.tensor, dtype=int, size=[N,Rd.d+H.d]. Here Rd.d and H.d are the spatial and subgroup H dimensions, which together form the dimension of the input. NOTE: The centers are relative to the center of the kernel and can thus be negative, but should be smaller than |center[i]|<size//2.
- size: int. This is the virtual kernel size (determines the cropping).
Args (optional):
- n: int. The order of the B-spline.
- s: float. The scale of each shifted cardinal B-spline.
- stride: int. not implemented...
- padding: int. Integer that specifies the amount of spatial padding on each side.
- b_padding: boolean. Whether or not to automatically correct for cropping due to the size of the cardinal B-splines.
- b_groups: int. Split the convolution kernels (along the output channel axis) into groups that have their own set of centers (basis functions). If b_groups=Cout, then each output channel is generated with a kernel that has its own basis consisting of n_basis functions.
Output:
- output: torch.tensor, size=[B,Cout,X',Y',...]. Here X',Y',... are the cropped/padded spatial dims.
"""
# Check if scale is indeed a float (check needs to be done otherwise
# we get unwanted casting to integers of the centers)
assert isinstance(s, float), "The specified argument \"s\" should be a float."
if h_grid_in is None:
h_grid_in = h_grid
output = efficient_1Dspline_conv_GG_locscalefilters(input, weights, group, h_grid, h_grid_in, 2, dilation,h_crop=h_crop)
# Add bias if provided # TODO
if bias is not None: # TODO implement bias
pass #output += bias.reshape([1, Cout] + [1] * group.G.d)
# Return the output
return output
def merge_channel_and_h_axes(input):
# input has shape [B,C,X,Y,...,H]
return torch.cat([input[...,hindex] for hindex in range(input.shape[-1])],1)
def efficient_1Dspline_conv_GG_locscalefilters(input, weights, group, h_grid, h_grid_in, order_spline, dilation, h_crop = False):
# Method:
# For each scale s in N_h, perform in parallel:
# 1. Vectorize input into input of form [B, N_out, N_h_in * N_in]
# 2. Convolve input with B-spline (of scale s).
# 3. Convolve with dilated convolutions (with dilation proportional to s).
# 4. Concatenate responses (put away unneccessary padding).
# TODO: Need to implement a similar (less-efficient version) for dilations < 1. It can be done by first computing base*weights n' performing usual convolution.
output_cat = []
if h_crop:
crop_factor = h_grid.N - 1
else:
crop_factor = 0
for in_scale in range(1, input.shape[-2] + 1 - crop_factor): #[N_h_in]
# keep track of where we are
h_grid_count = in_scale - 1 # Use to keep track of which output scales have been calculated. (it's used to shrink sizes of input/weights) as some become zero when moving accross scales.
# we want to avoid unwanted casting to integers.
in_scale = float(2 ** (in_scale - 1))
Bfunc = B(order_spline)
_, xMax, brange = eerie.bsplines.utils.B_supp_grid_2(n=order_spline, s=in_scale, intsupp=True, device=input.device) # Scales start from 1 to N_h
# Get values of cardinal spline on grid
b_spline_on_grid = Bfunc(brange / (in_scale)) # TODO why divided by s? / Can be computed offline
# Convolve with the cardinal B-spline
# Calculate convolution parameters (with cardinal B-spline)
# ----------------------------------------
# padding: The required padding equals int(b_spline_on_grid.shape[0] + (scale/2)) and is a function of the scale.
padding = int(dilation * in_scale * (weights.shape[-1] // 2) + b_spline_on_grid.shape[0] // 2)
# input needs to be reshaped as [B * N_in * N_h_in, 1, X]
N_b = input.shape[0]
N_in = input.shape[1]
N_h_in = input.shape[2]
# ---------------------------------------- # The h_grid_count: incorporates the fact that the highest resolutions are not considered for coarser scales.
output = torch.conv1d(input=input[:, :, h_grid_count:h_grid_count+h_grid.N, :].reshape(-1, 1, input.shape[-1])
if (h_grid_count + h_grid.N) < input.shape[-2] else
input[:, :, h_grid_count:, :].reshape(-1, 1, input.shape[-1]),
weight=b_spline_on_grid.view(1, 1, -1), bias=None, stride=1, padding=padding, dilation=1, groups=1)
# Reshape to input-like form
shape_inter = h_grid.N if (h_grid_count + h_grid.N) < input.shape[-2] else (N_h_in - h_grid_count)
output = output.reshape(N_b, N_in, shape_inter, -1)
# Convolve with weights
# Calculate convolution parameters (with weights)
# ----------------------------------------
# dilation: depends on scale
dil = int(in_scale) * dilation # Equivalent to H.left_action_on_Rd
# ---------------------------------------- # The h_grid_count: incorporates the fact that the coarsest part of the kernel are not considered for coarser scales.
output = output.reshape(N_b, N_in * shape_inter, -1)
weight = weights[:, :, :, :].reshape(weights.shape[0], -1, weights.shape[-1]) if (in_scale + h_grid.N) < input.shape[-2] \
else weights[:, :, :shape_inter, :].reshape(weights.shape[0], -1, weights.shape[-1])
output = float(1 / group.H.det(in_scale) ) * torch.conv1d(input=output, weight=weight, bias=None, stride=1, padding=0, dilation=dil, groups=1)
# The spatial dimension of the output is equal to that of the input.
output_cat.append(output)
# Concatenate all scales
return torch.stack(output_cat, dim=2)
|
<reponame>RaymondJiangkw/screeps_script
const Constants = {
enemies:["Yoner"],
username:"BoosterKevin",
emoji:{
redFlag:"🚩",
hunt:"🏹",
money:"💰",
number:"🔟",
bigSmile:"😀",
slightSmile:"🙂",
neutralFace:"😐"
},
labFormula:{
"OH":["H","O"],
"ZK":["Z","K"],
"UL":["U","L"],
"G":["ZK","UL"],
"UH":["U","H"],
"UO":["U","O"],
"KH":["K","H"],
"KO":["K","O"],
"LH":["L","H"],
"LO":["L","O"],
"ZH":["Z","H"],
"ZO":["Z","O"],
"GH":["G","H"],
"GO":["G","O"],
"UH2O":["UH","OH"],
"UHO2":["UO","OH"],
"KH2O":["KH","OH"],
"KHO2":["KO","OH"],
"LH2O":["LH","OH"],
"LHO2":["LO","OH"],
"ZH2O":["ZH","OH"],
"ZHO2":["ZO","OH"],
"GH2O":["GH","OH"],
"GHO2":["GO","OH"],
"XUH2O":["UH2O","X"],
"XUHO2":["UHO2","X"],
"XKH2O":["KH2O","X"],
"XKHO2":["KHO2","X"],
"XLH2O":["LH2O","X"],
"XLHO2":["LHO2","X"],
"XZH2O":["ZH2O","X"],
"XZHO2":["ZHO2","X"],
"XGH2O":["GH2O","X"],
"XGHO2":["GHO2","X"]
},
compoundEffect:{
"UH":"attack",
"UO":"work",
"KH":"carry",
"KO":"ranged_attack",
"LH":"work",
"LO":"heal",
"ZH":"work",
"ZO":"move",
"GH":"work",
"GO":"tough",
"UH2O":"attack",
"UHO2":"work",
"KH2O":"carry",
"KHO2":"ranged_attack",
"LH2O":"work",
"LHO2":"heal",
"ZH2O":"work",
"ZHO2":"move",
"GH2O":"work",
"GHO2":"tough",
"XUH2O":"attack",
"XUHO2":"work",
"XKH2O":"carry",
"XKHO2":"ranged_attack",
"XLH2O":"work",
"XLHO2":"heal",
"XZH2O":"work",
"XZHO2":"move",
"XGH2O":"work",
"XGHO2":"tough"
}
}
module.exports = Constants
|
#!/bin/bash -x -v
python3 minimal.py runserver 0.0.0.0:8000
|
<reponame>Coder-Dilip/HarinNepal<gh_stars>0
// landing page slider
var sliderElement = document.getElementById("my-keen-slider")
var elements = document.querySelectorAll(".keen-slider__slide")
var interval = 0
function autoplay(run) {
clearInterval(interval)
interval = setInterval(() => {
if (run && slider) {
slider.next()
}
}, 5000)
}
var slider = new KeenSlider(sliderElement, {
loop: true,
duration: 1000,
dragStart: () => {
autoplay(false)
},
dragEnd: () => {
autoplay(true)
},
})
sliderElement.addEventListener("mouseover", () => {
autoplay(false)
})
sliderElement.addEventListener("mouseout", () => {
autoplay(true)
})
autoplay(true)
// animate on scroll
AOS.init();
|
#!/bin/sh
SUPERUSER=samuelRHadmin
SU_PASSWORD=v3ry53cur3
DATABASE=controller_test_db
DB_SERVER=rhtestinstance.cux1erificun.us-east-1.rds.amazonaws.com
USER=tester
USER_PASSWORD=tester
# drop database
psql "postgresql://${SUPERUSER}:${SU_PASSWORD}@${DB_SERVER}/postgres" -c "DROP DATABASE IF EXISTS ${DATABASE};"
psql "postgresql://${SUPERUSER}:${SU_PASSWORD}@${DB_SERVER}/postgres" -c "REVOKE ALL PRIVILEGES ON SCHEMA public FROM ${USER};DROP ROLE ${USER};"
|
from enum import Enum
from typing import Dict, List, Tuple
import psycopg2
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.template.defaultfilters import pluralize
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from psycopg2 import OperationalError, sql
from hexa.catalog.models import CatalogQuerySet, Datasource, Entry
from hexa.catalog.sync import DatasourceSyncResult
from hexa.core.models import Permission
from hexa.core.models.cryptography import EncryptedTextField
class ExternalType(Enum):
DATABASE = "database"
TABLE = "table"
class DatabaseQuerySet(CatalogQuerySet):
def filter_for_user(self, user):
if user.is_active and user.is_superuser:
return self
return self.filter(
databasepermission__team__in=[t.pk for t in user.team_set.all()]
).distinct()
class Database(Datasource):
def get_permission_set(self):
return self.databasepermission_set.all()
searchable = True # TODO: remove (see comment in datasource_index command)
hostname = models.CharField(max_length=200)
username = models.CharField(max_length=200)
password = EncryptedTextField(max_length=200)
port = models.IntegerField(default=5432)
database = models.CharField(max_length=200)
postfix = models.CharField(max_length=200, blank=True)
class Meta:
verbose_name = "PostgreSQL Database"
ordering = ("hostname",)
unique_together = [("database", "postfix")]
objects = DatabaseQuerySet.as_manager()
@property
def unique_name(self):
if self.postfix:
return f"{self.database}{self.postfix}"
else:
return self.database
@property
def env_name(self):
slug = self.unique_name.replace("-", "_").upper()
return f"POSTGRESQL_{slug}"
@property
def url(self):
return f"postgresql://{self.username}:{self.password}@{self.hostname}:{self.port}/{self.database}"
@property
def safe_url(self):
return (
f"postgresql://{self.username}@{self.hostname}:{self.port}/{self.database}"
)
@property
def content_summary(self):
count = self.table_set.count()
return (
""
if count == 0
else _("%(count)d table%(suffix)s")
% {"count": count, "suffix": pluralize(count)}
)
def get_pipeline_credentials(self):
return {
"hostname": self.hostname,
"username": self.username,
"password": <PASSWORD>,
"port": self.port,
"database": self.database,
}
def populate_index(self, index):
index.last_synced_at = self.last_synced_at
index.external_name = self.database
index.external_id = self.safe_url
index.external_type = ExternalType.DATABASE.value
index.search = f"{self.database}"
index.path = [self.id.hex]
index.content = self.content_summary
index.datasource_name = self.database
index.datasource_id = self.id
@property
def display_name(self):
return self.unique_name
def __str__(self):
return self.display_name
def clean(self):
try:
with psycopg2.connect(self.url) as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT 1 = 1")
cursor.fetchall()
except OperationalError as e:
if "could not connect to server" in str(e):
raise ValidationError(
"Could not connect to server, please check hostname and port"
)
elif str(e).startswith("FATAL: "):
err = str(e).removeprefix("FATAL: ")
raise ValidationError(err)
else:
raise ValidationError(e)
def sync(self):
created_count = 0
updated_count = 0
identical_count = 0
deleted_count = 0
# Ignore tables from postgis as there is no value in showing them in the catalog
IGNORE_TABLES = ["geography_columns", "geometry_columns", "spatial_ref_sys"]
with psycopg2.connect(self.url) as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor:
cursor.execute(
"""
SELECT table_name, table_type, pg_class.reltuples as row_count
FROM information_schema.tables
JOIN pg_class ON information_schema.tables.table_name = pg_class.relname
WHERE table_schema = 'public'
ORDER BY table_name;
"""
)
response: List[Tuple[str, str, int]] = cursor.fetchall()
new_tables: Dict[str, Dict] = {
x[0]: x for x in response if x[0] not in IGNORE_TABLES
}
for name, data in new_tables.items():
if data["row_count"] < 10_000:
cursor.execute(
sql.SQL("SELECT COUNT(*) as row_count FROM {};").format(
sql.Identifier(data["table_name"])
),
)
response = cursor.fetchone()
new_tables[name]["row_count"] = response["row_count"]
with transaction.atomic():
existing_tables = Table.objects.filter(database=self)
for table in existing_tables:
if table.name not in new_tables.keys():
deleted_count += 1
table.delete()
else:
data = new_tables[table.name]
if table.rows == data["row_count"]:
identical_count += 1
else:
table.rows = data["row_count"]
updated_count += 1
table.save()
for new_table_name, new_table in new_tables.items():
if new_table_name not in {x.name for x in existing_tables}:
created_count += 1
Table.objects.create(
name=new_table_name, database=self, rows=data["row_count"]
)
# Flag the datasource as synced
self.last_synced_at = timezone.now()
self.save()
return DatasourceSyncResult(
datasource=self,
created=created_count,
updated=updated_count,
identical=identical_count,
deleted=deleted_count,
)
def get_absolute_url(self):
return reverse(
"connector_postgresql:datasource_detail", kwargs={"datasource_id": self.id}
)
class TableQuerySet(CatalogQuerySet):
def filter_for_user(self, user):
if user.is_active and user.is_superuser:
return self
return self.filter(database__in=Database.objects.filter_for_user(user))
class Table(Entry):
database = models.ForeignKey("Database", on_delete=models.CASCADE)
name = models.CharField(max_length=512)
rows = models.IntegerField(default=0)
searchable = True # TODO: remove (see comment in datasource_index command)
class Meta:
verbose_name = "PostgreSQL table"
ordering = ["name"]
objects = TableQuerySet.as_manager()
def get_permission_set(self):
return self.database.databasepermission_set.all()
def populate_index(self, index):
index.last_synced_at = self.database.last_synced_at
index.external_name = self.name
index.external_type = ExternalType.TABLE.value
index.path = [self.database.id.hex, self.id.hex]
index.external_id = f"{self.database.safe_url}/{self.name}"
index.context = self.database.database
index.search = f"{self.name}"
index.datasource_name = self.database.database
index.datasource_id = self.database.id
def get_absolute_url(self):
return reverse(
"connector_postgresql:table_detail",
kwargs={"datasource_id": self.database.id, "table_id": self.id},
)
class DatabasePermission(Permission):
database = models.ForeignKey(
"connector_postgresql.Database", on_delete=models.CASCADE
)
class Meta:
unique_together = [("database", "team")]
def index_object(self):
self.database.build_index()
def __str__(self):
return f"Permission for team '{self.team}' on database '{self.database}'"
|
'use strict';
var path = require('path');
var test = require('ava');
var assert = require('yeoman-assert');
var helpers = require('yeoman-test');
test.before(() => {
var pkg = require('../../package.json');
var deps = [
[helpers.createDummyGenerator(), 'statisk:gulp']
];
return helpers.run(path.join(__dirname, '../../generators/update'))
.withOptions({
'name': pkg.name,
'version': pkg.version,
'skip-install': true
})
.withPrompts({
uploading: 'None',
babel: true
})
.withGenerators(deps)
.toPromise();
});
test('creates gulpfile', () => {
assert.file('gulpfile.js');
});
test('creates package.json', () => {
assert.file('package.json');
});
test('creates comment about creation', () => {
const pkg = require('../../package.json');
const date = (new Date()).toISOString().split('T')[0];
assert.fileContent('gulpfile.js', '// generated on ' + date + ' using ' + pkg.name + ' ' + pkg.version);
});
test('creates gulp task files, but not build.js', () => {
assert.file([
'gulp/tasks/assets.js',
'gulp/tasks/clean.js',
'gulp/tasks/copy.js',
'gulp/tasks/fonts.js',
'gulp/tasks/html.js',
'gulp/tasks/images.js',
'gulp/tasks/inject.js',
'gulp/tasks/uploading.js'
]);
});
test('gulp/tasks/assets.js does not contain babel', () => {
[
'const babel',
'.pipe(babel'
].forEach(field => {
assert.fileContent('gulp/tasks/assets.js', field);
});
});
test('does not create credentials files', () => {
assert.noFile([
'aws-credentials.json',
'rsync-credentials.json'
]);
});
test('does not contain uploading packages', () => {
assert.noJsonFileContent('package.json', {
devDependencies: {
'gulp-awspublish': '',
'concurrent-transform': '',
'gulp-rsync': '',
'gulp-gh-pages': ''
}
});
});
test('does not contain deploy task', () => {
assert.fileContent('gulp/tasks/uploading.js', '// File empty but generated because of how Yeoman scaffolds files');
assert.noFileContent('gulp/tasks/uploading.js', 'gulp.task(\'upload\'');
});
|
package mainclient.methodNewDefault;
import main.methodNewDefault.IMethodNewDefault;
import main.methodNewDefault.IMethodNewDefaultOther;
public abstract class AbsMethodNewDefaultMultiIntOwnDef implements IMethodNewDefault, IMethodNewDefaultOther {
public int defaultMethod() {
return 11;
}
}
|
<reponame>ideacrew/pa_edidb
require 'rails_helper'
describe EmployerContributions::FederalEmployer do
let(:employer_groups) { [] }
let(:fed_employer_props) { {
:federal_contribution_groups => employer_groups,
:contribution_percent => 75.0
} }
let(:enrollment) { double(:enrollees => enrollees, :pre_amt_tot => pre_amt_tot) }
let(:enrollees) { [] }
let(:pre_amt_tot) { 0.0 }
let(:mg) { EmployerContributions::FederalContributionGroup }
describe "given some employer groups" do
let(:employer_groups) { [
mg.new(:enrollee_count => 1, :contribution_amount => 222.11),
mg.new(:enrollee_count => 2, :contribution_amount => 400.12),
mg.new(:enrollee_count => 3, :contribution_amount => 600.34)
]}
subject { EmployerContributions::FederalEmployer.new(fed_employer_props) }
describe "given just an employee" do
let(:enrollees) { [1] }
let(:pre_amt_tot) { 400.23 }
it "should use the correct group size" do
expect(subject.max_amount_for(enrollment)).to eql(222.11)
end
it "should calculate the right employer contribution" do
expect(subject.contribution_for(enrollment)).to eql(222.11)
end
end
describe "given an enrollment with more people than the existing max group size" do
let(:enrollees) { [1,2,3,4,5] }
let(:pre_amt_tot) { 500.21 }
it "should use the largest group size" do
expect(subject.max_amount_for(enrollment)).to eql(600.34)
end
it "should calculate the right employer contribution" do
expect(subject.contribution_for(enrollment)).to eql(375.16)
end
end
end
end
|
<filename>src/core/middlewares/cors.middleware.ts
import { Injectable, NestMiddleware } from '@nestjs/common';
import { Request, Response } from 'express';
import { CROSS_DOMAIN } from '../../app.config';
import { isDevMode } from '../../app.environment';
@Injectable()
export class CorsMiddleware implements NestMiddleware {
use(request: Request, response: Response, next) {
const origins = request.headers.origin;
const origin = (Array.isArray(origins) ? origins[0] : origins) || '';
const { allowedOrigins, allowedHeaders, allowedMethods } = CROSS_DOMAIN;
// Allow Origin
if (!origin || allowedOrigins.includes(origin) || isDevMode) {
response.setHeader('Access-Control-Allow-Origin', origin || '*');
}
// Headers
response.header('Access-Control-Allow-Headers', allowedHeaders.join(','));
response.header('Access-Control-Allow-Methods', allowedMethods.join(','));
response.header('Content-Type', 'application/json; charset=utf-8');
response.header('X-Powered-By', `NestServer`);
return next();
}
}
|
public static int RoundToNearestInteger(double number)
{
return (int) Math.Round(number);
}
|
package com.alphawallet.app.di;
import com.alphawallet.app.repository.CurrencyRepository;
import com.alphawallet.app.repository.CurrencyRepositoryType;
import com.alphawallet.app.repository.LocaleRepository;
import com.alphawallet.app.repository.LocaleRepositoryType;
import com.alphawallet.app.repository.PreferenceRepositoryType;
import com.alphawallet.app.service.AssetDefinitionService;
import com.alphawallet.app.viewmodel.AdvancedSettingsViewModelFactory;
import dagger.Module;
import dagger.Provides;
@Module
class AdvancedSettingsModule {
@Provides
AdvancedSettingsViewModelFactory provideAdvancedSettingsViewModelFactory(
LocaleRepositoryType localeRepository,
CurrencyRepositoryType currencyRepository,
AssetDefinitionService assetDefinitionService,
PreferenceRepositoryType preferenceRepository
) {
return new AdvancedSettingsViewModelFactory(
localeRepository,
currencyRepository,
assetDefinitionService,
preferenceRepository);
}
@Provides
LocaleRepositoryType provideLocaleRepository(PreferenceRepositoryType preferenceRepository) {
return new LocaleRepository(preferenceRepository);
}
@Provides
CurrencyRepositoryType provideCurrencyRepository(PreferenceRepositoryType preferenceRepository) {
return new CurrencyRepository(preferenceRepository);
}
}
|
#!/bin/bash
# port-forward connect server
NAMESPACE=${1:-kafka}
kubectl port-forward --namespace $NAMESPACE kcontrol 9021:9021
|
<filename>src/utils.ts
/**
* Type that can be indexed with TypeName then FieldName.
*/
export type HasTypeField<TypeName extends string, FieldName extends string> = {
[t in TypeName]: {
[f in FieldName]: unknown;
};
};
/**
* Map arg names to corresponding validator functions.
*/
export type ArgsValidationConfig<ArgsType> = {
[ArgName in keyof ArgsType]?: (value: ArgsType[ArgName]) => Promise<boolean>;
};
|
<filename>spec/mumukit/narrator_spec.rb
require 'spec_helper'
describe 'narrator' do
describe 'random narrator' do
let(:narrator) { Mumukit::Assistant::Narrator.random }
it { expect(narrator).to be_a Mumukit::Assistant::Narrator }
it { expect(narrator.retry_phrase).to be_a String }
end
describe 'seeded narrator' do
let(:seed) { Mumukit::Assistant::Narrator.seed(0, 0, 0, 0, 0) }
let(:narrator) { Mumukit::Assistant::Narrator.new(seed) }
context 'en' do
before { I18n.locale = :en }
let(:tips) { [
'check you have not mispelled `product`',
'check that you are using composition',
'remember that `sum` must work with both `Int`s and `Float`s'
] }
it { expect(narrator.retry_phrase).to eq 'Let\'s try again!' }
it { expect(narrator.explanation_introduction_phrase).to eq 'Oops, it didn\'t work :frowning:.' }
it { expect(narrator.compose_explanation tips).to eq "**Oops, it didn\'t work :frowning:.**\n\n"+
"Check you have not mispelled `product`.\n\n"+
"Also, check that you are using composition.\n\n" +
"Finally, remember that `sum` must work with both `Int`s and `Float`s.\n\n" +
"Let's try again!\n" }
end
context 'es' do
before { I18n.locale = :es }
it { expect(narrator.retry_phrase).to eq '¡Intentemos de nuevo!' }
it { expect(narrator.explanation_introduction_phrase).to eq 'Parece que algo no funcionó :see_no_evil:.' }
context '3 tips' do
let(:tips) { [
'fijate que no hayas escrito mal `product`',
'fijate que estés usando composición',
'recordá que `sum` debe funcionar tanto para `Int`s como `Float`s'
] }
it { expect(narrator.compose_explanation tips).to eq "**Parece que algo no funcionó :see_no_evil:.**\n\n"+
"Fijate que no hayas escrito mal `product`.\n\n"+
"Además, fijate que estés usando composición.\n\n" +
"Por último, recordá que `sum` debe funcionar tanto para `Int`s como `Float`s.\n\n" +
"¡Intentemos de nuevo!\n" }
end
context '2 tips' do
let(:tips) { [
'fijate que no hayas escrito mal `product`',
'fijate que estés usando composición'
] }
it { expect(narrator.compose_explanation tips).to eq "**Parece que algo no funcionó :see_no_evil:.**\n\n"+
"Fijate que no hayas escrito mal `product`.\n\n"+
"Además, fijate que estés usando composición.\n\n" +
"¡Intentemos de nuevo!\n" }
end
context '1 tip' do
let(:tips) { [
'recordá que `sum` debe funcionar tanto para `Int`s como `Float`s'
] }
it { expect(narrator.compose_explanation tips).to eq "**Parece que algo no funcionó :see_no_evil:.**\n\n"+
"Recordá que `sum` debe funcionar tanto para `Int`s como `Float`s.\n\n"+
"¡Intentemos de nuevo!\n" }
end
end
end
end
|
<gh_stars>100-1000
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import logging
from logging.handlers import RotatingFileHandler
from qframer.qt import QtGui
from qframer.qt import QtCore
from qframer import FMainWindow
import json
import time
#主日志保存在log/QSoftkeyer.log
logging.root.setLevel(logging.INFO)
logging.root.propagate = 0
loghandler = RotatingFileHandler(os.path.join("log", "QMain.log"), maxBytes=10 * 1024 * 1024, backupCount=100)
loghandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)8s [%(filename)16s:%(lineno)04s] %(message)s'))
loghandler.level = logging.INFO
logging.root.addHandler(loghandler)
logger = logging.root
logger.propagate = 0
from config import windowsoptions
import config
from effects import *
from childpages import *
from guiutil.utils import set_skin, set_bg
import utildialog
class MetroWindow(QtGui.QWidget):
def __init__(self, parent=None):
super(MetroWindow, self).__init__(parent)
self.pagetags = windowsoptions['mainwindow']['centralwindow']['pagetags']
self.pagetags_zh = windowsoptions['mainwindow']['centralwindow']['pagetags_zh']
self.initUI()
def initUI(self):
self.pagecount = len(self.pagetags_zh) # 页面个数
# self.createNavigation()
self.pages = QtGui.QStackedWidget() # 创建堆控件
# self.pages.addWidget(self.navigationPage)
self.createChildPages() # 创建子页面
# self.createConnections()
mainLayout = QtGui.QHBoxLayout()
mainLayout.addWidget(self.pages)
self.setLayout(mainLayout)
self.layout().setContentsMargins(0, 0, 0, 0)
self.faderWidget = None
self.pages.currentChanged.connect(self.fadeInWidget) # 页面切换时淡入淡出效果
def createNavigation(self):
'''
创建导航页面
'''
self.navigationPage = NavigationPage()
def createChildPages(self):
'''
创建子页面
'''
for buttons in self.pagetags:
for item in buttons:
page = item + 'Page'
childpage = 'child' + page
if hasattr(sys.modules[__name__], page):
setattr(self, page, getattr(sys.modules[__name__], page)(self))
else:
setattr(self, page, getattr(sys.modules[__name__], 'BasePage')(self))
setattr(self, childpage, ChildPage(self, getattr(self, page)))
self.pages.addWidget(getattr(self, childpage))
def createConnections(self):
'''
创建按钮与页面的链接
'''
for buttons in self.pagetags:
for item in buttons:
button = item + 'Button'
getattr(self.navigationPage, button).clicked.connect(self.childpageChange)
def childpageChange(self):
'''
页面切换响应函数
'''
currentpage = getattr(self, unicode('child' + self.sender().objectName()[:-6]) + 'Page')
if hasattr(self, 'navigationPage'):
if currentpage is self.navigationPage:
currentpage.parent.parent().statusBar().hide()
self.pages.setCurrentWidget(currentpage)
self.sender().setFocus()
#切换QChrome页面时进行实时刷新显示预览
if isinstance(currentpage.child, QChromePage):
currentpage.child.refreshcontent()
@QtCore.pyqtSlot()
def backnavigationPage(self):
self.parent().statusBar().hide()
self.pages.setCurrentWidget(self.navigationPage)
@QtCore.pyqtSlot()
def backPage(self):
index = self.pages.currentIndex()
if index == 1:
self.parent().statusBar().hide()
self.pages.setCurrentWidget(self.navigationPage)
else:
self.pages.setCurrentIndex(index - 1)
@QtCore.pyqtSlot()
def forwardnextPage(self):
index = self.pages.currentIndex()
if index < self.pagecount:
self.pages.setCurrentIndex(index + 1)
else:
self.parent().statusBar().hide()
self.pages.setCurrentWidget(self.navigationPage)
def fadeInWidget(self, index):
'''
页面切换时槽函数实现淡入淡出效果
'''
self.faderWidget = FaderWidget(self.pages.widget(0.5))
self.faderWidget.start()
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.initFrame()
self.centeralwindow = MetroWindow(self)
self.setCentralWidget(self.centeralwindow)
self.createMenus()
self.createToolbars()
self.createStatusbar()
self.setskin()
currentpage = self.centralWidget().pages.currentWidget()
currentpage.navigation.setVisible(windowsoptions['mainwindow']['navigationvisual'])
def initFrame(self):
title = windowsoptions['mainwindow']['title']
postion = windowsoptions['mainwindow']['postion']
minsize = windowsoptions['mainwindow']['minsize']
size = windowsoptions['mainwindow']['size']
windowicon = windowsoptions['mainwindow']['windowicon']
fullscreenflag = windowsoptions['mainwindow']['fullscreenflag']
navigationvisual = windowsoptions['mainwindow']['navigationvisual']
self.setWindowTitle(title)
self.setWindowIcon(QtGui.QIcon(windowicon)) # 设置程序图标
self.setMinimumSize(minsize[0], minsize[1])
width = QtGui.QDesktopWidget().availableGeometry().width() * 5 / 6
height = QtGui.QDesktopWidget().availableGeometry().height() * 7 / 8
self.setGeometry(postion[0], postion[1], width, height) # 初始化窗口位置和大小
self.center() # 将窗口固定在屏幕中间
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.fullscreenflag = fullscreenflag # 初始化时非窗口最大话标志
if self.fullscreenflag:
self.showFullScreen()
else:
self.showNormal()
self.navigationvisual = navigationvisual # 导航标志,初始化时显示导航
self.layout().setContentsMargins(0, 0, 0, 0)
# self.setWindowFlags(QtCore.Qt.CustomizeWindowHint) # 隐藏标题栏, 可以拖动边框改变大小
# self.setWindowFlags(QtCore.Qt.FramelessWindowHint) # 隐藏标题栏, 无法改变大小
self.setWindowFlags(QtCore.Qt.FramelessWindowHint) # 无边框, 带系统菜单, 可以最小化
def setskin(self):
for buttons in windowsoptions['mainwindow']['centralwindow']['pagetags']:
for item in buttons:
childpage = getattr(self.centeralwindow, 'child' + item + 'Page')
set_skin(childpage, os.sep.join(['skin', 'qss', 'MetroNavigationBar.qss'])) # 设置导航工具条的样式
set_skin(self, os.sep.join(['skin', 'qss', 'MetroMainwindow.qss'])) # 设置主窗口样式
def center(self):
qr = self.frameGeometry()
cp = QtGui.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def createMenus(self):
menusettings = windowsoptions['mainwindow']['menusettings']
menubar = self.menuBar()
menubar.setVisible(menusettings['visual'])
for menu in menusettings['menus']:
setattr(
self,
'%smenu' % menu['name'],
menubar.addMenu(u'%s%s' % (menu['name'], menu['name_zh']))
)
submenu = getattr(self, '%smenu' % menu['name'])
for menuaction in menu['actions']:
setattr(
self,
'%sAction' % menuaction['trigger'],
QtGui.QAction(
QtGui.QIcon(QtGui.QPixmap(menuaction['icon'])),
'%s%s' % (menuaction['name'], menuaction['name_zh']),
self
)
)
if hasattr(self, 'action%s' % menuaction['trigger']):
action = getattr(self, '%sAction' % menuaction['trigger'])
action.setShortcut(QtGui.QKeySequence(menuaction['shortcut']))
submenu.addAction(action)
action.triggered.connect(
getattr(self, 'action%s' % menuaction['trigger'])
)
else:
action = getattr(self, '%sAction' % menuaction['trigger'])
action.setShortcut(QtGui.QKeySequence(menuaction['shortcut']))
submenu.addAction(action)
action.triggered.connect(
getattr(self, 'actionNotImplement')
)
def createToolbars(self):
toolbarsettings = windowsoptions['mainwindow']['toolbarsettings']
self.toolbar = QtGui.QToolBar(self)
self.toolbar.setMovable(toolbarsettings['movable'])
self.toolbar.setVisible(toolbarsettings['visual'])
self.addToolBar(toolbarsettings['dockArea'], self.toolbar)
for toolbar in toolbarsettings['toolbars']:
setattr(
self,
'%sAction' % toolbar['trigger'],
QtGui.QAction(
QtGui.QIcon(QtGui.QPixmap(toolbar['icon'])),
'%s%s' % (toolbar['name'], toolbar['name_zh']),
self
)
)
if hasattr(self, 'action%s' % toolbar['trigger']):
action = getattr(self, '%sAction' % toolbar['trigger'])
action.setShortcut(QtGui.QKeySequence(toolbar['shortcut']))
action.setToolTip(toolbar['tooltip'])
self.toolbar.addAction(action)
action.triggered.connect(
getattr(self, 'action%s' % toolbar['trigger'])
)
self.toolbar.widgetForAction(action).setObjectName(toolbar['id'])
else:
action = getattr(self, '%sAction' % toolbar['trigger'])
action.setShortcut(QtGui.QKeySequence(toolbar['shortcut']))
action.setToolTip(toolbar['tooltip'])
self.toolbar.addAction(action)
action.triggered.connect(
getattr(self, 'actionNotImplement')
)
self.toolbar.widgetForAction(action).setObjectName(toolbar['id'])
def createStatusbar(self):
statusbarsettings = windowsoptions['mainwindow']['statusbarsettings']
self.statusbar = QtGui.QStatusBar()
self.setStatusBar(self.statusbar)
self.statusbar.showMessage(statusbarsettings['initmessage'])
self.statusbar.setMinimumHeight(statusbarsettings['minimumHeight'])
self.statusbar.setVisible(statusbarsettings['visual'])
def actionAbout(self):
pass
def actionNotImplement(self):
utildialog.msg(u'This action is not Implemented', windowsoptions['msgdialog'])
@QtCore.pyqtSlot()
def windowMaxNormal(self):
if self.isFullScreen():
self.showNormal()
self.sender().setObjectName("MaxButton")
set_skin(self, os.sep.join(['skin', 'qss', 'MetroMainwindow.qss'])) # 设置主窗口样式
else:
self.showFullScreen()
self.sender().setObjectName("MaxNormalButton")
set_skin(self, os.sep.join(['skin', 'qss', 'MetroMainwindow.qss'])) # 设置主窗口样式
def closeEvent(self, evt):
flag, exitflag = utildialog.exit(windowsoptions['exitdialog'])
if flag:
for item in exitflag:
if item == 'minRadio' and exitflag[item]:
self.showMinimized()
evt.ignore()
elif item == 'exitRadio' and exitflag[item]:
evt.accept()
elif item == 'exitsaveRadio' and exitflag[item]:
evt.accept()
self.saveoptions()
with open(os.sep.join([os.getcwd(), 'options', 'windowsoptions.json']), 'wb') as f:
json.dump(windowsoptions, f, indent=4)
else:
evt.ignore()
def saveoptions(self):
windowsoptions['mainwindow']['fullscreenflag'] = self.fullscreenflag
windowsoptions['mainwindow']['navigationvisual'] = \
self.centeralwindow.pages.currentWidget().navigation.isVisible()
windowsoptions['mainwindow']['menusettings']['visual'] = \
self.menuBar().isVisible()
windowsoptions['mainwindow']['statusbarsettings']['visual'] = \
self.statusBar().isVisible()
def keyPressEvent(self, evt):
if evt.key() == QtCore.Qt.Key_Escape:
self.close()
elif evt.key() == QtCore.Qt.Key_F5:
if not self.fullscreenflag:
self.showFullScreen()
self.fullscreenflag = True
else:
self.showNormal()
self.fullscreenflag = False
elif evt.key() == QtCore.Qt.Key_F10:
currentpage = self.centralWidget().pages.currentWidget()
if hasattr(currentpage, 'navigation'):
if self.navigationvisual:
currentpage.navigation.setVisible(False)
self.navigationvisual = False
else:
currentpage.navigation.setVisible(True)
self.navigationvisual = True
elif evt.key() == QtCore.Qt.Key_F9:
if self.menuBar().isVisible():
self.menuBar().hide()
else:
self.menuBar().show()
elif evt.key() == QtCore.Qt.Key_F8:
if self.statusbar.isVisible():
self.statusbar.hide()
else:
self.statusbar.show()
def mousePressEvent(self,event):
# 鼠标点击事件
if event.button() == QtCore.Qt.LeftButton:
self.dragPosition = event.globalPos() - self.frameGeometry().topLeft()
event.accept()
def mouseMoveEvent(self,event):
# 鼠标移动事件
if hasattr(self, "dragPosition"):
if event.buttons() == QtCore.Qt.LeftButton:
self.move(event.globalPos() - self.dragPosition)
event.accept()
class SplashScreen(QtGui.QSplashScreen):
def __init__(self, splash_image):
super(SplashScreen, self).__init__(splash_image) # 启动程序的图片
self.setWindowModality(QtCore.Qt.ApplicationModal)
def fadeTicker(self, keep_t):
self.setWindowOpacity(0)
t = 0
while t <= 50:
newOpacity = self.windowOpacity() + 0.02 # 设置淡入
if newOpacity > 1:
break
self.setWindowOpacity(newOpacity)
self.show()
t -= 1
time.sleep(0.04)
self.show()
time.sleep(keep_t)
t = 0
while t <= 50:
newOpacity = self.windowOpacity() - 0.02 # 设置淡出
if newOpacity < 0:
self.close()
break
self.setWindowOpacity(newOpacity)
self.show()
t += 1
time.sleep(0.04)
if __name__ == '__main__':
import sys
if sys.platform == "linux2":
import platform
if platform.architecture()[0] == "32bit":
QtGui.QApplication.addLibraryPath(
'/usr/lib/%s-linux-gnu/qt5/plugins/' % 'i386')
else:
QtGui.QApplication.addLibraryPath(
'/usr/lib/%s-linux-gnu/qt5/plugins/' % platform.machine())
app = QtGui.QApplication(sys.argv)
splash = SplashScreen(QtGui.QPixmap(windowsoptions['splashimg']))
splash.fadeTicker(0)
app.processEvents()
main = MainWindow()
main.show()
splash.finish(main)
sys.exit(app.exec_())
|
<gh_stars>10-100
"""
Split German compound words
"""
from pathlib import Path
from typing import List, Tuple
import re
import sys
import json
NGRAM_PATH = Path(__file__).parent / "ngram_probs.json"
with open(NGRAM_PATH) as f:
ngram_probs = json.load(f)
class Splitter:
"""
Wrapper around the split_compound function
"""
def split_compound(self, word: str) -> List[Tuple[float, str, str]]:
"""Return list of possible splits, best first.
:param word: Word to be split
:return: List of all splits
"""
word = word.lower()
# If there is a hyphen in the word, return part of the word behind the last hyphen
if '-' in word:
return [(1., re.search('(.*)-', word.title()).group(1), re.sub('.*-', '', word.title()))]
scores = list() # Score for each possible split position
# Iterate through characters, start at forth character, go to 3rd last
for n in range(3, len(word)-2):
pre_slice = word[:n]
# Cut of Fugen-S
if pre_slice.endswith('ts') or pre_slice.endswith('gs') or pre_slice.endswith('ks') \
or pre_slice.endswith('hls') or pre_slice.endswith('ns'):
if len(word[:n-1]) > 2: pre_slice = word[:n-1]
# Start, in, and end probabilities
pre_slice_prob = list()
in_slice_prob = list()
start_slice_prob = list()
# Extract all ngrams
for k in range(len(word)+1, 2, -1):
# Probability of first compound, given by its ending prob
if not pre_slice_prob and k <= len(pre_slice):
# The line above deviates from the description in the thesis;
# it only considers word[:n] as the pre_slice.
# This improves accuracy on GermEval and increases speed.
# Use the line below to replicate the original implementation:
# if k <= len(pre_slice):
end_ngram = pre_slice[-k:] # Look backwards
pre_slice_prob.append(ngram_probs["suffix"].get(end_ngram, -1)) # Punish unlikely pre_slice end_ngram
# Probability of ngram in word, if high, split unlikely
in_ngram = word[n:n+k]
in_slice_prob.append(ngram_probs["infix"].get(in_ngram, 1)) # Favor ngrams not occurring within words
# Probability of word starting
# The condition below deviates from the description in the thesis (see above comments);
# Remove the condition to restore the original implementation.
if not start_slice_prob:
ngram = word[n:n+k]
# Cut Fugen-S
if ngram.endswith('ts') or ngram.endswith('gs') or ngram.endswith('ks') \
or ngram.endswith('hls') or ngram.endswith('ns'):
if len(ngram[:-1]) > 2:
ngram = ngram[:-1]
start_slice_prob.append(ngram_probs["prefix"].get(ngram, -1))
if not pre_slice_prob or not start_slice_prob:
continue
start_slice_prob = max(start_slice_prob)
pre_slice_prob = max(pre_slice_prob) # Highest, best pre_slice
in_slice_prob = min(in_slice_prob) # Lowest, punish splitting of good in_grams
score = start_slice_prob - in_slice_prob + pre_slice_prob
scores.append((score, word[:n].title(), word[n:].title()))
scores.sort(reverse=True)
if not scores:
scores = [[0, word.title(), word.title()]]
return sorted(scores, reverse = True)
def germanet_evaluation(self,
germanet_file: str = 'split_compounds_from_GermaNet13.0.txt',
print_errors: bool = False) -> None:
"""
Test on GermaNet compounds
from http://www.sfs.uni-tuebingen.de/lsd/compounds.shtml
"""
cases, correct = 0, 0
for line in open(germanet_file,'r').readlines()[2:]:
cases += 1
sys.stderr.write('\r'+str(cases))
sys.stderr.flush()
line = line.strip().split('\t')
if not len(line) == 3:
continue # A few corrupted lines
split_result = self.split_compound(line[0])
if split_result[0][2] == line[2]:
correct += 1
elif print_errors:
print(line, split_result)
if cases % 10000 == 0:
print(' Accuracy (' + str(correct) + '/' + str(cases) + '): ', 100 * correct/cases)
print(' Accuracy (' + str(correct) + '/' + str(cases) + '): ', 100 * correct/cases)
if __name__ == '__main__':
splitter = Splitter()
scores = splitter.split_compound('Autobahnraststätte')
for score in scores:
print(score)
splitter.germanet_evaluation()
|
pp::VarDictionary CreateVarDictionary(const Value::DictionaryStorage& dictionary_storage) {
pp::VarDictionary var_dictionary;
for (const auto& pair : dictionary_storage) {
const std::string& key = pair.first;
const Value& value = pair.second;
pp::Var var_value;
// Convert Value to pp::Var based on its type
if (value.is_int()) {
var_value = pp::Var(static_cast<int32_t>(value.get_int()));
} else if (value.is_double()) {
var_value = pp::Var(value.get_double());
} else if (value.is_bool()) {
var_value = pp::Var(value.get_bool());
} else if (value.is_string()) {
var_value = pp::Var(value.get_string());
} else if (value.is_dictionary()) {
var_value = CreateVarDictionary(value.get_dictionary());
} else if (value.is_array()) {
var_value = CreateVarArray(value.get_array());
} else {
// Handle unsupported value types or error conditions
// For example, throw an exception or log an error
}
var_dictionary.Set(key, var_value);
}
return var_dictionary;
}
|
#!/usr/bin/env bash
# Copyright 2016 by Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
VENV_NAME=tmp-falcon-build
BUILD_DIR=./build
DIST_DIR=./dist
PY2_VERSION=2.7.14
#----------------------------------------------------------------------
# Helpers
#----------------------------------------------------------------------
# Args: (python_version)
_open_env() {
local PY_VERSION=$1
pyenv install -s $PY_VERSION
pyenv virtualenv $PY_VERSION $VENV_NAME
pyenv shell $VENV_NAME
pip install --upgrade pip
pip install --upgrade wheel twine
}
# Args: ()
_close_env() {
rm -rf $DIST_PATH
pyenv shell system
pyenv uninstall -f $VENV_NAME
}
# Args: (message)
_echo_task() {
echo
echo "# ----------------------------------------------------------"
echo "# $1"
echo "# ----------------------------------------------------------"
}
#----------------------------------------------------------------------
# Prerequisites
#----------------------------------------------------------------------
# Setup pyenv
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
#----------------------------------------------------------------------
# Start with a clean slate
#----------------------------------------------------------------------
_echo_task "Cleaning up old artifacts"
tools/clean.sh .
rm -rf $BUILD_DIR
rm -rf $DIST_DIR
pyenv shell system
pyenv uninstall -f $VENV_NAME
#----------------------------------------------------------------------
# README validation
#----------------------------------------------------------------------
_echo_task "Checking that README will render on PyPI"
_open_env $PY2_VERSION
pip install readme_renderer
python setup.py check -r -s
_close_env
#----------------------------------------------------------------------
# Source distribution
#----------------------------------------------------------------------
_echo_task "Building source distribution"
_open_env $PY2_VERSION
python setup.py sdist -d $DIST_DIR
_close_env
#----------------------------------------------------------------------
# Universal wheel - do not include Cython, note in README
#----------------------------------------------------------------------
_echo_task "Building universal wheel"
_open_env $PY2_VERSION
python setup.py bdist_wheel -d $DIST_DIR
_close_env
|
public class Rectangle {
private double width;
private double height;
public Rectangle(double width, double height) {
this.width = width;
this.height = height;
}
public double getArea() {
return this.width * this.height;
}
public double getPerimeter() {
return 2 * (this.width + this.height);
}
}
|
#!/bin/sh
#
# STIG URL: http://www.stigviewer.com/stig/red_hat_enterprise_linux_6/2014-06-11/finding/V-38451
# Finding ID: V-38451
# Version: RHEL-06-000040
# Finding Level: Medium
#
# The /etc/passwd file must be group-owned by root. The "/etc/passwd"
# file contains information about the users that are configured on the
# system. Protection of this file is critical for system security.
#
# CCI: CCI-000366
# NIST SP 800-53 :: CM-6 b
# NIST SP 800-53A :: CM-6.1 (iv)
# NIST SP 800-53 Revision 4 :: CM-6 b
#
############################################################
# Standard outputter function
diag_out() {
echo "${1}"
}
diag_out "----------------------------------"
diag_out "STIG Finding ID: V-38451"
diag_out " Ensure passwd file is owned as"
diag_out " the root group"
diag_out "----------------------------------"
|
#ifndef CORE_UTIL_BINARYDATACOMPRESSOR_H_
#define CORE_UTIL_BINARYDATACOMPRESSOR_H_
#include <core-base/common.h>
namespace ml
{
//! interface to compress data
class BinaryDataCompressorInterface {
public:
virtual void compressStreamToMemory(const BYTE *decompressedStream, UINT64 decompressedStreamLength, std::vector<BYTE> &compressedStream) const = 0;
virtual void decompressStreamFromMemory(const BYTE *compressedStream, UINT64 compressedStreamLength, BYTE *decompressedStream, UINT64 decompressedStreamLength) const = 0;
virtual std::string getTypename() const = 0;
};
//! interface to compress data
class BinaryDataCompressorNone : public BinaryDataCompressorInterface {
public:
void compressStreamToMemory(const BYTE *decompressedStream, UINT64 decompressedStreamLength, std::vector<BYTE> &compressedStream) const {
MLIB_ASSERT(false); //just a dummy; should never come here
}
void decompressStreamFromMemory(const BYTE *compressedStream, UINT64 compressedStreamLength, BYTE *decompressedStream, UINT64 decompressedStreamLength) const {
MLIB_ASSERT(false); //just a dummy; should never come here
}
std::string getTypename() const {
return "no compression";
}
};
//! interface to compress data
class BinaryDataCompressorDefault : public BinaryDataCompressorInterface {
public:
void compressStreamToMemory(const BYTE *decompressedStream, UINT64 decompressedStreamLength, std::vector<BYTE> &compressedStream) const {
compressedStream.resize(decompressedStreamLength);
memcpy(&compressedStream[0], decompressedStream, decompressedStreamLength);
}
void decompressStreamFromMemory(const BYTE *compressedStream, UINT64 compressedStreamLength, BYTE *decompressedStream, UINT64 decompressedStreamLength) const {
memcpy(decompressedStream, compressedStream, compressedStreamLength);
}
std::string getTypename() const {
return "stupid copying - should not be used";
}
};
} // namespace ml
#endif // CORE_UTIL_BINARYDATACOMPRESSOR_H_
|
class Solution {
public:
bool isHappy(int n) {
int slow, fast;
slow = fast = n;
do {
slow = squareSum(slow); //same method as checking loop in a linked list
fast = squareSum(squareSum(fast));
} while(slow != fast);
return (slow == 1);
}
int squareSum(int n) {
int sum = 0;
while(n) {
sum += (n % 10) * (n % 10);
n /= 10;
}
return sum;
}
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.