text
stringlengths 1
1.05M
|
|---|
import { JSX, render } from "preact";
import { isString } from "ts-type-guards";
import { log } from "userscripter";
import { truncate } from "~src/utilities";
/**
* A function assumed to insert `placeholder` in `parent`.
*/
type InsertIn<Parent extends Element> = (
/*
This type has an object parameter instead of two plain parameters because otherwise it would be easy to accidentally pass something that's not an actual insertion function.
In particular, one could accidentally pass `insertBefore` instead of `insertBefore(reference)` and get no type error.
It's still possible to pass something stupid like `console.log` or `() => {}`, but that's hard to prevent and not very likely anyway.
We could have parameterized over the type of `placeholder` and required that the return type be that type, but then one might be tempted to pass something like `({ placeholder }) => placeholder`.
Using `void` makes it obvious that the function should have at least some side effect.
*/
(_: { parent: Parent, placeholder: Text }) => void
)
type Insert = InsertIn<Element>
/**
* Renders a virtual node at a specific position in a parent element.
* @param parent The element to render inside.
* @param insert A function describing where in the parent element the rendered element should be inserted. It will be given `{ parent, placeholder }` and will be assumed to actually insert `placeholder` directly under `parent`.
* @param vnode The virtual node to render.
*/
export function renderIn<Parent extends Element>(
parent: Parent,
insert: InsertIn<Parent>,
vnode: JSX.Element,
): void {
const placeholder = document.createTextNode(""); // Doesn't matter what type of node we use here, because it will be replaced by the rendered element.
insert({ parent, placeholder });
if (placeholder.parentNode === parent) { // "If the optional replaceNode parameter is provided, it must be a child of containerNode." https://preactjs.com/guide/v10/api-reference/
render(vnode, parent, placeholder);
} else {
const nodeType = isString(vnode.type) ? ` "${vnode.type}"` : ""; // Cluttered message if it's not a string.
const nodeProps = truncate(200, JSON.stringify(vnode.props)); // Cluttered message if it's too long.
log.error(`Placeholder for virtual${nodeType} node with properties\n\n ${nodeProps}\n\nwas not inserted directly under the intended parent element.`);
}
}
export const insertAtTheEnd: Insert = (
({ parent, placeholder }) => {
parent.append(placeholder);
}
);
export const insertAtTheBeginning: Insert = (
({ parent, placeholder }) => {
parent.prepend(placeholder);
}
);
export const insertBefore: (reference: Node) => Insert = (
reference => ({ parent, placeholder }) => {
parent.insertBefore(placeholder, reference);
}
);
export const insertAfter: (reference: Node) => Insert = (
reference => ({ parent, placeholder }) => {
parent.insertBefore(placeholder, reference.nextSibling); // If reference.nextSibling is null, reference is the last child and placeholder will be inserted after it.
}
);
|
<reponame>Minwasko/SmartSnake<gh_stars>0
export class Snake {
public x: number;
public y: number;
public length: number;
constructor() {
this.x = 15;
this.y = 65;
this.length = 0;
}
public goUp(): void {
this.y -= 20;
document.getElementById('snake0').style.top = `${this.y}px`;
}
public goDown(): void {
this.y += 20;
document.getElementById('snake0').style.top = `${this.y}px`;
}
public goLeft(): void {
this.x -= 20;
document.getElementById('snake0').style.left = `${this.x}px`;
}
public goRight(): void {
this.x += 20;
document.getElementById('snake0').style.left = `${this.x}px`;
}
}
|
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.svm import SVC
# Read the data
data = [["The car is a Ford Mustang", "Ford Mustang"]]
df = pd.DataFrame(data, columns=['sentence', 'class'])
# Vectorizing the sentence
vectorizer = TfidfVectorizer()
X = vectorizer.fit_transform(df['sentence'])
# Training the classifier
classifier = SVC()
classifier.fit(X, df['class'])
|
<reponame>mizukai/sample<gh_stars>10-100
angular.module('audioVizApp')
.directive('slider', function () {
return {
template: '<div class="slider"><div class="title">{{name}}: {{model}}</div>'+
'<div class="input"><input ng-model="model" type="range" min="{{from}}" max="{{to}}" step="{{step}}">'+
'</div></div>',
replace: true,
restrict: 'E',
scope: { from: '@', to: '@', step: '@', model: '=', name: '@' }
};
});
|
#!/bin/bash
set -e
cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1
NDK=/home/heaven7/study/android/android-ndk-r21d
HOST_TAG=linux-x86_64
INSTALL_DIR="$(pwd)/../build"
cd "../libs/libvips"
# need install gobject-introspection (for autogen.sh)
function build_for_arch() {
if ! test -e build/${1}; then
#export CFLAGS="$CFLAGS -fPIC -O2 -flto"
export CFLAGS="$CFLAGS -fPIC -O2"
export TOOLCHAIN=$NDK/toolchains/llvm/prebuilt/$HOST_TAG
export AR=$TOOLCHAIN/bin/${3}-ar
export AS=$TOOLCHAIN/bin/${3}-as
export CC=$TOOLCHAIN/bin/${1}${2}-clang
export CXX=$TOOLCHAIN/bin/${1}${2}-clang++
#export LD="$TOOLCHAIN/bin/${3}-ld -flto"
export LD="$TOOLCHAIN/bin/${3}-ld"
export RANLIB=$TOOLCHAIN/bin/${3}-ranlib
export STRIP=$TOOLCHAIN/bin/${3}-strip
fake_sysroot=${INSTALL_DIR}/${1}/install
export PKG_CONFIG_DIR=
export PKG_CONFIG_LIBDIR=${fake_sysroot}/usr/lib/pkgconfig:${fake_sysroot}/usr/share/pkgconfig
export PKG_CONFIG_SYSROOT_DIR=${fake_sysroot}
export PKG_CONFIG_PATH=${fake_sysroot}/usr/local/lib/pkgconfig
#--disable-shared \
#--enable-static \
echo ${1}
./autogen.sh --host ${1}
./configure \
"--prefix=${fake_sysroot}/usr/local" \
--host ${1} \
"--with-sysroot=${NDK}/sysroot" \
"--with-expat=${fake_sysroot}/usr/local" \
"--with-zlib=${NDK}/sysroot" \
"--with-png-includes=${fake_sysroot}/usr/local/include" \
"--with-png-libraries=${fake_sysroot}/usr/local/libs" \
--without-gsf \
--without-fftw \
--without-magick \
--without-orc \
--without-lcms \
--without-OpenEXR \
--without-nifti \
--without-heif \
--without-pdfium \
--without-poppler \
--without-rsvg \
--without-openslide \
--without-matio \
--without-ppm \
--without-analyze \
--without-radiance \
--without-cfitsio \
--without-libwebp \
--without-pangoft2 \
--without-tiff \
--without-giflib \
--without-imagequant \
make -j4
mkdir -p build/${1}
make install
#sleep 1s
make distclean #可能报错
fi
}
#build_for_arch armv7a-linux-androideabi 16 arm-linux-androideabi
build_for_arch aarch64-linux-android 21 aarch64-linux-android
#build_for_arch i686-linux-android 16 i686-linux-android
#build_for_arch x86_64-linux-android 21 x86_64-linux-android
|
package korrektur;
import java.util.Random;
// utf8: "Köpfchen in das Wasser, Schwänzchen in die Höh." -CIA-Verhörmethode
public class Uebungsleitung extends Thread {
private Buffer<Klausur> left;
private Buffer<Klausur> right;
private Random random = new Random();
public Uebungsleitung(Buffer<Klausur> left, Buffer<Klausur> right) {
this.left = left;
this.right = right;
}
@Override
public void run() {
int count = 0;
System.out.printf("started Übungsleitung %s\n", getName());
while (true) {
try {
Klausur test = left.get();
if (test == null) {
break;
}
for (int i = 0; i < test.getPunkte().length; i++) {
int preliminary = test.getPunkte()[i];
int secondary = preliminary;
switch (random.nextInt(10)) {
case 9:
secondary = random.nextBoolean() ?
Math.min(Korrekturschema.maxPoints(i), preliminary + 1) :
Math.max(0, preliminary - 1);
break;
}
test.setZweitkorrektur(i, secondary);
}
right.add(test);
count++;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.printf("stopped Übungsleitung %s at %d\n", getName(), count);
}
}
|
<gh_stars>0
package com.example.security.security.service;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.example.security.domain.SecurityUser;
import com.example.security.service.SecurityUserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
/**
* 一个自定义的service用来和数据库进行操作. 即以后我们要通过数据库保存权限.则需要我们继承UserDetailsService
*
* @author jiang
*
*/
@Component
public class CustomUserDetailsService implements UserDetailsService {
protected static Logger logger = LoggerFactory.getLogger("CustomUserDetailsService");
@Autowired
private SecurityUserService securityUserService;
private GrantedAuthority DEFAULT_ROLE = new SimpleGrantedAuthority("ROLE_USER");
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException, DataAccessException {
if (!StringUtils.hasLength(username)) {
throw new BadCredentialsException("用户名不能为空");
}
UserDetails userdetails = null;
try {
// 1.根据用户名从数据库查询用户信息,根据自己的业务规则去写
SecurityUser serurityUser = securityUserService.selectByUserName(username);
if (serurityUser == null) {
throw new BadCredentialsException("用户名不存在");
}
// 2. 设置角色
List<GrantedAuthority> grantedAuthorities = new ArrayList<>();
String dbRole = serurityUser.getRole();
if (!StringUtils.hasLength(dbRole)) {
grantedAuthorities.add(DEFAULT_ROLE);
} else {
String[] roles = dbRole.split(",");
for (String role : roles) {
GrantedAuthority grantedAuthority = new SimpleGrantedAuthority(role);
grantedAuthorities.add(grantedAuthority);
}
}
userdetails= new org.springframework.security.core.userdetails.User(username, serurityUser.getPassword(),
true, // 激活状态,true:yes;false:no
true, // 账号过期状态,true:no;false;yes
true, // 证书过期状态,true:no;false;yes
true, // 账号锁定状态,true:no;false;yes
// AuthorityUtils.createAuthorityList("ROLE_USER")
grantedAuthorities);
} catch (Exception e) {
logger.error("Error in retrieving user");
throw new UsernameNotFoundException("Error in retrieving user");
}
return userdetails;
}
}
|
package service
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"os/signal"
"path/filepath"
"strings"
"syscall"
"time"
"github.com/davecgh/go-spew/spew"
dhclient "github.com/digineo/go-dhclient"
"github.com/plunder-app/kube-vip/pkg/cluster"
"github.com/plunder-app/kube-vip/pkg/kubevip"
log "github.com/sirupsen/logrus"
v1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/leaderelection/resourcelock"
watchtools "k8s.io/client-go/tools/watch"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/client-go/tools/leaderelection"
)
const plunderLock = "plunder-lock"
// OutSideCluster allows the controller to be started using a local kubeConfig for testing
var OutSideCluster bool
// EnableArp - determines the use of ARP broadcasts
var EnableArp bool
// Interface - determines the interface that all Loadbalancers/macvlans will bind too
var Interface string
type plndrServices struct {
Services []service `json:"services"`
}
type dhcpService struct {
// dhcpClient (used DHCP for the vip)
dhcpClient *dhclient.Client
dhcpInterface string
}
type serviceInstance struct {
// Virtual IP / Load Balancer configuration
vipConfig kubevip.Config
// Kubernetes service mapping
service service
// cluster instance
cluster cluster.Cluster
// Custom settings
dhcp *dhcpService
}
// TODO - call from a package (duplicated struct in the cloud-provider code)
type service struct {
Vip string `json:"vip"`
Port int `json:"port"`
UID string `json:"uid"`
Type string `json:"type"`
ServiceName string `json:"serviceName"`
}
// Manager degines the manager of the load-balancing services
type Manager struct {
clientSet *kubernetes.Clientset
configMap string
// Keeps track of all running instances
serviceInstances []serviceInstance
}
// NewManager will create a new managing object
func NewManager(configMap string) (*Manager, error) {
var clientset *kubernetes.Clientset
if OutSideCluster == false {
// This will attempt to load the configuration when running within a POD
cfg, err := rest.InClusterConfig()
if err != nil {
return nil, fmt.Errorf("error creating kubernetes client config: %s", err.Error())
}
clientset, err = kubernetes.NewForConfig(cfg)
if err != nil {
return nil, fmt.Errorf("error creating kubernetes client: %s", err.Error())
}
// use the current context in kubeconfig
} else {
config, err := clientcmd.BuildConfigFromFlags("", filepath.Join(os.Getenv("HOME"), ".kube", "config"))
if err != nil {
panic(err.Error())
}
clientset, err = kubernetes.NewForConfig(config)
if err != nil {
return nil, fmt.Errorf("error creating kubernetes client: %s", err.Error())
}
}
return &Manager{
clientSet: clientset,
configMap: configMap,
}, nil
}
// Start will begin the ConfigMap watcher
func (sm *Manager) Start() error {
ns, err := returnNameSpace()
if err != nil {
return err
}
id, err := os.Hostname()
if err != nil {
return err
}
// Build a options structure to defined what we're looking for
listOptions := metav1.ListOptions{
FieldSelector: fmt.Sprintf("metadata.name=%s", sm.configMap),
}
log.Infof("Beginning cluster membership, namespace [%s], lock name [%s], id [%s]", ns, plunderLock, id)
// we use the Lease lock type since edits to Leases are less common
// and fewer objects in the cluster watch "all Leases".
lock := &resourcelock.LeaseLock{
LeaseMeta: metav1.ObjectMeta{
Name: plunderLock,
Namespace: ns,
},
Client: sm.clientSet.CoordinationV1(),
LockConfig: resourcelock.ResourceLockConfig{
Identity: id,
},
}
// use a Go context so we can tell the leaderelection code when we
// want to step down
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
// listen for interrupts or the Linux SIGTERM signal and cancel
// our context, which the leader election code will observe and
// step down
signalChan := make(chan os.Signal, 1)
// Add Notification for Userland interrupt
signal.Notify(signalChan, syscall.SIGINT)
// Add Notification for SIGTERM (sent from Kubernetes)
signal.Notify(signalChan, syscall.SIGTERM)
// Add Notification for SIGKILL (sent from Kubernetes)
signal.Notify(signalChan, syscall.SIGKILL)
go func() {
<-signalChan
log.Info("Received termination, signaling shutdown")
// Cancel the context, which will in turn cancel the leadership
cancel()
}()
// start the leader election code loop
leaderelection.RunOrDie(ctx, leaderelection.LeaderElectionConfig{
Lock: lock,
// IMPORTANT: you MUST ensure that any code you have that
// is protected by the lease must terminate **before**
// you call cancel. Otherwise, you could have a background
// loop still running and another process could
// get elected before your background loop finished, violating
// the stated goal of the lease.
ReleaseOnCancel: true,
LeaseDuration: 10 * time.Second,
RenewDeadline: 5 * time.Second,
RetryPeriod: 1 * time.Second,
Callbacks: leaderelection.LeaderCallbacks{
OnStartedLeading: func(ctx context.Context) {
// we're notified when we start
// Watch function
// Use a restartable watcher, as this should help in the event of etcd or timeout issues
rw, err := watchtools.NewRetryWatcher("1", &cache.ListWatch{
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
return sm.clientSet.CoreV1().ConfigMaps(ns).Watch(context.TODO(), listOptions)
},
})
if err != nil {
log.Errorf("error creating watcher: %s", err.Error())
ctx.Done()
}
ch := rw.ResultChan()
defer rw.Stop()
log.Infof("Beginning watching Kubernetes configMap [%s]", sm.configMap)
var svcs plndrServices
//signalChan := make(chan os.Signal, 1)
//signal.Notify(signalChan, os.Interrupt)
go func() {
for event := range ch {
// We need to inspect the event and get ResourceVersion out of it
switch event.Type {
case watch.Added, watch.Modified:
log.Debugf("ConfigMap [%s] has been Created or modified", sm.configMap)
cm, ok := event.Object.(*v1.ConfigMap)
if !ok {
log.Errorf("Unable to parse ConfigMap from watcher")
break
}
data := cm.Data["plndr-services"]
json.Unmarshal([]byte(data), &svcs)
log.Debugf("Found %d services defined in ConfigMap", len(svcs.Services))
err = sm.syncServices(&svcs)
if err != nil {
log.Errorf("%v", err)
}
case watch.Deleted:
log.Debugf("ConfigMap [%s] has been Deleted", sm.configMap)
case watch.Bookmark:
// Un-used
case watch.Error:
log.Infoln("err")
// This round trip allows us to handle unstructured status
errObject := apierrors.FromObject(event.Object)
statusErr, ok := errObject.(*apierrors.StatusError)
if !ok {
log.Fatalf(spew.Sprintf("Received an error which is not *metav1.Status but %#+v", event.Object))
// Retry unknown errors
//return false, 0
}
status := statusErr.ErrStatus
log.Errorf("%v", status)
default:
}
}
}()
<-signalChan
},
OnStoppedLeading: func() {
// we can do cleanup here
log.Infof("leader lost: %s", id)
for x := range sm.serviceInstances {
sm.serviceInstances[x].cluster.Stop()
}
},
OnNewLeader: func(identity string) {
// we're notified when new leader elected
if identity == id {
// I just got the lock
return
}
log.Infof("new leader elected: %s", identity)
},
},
})
//<-signalChan
log.Infof("Shutting down Kube-Vip")
return nil
}
func returnNameSpace() (string, error) {
if data, err := ioutil.ReadFile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"); err == nil {
if ns := strings.TrimSpace(string(data)); len(ns) > 0 {
return ns, nil
}
return "", err
}
return "", fmt.Errorf("Unable to find Namespace")
}
|
package controllers
import java.util.UUID
import models.EventType
import play.api.mvc.Call
import scala.util.Try
import scala.util.matching.Regex
object PathValidator {
// We put in the whitelist paths used in emails and
// paths that might be used as bookmarks.
//
// Note that we cannot use Play's router to validate an url, hence the regexes.
private val pathWhitelist: List[Regex] = {
val placeholder = "00000000-0000-0000-0000-000000000000"
val placeholderUUID = UUID.fromString(placeholder)
val calls: List[Call] = List(
routes.HomeController.index,
routes.HomeController.help,
routes.HomeController.welcome,
routes.ApplicationController.create,
routes.ApplicationController.myApplications,
routes.ApplicationController.show(placeholderUUID),
routes.MandatController.mandat(placeholderUUID),
routes.ApplicationController.stats,
routes.UserController.showEditProfile,
routes.UserController.home,
routes.UserController.editUser(placeholderUUID),
routes.GroupController.showEditMyGroups,
routes.GroupController.editGroup(placeholderUUID),
routes.UserController.add(placeholderUUID),
routes.UserController.showValidateAccount,
routes.AreaController.all,
routes.AreaController.deploymentDashboard,
routes.AreaController.franceServiceDeploymentDashboard,
routes.ApplicationController.applicationsAdmin,
routes.UserController.all(placeholderUUID),
)
val uuidRegex = "([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})"
calls.map(call =>
// this compiles the regex
new Regex("^" + call.path().replace(placeholder, uuidRegex) + "$")
)
}
def isValidPath(path: String): Boolean =
pathWhitelist.exists { pathRegex =>
path match {
case pathRegex(uuids @ _*) =>
uuids.forall(uuid => Try(UUID.fromString(uuid)).isSuccess)
case _ => false
}
}
}
|
<filename>lessons/js-arrays/nested-arrays.js
// sc: https://ru.hexlet.io/courses/js-arrays/lessons/nested-arrays/exercise_unit
// superseries.js
// Реализуйте и экспортируйте по умолчанию функцию, которая находит команду победителя для
// конкретной суперсерии. Победитель определяется как команда, у которой больше побед в конкретной
// серии. Функция принимает на вход массив, в котором каждый элемент — это массив описывающий счет в
// конкретной игре. Результат функции – название страны: 'canada', 'ussr'. Если суперсерия
// закончилась в ничью, то нужно вернуть null.
const getSuperSeriesWinner = (series) => {
const dict = ['ussr', null, 'canada'];
const score = series.reduce((sum, [a, b]) => sum + Math.sign(a - b), 0);
return dict[Math.sign(score) + 1];
};
// export default getSuperSeriesWinner;
const scores = [
[3, 2],
[4, 1],
[5, 8],
[5, 5],
[2, 2],
[2, 4],
[4, 2],
[2, 3],
];
console.log(getSuperSeriesWinner(scores));
|
var _ = require('lodash');
var channels = require('./lib/database').logChannels;
var redis = require('./lib/redis');
var schedule = require('node-schedule');
var date = new Date()
date.setDate(date.getDate() - 1);
var lastDay = date;
var keyPrefix = "cnt:" + lastDay.getDate() + ":";
var _redis2Mongo = function(key){
var channelUuid = _.trimLeft(key, keyPrefix);
redis.hgetall(key, function(error, log){
log.uuid = channelUuid;
log.timestamp = lastDay;
console.log(log);
channels.insert(log,function(error){
if(error){
return
}
redis.del(key);
});
})
}
var moveCnt = function () {
console.log('start');
redis.keys(keyPrefix + "*", function(error, keyArray){
_.forEach(keyArray, _redis2Mongo );
});
};
var j = schedule.scheduleJob('0 18 1 * * *', moveCnt);
|
<filename>src/components/forms/Login/index.js
/**
* @module React
*/
import React from 'react'
import Input from 'components/input/Input'
import TextButton from 'components/buttons/TextButton'
import CtaLink from 'components/links/CtaLink'
import Checkbox from 'components/input/Checkbox'
import SocialButton from 'components/socialmedia/SocialButton'
import classNames from 'utils/classnames'
import omit from 'utils/objectutils/omit'
import Smallloader from 'components/gui/Loaders/Smallloader'
import {
FACEBOOK_LOGIN,
TWITTER_LOGIN,
LINKEDIN_LOGIN
} from 'texts/forms'
/**
* @module Form, Field
*/
import { Form, Field, Submit } from 'components/forms/BaseForm'
const LoginForm = (props) => {
const {
submitForm,
values,
className,
modifier,
toggleLoggedIn,
isSubmitting
} = props
const modifiedClassNames = classNames('login-form', className, modifier)
const formProps = omit(props, ['className', 'modifier', 'handleSubmit'])
return (
<div className={modifiedClassNames}>
<Form
className='login-form__form'
handleSubmit={() => { submitForm(values) }}
{...formProps}>
<div className='form__group'>
<Field
component={Input}
type='email'
placeholder='EMAIL ADDRESS'
validate={[]}
name='email' />
</div>
<div className='form__group'>
<Field
component={Input}
type='password'
placeholder='PASSWORD'
validate={[]}
name='password' />
</div>
<div className='row-sm'>
<div className='col-xs-12 col-sm-6 align-middle form__group text-center'>
<Submit
disabled={isSubmitting}
component={TextButton}
modifier={['fluid']}
className='login-form__submit'
text={isSubmitting ? (<Smallloader isVisible modifier='sm' />) : 'log in'} />
</div>
<div className='col-xs-12 col-sm-6 align-middle form__group text-center'>
<CtaLink modifier={['italic']}>
<h6>Forgotten details</h6>
</CtaLink>
</div>
</div>
<div className='form__group'>
<Checkbox
value={values.keepLoggedIn}
handleChange={toggleLoggedIn}
label='Keep me logged in'
name='keepLoggedIn' />
</div>
<div className='form__group'>
<SocialButton
modifier='facebook'
text={FACEBOOK_LOGIN} />
</div>
<div className='form__group'>
<SocialButton
modifier='twitter'
text={TWITTER_LOGIN} />
</div>
<div className='form__group'>
<SocialButton
text={LINKEDIN_LOGIN}
modifier='linked-in' />
</div>
</Form>
</div>
)
}
export default LoginForm
|
#!/bin/bash
set -eu
RESPONSE=$(curl -S -X POST -H "Content-Type: application/json" --data "{ \"commit\": \"${GITHUB_SHA}\", \"ref\": \"${GITHUB_REF}\", \"default_branch\": \"master\" }" ${WEBHOOK_URL})
if [[ ${RESPONSE} == "Okay" ]]; then
echo "exit 0, ${RESPONSE}"
exit 0
else
echo "exit 1, ${RESPONSE}"
exit 1
fi
|
<gh_stars>1000+
#encoding: utf-8
module CamaleonCms::Admin::CustomFieldsHelper
def cama_custom_field_elements
return @_cama_custom_field_elements if @_cama_custom_field_elements.present?
items = {}
items[:text_box] = {
key: 'text_box',
label: t('camaleon_cms.admin.custom_field.fields.text_box'),
options: {
required: true,
multiple: true,
translate: true,
default_value: '',
show_frontend: true
}
}
items[:text_area] = {
key: 'text_area',
label: t('camaleon_cms.admin.custom_field.fields.text_area'),
options: {
required: true,
multiple: true,
translate: true,
default_value: '',
show_frontend: true
}
}
items[:select] = {
key: 'select',
label: t('camaleon_cms.admin.custom_field.fields.select'),
options: {
required: true,
multiple: true,
translate: true,
multiple_options: {
label: t('camaleon_cms.admin.settings.options_select'),
default: 'radio'
},
show_frontend: true
}
}
items[:radio] = {
key: 'radio',
label: t('camaleon_cms.admin.custom_field.fields.radio', default: 'Radio'),
options: {
required: true,
multiple: false,
multiple_options: {
label: t('camaleon_cms.admin.settings.options_select'),
default: 'radio',
use_not_default: true
},
show_frontend: true
}
}
items[:checkbox] = {
key: 'checkbox',
label: t('camaleon_cms.admin.custom_field.fields.checkbox', default: 'Checkbox'),
options: {
required: true,
multiple: false,
default_value: '1',
show_frontend: true
}
}
items[:checkboxes] = {
key: 'checkboxes',
label: t('camaleon_cms.admin.custom_field.fields.checkboxes', default: 'Checkboxes'),
options: {
required: false,
multiple: false,
multiple_options: {
label: 'Checkboxes',
default: 'checkbox'
},
show_frontend: true
}
}
items[:audio] = {
key: 'audio',
label: t('camaleon_cms.admin.custom_field.fields.audio', default: 'Audio'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:colorpicker] = {
key: 'colorpicker',
label: t('camaleon_cms.admin.custom_field.fields.colorpicker'),
extra_fields:[
{
type: 'select',
key: 'color_format',
label: 'Color Format',
values: [
{
value: 'hex',
label: 'hex'
},
{
value: 'rgb',
label: 'rgb'
},
{
value: 'rgba',
label: 'rgba'
}
]
}
],
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:date] = {
key: 'date',
label: t('camaleon_cms.admin.custom_field.fields.date'),
extra_fields:[
{
type: 'radio',
key: 'type_date',
values: [
{
value: '0',
label: t('camaleon_cms.admin.settings.input_only_date')
},
{
value: '1',
label: t('camaleon_cms.admin.settings.input_date_time')
}
]
}
],
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:editor] = {
key: 'editor',
label: t('camaleon_cms.admin.custom_field.fields.editor', default: 'Editor'),
options: {
required: false,
multiple: true,
translate: true,
default_value: '',
show_frontend: true
}
}
items[:email] = {
key: 'email',
label: t('camaleon_cms.admin.custom_field.fields.email'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:file] = {
key: 'file',
label: t('camaleon_cms.admin.custom_field.fields.file'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true,
},
extra_fields:[
{
type: 'text_box',
key: 'formats',
label: 'File Formats (image,video,audio)'
}
]
}
items[:image] = {
key: 'image',
label: t('camaleon_cms.admin.custom_field.fields.image'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
},
extra_fields:[
{
type: 'text_box',
key: 'dimension',
label: cama_t('camaleon_cms.admin.custom_field.fields.image_dimension'),
description: cama_t('camaleon_cms.admin.custom_field.fields.image_dimension_descr'),
},
{
type: 'text_box',
key: 'versions',
label: cama_t('camaleon_cms.admin.custom_field.fields.image_versions'),
description: cama_t('camaleon_cms.admin.custom_field.fields.image_versions_descr')
}
]
}
items[:numeric] = {
key: 'numeric',
label: t('camaleon_cms.admin.custom_field.fields.numeric'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:phone] = {
key: 'phone',
label: t('camaleon_cms.admin.custom_field.fields.phone'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:url] = {
key: 'url',
label: t('camaleon_cms.admin.custom_field.fields.url', default: 'URL'),
options: {
required: true,
multiple: true,
translate: true,
default_value: '',
show_frontend: true
}
}
items[:video] = {
key: 'video',
label: t('camaleon_cms.admin.custom_field.fields.video', default: 'Video'),
options: {
required: true,
multiple: true,
default_value: '',
show_frontend: true
}
}
items[:users] = {
key: 'users',
label: t('camaleon_cms.admin.custom_field.fields.users'),
options: {
required: true,
multiple: true,
show_frontend: true
}
}
items[:posts] = {
key: 'posts',
label: t('camaleon_cms.admin.custom_field.fields.posts'),
options: {
required: true,
multiple: true,
show_frontend: true
},
extra_fields:[
{
type: 'checkbox',
key: 'post_types',
label: 'Post types',
values: current_site.post_types.pluck(:id, :name).map{|pt| {value: pt.first, label: pt.last}}.unshift({value: "all", label: "--- All Post Types ---"})
}
]
}
items[:post_types] = {
key: 'post_types',
label: t('camaleon_cms.admin.post_type.post_types'),
options: {
required: true,
multiple: true
}
}
items[:categories] = {
key: 'categories',
label: t('camaleon_cms.admin.table.categories'),
options: {
required: true,
multiple: true
}
}
# evaluate the content of command value on listing
# sample command: options_from_collection_for_select(current_site.the_posts("commerce").decorate, :id, :the_title)
items[:select_eval] = {
key: 'select_eval',
label: t('camaleon_cms.admin.custom_field.fields.select_eval'),
options: {
required: true,
multiple: false,
default_value: '',
show_frontend: false
},
extra_fields:[
{
type: 'text_area',
key: 'command',
label: 'Command to Eval'
}
]
}
items[:field_attrs] = {
key: 'field_attrs',
label: t('camaleon_cms.admin.custom_field.fields.field_attrs'),
options: {
required: false,
multiple: true,
show_frontend: true,
translate: true
}
}
items[:private_file] = {
key: 'private_file',
label: t('camaleon_cms.admin.custom_field.fields.private_file', default: 'Private File'),
options: {
required: true,
multiple: true,
default_value: ''
},
extra_fields:[
{
type: 'text_box',
key: 'formats',
label: 'File Formats (image,video,audio)'
}
]
}
r = {fields: items}; hooks_run("extra_custom_fields", r)
@_cama_custom_field_elements = r[:fields]
end
# add your model class into custom fields editor
# Note: to use custom fields on your model, you need the following:
# - add: belongs_to :site (in your model) //don't forget multi site support, i.e.: you need site_id attribute in your table
# - add: include CamaleonCms::CustomFieldsRead (in your model)
# ==> With this, you can manage your model like a plugin. Check api -> custom fields section into docs)
# model_class: class name (Product)
def cf_add_model(model_class)
@_extra_models_for_fields << model_class
end
end
|
def calculate_synapses(second_layer_neurons, output_layer_neurons):
total_synapses = second_layer_neurons * output_layer_neurons
return total_synapses
# Test the function
second_layer = 14
output_layer = 3
print(calculate_synapses(second_layer, output_layer)) # Output: 42
|
#!/bin/bash
#
# Copyright 2021 ZUP IT SERVICOS EM TECNOLOGIA E INOVACAO SA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# https://github.com/kt3k/deno_license_checker
# Please install deno at https://github.com/denoland/deno to run the command below
# You can run this file from the root of the repository with: bash fastlane/check-license.sh $(pwd)
echo "Running check licenses for folder: $1"
deno run --unstable --allow-read https://deno.land/x/license_checker@v3.1.4/main.ts "$1"
exit $?
|
#!/bin/bash
sudo apt upgrade -y
echo "Installing CNI plugin"
sudo wget https://raw.githubusercontent.com/Azure/azure-container-networking/v1.1.7/scripts/install-cni-plugin.sh
sudo chmod +x install-cni-plugin.sh
sudo ./install-cni-plugin.sh v1.1.7 v0.8.7
echo "Setting up a webserver for testing"
sudo apt install apache2 -y
sudo ufw allow 'Apache'
cd /var/www/html
echo "<html><h1>Hello AWS Study - Welcome To My Webpage</h1><body>`hostnamectl`</body></html>" | sudo tee index.html
|
<filename>src/archive/archive.module.ts
import { Module } from '@nestjs/common';
import { ArchiveService } from './archive.service';
import { ArchiveController } from './archive.controller';
@Module({
controllers: [ArchiveController],
providers: [ArchiveService]
})
export class ArchiveModule {}
|
def selectMultiplesOf3(nums):
multiples = []
for num in nums:
if num % 3 == 0:
multiples.append(num)
return multiples
if __name__ == '__main__':
print(selectMultiplesOf3(nums))
|
/*
* Gray: A Ray Tracing-based Monte Carlo Simulator for PET
*
* Copyright (c) 2018, <NAME>, <NAME>, <NAME>, <NAME>
*
* This software is distributed under the terms of the MIT License unless
* otherwise noted. See LICENSE for further details.
*
*/
#include "Gray/Sources/PointSource.h"
PointSource::PointSource(const VectorR3 &p, double act) :
Source(p, act)
{
}
VectorR3 PointSource::Decay() const {
return(position);
}
bool PointSource::Inside(const VectorR3&) const {
return false;
}
|
import random
# Function to generate random string
def get_random_string(length):
# Create an empty string
letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
random_string = ""
# Generate the random string of required length
for x in range(length):
random_string += random.choice(letters)
return random_string
# Usage example
random_string = get_random_string(8)
print("Random String: " + random_string)
|
<gh_stars>0
export function millisToMinutesAndSeconds(millis) {
const minutes = Math.floor(millis / 60000)
const seconds = ((millis % 60000) / 1000).toFixed(0)
return seconds == 60
? minutes + 1 + ':00'
: minutes + ':' + (seconds < 10 ? '0' : '') + seconds
}
|
<gh_stars>0
import { ExternalLink } from 'react-external-link';
const InfoSection = () => {
return (
<>
<section id="homepage">
<div className="row">
<div className="col-md-12 col-lg-8">
<h3 id="bannerTitle" className="text-left">
Full Stack Web Developer
</h3>
<p id="mainBannerP" className="text-left">
Hi, I'm Lee. I'm a Full Stack Web Developer with a Certificate
from Monash University and a background in Education. Expertise in
communication, leadership and time management. I’m inspired by
Software Development as it fuels my love of learning.
</p>
<p id="mainBannerP" className="text-left">
I enjoy sharing knowledge and contributing to challenging projects
in agile environments. I recently incorporated user authentication
on a members page using bcrypt and passport npm packages. My drive
and resilience motivates me to persevere and create solutions.
</p>
<p id="mainBannerP" className="text-left">
I build software using responsive design, clean code, creativity
and effective communication. I’m excited to join an innovative
company to offer my skills and develop them further.
</p>
<br />
<p id="quoteP" className="left">
“Once you stop learning you start dying”
</p>
<p Id="referenceP" className="text-left">
<NAME>
</p>
</div>
{/* <!-- contact details --> */}
<div className="col-md-12 col-lg-4">
<img
src={`${process.env.PUBLIC_URL}/assets/resize/head1.jpg`}
id="imgHeadshot"
alt="headshot"
/>
<aside id="list">
<div id="contactList" className="vl">
<h5>
<a
href="https://github.com/LeeKiri"
target="_blank"
rel="noreferrer"
title="Go to profile"
>
<i className="fab fa-github"></i> GitHub Profile
</a >
</h5>
<h5>
<ExternalLink
href="https://linkedin.com/in/lee-k-templeton"
target="_blank"
rel="noreferrer"
title="Go to profile">
<i className="fab fa-linkedin"></i> Linkedin
</ExternalLink>
</h5>
<h5>
<a
href={`${process.env.PUBLIC_URL}/assets/LeeTempleton-Go.pdf`}
target="_blank"
rel="noreferrer"
title="Go to profile"
>
<i className="fas fa-file"></i> Resume
</a >
</h5>
<h5>
<a
href="portfolio.html"
target="_blank"
rel="noreferrer"
title="Go to profile"
>
<i className="fas fa-code-branch"></i> Projects
</a >
</h5>
<h5>
<i className="fas fa-envelope"></i>
<a href="mailto:<EMAIL>">
<EMAIL>
</a >
</h5>
<h5>
<i className="fas fa-phone-square-alt"></i>
<a href="tel:+61423201885"> +61 423201885</a >
</h5>
</div>
</aside>
</div>
</div>
</section>
</>
);
};
export default InfoSection;
|
#!/bin/bash
GITHUB_USER="ms705"
cd ext
git clone https://github.com/${GITHUB_USER}/Metis.git metis
cd metis
./configure --enable-debug --enable-profile
make
|
class BankAccount:
def __init__(self, account_holder):
self.account_holder = account_holder
self.balance = 0
def deposit(self, amount):
if amount > 0:
self.balance += amount
return f"Deposit of {amount} successful. Current balance: {self.balance}"
else:
return "Invalid deposit amount"
def withdraw(self, amount):
if amount > 0:
if self.balance >= amount:
self.balance -= amount
return f"Withdrawal of {amount} successful. Current balance: {self.balance}"
else:
return "Insufficient funds"
else:
return "Invalid withdrawal amount"
def check_balance(self):
return f"Current balance: {self.balance}"
# Simple user interface
def main():
account_name = input("Enter account holder's name: ")
account = BankAccount(account_name)
while True:
print("\n1. Deposit\n2. Withdraw\n3. Check Balance\n4. Exit")
choice = input("Enter your choice: ")
if choice == "1":
amount = float(input("Enter amount to deposit: "))
print(account.deposit(amount))
elif choice == "2":
amount = float(input("Enter amount to withdraw: "))
print(account.withdraw(amount))
elif choice == "3":
print(account.check_balance())
elif choice == "4":
print("Exiting...")
break
else:
print("Invalid choice")
if __name__ == "__main__":
main()
|
"""
Create a generative recurrent neural network (GRU) to generate text
"""
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Embedding, GRU
# Parameters
vocab_size = 81 # Size of our vocabulary
embedding_dim = 32 # Number of embedding dimensions
num_layers = 2 # Number of GRU layers
batch_size = 16 # Batch size
num_epochs = 100 # Number of epochs
seq_length = 30 # Sequence length
# Create the model
model = Sequential()
model.add(Embedding(vocab_size, embedding_dim, input_length=seq_length))
model.add(GRU(seq_length, return_sequences=True))
for _ in range(num_layers-1):
model.add(GRU(seq_length))
model.add(Dense(vocab_size, activation='softmax'))
# Compile the model
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# Train the model
model.fit(X_train, y_train, batch_size=batch_size, epochs=num_epochs)
|
/**
*
* RaisedButton
*
*/
import React, { Component, PropTypes } from 'react';
import {RaisedButton as MUIButton} from 'material-ui';
class RaisedButton extends Component { // eslint-disable-line react/prefer-stateless-function
constructor(props) {
super(props);
this.state = {
exampleValue: '',
};
}
render() {
const {exampleValue} = this.state; // eslint-disable-line
const {exampleProp} = this.props; // eslint-disable-line
return (
<MUIButton label="Default" />
); // eslint-disable-line
}
}
RaisedButton.propTypes = {
exampleProp: PropTypes.string,
};
RaisedButton.defaultProps = {
exampleProp: '',
};
export default RaisedButton;
|
import { List } from './List';
/**
* Remove the first item out of a [[List]]
* @param L
* @returns [[List]]
* @example
* ```ts
* ```
*/
export declare type Tail<L extends List> = L extends readonly [] ? L : L extends readonly [any?, ...infer LTail] ? LTail : L;
|
//@ts-nocheck
const express = require('express')
const bodyParser = require('body-parser')
const db = require('./queries')
const app = express()
const port = 8080
app.use(bodyParser.json())
app.use(
bodyParser.urlencoded({
extended: true,
})
)
app.use((req, res, next) => {
// Website you wish to allow to connect
res.setHeader('Access-Control-Allow-Origin', 'http://localhost:2003');
// Request methods you wish to allow
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
// Request headers you wish to allow
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
// Set to true if you need the website to include cookies in the requests sent
// to the API (e.g. in case you use sessions)
res.setHeader('Access-Control-Allow-Credentials', false);
// Pass to next layer of middleware
next();
});
app.get('/', (req, resp) => {
resp.json({ info: 'Node.js, Express, and postgres Api'})
})
app.get('/users', db.getUsers)
app.get('/users/:id', db.getUserById)
app.post('/users', db.createUser)
app.put('/users/:id', db.updateUser)
app.delete('/users/:id', db.deleteUser)
app.listen(port, () => {
console.log(`App running on port ${port}.`)
})
|
class DashboardController < ApplicationController
before_action :login, only: :private
before_action :determine_auth_scope
def index
github_authenticate!(:default) unless github_authenticated?(:default)
return redirect_to(welcome_path) unless logged_in? && current_user.has_scope?('read:org')
@private = nil
@user = gh.users(current_user.login)
@repos = huboard.all_repos
end
def user
user = gh.users(params[:user]).raw
not_found unless user.status == 200
if logged_in? && current_user.login == params[:user]
@repos = huboard.repos
else
@repos = huboard.repos_by_user(params[:user])
end
@user = user.body
@private = nil
render :index
end
def public
user = gh.users(params[:user]).raw
not_found unless user.status == 200
@private = 0
@user = user.body
@repos = huboard.repos_by_user(params[:user]).select {|r| !r['private'] }
render :index
end
def private
user = gh.users(params[:user]).raw
not_found unless user.status == 200
@private = 1
@user = user.body
if logged_in? && current_user.login == params[:user]
@repos = huboard.all_repos.select {|r| r['private'] }
else
@repos = huboard.all_repos.select {|r| r['private'] && r['owner']['login'].casecmp(params[:user]) == 0 }
end
render :index
end
:protected
def login
github_authenticate! :private
end
def determine_auth_scope
@auth_scope_private = logged_in? && current_user.has_scope?(:repo)
end
end
|
def resource_schedule_optimization(tasks, resources):
# Create a dictionary of resources and their capacities
resource_dic = {}
for resource in resources:
resource_dic[resource] = {
"capacity": resources[resource]["capacity"],
"available": resources[resource]["capacity"]
}
# Initialize the schedule
schedule = {task: None for task in tasks}
# Sort the tasks by their starting time
tasks = sorted(tasks, key=lambda t: t["start_time"])
# Iterate over the tasks
for task in tasks:
for resource in resources:
# Check if resource has the capacity
if resource_dic[resource]["available"] >= task["demand"]:
# Assign the task to the resource
schedule[task] = resource
# Reduce the availability of the resource
resource_dic[resource]["available"] -= task["demand"]
break
return schedule
|
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
pushd "${SCRIPT_DIR}/.." > /dev/null
set -e
COVERAGE_THRESHOLD=90
echo "Create Virtualenv for Python deps ..."
check_python_version() {
python3 tools/check_python_version.py 3 6
}
function prepare_venv() {
VIRTUALENV="$(which virtualenv)"
if [ $? -eq 1 ]
then
# python34 which is in CentOS does not have virtualenv binary
echo "Trying to find virtualenv-3"
VIRTUALENV="$(which virtualenv-3)"
fi
if [ $? -eq 1 ]
then
# still don't have virtual environment -> use python3.4 directly
echo "Fallback to Python 3.4"
python3.4 -m venv venv && source venv/bin/activate
else
echo "Using ${VIRTUALENV}"
${VIRTUALENV} -p python3 venv && source venv/bin/activate
fi
if [ $? -ne 0 ]
then
printf "%sPython virtual environment can't be initialized%s\n" "${RED}" "${NORMAL}"
exit 1
fi
pip install -U pip
python3 "$(which pip3)" install -r requirements.txt
}
[ "$NOVENV" == "1" ] || prepare_venv || exit 1
check_python_version
$(which pip3) install pytest
$(which pip3) install pytest-cov
$(which pip3) install codecov
PYTHONDONTWRITEBYTECODE=1 PYTHONPATH=`pwd` python3 "$(which pytest)" --cov=victimsdb_lib/ --cov-report term-missing --cov-fail-under=$COVERAGE_THRESHOLD -vv -s tests/
printf "%stests passed%s\n\n" "${GREEN}" "${NORMAL}"
codecov --token=0c52dc5d-f4a6-438a-8683-fc430f10d434
popd > /dev/null
|
<gh_stars>1-10
package org.opentaps.base.constants;
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
// DO NOT EDIT THIS FILE! THIS IS AUTO GENERATED AND WILL GET WRITTEN OVER PERIODICALLY WHEN THE DATA CHANGE
/**
* AgreementItemType constant values.
*/
public final class AgreementItemTypeConstants {
private AgreementItemTypeConstants() { }
/** Commission Rate. */
public static final String AGREEMENT_COMMISSION = "AGREEMENT_COMMISSION";
/** Credit Terms. */
public static final String AGREEMENT_CREDIT = "AGREEMENT_CREDIT";
/** Exhibit. */
public static final String AGREEMENT_EXHIBIT = "AGREEMENT_EXHIBIT";
/** Payment Terms. */
public static final String AGREEMENT_PAYMENT = "AGREEMENT_PAYMENT";
/** Pricing. */
public static final String AGREEMENT_PRICING_PR = "AGREEMENT_PRICING_PR";
/** Section. */
public static final String AGREEMENT_SECTION = "AGREEMENT_SECTION";
/** Supplier Descriptive Terms. */
public static final String AGREEMENT_SUPPLIER = "AGREEMENT_SUPPLIER";
/** Commission Customers. */
public static final String COMM_CUSTOMERS = "COMM_CUSTOMERS";
/** Payment terms. */
public static final String COMM_PAYMENT = "COMM_PAYMENT";
/** Commission Rate Schedule. */
public static final String COMM_RATES = "COMM_RATES";
/** Timing of Commissions. */
public static final String COMM_TIMING = "COMM_TIMING";
/** Partner-Customer Payment Terms. */
public static final String PARTNER_CUST_PAY = "PARTNER_CUST_PAY";
/** Partner Terms Of Service. */
public static final String PARTNER_TOS = "PARTNER_TOS";
/** Sub-Agreement. */
public static final String SUBAGREEMENT = "SUBAGREEMENT";
}
|
package com.thinkaurelius.titan.util.datastructures;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import java.util.Collection;
import java.util.Iterator;
public class IterablesUtil {
public static final <O> Iterable<O> emptyIterable() {
return new Iterable<O>() {
@Override
public Iterator<O> iterator() {
return Iterators.emptyIterator();
}
};
}
public static final int size(Iterable i) {
if (i instanceof Collection) return ((Collection)i).size();
else return Iterables.size(i);
}
public static final boolean sizeLargerOrEqualThan(Iterable i, int limit) {
if (i instanceof Collection) return ((Collection)i).size()>=limit;
Iterator iter = i.iterator();
int count=0;
while (iter.hasNext()) {
iter.next();
count++;
if (count>=limit) return true;
}
return false;
}
}
|
$(document).ready(funcionPrincipal);
function funcionPrincipal()
{
$("#btnnuevafila").on('click',FuncionNuevaFila);
/*$.get("productillos", function (response, opciones) {
console.log(response);*/
// $("#btnnuevafila").on('click', llenar);
// ListarProductos();
// });
}
/*
$.get("productillos", function (response, producto) {
console.log(response);
for (i = 0; i < response.length; i++) {
$("#productillos").append("<option value='" + response[i].id + "'>" + response[i].nombre + "</option>");
}
});
*/
/*function llenar(){
//este carga valores estaticos
$('.opciones').empty();
for (i = 0; i < 10; i++) {
$(".opciones").append("<option value='5'>" + "opciones" + "</option>");
}
}*/
/*function Llenar() {
$('#btnnuevafila').change('click', function (event) {
$.get("productillos", function (response, producto) {
console.log(response);
for (i = 0; i < response.length; i++) {
$("#productillos").append("<option value='" + response[i].id + "'>" + response[i].nombre + "</option>");
}
});
});*/
var ListarProductos = function () {
$.ajax({
type: "GET",
url: "Clases/compra/productillos",
data: {
id: 1,
name: '<NAME>',
pass: '<PASSWORD>'
},
success: function (data) {
console.log("First Method Data Saved: ", data);
}
});
// alert('acabo de correr la funcion ajax')
}
function FuncionNuevaFila()
{
$("#tablaproductos")
.append(
$('<tr>')
.append(
$('<td>')
.append(
$('<select>').addClass('opciones')
)
)
.append(
$('<td>')
.append(
$('<p>').text('detalle')
)
)
.append(
$('<td>')
.append(
$('<p>').text('2')
)
)
.append(
$('<td>')
.append(
$('<input>').attr('type','text').attr('value','25')
)
)
.append(
$('<td>')
.append(
$('<p>').text('10')
)
)
.append(
$('<td>')
.append(
$('<a>').text('dfsds')
)
)
)
ListarProductos();
//alert('estoy pasando por aqui');
/*$.get(route('litar'), function (res) {
$(res).each(function (key, value) {
$('.opciones').append("<option>" + value.nombnre + "</option>");
});
}); */
}
|
<reponame>wangsiwei12138/supermall<filename>src/common/mixin.js
import { debounce } from "./utils";
import BackTop from "components/content/backTop/BackTop";
// import {BACK_POSITION} from "common/const"
export const itemListenerMixin = {
mounted () {
//1.图片加载完成的事件监听
const refresh = debounce(this.$refs.scroll.refresh, 50);
//3.监听item中图片加载完成
this.$bus.$on("itemImgLoad", () => {
refresh();
});
this.$bus.$on("itemImgLoad",this.itemImageListener)
}
}
export const backTopMixin = {
components: {
BackTop
},
data () {
return {
isShowBackTop: false,
}
},
methods: {
backClick() {
this.$refs.scroll.scrollTo(0 , 0);
},
}
}
|
console.log('Hello, TypeScript');
function add(a: number, b: number) {
return a + b;
}
const sum = add(2, 3);
|
db.collection.find({name: "John"}, {name: 1, age: 1, country: 1})
|
import request from '@/utils/request'
// 查询房间信息列表
export function listMyform(query) {
return request({
url: '/Ower/myform/list',
method: 'get',
params: query
})
}
// 查询房间信息详细
export function getMyform(id) {
return request({
url: '/Ower/myform/' + id,
method: 'get'
})
}
// 新增房间信息
export function addMyform(data) {
return request({
url: '/Ower/myform',
method: 'post',
data: data
})
}
// 修改房间信息
export function updateMyform(data) {
return request({
url: '/Ower/myform',
method: 'put',
data: data
})
}
// 删除房间信息
export function delMyform(id) {
return request({
url: '/Ower/myform/' + id,
method: 'delete'
})
}
// 导出房间信息
export function exportMyform(query) {
return request({
url: '/Ower/myform/export',
method: 'get',
params: query
})
}
|
from collections import Counter
def most_frequent(arr):
c = Counter(arr)
return c.most_common(1)[0][0]
arr = [1, 3, 3, 3, 5, 4, 4, 6]
print(most_frequent(arr))
# Output: 3
|
import React from 'react';
import { useDispatch } from 'react-redux';
import PropTypes from 'prop-types';
import { removeBookAction } from '../../redux/books/books';
const Book = ({ title, category, id }) => {
const dispatch = useDispatch();
const removeBookfromStore = (id) => {
dispatch(removeBookAction(id));
};
return (
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-7 gap-4 sm:gap-8 lg:gap-8">
<div className="description col-span-1 lg:col-span-3">
<p className="category text-title opacity-50 font-mont font-bold md:text-sm capitalize">{category}</p>
<p className="title text-title font-robo font-bold text-xl md:text-2xl">{title}</p>
<p className="text-link font-robo font-light md:text-sm mb-4">Author</p>
<button type="button" className="text-link font-robo font-light text-sm pr-2 lg:pr-4 hover:text-blue-900">Comments</button>
<button type="button" className="text-link font-robo font-light text-sm pr-2 pl-2 lg:pr-4 lg:pl-4 border-r border-l border-gray-200 hover:text-blue-900" onClick={() => removeBookfromStore(id)}>Remove</button>
<button type="button" className="text-link font-robo font-light text-sm pl-2 lg:pl-4 hover:text-blue-900">Edit</button>
</div>
<div className="hidden lg:flex progress col-span-1 lg:col-span-2 lg:pr-6 xl:p-0 lg:border-r border-gray-200 justify-center items-center">
<div className="wrapper">
<div className="arc arc_start" />
<div className="arc arc_end" />
</div>
<div>
<p className="text-title md:text-3xl font-mont font-normal">75%</p>
<p className="text-title md:text-sm opacity-50 font-mont font-normal">Completed</p>
</div>
</div>
<div className="col-span-1 lg:col-span-2">
<p className="text-title opacity-50 font-robo font-light md:text-sm mb-2">CURRENT CHAPTER</p>
<p className="text-title font-robo font-light md:text-base mb-4 lg:mb-7">Chapter 1</p>
<button type="button" className="bg-azure text-white py-2 px-8 font-robo font-light text-xs md:text-sm rounded hover:bg-blue-600">UPDATE PROGRESS</button>
</div>
</div>
);
};
Book.propTypes = {
title: PropTypes.string.isRequired,
category: PropTypes.string.isRequired,
id: PropTypes.string.isRequired,
};
export default Book;
|
<filename>veriloggen/thread/stream.py
from __future__ import absolute_import
from __future__ import print_function
import math
import functools
import ast
import inspect
import textwrap
from collections import OrderedDict
import veriloggen.core.vtypes as vtypes
from veriloggen.seq.seq import make_condition
from veriloggen.fsm.fsm import FSM
from veriloggen.seq.seq import Seq
from veriloggen.stream.stream import Stream as BaseStream
from veriloggen.stream.stypes import Substream as BaseSubstream
from . import compiler
from . import thread
mode_width = 3
mode_idle = vtypes.Int(0, mode_width, base=2)
mode_normal = vtypes.Int(1, mode_width, base=2)
mode_pattern = vtypes.Int(2, mode_width, base=2)
mode_multipattern = vtypes.Int(4, mode_width, base=2)
def TmpStream(m, clk, rst,
datawidth=32, addrwidth=32,
max_pattern_length=4, ram_sel_width=8,
fsm_as_module=False):
name = compiler._tmp_name('_tmp_stream')
return Stream(m, name, clk, rst,
datawidth, addrwidth,
max_pattern_length, ram_sel_width,
fsm_as_module=False)
class Stream(BaseStream):
__intrinsics__ = ('set_source', 'set_source_pattern', 'set_source_multidim',
'set_source_multipattern', 'set_source_empty',
'set_sink', 'set_sink_pattern', 'set_sink_multidim',
'set_sink_multipattern',
'set_sink_empty', 'set_constant',
'run', 'join', 'done',
'source_join', 'source_done',
'sink_join', 'sink_done')
ram_delay = 4
def __init__(self, m, name, clk, rst,
datawidth=32, addrwidth=32,
max_pattern_length=4, max_multipattern_length=2,
ram_sel_width=8, fsm_as_module=False):
BaseStream.__init__(self, module=m, clock=clk, reset=rst,
no_hook=True)
self.name = name
self.datawidth = datawidth
self.addrwidth = addrwidth
self.max_pattern_length = max_pattern_length
self.max_multipattern_length = max_multipattern_length
self.ram_sel_width = ram_sel_width
self.fsm_as_module = fsm_as_module
self.stream_synthesized = False
self.fsm_synthesized = False
self.fsm = FSM(self.module, '_%s_fsm' %
self.name, self.clock, self.reset,
as_module=self.fsm_as_module)
self.start_flag = self.module.Wire(
'_'.join(['', self.name, 'start_flag']))
self.start = self.module.Reg(
'_'.join(['', self.name, 'start']), initval=0)
self.end_flag = self.module.Reg(
'_'.join(['', self.name, 'end_flag']), initval=0)
self.source_busy = self.module.Reg(
'_'.join(['', self.name, 'source_busy']), initval=0)
self.sink_busy = self.module.Reg(
'_'.join(['', self.name, 'sink_busy']), initval=0)
self.sink_wait_count = None
self.reduce_reset = None
self.reduce_reset_var = None
self.sources = OrderedDict()
self.sinks = OrderedDict()
self.constants = OrderedDict()
self.substreams = []
self.var_name_map = OrderedDict()
self.var_id_map = OrderedDict()
self.var_id_name_map = OrderedDict()
self.var_name_id_map = OrderedDict()
self.var_id_count = 0
self.source_idle_map = OrderedDict()
self.sink_when_map = OrderedDict()
self.ram_id_count = 1 # '0' is reserved for idle
self.ram_id_map = OrderedDict() # key: ran._id(), value: count
self.fsm_id_count = 0
def source(self, name=None, datawidth=None, point=0, signed=True):
if self.stream_synthesized:
raise ValueError(
'cannot modify the stream because already synthesized')
_id = self.var_id_count
if name is None:
name = 'source_%d' % _id
if name in self.var_name_map:
raise ValueError("'%s' is already defined in stream '%s'" %
(name, self.name))
prefix = self._prefix(name)
self.var_id_count += 1
if datawidth is None:
datawidth = self.datawidth
var = self.Variable(self._dataname(name), datawidth, point, signed)
self.sources[name] = var
self.var_id_map[_id] = var
self.var_name_map[name] = var
self.var_id_name_map[_id] = name
self.var_name_id_map[name] = _id
var.source_fsm = None
var.source_pat_fsm = None
var.source_multipat_fsm = None
var.source_idle = self.module.Reg('_%s_idle' % prefix, initval=1)
self.source_idle_map[name] = var.source_idle
# 3'b000: set_source_empty, 3'b001: set_source,
# 3'b010: set_source_pattern, 3'b100: set_source_multipattern
var.source_mode = self.module.Reg('_%s_source_mode' % prefix, mode_width,
initval=mode_idle)
var.source_offset = self.module.Reg('_%s_source_offset' % prefix,
self.addrwidth, initval=0)
var.source_size = self.module.Reg('_%s_source_size' % prefix,
self.addrwidth + 1, initval=0)
var.source_stride = self.module.Reg('_%s_source_stride' % prefix,
self.addrwidth, initval=0)
var.source_count = self.module.Reg('_%s_source_count' % prefix,
self.addrwidth + 1, initval=0)
var.source_pat_cur_offsets = None
var.source_pat_sizes = None
var.source_pat_strides = None
var.source_pat_counts = None
var.source_multipat_num_patterns = None
var.source_multipat_offsets = None
var.source_multipat_cur_offsets = None
var.source_multipat_sizes = None
var.source_multipat_strides = None
var.source_ram_id_map = OrderedDict()
var.source_ram_sel = self.module.Reg('_%s_source_ram_sel' % prefix,
self.ram_sel_width, initval=0)
var.source_ram_raddr = self.module.Reg('_%s_source_ram_raddr' % prefix,
self.addrwidth, initval=0)
var.source_ram_renable = self.module.Reg('_%s_source_ram_renable' % prefix,
initval=0)
var.source_ram_rdata = self.module.Wire('_%s_source_ram_rdata' % prefix,
datawidth)
var.source_ram_rvalid = self.module.Reg('_%s_source_ram_rvalid' % prefix,
initval=0)
var.has_source_empty = False
var.source_empty_data = self.module.Reg('_%s_source_empty_data' % prefix,
datawidth, initval=0)
self.seq(
var.source_idle(var.source_idle),
var.source_ram_rvalid(0)
)
return var
def sink(self, data, name=None, when=None, when_name=None):
if self.stream_synthesized:
raise ValueError(
'cannot modify the stream because already synthesized')
_id = self.var_id_count
if name is None:
name = 'sink_%d' % _id
if name in self.var_name_map:
raise ValueError("'%s' is already defined in stream '%s'" %
(name, self.name))
else:
data.output(self._dataname(name))
prefix = self._prefix(name)
self.var_id_count += 1
self.sinks[name] = data
self.var_id_map[_id] = data
self.var_name_map[name] = data
self.var_id_name_map[_id] = name
self.var_name_id_map[name] = _id
data.sink_fsm = None
data.sink_pat_fsm = None
data.sink_multipat_fsm = None
# 3'b001: set_sink, 3'b010: set_sink_pattern, 3'b100: set_sink_multipattern
data.sink_mode = self.module.Reg('_%s_sink_mode' % prefix, mode_width,
initval=mode_idle)
data.sink_offset = self.module.Reg('_%s_sink_offset' % prefix,
self.addrwidth, initval=0)
data.sink_size = self.module.Reg('_%s_sink_size' % prefix,
self.addrwidth + 1, initval=0)
data.sink_stride = self.module.Reg('_%s_sink_stride' % prefix,
self.addrwidth, initval=0)
data.sink_count = self.module.Reg('_%s_sink_count' % prefix,
self.addrwidth + 1, initval=0)
data.sink_pat_cur_offsets = None
data.sink_pat_sizes = None
data.sink_pat_strides = None
data.sink_pat_counts = None
data.sink_multipat_num_patterns = None
data.sink_multipat_offsets = None
data.sink_multipat_cur_offsets = None
data.sink_multipat_sizes = None
data.sink_multipat_strides = None
data.sink_ram_id_map = OrderedDict()
data.sink_ram_sel = self.module.Reg('_%s_sink_ram_sel' % prefix,
self.ram_sel_width, initval=0)
data.sink_ram_waddr = self.module.Reg('_%s_sink_waddr' % prefix,
self.addrwidth, initval=0)
data.sink_ram_wenable = self.module.Reg('_%s_sink_wenable' % prefix,
initval=0)
data.sink_ram_wdata = self.module.Reg('_%s_sink_wdata' % prefix,
data.width, initval=0)
# default value
self.seq(
data.sink_ram_wenable(0)
)
if when is not None:
self.sink(when, when_name)
self.sink_when_map[name] = when
def constant(self, name=None, datawidth=None, point=0, signed=True):
if self.stream_synthesized:
raise ValueError(
'cannot modify the stream because already synthesized')
_id = self.var_id_count
if name is None:
name = 'constant_%d' % _id
if name in self.var_name_map:
raise ValueError("'%s' is already defined in stream '%s'" %
(name, self.name))
prefix = self._prefix(name)
self.var_id_count += 1
if datawidth is None:
datawidth = self.datawidth
var = self.ParameterVariable(self._dataname(name), datawidth,
point, signed)
self.constants[name] = var
self.var_id_map[_id] = var
self.var_name_map[name] = var
self.var_id_name_map[_id] = name
self.var_name_id_map[name] = _id
var.next_constant_data = self.module.Reg('_%s_next_constant_data' % prefix,
datawidth, initval=0)
var.has_constant_data = False
return var
def substream(self, substrm):
sub = Substream(self.module, self.clock, self.reset, substrm, self)
self.substreams.append(sub)
return sub
def set_source(self, fsm, name, ram, offset, size, stride=1, port=0):
""" intrinsic method to assign RAM property to a source stream """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sources:
raise NameError("No such stream '%s'" % name)
set_cond = self._set_flag(fsm)
self.seq.If(set_cond)(
var.source_mode(mode_normal),
var.source_offset(offset),
var.source_size(size),
var.source_stride(stride)
)
port = vtypes.to_int(port)
self._setup_source_ram(ram, var, port, set_cond)
self._synthesize_set_source(var, name)
fsm.goto_next()
def set_source_pattern(self, fsm, name, ram, offset, pattern, port=0):
""" intrinsic method to assign RAM property to a source stream """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sources:
raise NameError("No such stream '%s'" % name)
if not isinstance(pattern, (tuple, list)):
raise TypeError('pattern must be list or tuple.')
if not pattern:
raise ValueError(
'pattern must have one (size, stride) pair at least.')
if not isinstance(pattern[0], (tuple, list)):
pattern = (pattern,)
pattern = tuple(pattern)
if len(pattern) > self.max_pattern_length:
raise ValueError(
"'pattern' length exceeds maximum pattern length.")
self._make_source_pattern_vars(var, name)
set_cond = self._set_flag(fsm)
self.seq.If(set_cond)(
var.source_mode(mode_pattern),
var.source_offset(offset)
)
pad = tuple([(1, 0)
for _ in range(self.max_pattern_length - len(pattern))])
for (source_pat_size, source_pat_stride,
(size, stride)) in zip(var.source_pat_sizes, var.source_pat_strides,
pattern + pad):
self.seq.If(set_cond)(
source_pat_size(size),
source_pat_stride(stride)
)
port = vtypes.to_int(port)
self._setup_source_ram(ram, var, port, set_cond)
self._synthesize_set_source_pattern(var, name)
fsm.goto_next()
def set_source_multidim(self, fsm, name, ram, offset, shape, order=None, port=0):
""" intrinsic method to assign RAM property to a source stream """
if order is None:
order = list(reversed(range(len(shape))))
pattern = self._to_pattern(shape, order)
return self.set_source_pattern(fsm, name, ram, offset, pattern, port)
def set_source_multipattern(self, fsm, name, ram, offsets, patterns, port=0):
""" intrinsic method to assign multiple patterns to a RAM """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sources:
raise NameError("No such stream '%s'" % name)
if not isinstance(patterns, (tuple, list)):
raise TypeError('patterns must be list or tuple.')
if not patterns:
raise ValueError(
'patterns must have one [(size, stride)] list at least.')
if not isinstance(offsets, (tuple, list)):
offsets = [offsets] * len(patterns)
if not offsets:
raise ValueError('offsets must have one offset value at least.')
offsets = tuple(offsets)
patterns = tuple(patterns)
if len(offsets) != len(patterns):
raise ValueError(
"number of offsets must be 1 or equal to the number of patterns.")
if len(offsets) > self.max_multipattern_length:
raise ValueError(
"'offsets' length exceeds maximum multipattern length.")
if len(patterns) > self.max_multipattern_length:
raise ValueError(
"'patterns' length exceeds maximum multipattern length.")
for pattern in patterns:
if len(pattern) > self.max_pattern_length:
raise ValueError(
"'pattern' length exceeds maximum pattern length.")
self._make_source_multipattern_vars(var, name)
set_cond = self._set_flag(fsm)
self.seq.If(set_cond)(
var.source_mode(mode_multipattern),
var.source_multipat_num_patterns(len(patterns))
)
offsets_pad = tuple(
[0 for _ in range(self.max_multipattern_length - len(patterns))])
for offset, multipat_offset in zip(offsets + offsets_pad,
var.source_multipat_offsets):
self.seq.If(set_cond)(
multipat_offset(offset)
)
for multipat_sizes, multipat_strides, pattern in zip(
var.source_multipat_sizes, var.source_multipat_strides, patterns):
pad = tuple([(1, 0)
for _ in range(self.max_pattern_length - len(pattern))])
for (multipat_size, multipat_stride,
(size, stride)) in zip(multipat_sizes, multipat_strides,
pattern + pad):
self.seq.If(set_cond)(
multipat_size(size),
multipat_stride(stride)
)
port = vtypes.to_int(port)
self._setup_source_ram(ram, var, port, set_cond)
self._synthesize_set_source_multipattern(var, name)
fsm.goto_next()
def set_source_empty(self, fsm, name, value=0):
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sources:
raise NameError("No such stream '%s'" % name)
set_cond = self._set_flag(fsm)
self.seq.If(set_cond)(
var.source_mode(mode_idle),
var.source_empty_data(value)
)
if var.has_source_empty:
fsm.goto_next()
return
source_start = vtypes.Ands(self.start,
vtypes.Not(vtypes.Uor(vtypes.And(var.source_mode, mode_idle))))
self.seq.If(source_start)(
var.source_idle(1)
)
wdata = var.source_empty_data
wenable = source_start
var.write(wdata, wenable)
var.has_source_empty = True
fsm.goto_next()
def set_sink(self, fsm, name, ram, offset, size, stride=1, port=0):
""" intrinsic method to assign RAM property to a sink stream """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sinks:
raise NameError("No such stream '%s'" % name)
set_cond = self._set_flag(fsm)
start_delay = self._write_delay() - 1
self.seq.If(set_cond).Delay(start_delay).EagerVal()(
var.sink_mode(mode_normal),
var.sink_offset(offset),
var.sink_size(size),
var.sink_stride(stride)
)
set_cond = self.seq.Prev(set_cond, start_delay)
port = vtypes.to_int(port)
self._setup_sink_ram(ram, var, port, set_cond)
self._synthesize_set_sink(var, name)
fsm.goto_next()
def set_sink_pattern(self, fsm, name, ram, offset, pattern, port=0):
""" intrinsic method to assign RAM property to a sink stream """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sinks:
raise NameError("No such stream '%s'" % name)
if not isinstance(pattern, (tuple, list)):
raise TypeError('pattern must be list or tuple.')
if not pattern:
raise ValueError(
'pattern must have one (size, stride) pair at least.')
if not isinstance(pattern[0], (tuple, list)):
pattern = (pattern,)
pattern = tuple(pattern)
if len(pattern) > self.max_pattern_length:
raise ValueError(
"'pattern' length exceeds maximum pattern length.")
self._make_sink_pattern_vars(var, name)
set_cond = self._set_flag(fsm)
start_delay = self._write_delay() - 1
self.seq.If(set_cond).Delay(start_delay).EagerVal()(
var.sink_mode(mode_pattern),
var.sink_offset(offset)
)
pad = tuple([(1, 0)
for _ in range(self.max_pattern_length - len(pattern))])
for (sink_pat_size, sink_pat_stride,
(size, stride)) in zip(var.sink_pat_sizes, var.sink_pat_strides,
pattern + pad):
self.seq.If(set_cond).Delay(start_delay).EagerVal()(
sink_pat_size(size),
sink_pat_stride(stride)
)
set_cond = self.seq.Prev(set_cond, start_delay)
port = vtypes.to_int(port)
self._setup_sink_ram(ram, var, port, set_cond)
self._synthesize_set_sink_pattern(var, name)
fsm.goto_next()
def set_sink_multidim(self, fsm, name, ram, offset, shape, order=None, port=0):
""" intrinsic method to assign RAM property to a sink stream """
if order is None:
order = list(reversed(range(len(shape))))
pattern = self._to_pattern(shape, order)
return self.set_sink_pattern(fsm, name, ram, offset, pattern, port)
def set_sink_multipattern(self, fsm, name, ram, offsets, patterns, port=0):
""" intrinsic method to assign multiple patterns to a RAM """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sinks:
raise NameError("No such stream '%s'" % name)
if not isinstance(patterns, (tuple, list)):
raise TypeError('patterns must be list or tuple.')
if not patterns:
raise ValueError(
'patterns must have one [(size, stride)] list at least.')
if not isinstance(offsets, (tuple, list)):
offsets = [offsets] * len(patterns)
if not offsets:
raise ValueError('offsets must have one offset value at least.')
offsets = tuple(offsets)
patterns = tuple(patterns)
if len(offsets) != len(patterns):
raise ValueError(
"number of offsets must be 1 or equal to the number of patterns.")
if len(offsets) > self.max_multipattern_length:
raise ValueError(
"'offsets' length exceeds maximum multipattern length.")
if len(patterns) > self.max_multipattern_length:
raise ValueError(
"'patterns' length exceeds maximum multipattern length.")
for pattern in patterns:
if len(pattern) > self.max_pattern_length:
raise ValueError(
"'pattern' length exceeds maximum pattern length.")
self._make_sink_multipattern_vars(var, name)
set_cond = self._set_flag(fsm)
start_delay = self._write_delay() - 1
self.seq.If(set_cond).Delay(start_delay).EagerVal()(
var.sink_mode(mode_multipattern),
var.sink_multipat_num_patterns(len(patterns))
)
offsets_pad = tuple(
[0 for _ in range(self.max_multipattern_length - len(patterns))])
for offset, multipat_offset in zip(offsets + offsets_pad,
var.sink_multipat_offsets):
self.seq.If(set_cond).Delay(start_delay).EagerVal()(
multipat_offset(offset)
)
for multipat_sizes, multipat_strides, pattern in zip(
var.sink_multipat_sizes, var.sink_multipat_strides, patterns):
pad = tuple([(1, 0)
for _ in range(self.max_pattern_length - len(pattern))])
for (multipat_size, multipat_stride,
(size, stride)) in zip(multipat_sizes, multipat_strides,
pattern + pad):
self.seq.If(set_cond).Delay(start_delay).EagerVal()(
multipat_size(size),
multipat_stride(stride)
)
set_cond = self.seq.Prev(set_cond, start_delay)
port = vtypes.to_int(port)
self._setup_sink_ram(ram, var, port, set_cond)
self._synthesize_set_sink_multipattern(var, name)
fsm.goto_next()
def set_sink_empty(self, fsm, name):
""" intrinsic method to assign RAM property to a sink stream """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.sinks:
raise NameError("No such stream '%s'" % name)
set_cond = self._set_flag(fsm)
start_delay = self._write_delay() - 1
set_cond = self.seq.Prev(set_cond, start_delay)
ram_sel = var.sink_ram_sel
self.seq.If(set_cond)(
ram_sel(0) # '0' is reserved for empty
)
fsm.goto_next()
def set_constant(self, fsm, name, value):
""" intrinsic method to assign constant value to a constant stream """
if not self.stream_synthesized:
self._implement_stream()
if isinstance(name, str):
var = self.var_name_map[name]
elif isinstance(name, vtypes.Str):
name = name.value
var = self.var_name_map[name]
elif isinstance(name, int):
var = self.var_id_map[name]
elif isinstance(name, vtypes.Int):
name = name.value
var = self.var_id_map[name]
else:
raise TypeError('Unsupported index name')
if name not in self.constants:
raise NameError("No such stream '%s'" % name)
set_cond = self._set_flag(fsm)
self.seq.If(set_cond)(
var.next_constant_data(value)
)
if not var.has_constant_data:
var.write(var.next_constant_data, self.start)
var.has_constant_data = True
fsm.goto_next()
def run(self, fsm):
# entry point
self.fsm._set_index(0)
cond = self._set_flag(fsm)
add_mux(self.start_flag, cond, 1)
# after started
if self.fsm_synthesized:
fsm.goto_next()
fsm.goto_next()
return
self.fsm_synthesized = True
start_cond = vtypes.Ands(self.fsm.here, self.start_flag)
# start pulse
self.fsm.seq(
self.start(0)
)
self.fsm.If(self.start_flag)(
self.start(1),
self.source_busy(1)
)
if self.reduce_reset is not None:
reset_delay = self.ram_delay + 1
self.fsm.seq.If(self.seq.Prev(start_cond, reset_delay))(
self.reduce_reset(0)
)
substreams = self._collect_substreams()
for sub in substreams:
sub.substrm.fsm.seq.If(start_cond)(
sub.substrm.source_busy(1)
)
start_stage = sub.start_stage
reset_delay = self.ram_delay + 1 + sub.reset_delay
# cond_delay = self.ram_delay + 1 + sub.reset_delay - 1
# increased for constant
cond_delay = self.ram_delay + 1 + sub.reset_delay - 2
sub_fsm = sub.substrm.fsm
sub_fsm._set_index(0)
if sub.substrm.reduce_reset is not None:
sub_fsm.seq.If(self.seq.Prev(start_cond, reset_delay))(
sub.substrm.reduce_reset(0)
)
for cond in sub.conds.values():
sub_fsm.seq.If(self.seq.Prev(start_cond, cond_delay))(
cond(1)
)
self.fsm.If(self.start_flag).goto_next()
self.fsm.goto_next()
done_cond = None
for key, source_idle in sorted(self.source_idle_map.items(),
key=lambda x: x[0]):
done_cond = make_condition(done_cond, source_idle)
done = self.module.Wire('_%s_done' % self.name)
done.assign(done_cond)
self.fsm.If(done).goto_next()
self.fsm(
self.source_busy(0)
)
end_cond = self.fsm.here
# reset accumulate pipelines
if self.reduce_reset is not None:
reset_delay = 1
self.fsm.seq.If(self.seq.Prev(end_cond, reset_delay))(
self.reduce_reset(1)
)
for sub in substreams:
sub.substrm.fsm.seq.If(end_cond)(
sub.substrm.source_busy(0)
)
reset_delay = 1 + sub.reset_delay
cond_delay = 1 + sub.reset_delay - 1
sub_fsm = sub.substrm.fsm
sub_fsm._set_index(0)
if sub.substrm.reduce_reset is not None:
sub_fsm.seq.If(self.seq.Prev(end_cond, reset_delay))(
sub.substrm.reduce_reset(1)
)
for cond in sub.conds.values():
sub_fsm.seq.If(self.seq.Prev(end_cond, cond_delay))(
cond(0)
)
num_wdelay = sub.substrm._write_delay()
if sub.substrm.sink_wait_count is None:
sub.substrm.sink_wait_count = sub.substrm.module.Reg(
'_'.join(['', sub.substrm.name, 'sink_wait_count']),
int(math.ceil(math.log(num_wdelay, 2))), initval=0)
sub.substrm.fsm.seq.If(sub.substrm.sink_wait_count == 1,
vtypes.Not(start_cond),
sub.substrm.seq.Prev(end_cond, num_wdelay))(
sub.substrm.sink_busy(0)
)
sub.substrm.fsm.seq.If(start_cond)(
sub.substrm.sink_busy(1)
)
sub.substrm.fsm.seq.If(vtypes.Not(start_cond),
sub.substrm.seq.Prev(end_cond, num_wdelay))(
sub.substrm.sink_wait_count.dec()
)
sub.substrm.fsm.seq.If(start_cond,
vtypes.Not(sub.substrm.seq.Prev(end_cond, num_wdelay)))(
sub.substrm.sink_wait_count.inc()
)
num_wdelay = self._write_delay()
if self.sink_wait_count is None:
self.sink_wait_count = self.module.Reg(
'_'.join(['', self.name, 'sink_wait_count']),
int(math.ceil(math.log(num_wdelay, 2))), initval=0)
self.fsm.seq.If(self.sink_wait_count == 1,
vtypes.Not(start_cond),
self.seq.Prev(end_cond, num_wdelay))(
self.sink_busy(0)
)
self.fsm.seq.If(start_cond)(
self.sink_busy(1)
)
self.fsm.seq.If(vtypes.Not(start_cond),
self.seq.Prev(end_cond, num_wdelay))(
self.sink_wait_count.dec()
)
self.fsm.seq.If(start_cond,
vtypes.Not(self.seq.Prev(end_cond, num_wdelay)))(
self.sink_wait_count.inc()
)
self.fsm.seq(
self.end_flag(0)
)
self.fsm.seq.If(self.seq.Prev(end_cond, num_wdelay))(
self.end_flag(1)
)
self.fsm.goto_init()
fsm.goto_next()
fsm.goto_next()
return 0
def join(self, fsm):
fsm.If(vtypes.Not(self.source_busy),
vtypes.Not(self.sink_busy)).goto_next()
return 0
def done(self, fsm):
return vtypes.Ands(vtypes.Not(self.source_busy),
vtypes.Not(self.sink_busy))
def source_join(self, fsm):
fsm.If(vtypes.Not(self.source_busy)).goto_next()
return 0
def source_done(self, fsm):
return vtypes.Not(self.source_busy)
def sink_join(self, fsm):
fsm.If(vtypes.Not(self.sink_busy)).goto_next()
return 0
def sink_done(self, fsm):
return vtypes.Not(self.sink_busy)
def _setup_source_ram(self, ram, var, port, set_cond):
if ram._id() in var.source_ram_id_map:
ram_id = var.source_ram_id_map[ram._id()]
self.seq.If(set_cond)(
var.source_ram_sel(ram_id)
)
return
if ram._id() not in self.ram_id_map:
ram_id = self.ram_id_count
self.ram_id_count += 1
self.ram_id_map[ram._id()] = ram_id
else:
ram_id = self.ram_id_map[ram._id()]
var.source_ram_id_map[ram._id()] = ram_id
self.seq.If(set_cond)(
var.source_ram_sel(ram_id)
)
ram_cond = (var.source_ram_sel == ram_id)
renable = vtypes.Ands(var.source_ram_renable, ram_cond)
d, v = ram.read_rtl(var.source_ram_raddr, port=port, cond=renable)
add_mux(var.source_ram_rdata, ram_cond, d)
self.seq.If(self.seq.Prev(renable, 1))(
var.source_ram_rvalid(1)
)
def _synthesize_set_source(self, var, name):
if var.source_fsm is not None:
return
wdata = var.source_ram_rdata
wenable = var.source_ram_rvalid
var.write(wdata, wenable)
source_start = vtypes.Ands(self.start,
vtypes.And(var.source_mode, mode_normal))
self.seq.If(source_start)(
var.source_idle(0)
)
fsm_id = self.fsm_id_count
self.fsm_id_count += 1
prefix = self._prefix(name)
fsm_name = '_%s_source_fsm_%d' % (prefix, fsm_id)
var.source_fsm = FSM(self.module, fsm_name, self.clock, self.reset,
as_module=self.fsm_as_module)
var.source_fsm.If(source_start).goto_next()
self.seq.If(var.source_fsm.here)(
var.source_ram_raddr(var.source_offset),
var.source_ram_renable(1),
var.source_count(var.source_size)
)
var.source_fsm.goto_next()
self.seq.If(var.source_fsm.here)(
var.source_ram_raddr.add(var.source_stride),
var.source_ram_renable(1),
var.source_count.dec()
)
self.seq.If(var.source_fsm.here, var.source_count == 1)(
var.source_ram_renable(0),
var.source_idle(1)
)
var.source_fsm.If(var.source_count == 1).goto_init()
def _make_source_pattern_vars(self, var, name):
if var.source_pat_cur_offsets is not None:
return
prefix = self._prefix(name)
var.source_pat_cur_offsets = [
self.module.Reg('_source_%s_pat_cur_offset_%d' % (prefix, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
var.source_pat_sizes = [self.module.Reg('_source_%s_pat_size_%d' % (prefix, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
var.source_pat_strides = [self.module.Reg('_source_%s_pat_stride_%d' % (prefix, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
var.source_pat_counts = [self.module.Reg('_source_%s_pat_count_%d' % (prefix, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
def _synthesize_set_source_pattern(self, var, name):
if var.source_pat_fsm is not None:
return
wdata = var.source_ram_rdata
wenable = var.source_ram_rvalid
var.write(wdata, wenable)
source_start = vtypes.Ands(self.start,
vtypes.And(var.source_mode, mode_pattern))
self.seq.If(source_start)(
var.source_idle(0)
)
for source_pat_cur_offset in var.source_pat_cur_offsets:
self.seq.If(source_start)(
source_pat_cur_offset(0)
)
for (source_pat_size, source_pat_count) in zip(
var.source_pat_sizes, var.source_pat_counts):
self.seq.If(source_start)(
source_pat_count(source_pat_size - 1)
)
fsm_id = self.fsm_id_count
self.fsm_id_count += 1
prefix = self._prefix(name)
fsm_name = '_%s_source_pat_fsm_%d' % (prefix, fsm_id)
var.source_pat_fsm = FSM(self.module, fsm_name,
self.clock, self.reset,
as_module=self.fsm_as_module)
var.source_pat_fsm.If(source_start).goto_next()
source_all_offset = self.module.Wire('_%s_source_pat_all_offset' % prefix,
self.addrwidth)
source_all_offset_val = var.source_offset
for source_pat_cur_offset in var.source_pat_cur_offsets:
source_all_offset_val += source_pat_cur_offset
source_all_offset.assign(source_all_offset_val)
self.seq.If(var.source_pat_fsm.here)(
var.source_ram_raddr(source_all_offset),
var.source_ram_renable(1)
)
upcond = None
for (source_pat_cur_offset, source_pat_size,
source_pat_stride, source_pat_count) in zip(
var.source_pat_cur_offsets, var.source_pat_sizes,
var.source_pat_strides, var.source_pat_counts):
self.seq.If(var.source_pat_fsm.here, upcond)(
source_pat_cur_offset.add(source_pat_stride),
source_pat_count.dec()
)
reset_cond = source_pat_count == 0
self.seq.If(var.source_pat_fsm.here, upcond, reset_cond)(
source_pat_cur_offset(0),
source_pat_count(source_pat_size - 1)
)
upcond = make_condition(upcond, reset_cond)
fin_cond = upcond
var.source_pat_fsm.If(fin_cond).goto_next()
self.seq.If(var.source_pat_fsm.here)(
var.source_ram_renable(0),
var.source_idle(1)
)
var.source_pat_fsm.goto_init()
def _make_source_multipattern_vars(self, var, name):
if var.source_multipat_cur_offsets is not None:
return
prefix = self._prefix(name)
var.source_multipat_num_patterns = self.module.Reg(
'_source_%s_multipat_num_patterns' % prefix,
int(math.ceil(math.log(self.max_multipattern_length, 2))), initval=0)
var.source_multipat_offsets = [
self.module.Reg('_source_%s_multipat_%d_offset' % (prefix, j),
self.addrwidth, initval=0)
for j in range(self.max_multipattern_length)]
var.source_multipat_cur_offsets = [
self.module.Reg('_source_%s_multipat_%d_cur_offset' % (prefix, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
var.source_multipat_sizes = [[self.module.Reg('_source_%s_multipat_%d_size_%d' %
(prefix, j, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
for j in range(self.max_multipattern_length)]
var.source_multipat_strides = [[self.module.Reg('_source_%s_multipat_%d_stride_%d' %
(prefix, j, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
for j in range(self.max_multipattern_length)]
var.source_multipat_counts = [[self.module.Reg('_source_%s_multipat_%d_count_%d' %
(prefix, j, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
for j in range(self.max_multipattern_length)]
def _synthesize_set_source_multipattern(self, var, name):
if var.source_pat_fsm is not None:
return
wdata = var.source_ram_rdata
wenable = var.source_ram_rvalid
var.write(wdata, wenable)
source_start = vtypes.Ands(self.start,
vtypes.And(var.source_mode, mode_multipattern))
self.seq.If(source_start)(
var.source_idle(0)
)
self.seq.If(source_start)(
var.source_multipat_num_patterns.dec()
)
for source_multipat_cur_offset in var.source_multipat_cur_offsets:
self.seq.If(source_start)(
source_multipat_cur_offset(0)
)
for (source_multipat_size, source_multipat_count) in zip(
var.source_multipat_sizes[0], var.source_multipat_counts[0]):
self.seq.If(source_start)(
source_multipat_count(source_multipat_size - 1)
)
fsm_id = self.fsm_id_count
self.fsm_id_count += 1
prefix = self._prefix(name)
fsm_name = '_%s_source_multipat_fsm_%d' % (prefix, fsm_id)
var.source_multipat_fsm = FSM(self.module, fsm_name,
self.clock, self.reset,
as_module=self.fsm_as_module)
var.source_multipat_fsm.If(source_start).goto_next()
source_all_offset = self.module.Wire('_%s_source_multipat_all_offset' % prefix,
self.addrwidth)
source_all_offset_val = var.source_multipat_offsets[0]
for source_multipat_cur_offset in var.source_multipat_cur_offsets:
source_all_offset_val += source_multipat_cur_offset
source_all_offset.assign(source_all_offset_val)
self.seq.If(var.source_multipat_fsm.here)(
var.source_ram_raddr(source_all_offset),
var.source_ram_renable(1)
)
upcond = None
for (source_multipat_cur_offset, source_multipat_size,
source_multipat_stride, source_multipat_count) in zip(
var.source_multipat_cur_offsets, var.source_multipat_sizes[0],
var.source_multipat_strides[0], var.source_multipat_counts[0]):
self.seq.If(var.source_multipat_fsm.here, upcond)(
source_multipat_cur_offset.add(source_multipat_stride),
source_multipat_count.dec()
)
reset_cond = source_multipat_count == 0
self.seq.If(var.source_multipat_fsm.here, upcond, reset_cond)(
source_multipat_cur_offset(0),
source_multipat_count(source_multipat_size - 1)
)
upcond = make_condition(upcond, reset_cond)
fin_cond = upcond
prev_offset = var.source_multipat_offsets[0]
for multipat_offset in var.source_multipat_offsets[1:]:
self.seq.If(fin_cond, var.source_multipat_fsm.here)(
prev_offset(multipat_offset)
)
prev_offset = multipat_offset
prev_sizes = var.source_multipat_sizes[0]
for multipat_sizes in var.source_multipat_sizes[1:]:
for prev_size, size in zip(prev_sizes, multipat_sizes):
self.seq.If(fin_cond, var.source_multipat_fsm.here)(
prev_size(size)
)
prev_sizes = multipat_sizes
prev_strides = var.source_multipat_strides[0]
for multipat_strides in var.source_multipat_strides[1:]:
for prev_stride, stride in zip(prev_strides, multipat_strides):
self.seq.If(fin_cond, var.source_multipat_fsm.here)(
prev_stride(stride)
)
prev_strides = multipat_strides
self.seq.If(fin_cond, var.source_multipat_fsm.here)(
var.source_multipat_num_patterns.dec()
)
var.source_multipat_fsm.If(fin_cond,
var.source_multipat_num_patterns == 0).goto_next()
self.seq.If(var.source_multipat_fsm.here)(
var.source_ram_renable(0),
var.source_idle(1)
)
var.source_multipat_fsm.goto_init()
def _setup_sink_ram(self, ram, var, port, set_cond):
if ram._id() in var.sink_ram_id_map:
ram_id = var.sink_ram_id_map[ram._id()]
self.seq.If(set_cond)(
var.sink_ram_sel(ram_id)
)
return
if ram._id() not in self.ram_id_map:
ram_id = self.ram_id_count
self.ram_id_count += 1
self.ram_id_map[ram._id()] = ram_id
else:
ram_id = self.ram_id_map[ram._id()]
var.sink_ram_id_map[ram._id()] = ram_id
self.seq.If(set_cond)(
var.sink_ram_sel(ram_id)
)
ram_cond = (var.sink_ram_sel == ram_id)
wenable = vtypes.Ands(var.sink_ram_wenable, ram_cond)
ram.write_rtl(var.sink_ram_waddr, var.sink_ram_wdata,
port=port, cond=wenable)
def _synthesize_set_sink(self, var, name):
if var.sink_fsm is not None:
return
start_delay = self._write_delay()
start = self.seq.Prev(self.start, start_delay)
sink_start = vtypes.Ands(start,
vtypes.And(var.sink_mode, mode_normal))
fsm_id = self.fsm_id_count
self.fsm_id_count += 1
prefix = self._prefix(name)
fsm_name = '_%s_sink_fsm_%d' % (prefix, fsm_id)
var.sink_fsm = FSM(self.module, fsm_name, self.clock, self.reset,
as_module=self.fsm_as_module)
self.seq.If(sink_start)(
var.sink_ram_waddr(var.sink_offset - var.sink_stride),
var.sink_count(var.sink_size)
)
var.sink_fsm.If(sink_start).goto_next()
if name in self.sink_when_map:
when = self.sink_when_map[name]
wcond = when.read()
else:
wcond = None
rdata = var.read()
self.seq.If(var.sink_fsm.here, wcond)(
var.sink_ram_waddr.add(var.sink_stride),
var.sink_ram_wdata(rdata),
var.sink_ram_wenable(1),
var.sink_count.dec()
)
var.sink_fsm.If(wcond, var.sink_count == 1).goto_init()
def _make_sink_pattern_vars(self, var, name):
if var.sink_pat_cur_offsets is not None:
return
prefix = self._prefix(name)
var.sink_pat_cur_offsets = [self.module.Reg('_sink_%s_pat_cur_offset_%d' % (prefix, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
var.sink_pat_sizes = [self.module.Reg('_sink_%s_pat_size_%d' % (prefix, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
var.sink_pat_strides = [self.module.Reg('_sink_%s_pat_stride_%d' % (prefix, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
var.sink_pat_counts = [self.module.Reg('_sink_%s_pat_count_%d' % (prefix, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
def _synthesize_set_sink_pattern(self, var, name):
if var.sink_pat_fsm is not None:
return
start_delay = self._write_delay()
start = self.seq.Prev(self.start, start_delay)
sink_start = vtypes.Ands(start,
vtypes.And(var.sink_mode, mode_pattern))
fsm_id = self.fsm_id_count
self.fsm_id_count += 1
prefix = self._prefix(name)
fsm_name = '_%s_sink_pat_fsm_%d' % (prefix, fsm_id)
var.sink_pat_fsm = FSM(self.module, fsm_name,
self.clock, self.reset,
as_module=self.fsm_as_module)
for sink_pat_cur_offset in var.sink_pat_cur_offsets:
self.seq.If(sink_start)(
sink_pat_cur_offset(0)
)
for (sink_pat_size, sink_pat_count) in zip(
var.sink_pat_sizes, var.sink_pat_counts):
self.seq.If(sink_start)(
sink_pat_count(sink_pat_size - 1)
)
var.sink_pat_fsm.If(sink_start).goto_next()
if name in self.sink_when_map:
when = self.sink_when_map[name]
wcond = when.read()
else:
wcond = None
sink_all_offset = self.module.Wire('_%s_sink_pat_all_offset' % prefix,
self.addrwidth)
sink_all_offset_val = var.sink_offset
for sink_pat_cur_offset in var.sink_pat_cur_offsets:
sink_all_offset_val += sink_pat_cur_offset
sink_all_offset.assign(sink_all_offset_val)
if name in self.sink_when_map:
when = self.sink_when_map[name]
wcond = when.read()
else:
wcond = None
rdata = var.read()
self.seq.If(var.sink_pat_fsm.here, wcond)(
var.sink_ram_waddr(sink_all_offset),
var.sink_ram_wdata(rdata),
var.sink_ram_wenable(1)
)
upcond = None
for (sink_pat_cur_offset, sink_pat_size,
sink_pat_stride, sink_pat_count) in zip(
var.sink_pat_cur_offsets, var.sink_pat_sizes,
var.sink_pat_strides, var.sink_pat_counts):
self.seq.If(var.sink_pat_fsm.here, upcond)(
sink_pat_cur_offset.add(sink_pat_stride),
sink_pat_count.dec()
)
reset_cond = sink_pat_count == 0
self.seq.If(var.sink_pat_fsm.here, upcond, reset_cond)(
sink_pat_cur_offset(0),
sink_pat_count(sink_pat_size - 1)
)
upcond = make_condition(upcond, reset_cond)
fin_cond = upcond
var.sink_pat_fsm.If(fin_cond).goto_init()
def _make_sink_multipattern_vars(self, var, name):
if var.sink_multipat_cur_offsets is not None:
return
prefix = self._prefix(name)
var.sink_multipat_num_patterns = self.module.Reg(
'_sink_%s_multipat_num_patterns' % prefix,
int(math.ceil(math.log(self.max_multipattern_length, 2))), initval=0)
var.sink_multipat_offsets = [
self.module.Reg('_sink_%s_multipat_%d_offset' % (prefix, j),
self.addrwidth, initval=0)
for j in range(self.max_multipattern_length)]
var.sink_multipat_cur_offsets = [
self.module.Reg('_sink_%s_multipat_%d_cur_offset' % (prefix, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
var.sink_multipat_sizes = [[self.module.Reg('_sink_%s_multipat_%d_size_%d' %
(prefix, j, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
for j in range(self.max_multipattern_length)]
var.sink_multipat_strides = [[self.module.Reg('_sink_%s_multipat_%d_stride_%d' %
(prefix, j, i),
self.addrwidth, initval=0)
for i in range(self.max_pattern_length)]
for j in range(self.max_multipattern_length)]
var.sink_multipat_counts = [[self.module.Reg('_sink_%s_multipat_%d_count_%d' %
(prefix, j, i),
self.addrwidth + 1, initval=0)
for i in range(self.max_pattern_length)]
for j in range(self.max_multipattern_length)]
def _synthesize_set_sink_multipattern(self, var, name):
if var.sink_multipat_fsm is not None:
return
start_delay = self._write_delay()
start = self.seq.Prev(self.start, start_delay)
sink_start = vtypes.Ands(start,
vtypes.And(var.sink_mode, mode_multipattern))
fsm_id = self.fsm_id_count
self.fsm_id_count += 1
prefix = self._prefix(name)
fsm_name = '_%s_sink_multipat_fsm_%d' % (prefix, fsm_id)
var.sink_multipat_fsm = FSM(self.module, fsm_name,
self.clock, self.reset,
as_module=self.fsm_as_module)
self.seq.If(sink_start)(
var.sink_multipat_num_patterns.dec()
)
for sink_multipat_cur_offset in var.sink_multipat_cur_offsets:
self.seq.If(sink_start)(
sink_multipat_cur_offset(0)
)
for (sink_multipat_size, sink_multipat_count) in zip(
var.sink_multipat_sizes[0], var.sink_multipat_counts[0]):
self.seq.If(sink_start)(
sink_multipat_count(sink_multipat_size - 1)
)
var.sink_multipat_fsm.If(sink_start).goto_next()
if name in self.sink_when_map:
when = self.sink_when_map[name]
wcond = when.read()
else:
wcond = None
sink_all_offset = self.module.Wire('_%s_sink_multipat_all_offset' % prefix,
self.addrwidth)
sink_all_offset_val = var.sink_multipat_offsets[0]
for sink_multipat_cur_offset in var.sink_multipat_cur_offsets:
sink_all_offset_val += sink_multipat_cur_offset
sink_all_offset.assign(sink_all_offset_val)
if name in self.sink_when_map:
when = self.sink_when_map[name]
wcond = when.read()
else:
wcond = None
rdata = var.read()
self.seq.If(var.sink_multipat_fsm.here)(
var.sink_ram_wenable(0)
)
self.seq.If(var.sink_multipat_fsm.here, wcond)(
var.sink_ram_waddr(sink_all_offset),
var.sink_ram_wdata(rdata),
var.sink_ram_wenable(1)
)
upcond = None
for (sink_multipat_cur_offset, sink_multipat_size,
sink_multipat_stride, sink_multipat_count) in zip(
var.sink_multipat_cur_offsets, var.sink_multipat_sizes[0],
var.sink_multipat_strides[0], var.sink_multipat_counts[0]):
self.seq.If(var.sink_multipat_fsm.here, upcond)(
sink_multipat_cur_offset.add(sink_multipat_stride),
sink_multipat_count.dec()
)
reset_cond = sink_multipat_count == 0
self.seq.If(var.sink_multipat_fsm.here, upcond, reset_cond)(
sink_multipat_cur_offset(0),
sink_multipat_count(sink_multipat_size - 1)
)
upcond = make_condition(upcond, reset_cond)
fin_cond = upcond
prev_offset = var.sink_multipat_offsets[0]
for multipat_offset in var.sink_multipat_offsets[1:]:
self.seq.If(fin_cond, var.sink_multipat_fsm.here)(
prev_offset(multipat_offset)
)
prev_offset = multipat_offset
prev_sizes = var.sink_multipat_sizes[0]
for multipat_sizes in var.sink_multipat_sizes[1:]:
for prev_size, size in zip(prev_sizes, multipat_sizes):
self.seq.If(fin_cond, var.sink_multipat_fsm.here)(
prev_size(size)
)
prev_sizes = multipat_sizes
prev_strides = var.sink_multipat_strides[0]
for multipat_strides in var.sink_multipat_strides[1:]:
for prev_stride, stride in zip(prev_strides, multipat_strides):
self.seq.If(fin_cond, var.sink_multipat_fsm.here)(
prev_stride(stride)
)
prev_strides = multipat_strides
self.seq.If(fin_cond, var.sink_multipat_fsm.here)(
var.sink_multipat_num_patterns.dec()
)
var.sink_multipat_fsm.If(fin_cond,
var.sink_multipat_num_patterns == 0).goto_init()
def _set_flag(self, fsm, prefix='_set_flag'):
flag = self.module.TmpReg(initval=0, prefix=prefix)
cond = fsm.here
self.seq(
flag(0)
)
self.seq.If(cond)(
flag(1)
)
return flag
def _implement_stream(self):
self.implement()
self.stream_synthesized = True
def _write_delay(self):
depth = self.pipeline_depth()
return depth + self.ram_delay
def _to_pattern(self, shape, order):
pattern = []
for p in order:
if not isinstance(p, int):
raise TypeError(
"Values of 'order' must be 'int', not %s" % str(type(p)))
size = shape[p]
basevalue = 1 if isinstance(size, int) else vtypes.Int(1)
stride = functools.reduce(lambda x, y: x * y,
shape[p + 1:], basevalue)
pattern.append((size, stride))
return tuple(pattern)
def _prefix(self, name):
return '%s_%s' % (self.name, name)
def _dataname(self, name):
return '%s_data' % self._prefix(name)
def _collect_substreams(self):
ret = []
for sub in self.substreams:
ret.extend(sub._collect_substreams())
return ret
def __getattr__(self, attr):
f = BaseStream.__getattr__(self, attr)
if (callable(f) and
(f.__name__.startswith('Reduce') or
f.__name__.startswith('Counter') or
f.__name__.startswith('Pulse'))):
if self.reduce_reset is None:
self.reduce_reset = self.module.Reg(
'_'.join(['', self.name, 'reduce_reset']), initval=1)
self.reduce_reset_var = self.Variable(
self.reduce_reset, width=1)
return functools.partial(f, reset=self.reduce_reset_var)
return f
class Substream(BaseSubstream):
def __init__(self, module, clock, reset, substrm, strm=None):
self.module = module
self.clock = clock
self.reset = reset
self.reset_delay = 0
BaseSubstream.__init__(self, substrm, strm)
def to_source(self, name, data):
source_name = self.substrm._dataname(name)
cond = self.module.Reg(compiler._tmp_name(self.name('%s_cond' % source_name)),
initval=0)
BaseSubstream.write(self, source_name, data, cond)
def to_constant(self, name, data):
constant_name = self.substrm._dataname(name)
cond = self.module.Reg(compiler._tmp_name(self.name('%s_cond' % constant_name)),
initval=0)
BaseSubstream.write(self, constant_name, data, cond)
def from_sink(self, name):
sink_name = self.substrm._dataname(name)
return BaseSubstream.read(self, sink_name)
def _collect_substreams(self):
ret = []
self.reset_delay = 0
ret.append(self)
ret.extend(self.substrm._collect_substreams())
for s in ret:
s.reset_delay += 1 + self.start_stage
return ret
def add_mux(targ, cond, value):
prev_assign = targ._get_assign()
if not prev_assign:
targ.assign(vtypes.Mux(cond, value, 0))
else:
prev_value = prev_assign.statement.right
prev_assign.overwrite_right(
vtypes.Mux(cond, value, prev_value))
targ.module.remove(prev_assign)
targ.module.append(prev_assign)
|
def is_value_in_dict(key, my_dict):
return key in my_dict
|
<reponame>thevetdoctor/obainstaclone
import React from 'react';
import { SafeAreaView, ScrollView } from 'react-native';
import Header from './Header';
import FooterIcons from './FooterIcons';
import Post from './Post';
import posts from './posts';
import Stories from './Stories';
import styles from './styles';
function HomeScreen({navigation}) {
return (
<SafeAreaView style={styles.container}>
<Header navigation={navigation} />
<Stories />
<ScrollView>
{posts.map((item, idx) => (
<Post post={item} key={idx}/>
))}
</ScrollView>
{/* <FooterIcons /> */}
</SafeAreaView>
)
}
export default HomeScreen;
|
<gh_stars>1-10
destructuring_arrays: {
input: {
{const [aa, bb] = cc;}
{const [aa, [bb, cc]] = dd;}
{let [aa, bb] = cc;}
{let [aa, [bb, cc]] = dd;}
var [aa, bb] = cc;
var [aa, [bb, cc]] = dd;
var [,[,,,,,],,,zz,] = xx; // Trailing comma
var [,,zzz,,] = xxx; // Trailing comma after hole
}
expect: {
{const [aa, bb] = cc;}
{const [aa, [bb, cc]] = dd;}
{let [aa, bb] = cc;}
{let [aa, [bb, cc]] = dd;}
var [aa, bb] = cc;
var [aa, [bb, cc]] = dd;
var [,[,,,,,],,,zz] = xx;
var [,,zzz,,] = xxx;
}
}
destructuring_arrays_holes: {
input: {
var [,,,,] = a;
var [,,b,] = c;
var [d,,] = e;
}
expect_exact: "var[,,,,]=a;var[,,b]=c;var[d,,]=e;"
}
destructuring_objects: {
input: {
{const {aa, bb} = {aa:1, bb:2};}
{const {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
{let {aa, bb} = {aa:1, bb:2};}
{let {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
var {aa, bb} = {aa:1, bb:2};
var {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};
}
expect: {
{const {aa, bb} = {aa:1, bb:2};}
{const {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
{let {aa, bb} = {aa:1, bb:2};}
{let {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
var {aa, bb} = {aa:1, bb:2};
var {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};
}
}
destructuring_objects_trailing_elision: {
beautify = {
ecma: 6
}
input: {
var {cc,} = foo;
}
expect_exact: "var{cc}=foo;"
}
nested_destructuring_objects: {
beautify = {
ecma: 6
}
input: {
const [{a},b] = c;
let [{d},e] = f;
var [{g},h] = i;
}
expect_exact: 'const[{a},b]=c;let[{d},e]=f;var[{g},h]=i;';
}
destructuring_constdef_in_loops: {
beautify = {
ecma: 6
}
input: {
for (const [x,y] in pairs);
for (const [a] = 0;;);
for (const {c} of cees);
}
expect_exact: "for(const[x,y]in pairs);for(const[a]=0;;);for(const{c}of cees);"
}
destructuring_letdef_in_loops: {
beautify = {
ecma: 6
}
input: {
for (let [x,y] in pairs);
for (let [a] = 0;;);
for (let {c} of cees);
}
expect_exact: "for(let[x,y]in pairs);for(let[a]=0;;);for(let{c}of cees);"
}
destructuring_vardef_in_loops: {
beautify = {
ecma: 6
}
input: {
for (var [x,y] in pairs);
for (var [a] = 0;;);
for (var {c} of cees);
}
expect_exact: "for(var[x,y]in pairs);for(var[a]=0;;);for(var{c}of cees);"
}
destructuring_expressions: {
beautify = {
ecma: 6
}
input: {
({a, b});
[{a}];
f({x});
}
expect_exact: "({a,b});[{a}];f({x});"
}
destructuring_remove_unused_1: {
options = {
unused: true
}
input: {
function a() {
var unused = "foo";
var a = [1];
var [b] = a;
f(b);
}
function b() {
var unused = "foo";
var a = {b: 1};
var {b} = a;
f(b);
}
function c() {
var unused = "foo";
var a = [[1]];
var [[b]] = a;
f(b);
}
function d() {
var unused = "foo";
var a = {b: {b:1}};
var {b:{b}} = a;
f(b);
}
function e() {
var unused = "foo";
var a = [1, 2, 3, 4, 5];
var x = [[1, 2, 3]];
var y = {h: 1};
var [b, ...c] = a;
var [...[e, f]] = x;
var [...{g: h}] = y;
f(b, c, e, f, g);
}
}
expect: {
function a() {
var a = [1];
var [b] = a;
f(b);
}
function b() {
var a = {b: 1};
var {b} = a;
f(b);
}
function c() {
var a = [[1]];
var [[b]] = a;
f(b);
}
function d() {
var a = {b: {b:1}};
var {b:{b}} = a;
f(b);
}
function e() {
var a = [1, 2, 3, 4, 5];
var x = [[1, 2, 3]];
var y = {h: 1};
var [b, ...c] = a;
var [...[e, f]] = x;
var [...{g: h}] = y;
f(b, c, e, f, g);
}
}
}
destructuring_remove_unused_2: {
options = {
unused: true
}
input: {
function a() {
var unused = "foo";
var a = [,,1];
var [b] = a;
f(b);
}
function b() {
var unused = "foo";
var a = [{a: [1]}];
var [{b: a}] = a;
f(b);
}
}
expect: {
function a() {
var a = [,,1];
var [b] = a;
f(b);
}
function b() {
var a = [{a: [1]}];
var [{b: a}] = a;
f(b);
}
}
}
object_destructuring_may_need_parentheses: {
beautify = {
ecma: 6
}
input: {
({a, b} = {a: 1, b: 2});
}
expect_exact: "({a,b}={a:1,b:2});"
}
destructuring_with_undefined_as_default_assignment: {
options = {
evaluate: true
}
input: {
[foo = undefined] = bar;
[foo = void 0] = bar;
}
expect: {
[foo] = bar;
[foo] = bar;
}
}
destructuring_dont_evaluate_with_undefined_as_default_assignment: {
options = {
evaluate: false
}
input: {
[foo = undefined] = bar;
}
expect: {
[foo = void 0] = bar;
}
}
reduce_vars: {
options = {
reduce_funcs: true,
reduce_vars: true,
}
input: {
{const [aa, [bb, cc]] = dd;}
{let [aa, [bb, cc]] = dd;}
var [aa, [bb, cc]] = dd;
[aa, [bb, cc]] = dd;
{const {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
{let {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
var {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};
({aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}});
const [{a},b] = c;
let [{d},e] = f;
var [{g},h] = i;
[{a},b] = c;
for (const [x,y] in pairs);
for (let [x,y] in pairs);
for (var [x,y] in pairs);
for ([x,y] in pairs);
}
expect: {
{const [aa, [bb, cc]] = dd;}
{let [aa, [bb, cc]] = dd;}
var [aa, [bb, cc]] = dd;
[aa, [bb, cc]] = dd;
{const {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
{let {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};}
var {aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}};
({aa, bb: {cc, dd}} = {aa:1, bb: {cc:2, dd: 3}});
const [{a},b] = c;
let [{d},e] = f;
var [{g},h] = i;
[{a},b] = c;
for (const [x,y] in pairs);
for (let [x,y] in pairs);
for (var [x,y] in pairs);
for ([x,y] in pairs);
}
}
unused: {
options = {
unused: true,
}
input: {
let { foo: [, , ...a] } = { foo: [1, 2, 3, 4], bar: 5 };
console.log(a);
}
expect: {
let { foo: [, , ...a] } = { foo: [1, 2, 3, 4], bar: 5 };
console.log(a);
}
}
issue_1886: {
options = {
collapse_vars: true,
}
input: {
let [a] = [1];
console.log(a);
}
expect: {
let [a] = [1];
console.log(a);
}
}
destructuring_decl_of_numeric_key: {
options = {
evaluate: true,
unused: true,
}
input: {
let { 3: x } = { [1 + 2]: 42 };
console.log(x);
}
expect: {
let { 3: x } = { [3]: 42 };
console.log(x);
}
expect_stdout: "42"
node_version: ">=6"
}
destructuring_decl_of_computed_key: {
options = {
evaluate: true,
unused: true,
}
input: {
let four = 4;
let { [7 - four]: x } = { [1 + 2]: 42 };
console.log(x);
}
expect: {
let four = 4;
let { [7 - four]: x } = { [3]: 42 };
console.log(x);
}
expect_stdout: "42"
node_version: ">=6"
}
destructuring_assign_of_numeric_key: {
options = {
evaluate: true,
unused: true,
}
input: {
let x;
({ 3: x } = { [1 + 2]: 42 });
console.log(x);
}
expect: {
let x;
({ 3: x } = { [3]: 42 });
console.log(x);
}
expect_stdout: "42"
node_version: ">=6"
}
destructuring_assign_of_computed_key: {
options = {
evaluate: true,
unused: true,
}
input: {
let x;
let four = 4;
({ [(5 + 2) - four]: x } = { [1 + 2]: 42 });
console.log(x);
}
expect: {
let x;
let four = 4;
({ [7 - four]: x } = { [3]: 42 });
console.log(x);
}
expect_stdout: "42"
node_version: ">=6"
}
mangle_destructuring_decl: {
options = {
evaluate: true,
unused: true,
}
mangle = {
}
input: {
function test(opts) {
let a = opts.a || { e: 7, n: 8 };
let { t, e, n, s = 5 + 4, o, r } = a;
console.log(t, e, n, s, o, r);
}
test({a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 }});
test({});
}
expect: {
function test(t) {
let e = t.a || { e: 7, n: 8 };
let {t: n, e: o, n: s, s: l = 9, o: a, r: c} = e;
console.log(n, o, s, l, a, c);
}
test({ a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 } });
test({});
}
expect_stdout: [
"1 2 3 4 5 6",
"undefined 7 8 9 undefined undefined",
]
node_version: ">=6"
}
mangle_destructuring_decl_collapse_vars: {
options = {
collapse_vars: true,
evaluate: true,
unused: true,
}
mangle = {
}
input: {
function test(opts) {
let a = opts.a || { e: 7, n: 8 };
let { t, e, n, s = 5 + 4, o, r } = a;
console.log(t, e, n, s, o, r);
}
test({a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 }});
test({});
}
expect: {
function test(t) {
let e = t.a || { e: 7, n: 8 };
let {t: n, e: o, n: s, s: l = 9, o: a, r: c} = e;
console.log(n, o, s, l, a, c);
}
test({ a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 } });
test({});
}
expect_stdout: [
"1 2 3 4 5 6",
"undefined 7 8 9 undefined undefined",
]
node_version: ">=6"
}
mangle_destructuring_assign_toplevel_true: {
options = {
toplevel: true,
evaluate: true,
unused: true,
}
mangle = {
toplevel: true,
}
beautify = {
ecma: 6
}
input: {
function test(opts) {
let s, o, r;
let a = opts.a || { e: 7, n: 8 };
({ t, e, n, s = 5 + 4, o, r } = a);
console.log(t, e, n, s, o, r);
}
let t, e, n;
test({a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 }});
test({});
}
expect: {
function e(e) {
let l, s, a;
let c = e.a || { e: 7, n: 8 };
({t: n, e: o, n: t, s: l = 9, o: s, r: a} = c);
console.log(n, o, t, l, s, a);
}
let n, o, t;
e({ a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 } });
e({});
}
expect_stdout: [
"1 2 3 4 5 6",
"undefined 7 8 9 undefined undefined",
]
node_version: ">=6"
}
mangle_destructuring_assign_toplevel_false: {
options = {
toplevel: false,
evaluate: true,
unused: true,
}
mangle = {
toplevel: false,
}
beautify = {
ecma: 6
}
input: {
function test(opts) {
let s, o, r;
let a = opts.a || { e: 7, n: 8 };
({ t, e, n, s = 9, o, r } = a);
console.log(t, e, n, s, o, r);
}
let t, e, n;
test({a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 }});
test({});
}
expect: {
function test(o) {
let s, l, a;
let c = o.a || { e: 7, n: 8 };
({t, e, n, s = 9, o: l, r: a} = c);
console.log(t, e, n, s, l, a);
}
let t, e, n;
test({ a: { t: 1, e: 2, n: 3, s: 4, o: 5, r: 6 } });
test({});
}
expect_stdout: [
"1 2 3 4 5 6",
"undefined 7 8 9 undefined undefined",
]
node_version: ">=6"
}
mangle_destructuring_decl_array: {
options = {
evaluate: true,
unused: true,
toplevel: true,
}
mangle = {
toplevel: true,
}
beautify = {
ecma: 6
}
input: {
var [, t, e, n, s, o = 2, r = [ 1 + 2 ]] = [ 9, 8, 7, 6 ];
console.log(t, e, n, s, o, r);
}
expect: {
var [, o, l, a, c, e = 2, g = [ 3 ]] = [ 9, 8, 7, 6 ];
console.log(o, l, a, c, e, g);
}
expect_stdout: "8 7 6 undefined 2 [ 3 ]"
node_version: ">=6"
}
anon_func_with_destructuring_args: {
options = {
evaluate: true,
unused: true,
toplevel: true,
}
mangle = {
toplevel: true,
}
beautify = {
ecma: 5,
}
input: {
(function({foo = 1 + 0, bar = 2}, [car = 3, far = 4]) {
console.log(foo, bar, car, far);
})({bar: 5 - 0}, [, 6]);
}
expect: {
(function({foo: o = 1, bar: n = 2}, [a = 3, b = 4]) {
console.log(o, n, a, b);
})({bar: 5}, [, 6]);
}
expect_stdout: "1 5 3 6"
node_version: ">=6"
}
arrow_func_with_destructuring_args: {
options = {
evaluate: true,
unused: true,
toplevel: true,
}
mangle = {
toplevel: true,
}
beautify = {
ecma: 5,
}
input: {
(({foo = 1 + 0, bar = 2}, [car = 3, far = 4]) => {
console.log(foo, bar, car, far);
})({bar: 5 - 0}, [, 6]);
}
expect: {
(({foo: o = 1, bar: a = 2}, [b = 3, l = 4]) => {
console.log(o, a, b, l);
})({bar: 5}, [, 6]);
}
expect_stdout: "1 5 3 6"
node_version: ">=6"
}
issue_2044_ecma_5: {
beautify = {
beautify: false,
ecma: 5,
}
input: {
({x : a = 1, y = 2 + b, z = 3 - c} = obj);
}
expect_exact: "({x:a=1,y:y=2+b,z:z=3-c}=obj);"
}
issue_2044_ecma_6: {
beautify = {
beautify: false,
ecma: 6,
}
input: {
({x : a = 1, y = 2 + b, z = 3 - c} = obj);
}
expect_exact: "({x:a=1,y=2+b,z=3-c}=obj);"
}
issue_2044_ecma_5_beautify: {
beautify = {
beautify: true,
ecma: 5,
}
input: {
({x : a = 1, y = 2 + b, z = 3 - c} = obj);
}
expect_exact: "({x: a = 1, y: y = 2 + b, z: z = 3 - c} = obj);"
}
issue_2044_ecma_6_beautify: {
beautify = {
beautify: true,
ecma: 6,
}
input: {
({x : a = 1, y = 2 + b, z = 3 - c} = obj);
}
expect_exact: "({x: a = 1, y = 2 + b, z = 3 - c} = obj);"
}
issue_2140: {
options = {
unused: true,
}
input: {
!function() {
var t = {};
console.log(([t.a] = [42])[0]);
}();
}
expect: {
!function() {
var t = {};
console.log(([t.a] = [42])[0]);
}();
}
expect_stdout: "42"
node_version: ">=6"
}
issue_3205_1: {
options = {
inline: 3,
reduce_vars: true,
side_effects: true,
unused: true,
}
input: {
function f(a) {
function g() {
var {b, c} = a;
console.log(b, c);
}
g();
}
f({ b: 2, c: 3 });
}
expect: {
function f(a) {
(function() {
var {b: b, c: c} = a;
console.log(b, c);
})();
}
f({ b: 2, c: 3 });
}
expect_stdout: "2 3"
node_version: ">=6"
}
issue_3205_2: {
options = {
inline: 3,
side_effects: true,
unused: true,
}
input: {
(function() {
function f() {
var o = { a: "PASS" }, {a: x} = o;
console.log(x);
}
f();
})();
}
expect: {
(function() {
function f() {
var o = { a: "PASS" }, {a: x} = o;
console.log(x);
}
f();
})();
}
expect_stdout: "PASS"
node_version: ">=6"
}
issue_3205_3: {
options = {
inline: 3,
side_effects: true,
unused: true,
}
input: {
(function() {
function f(o, {a: x} = o) {
console.log(x);
}
f({ a: "PASS" });
})();
}
expect: {
(function() {
function f(o, {a: x} = o) {
console.log(x);
}
f({ a: "PASS" });
})();
}
expect_stdout: "PASS"
node_version: ">=6"
}
issue_3205_4: {
options = {
inline: 3,
side_effects: true,
unused: true,
}
input: {
(function() {
function f(o) {
var {a: x} = o;
console.log(x);
}
f({ a: "PASS" });
})();
}
expect: {
(function() {
function f(o) {
var {a: x} = o;
console.log(x);
}
f({ a: "PASS" });
})();
}
expect_stdout: "PASS"
node_version: ">=6"
}
issue_3205_5: {
options = {
inline: 3,
passes: 4,
reduce_vars: true,
side_effects: true,
unused: true,
}
input: {
(function() {
function f(g) {
var o = g, {a: x} = o;
console.log(x);
}
f({ a: "PASS" });
})();
}
expect: {
!function(g) {
var {a: x} = {
a: "PASS"
};
console.log(x);
}();
}
expect_stdout: "PASS"
node_version: ">=6"
}
unused_destructuring_decl_1: {
options = {
pure_getters: true,
toplevel: true,
unused: true,
}
input: {
let { x: L, y } = { x: 2 };
var { U: u, V } = { V: 3 };
const { C, D } = { C: 1, D: 4 };
console.log(L, V);
}
expect: {
let { x: L } = { x: 2 };
var { V } = { V: 3 };
console.log(L, V);
}
expect_stdout: "2 3"
node_version: ">=6"
}
unused_destructuring_decl_2: {
options = {
pure_getters: true,
toplevel: false,
unused: true,
}
input: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect_stdout: "7 8 3"
node_version: ">=6"
}
unused_destructuring_decl_3: {
options = {
pure_getters: false,
toplevel: true,
unused: true,
}
input: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect_stdout: "7 8 3"
node_version: ">=6"
}
unused_destructuring_decl_4: {
options = {
pure_getters: true,
toplevel: true,
unused: false,
}
input: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect_stdout: "7 8 3"
node_version: ">=6"
}
unused_destructuring_decl_5: {
options = {
pure_getters: true,
toplevel: true,
top_retain: [ "a", "e", "w" ],
unused: true,
}
input: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, f: g, h = new Object(2) } = { e: 8 };
var { w, x: y, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect: {
const { a, b: c, d = new Object(1) } = { b: 7 };
let { e, h = new Object(2) } = { e: 8 };
var { w, z = new Object(3) } = { w: 4, x: 5, y: 6 };
console.log(c, e, z + 0);
}
expect_stdout: "7 8 3"
node_version: ">=6"
}
unused_destructuring_function_param: {
options = {
pure_getters: true,
unused: true,
}
input: {
function foo({w = console.log("side effect"), x, y: z}) {
console.log(x);
}
foo({x: 1, y: 2, z: 3});
}
expect: {
function foo({w = console.log("side effect"), x}) {
console.log(x);
}
foo({x: 1, y: 2, z: 3});
}
expect_stdout: [
"side effect",
"1",
]
node_version: ">=6"
}
unused_destructuring_arrow_param: {
options = {
pure_getters: true,
unused: true,
}
input: {
let bar = ({w = console.log("side effect"), x, y: z}) => {
console.log(x);
};
bar({x: 4, y: 5, z: 6});
}
expect: {
let bar = ({w = console.log("side effect"), x}) => {
console.log(x);
};
bar({x: 4, y: 5, z: 6});
}
expect_stdout: [
"side effect",
"4",
]
node_version: ">=6"
}
unused_destructuring_object_method_param: {
options = {
pure_getters: true,
unused: true,
}
input: {
({
baz({w = console.log("side effect"), x, y: z}) {
console.log(x);
}
}).baz({x: 7, y: 8, z: 9});
}
expect: {
({
baz({w = console.log("side effect"), x}) {
console.log(x);
}
}).baz({x: 7, y: 8, z: 9});
}
expect_stdout: [
"side effect",
"7",
]
node_version: ">=6"
}
unused_destructuring_class_method_param: {
options = {
pure_getters: true,
unused: true,
}
input: {
(new class {
baz({w = console.log("side effect"), x, y: z}) {
console.log(x);
}
}).baz({x: 7, y: 8, z: 9});
}
expect: {
(new class {
baz({w = console.log("side effect"), x}) {
console.log(x);
}
}).baz({x: 7, y: 8, z: 9});
}
expect_stdout: [
"side effect",
"7",
]
node_version: ">=6"
}
unused_destructuring_getter_side_effect_1: {
options = {
pure_getters: false,
unused: true,
}
input: {
function extract(obj) {
const { a, b } = obj;
console.log(b);
}
extract({a: 1, b: 2});
extract({
get a() {
var s = "side effect";
console.log(s);
return s;
},
b: 4,
});
}
expect: {
function extract(obj) {
const { a, b } = obj;
console.log(b);
}
extract({a: 1, b: 2});
extract({
get a() {
var s = "side effect";
console.log(s);
return s;
},
b: 4,
});
}
expect_stdout: [
"2",
"side effect",
"4",
]
node_version: ">=6"
}
unused_destructuring_getter_side_effect_2: {
options = {
pure_getters: true,
unused: true,
}
input: {
function extract(obj) {
const { a, b } = obj;
console.log(b);
}
extract({a: 1, b: 2});
extract({
get a() {
var s = "side effect";
console.log(s);
return s;
},
b: 4,
});
}
expect: {
function extract(obj) {
const { b } = obj;
console.log(b);
}
extract({a: 1, b: 2});
extract({
get a() {
var s = "side effect";
console.log(s);
return s;
},
b: 4,
});
}
// No `expect_stdout` clause here because `pure_getters`
// drops the getter side effect as expected and produces
// different output than the original `input` code.
}
unused_destructuring_assign_1: {
options = {
pure_getters: true,
unused: true,
}
input: {
function extract(obj) {
var a;
let b;
({ a, b } = obj);
console.log(b);
}
extract({a: 1, b: 2});
extract({b: 4});
}
expect: {
function extract(obj) {
var a;
let b;
({ a, b } = obj); // TODO: future optimization opportunity
console.log(b);
}
extract({a: 1, b: 2});
extract({b: 4});
}
expect_stdout: [
"2",
"4",
]
node_version: ">=6"
}
unused_destructuring_assign_2: {
options = {
pure_getters: false,
unused: true,
}
input: {
function extract(obj) {
var a;
let b;
({ a, b } = obj);
console.log(b);
}
extract({a: 1, b: 2});
extract({
get a() {
var s = "side effect";
console.log(s);
return s;
},
b: 4,
});
}
expect: {
function extract(obj) {
var a;
let b;
({ a, b } = obj);
console.log(b);
}
extract({a: 1, b: 2});
extract({
get a() {
var s = "side effect";
console.log(s);
return s;
},
b: 4,
});
}
expect_stdout: [
"2",
"side effect",
"4",
]
node_version: ">=6"
}
export_unreferenced_declarations_1: {
options = {
module: true,
pure_getters: true,
unused: true,
}
beautify = {
beautify: false,
ecma: 6,
}
input: {
export const { keys } = Object;
export let { L, M } = Object;
export var { V, W } = Object;
}
expect_exact: "export const{keys}=Object;export let{L,M}=Object;export var{V,W}=Object;"
}
export_unreferenced_declarations_2: {
options = {
module: true,
pure_getters: true,
unused: true,
}
input: {
var {unused} = obj;
export const [{a, b = 1}] = obj;
export let [[{c, d = 2}]] = obj;
export var [, [{e, f = 3}]] = obj;
}
expect: {
obj;
export const [{a, b = 1}] = obj;
export let [[{c, d = 2}]] = obj;
export var [, [{e, f = 3}]] = obj;
}
}
export_function_containing_destructuring_decl: {
options = {
module: true,
pure_getters: true,
unused: true,
}
input: {
export function f() {
let [{x, y, z}] = [{x: 1, y: 2}];
return x;
}
}
expect: {
export function f() {
let [{x}] = [{x: 1, y: 2}];
return x;
}
}
}
unused_destructuring_declaration_complex_1: {
options = {
toplevel: true,
pure_getters: true,
unused: true,
}
input: {
const [, w, , x, {y, z}] = [1, 2, 3, 4, {z: 5}];
console.log(x, z);
}
expect: {
// TODO: unused destructuring array declarations not optimized
const [, w, , x, {z}] = [1, 2, 3, 4, {z: 5}];
console.log(x, z);
}
expect_stdout: "4 5"
node_version: ">=6"
}
unused_destructuring_declaration_complex_2: {
options = {
toplevel: true,
pure_getters: false,
unused: true,
}
input: {
const [, w, , x, {y, z}] = [1, 2, 3, 4, {z: 5}];
console.log(x, z);
}
expect: {
const [, w, , x, {y, z}] = [1, 2, 3, 4, {z: 5}];
console.log(x, z);
}
expect_stdout: "4 5"
node_version: ">=6"
}
unused_destructuring_multipass: {
options = {
conditionals: true,
evaluate: true,
toplevel: true,
passes: 2,
pure_getters: true,
side_effects: true,
unused: true,
}
input: {
let { w, x: y, z } = { x: 1, y: 2, z: 3 };
console.log(y);
if (0) {
console.log(z);
}
}
expect: {
let { x: y } = { x: 1, y: 2, z: 3 };
console.log(y);
}
expect_stdout: "1"
node_version: ">=6"
}
issue_t111_1: {
options = {
toplevel: true,
unused: true,
}
input: {
var p = x => (console.log(x), x), unused = p(1), {} = p(2);
}
expect: {
var p = x => (console.log(x), x), {} = (p(1), p(2));
}
expect_stdout: [
"1",
"2",
]
node_version: ">=6"
}
issue_t111_2a: {
options = {
toplevel: true,
unused: true,
}
input: {
var p = x => (console.log(x), x), a = p(1), {} = p(2), c = p(3), d = p(4);
}
expect: {
var p = x => (console.log(x), x), {} = (p(1), p(2));
p(3), p(4);
}
expect_stdout: [
"1",
"2",
"3",
"4",
]
node_version: ">=6"
}
issue_t111_2b: {
options = {
toplevel: true,
unused: true,
}
input: {
let p = x => (console.log(x), x), a = p(1), {} = p(2), c = p(3), d = p(4);
}
expect: {
let p = x => (console.log(x), x), {} = (p(1), p(2));
p(3), p(4);
}
expect_stdout: [
"1",
"2",
"3",
"4",
]
node_version: ">=6"
}
issue_t111_2c: {
options = {
toplevel: true,
unused: true,
}
input: {
const p = x => (console.log(x), x), a = p(1), {} = p(2), c = p(3), d = p(4);
}
expect: {
const p = x => (console.log(x), x), {} = (p(1), p(2));
p(3), p(4);
}
expect_stdout: [
"1",
"2",
"3",
"4",
]
node_version: ">=6"
}
issue_t111_3: {
options = {
toplevel: true,
unused: true,
}
input: {
let p = x => (console.log(x), x), a = p(1), {} = p(2), c = p(3), {} = p(4);
}
expect: {
let p = x => (console.log(x), x), {} = (p(1), p(2)), {} = (p(3), p(4));
}
expect_stdout: [
"1",
"2",
"3",
"4",
]
node_version: ">=6"
}
issue_t111_4: {
options = {
toplevel: true,
unused: true,
}
input: {
let p = x => (console.log(x), x), a = 1, {length} = [0], c = 3, {x} = {x: 2};
p(`${length} ${x}`);
}
expect: {
let p = x => (console.log(x), x), {length} = [0], {x} = {x: 2};
p(`${length} ${x}`);
}
expect_stdout: "1 2"
node_version: ">=6"
}
empty_object_destructuring_1: {
options = {
pure_getters: false,
toplevel: true,
unused: true,
}
input: {
var {} = Object;
let {L} = Object, L2 = "foo";
const bar = "bar", {prop: C1, C2 = console.log("side effect"), C3} = Object;
}
expect: {
var {} = Object;
let {L: L} = Object;
const {prop: C1, C2: C2 = console.log("side effect"), C3: C3} = Object;
}
expect_stdout: "side effect"
node_version: ">=6"
}
empty_object_destructuring_2: {
options = {
pure_getters: "strict",
toplevel: true,
unused: true,
}
input: {
var {} = Object;
let {L} = Object, L2 = "foo";
const bar = "bar", {prop: C1, C2 = console.log("side effect"), C3} = Object;
}
expect: {
var {} = Object;
let {L: L} = Object;
const {prop: C1, C2: C2 = console.log("side effect"), C3: C3} = Object;
}
expect_stdout: "side effect"
node_version: ">=6"
}
empty_object_destructuring_3: {
options = {
pure_getters: true,
toplevel: true,
unused: true,
}
input: {
var {} = Object;
let {L} = Object, L2 = "foo";
const bar = "bar", {prop: C1, C2 = console.log("side effect"), C3} = Object;
}
expect: {
Object;
Object;
const {C2: C2 = console.log("side effect")} = Object;
}
expect_stdout: "side effect"
node_version: ">=6"
}
empty_object_destructuring_4: {
options = {
pure_getters: true,
toplevel: true,
unsafe: true,
unused: true,
}
input: {
var {} = Object;
let {L} = Object, L2 = "foo";
const bar = "bar", {prop: C1, C2 = console.log("side effect"), C3} = Object;
}
expect: {
const {C2: C2 = console.log("side effect")} = Object;
}
expect_stdout: "side effect"
node_version: ">=6"
}
empty_object_destructuring_misc: {
options = {
pure_getters: true,
toplevel: true,
unsafe: true,
unused: true,
}
input: {
let out = [],
foo = (out.push(0), 1),
{} = {k: 9},
bar = out.push(2),
{unused} = (out.push(3), {unused: 7}),
{a: b, prop, w, x: y, z} = {prop: 8},
baz = (out.push(4), 5);
console.log(`${foo} ${prop} ${baz} ${JSON.stringify(out)}`);
}
expect: {
let out = [],
foo = (out.push(0), 1),
{prop: prop} = (out.push(2), out.push(3), {prop: 8}),
baz = (out.push(4), 5);
console.log(`${foo} ${prop} ${baz} ${JSON.stringify(out)}`);
}
expect_stdout: "1 8 5 [0,2,3,4]"
node_version: ">=6"
}
destructure_empty_array_1: {
options = {
pure_getters: false,
toplevel: true,
unsafe: true,
unused: true,
}
input: {
let {} = Object, [] = {}, unused = console.log("not reached");
}
expect: {
let {} = Object, [] = {};
console.log("not reached");
}
expect_stdout: true // TypeError: {} is not iterable
node_version: ">=6"
}
destructure_empty_array_2: {
options = {
pure_getters: "strict",
toplevel: true,
unsafe: true,
unused: true,
}
input: {
let {} = Object, [] = {}, unused = console.log("not reached");
}
expect: {
let {} = Object, [] = {};
console.log("not reached");
}
expect_stdout: true // TypeError: {} is not iterable
node_version: ">=6"
}
destructure_empty_array_3: {
options = {
pure_getters: true,
toplevel: true,
unsafe: true,
unused: true,
}
input: {
let {} = Object, [] = {}, unused = console.log("not reached");
}
expect: {
let [] = {};
console.log("not reached");
}
expect_stdout: true // TypeError: {} is not iterable
node_version: ">=6"
}
|
/*
* Copyright 2014-2021 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.agrona.concurrent;
import org.agrona.ErrorHandler;
import org.agrona.concurrent.status.AtomicCounter;
import java.nio.channels.ClosedByInterruptException;
import java.util.Objects;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
/**
* Agent runner containing an {@link Agent} which is run on a {@link Thread}.
* <p>
* <b>Note:</b> An instance should only be started once and then discarded, it should not be reused.
*/
public class AgentRunner implements Runnable, AutoCloseable
{
/**
* Indicates that the runner is being closed.
*/
@SuppressWarnings("InstantiatingAThreadWithDefaultRunMethod")
public static final Thread TOMBSTONE = new Thread();
/**
* Default retry timeout for closing.
*/
public static final int RETRY_CLOSE_TIMEOUT_MS = 5000;
private volatile boolean isRunning = true;
private volatile boolean isClosed = false;
private final AtomicCounter errorCounter;
private final ErrorHandler errorHandler;
private final IdleStrategy idleStrategy;
private final Agent agent;
private final AtomicReference<Thread> thread = new AtomicReference<>();
/**
* Create an agent runner and initialise it.
*
* @param idleStrategy to use for Agent run loop
* @param errorHandler to be called if an {@link Throwable} is encountered
* @param errorCounter to be incremented each time an exception is encountered. This may be null.
* @param agent to be run in this thread.
*/
public AgentRunner(
final IdleStrategy idleStrategy,
final ErrorHandler errorHandler,
final AtomicCounter errorCounter,
final Agent agent)
{
Objects.requireNonNull(idleStrategy, "idleStrategy");
Objects.requireNonNull(errorHandler, "errorHandler");
Objects.requireNonNull(agent, "agent");
this.idleStrategy = idleStrategy;
this.errorHandler = errorHandler;
this.errorCounter = errorCounter;
this.agent = agent;
}
/**
* Start the given agent runner on a new thread.
*
* @param runner the agent runner to start.
* @return the new thread that has been started.
*/
public static Thread startOnThread(final AgentRunner runner)
{
return startOnThread(runner, Thread::new);
}
/**
* Start the given agent runner on a new thread.
*
* @param runner the agent runner to start.
* @param threadFactory the factory to use to create the thread.
* @return the new thread that has been started.
*/
public static Thread startOnThread(final AgentRunner runner, final ThreadFactory threadFactory)
{
final Thread thread = threadFactory.newThread(runner);
thread.setName(runner.agent().roleName());
thread.start();
return thread;
}
/**
* The {@link Agent} which is contained.
*
* @return {@link Agent} being contained.
*/
public Agent agent()
{
return agent;
}
/**
* Has the {@link Agent} been closed?
*
* @return has the {@link Agent} been closed?
*/
public boolean isClosed()
{
return isClosed;
}
/**
* Get the thread which is running that {@link Agent}.
* <p>
* If null then the runner has not been started. If {@link #TOMBSTONE} then the runner is being closed.
*
* @return the thread running the {@link Agent}.
*/
public Thread thread()
{
return thread.get();
}
/**
* Run an {@link Agent}.
* <p>
* This method does not return until the run loop is stopped via {@link #close()}.
*/
public void run()
{
try
{
if (!thread.compareAndSet(null, Thread.currentThread()))
{
return;
}
final IdleStrategy idleStrategy = this.idleStrategy;
final Agent agent = this.agent;
try
{
agent.onStart();
}
catch (final Throwable throwable)
{
errorHandler.onError(throwable);
isRunning = false;
}
while (isRunning)
{
doWork(idleStrategy, agent);
}
try
{
agent.onClose();
}
catch (final Throwable throwable)
{
errorHandler.onError(throwable);
}
}
finally
{
isClosed = true;
}
}
/**
* Stop the running Agent and cleanup.
* <p>
* This is equivalent to calling {@link AgentRunner#close(int, Consumer)}
* using the default {@link AgentRunner#RETRY_CLOSE_TIMEOUT_MS} value and a
* null action.
*/
public final void close()
{
close(RETRY_CLOSE_TIMEOUT_MS, null);
}
/**
* Stop the running Agent and cleanup.
* <p>
* This will wait for the work loop to exit. The close timeout parameter
* controls how long we should wait before retrying to stop the agent by
* interrupting the thread. If the calling thread has its interrupt flag
* set then this method can return early before waiting for the running
* agent to close.
* <p>
* An optional action can be invoked whenever we time out while waiting
* which accepts the agent runner thread as the parameter (e.g. to obtain
* and log a stack trace from the thread). If the action is null, a message
* is written to stderr. Please note that a retry close timeout of zero
* waits indefinitely, in which case the fail action is only called on interrupt.
*
* @param retryCloseTimeoutMs how long to wait before retrying.
* @param closeFailAction function to invoke before retrying after close timeout.
*/
public final void close(final int retryCloseTimeoutMs, final Consumer<Thread> closeFailAction)
{
isRunning = false;
final Thread thread = this.thread.getAndSet(TOMBSTONE);
if (null == thread)
{
try
{
agent.onClose();
}
catch (final Throwable throwable)
{
errorHandler.onError(throwable);
}
finally
{
isClosed = true;
}
}
else if (TOMBSTONE != thread)
{
while (true)
{
try
{
if (isClosed)
{
return;
}
thread.join(retryCloseTimeoutMs);
if (!thread.isAlive() || isClosed)
{
return;
}
failAction(closeFailAction, thread, "timeout, retrying...");
if (!thread.isInterrupted())
{
thread.interrupt();
}
}
catch (final InterruptedException ignore)
{
Thread.currentThread().interrupt();
failAction(closeFailAction, thread, "thread interrupt");
if (!isClosed && !thread.isInterrupted())
{
thread.interrupt();
Thread.yield();
}
return;
}
}
}
}
private void failAction(final Consumer<Thread> closeFailAction, final Thread thread, final String message)
{
if (null == closeFailAction)
{
System.err.println(agent.roleName() + " failed to close due to " + message);
}
else
{
closeFailAction.accept(thread);
}
}
private void doWork(final IdleStrategy idleStrategy, final Agent agent)
{
try
{
final int workCount = agent.doWork();
idleStrategy.idle(workCount);
if (0 >= workCount)
{
if (Thread.currentThread().isInterrupted())
{
isRunning = false;
}
}
}
catch (final InterruptedException | ClosedByInterruptException ignore)
{
isRunning = false;
Thread.currentThread().interrupt();
}
catch (final AgentTerminationException ex)
{
isRunning = false;
handleError(ex);
}
catch (final Throwable throwable)
{
handleError(throwable);
if (Thread.currentThread().isInterrupted())
{
isRunning = false;
}
}
}
private void handleError(final Throwable throwable)
{
if (null != errorCounter && isRunning && !errorCounter.isClosed())
{
errorCounter.increment();
}
errorHandler.onError(throwable);
}
}
|
const express = require('express');
const router = express.Router();
const AbortController = require('abort-controller');
const fetch = require('node-fetch');
const os = require('os');
const fs = require('fs');
const { exec } = require('child_process');
const USE_CACHED = {};
// 预览
router.get('/preview', async function (req, res) {
const src = req.query.src;
const attname = req.query.attname;
function _preview(file) {
res.sendFile(file, {
headers: {
'Content-Type': attname
? 'application/octet-stream'
: 'application/pdf',
'Content-Disposition': attname
? `attachment; filename=${encodeURIComponent(attname)}`
: 'inline',
},
});
}
const srcClear = src.split('?')[0]; // No queries
const c = USE_CACHED[srcClear];
if (c) {
const exists = fs.existsSync(c);
if (exists) {
console.log('Use cached :', c);
_preview(c);
return;
} else {
delete USE_CACHED[srcClear];
}
}
await fileDownload(src, (err, file) => {
if (err) {
res.send(JSON.stringify(err));
} else {
convertPdf(file, (err2, file2) => {
if (err2) {
res.send(JSON.stringify(err2));
} else {
USE_CACHED[srcClear] = file2;
_preview(file2);
}
});
}
});
});
// 下载文件
async function fileDownload(src, callback) {
console.debug('Download file ...', src);
const controller = new AbortController();
const timeout = setTimeout(
() => controller.abort(),
(process.env.RBDP_FILE_DOWNLOAD_TIMEOUT || 30) * 1000
);
let response;
try {
response = await fetch(src, { signal: controller.signal });
} catch (err) {
if (err) console.error('>>>>>>>>>>', err);
typeof callback === 'function' &&
callback(`无法读取源文件 (1001) : ${src}`);
return;
} finally {
clearTimeout(timeout);
}
if (!response.ok) {
typeof callback === 'function' &&
callback(`无法读取源文件 (1002) : ${src}`);
return;
}
const dest = `${getUseDir()}/file-${new Date().getTime()}`;
const buffer = await response.buffer();
fs.writeFile(`${dest}.download`, buffer, {}, err => {
if (err) console.error('>>>>>>>>>>', err);
typeof callback === 'function' && callback(err, dest);
});
}
// 转换 PDF
function convertPdf(file, callback) {
const cmd = `${
process.env.RBDP_LIBREOFFICE_BIN || 'libreoffice'
} --headless --convert-to pdf ${file}.download --outdir ${getUseDir()}`;
console.debug('Exec convert ...', cmd);
// eslint-disable-next-line no-unused-vars
exec(cmd, (err, stdout, stderr) => {
if (err) console.error('>>>>>>>>>>', err);
typeof callback === 'function' && callback(err, `${file}.pdf`);
});
}
function getUseDir() {
const dir = `${process.env.RBDP_WORKDIR || os.tmpdir()}/${dateFormat(
'YYYYmmdd',
new Date()
)}`;
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
return dir;
}
function dateFormat(fmt, date) {
const opt = {
'Y+': date.getFullYear().toString(),
'm+': (date.getMonth() + 1).toString(),
'd+': date.getDate().toString(),
'H+': date.getHours().toString(),
'M+': date.getMinutes().toString(),
'S+': date.getSeconds().toString(),
};
let ret;
for (let k in opt) {
ret = new RegExp(`(${k})`).exec(fmt);
if (ret) {
fmt = fmt.replace(
ret[1],
ret[1].length == 1 ? opt[k] : opt[k].padStart(ret[1].length, '0')
);
}
}
return fmt;
}
module.exports = router;
|
#include <iostream>
#include <math.h>
#include <string>
#include<climits>
using namespace std;
int maxSubArraySum(int a[], int size)
{
int max_so_far = 0, max_ending_here = 0;
for (int i = 0; i < size; i++)
{
max_ending_here = max_ending_here + a[i];
if (max_ending_here < 0)
max_ending_here = 0;
else if (max_so_far < max_ending_here)
max_so_far = max_ending_here;
}
return max_so_far;
}
int main() {
int n, m, r, c;
cin >> n >> m >> r >> c;
int tile[r][c];
int tileCost=0;
int rowSum[r]={0}, columnSum[c]={0};
for (int i=0 ; i<r ; i++) {
for (int j=0 ; j<c ; j++) {
cin >> tile[i][j];
tileCost += tile[i][j];
rowSum[i] += tile[i][j];
}
}
for (int i=0 ; i<c ; i++)
for (int j=0 ; j<r ; j++)
columnSum[i] += tile[j][i];
int width = m / c;
int height = n / r;
int base_cost = width*height*tileCost;
int width_l = m%c;
int x = c - width_l;
int height_l = n%r;
int y = r - height_l;
int rest_cost = height*(tileCost - maxSubArraySum(columnSum, x)) +
width*(tileCost - maxSubArraySum(rowSum, y));
//cout << tileCost - maxSubArraySum(columnSum, x) <<endl;
//cout << tileCost - maxSubArraySum(rowSum, y) << endl;
cout << base_cost + rest_cost << endl;
return 0;
}
|
#!/bin/bash
source ${HOME}/work/tools/venv_python3.7.2_torch1.4_decore0/bin/activate
export FAIRSEQ_PATH=${HOME}/work/tools/venv_python3.7.2_torch1.4_decore0/bin/
echo " ---"
echo " * Using python: `which python`"
echo " * Using fairseq-train: `which fairseq-train`"; echo
# -----------------
FEATURES_TYPE='FlowBERT' # Use 'spectro' or 'W2V'
FEATURES_LANG='Fr' # Use 'Fr' or 'En' ('En' only with 'W2V')
NORMFLAG='Normalized'
# -----------------
DATA_PATH=${HOME}/work/data/MEDIA-Original/semantic_speech_aligned_corpus/DialogMachineAndUser_SemTextAndWav_FixedChannel/
SERIALIZED_CORPUS=MEDIA.user+machine.${FEATURES_TYPE}-7Klt-${FEATURES_LANG}-${NORMFLAG}.data
SUBTASK=token
if [[ $# -eq 3 ]]; then
SUBTASK=$3
fi
CRITERION='cross_entropy'
CHECKPOINT=checkpoints/checkpoint_best.pt
if [[ $# -ge 1 ]]; then
CHECKPOINT=$1
fi
if [[ $# -ge 2 ]]; then
SERIALIZED_CORPUS=$2
if [[ `ls -l ${SERIALIZED_CORPUS}.* | wc -l` -eq 0 ]]; then
echo "$0 ERROR: no serialized data found with prefix ${SERIALIZED_CORPUS}"; exit 1
fi
fi
echo " * Using serialized data prefix: ${SERIALIZED_CORPUS}"
echo " ---"
BASIC_OPTIONS="--beam 1 --iter-decode-max-iter 1 --prefix-size 0 --match-source-len"
GENERATE_OPTIONS="--beam 1 --iter-decode-max-iter 1 --max-len-a 1.0 --max-len-b 0 --prefix-size 0" # Average max-len-a for MEDIA (computed on train): 0.123
CHECKPOINT_DIR=`dirname ${CHECKPOINT}`
PYTHONPATH=${PYTHONPATH}:${HOME}/work/tools/fairseq/ ${FAIRSEQ_PATH}/fairseq-generate ${DATA_PATH} \
--path ${CHECKPOINT} ${GENERATE_OPTIONS} --max-sentences 1 --num-workers=0 \
--task end2end_slu --criterion ${CRITERION} --padded-reference \
--serialized-data ${SERIALIZED_CORPUS} --slu-subtask ${SUBTASK} --user-only \
--gen-subset valid --results-path ${CHECKPOINT_DIR} \
| tee ${CHECKPOINT_DIR}/generate-valid.txt
PYTHONPATH=${PYTHONPATH}:${HOME}/work/tools/fairseq/ ${FAIRSEQ_PATH}/fairseq-generate ${DATA_PATH} \
--path ${CHECKPOINT} ${GENERATE_OPTIONS} --max-sentences 1 --num-workers=0 \
--task end2end_slu --criterion ${CRITERION} --padded-reference \
--serialized-data ${SERIALIZED_CORPUS} --slu-subtask ${SUBTASK} --user-only \
--gen-subset test --results-path ${CHECKPOINT_DIR} \
| tee ${CHECKPOINT_DIR}/generate-test.txt
deactivate
|
#!/bin/bash
set -eo pipefail
CURRENT_DIR=$(cd $(dirname $0); pwd)
TEMP_DIR="${CURRENT_DIR}/temp"
REPO_ROOT_DIR="${CURRENT_DIR%/contrib/offline}"
: ${DOWNLOAD_YML:="roles/download/defaults/main.yml"}
mkdir -p ${TEMP_DIR}
# generate all download files url template
grep 'download_url:' ${REPO_ROOT_DIR}/${DOWNLOAD_YML} \
| sed 's/^.*_url: //g;s/\"//g' > ${TEMP_DIR}/files.list.template
# generate all images list template
sed -n '/^downloads:/,/download_defaults:/p' ${REPO_ROOT_DIR}/${DOWNLOAD_YML} \
| sed -n "s/repo: //p;s/tag: //p" | tr -d ' ' \
| sed 'N;s#\n# #g' | tr ' ' ':' | sed 's/\"//g' > ${TEMP_DIR}/images.list.template
# add kube-* images to images list template
# Those container images are downloaded by kubeadm, then roles/download/defaults/main.yml
# doesn't contain those images. That is reason why here needs to put those images into the
# list separately.
KUBE_IMAGES="kube-apiserver kube-controller-manager kube-scheduler kube-proxy"
for i in $KUBE_IMAGES; do
echo "{{ kube_image_repo }}/$i:{{ kube_version }}" >> ${TEMP_DIR}/images.list.template
done
# run ansible to expand templates
/bin/cp ${CURRENT_DIR}/generate_list.yml ${REPO_ROOT_DIR}
(cd ${REPO_ROOT_DIR} && ansible-playbook $* generate_list.yml && /bin/rm generate_list.yml) || exit 1
|
#! /bin/sh
echo '1. 文件重命名 .mm --> .cpp'
cd basePro
mv src/mainwindow.mm src/mainwindow.cpp
echo 'src/mainwindow.mm --> mainwindow.cpp'
mv src/widgets/common/LoginWidget.mm src/widgets/common/LoginWidget.cpp
echo 'src/widgets/common/LoginWidget.mm --> LoginWidget.cpp'
mv src/widgets/common/RoomEntryWidget.mm src/widgets/common/RoomEntryWidget.cpp
echo 'ssrc/widgets/common/RoomEntryWidget.mm --> RoomEntryWidget.cpp'
mv src/widgets/player/Player.mm src/widgets/player/Player.cpp
echo 'src/widgets/player/Player.mm --> Player.cpp'
echo '-------------------------------------------------------------------------'
pwd
echo '2. 执行qmake '
qmake -spec macx-xcode basePro.pro
echo '-------------------------------------------------------------------------'
pwd
echo '3.拷贝资源文件 '
cd ../run_script
cp run_info.plist ../basePro/info.plist
echo '拷贝 info.plist'
rm -rf ../basePro/icon.iconset
cp -r run_icon.iconset ../basePro/icon.iconset
echo '拷贝 icon.iconset'
cp -rf ../basePro/Object-C sources
cp -rf sources/Object-C ../basePro
echo '拷贝 Object-C'
cp -rf ../basePro/OtherFile sources
cp -rf sources/OtherFile ../basePro
echo '拷贝 OtherFile'
echo '-------------------------------------------------------------------------'
echo '4. 修改XcodeProj文件 '
ruby run_amend_project_file.rb
echo '-------------------------------------------------------------------------'
echo '5. 新增文件至功能 部分.cpp --> .mm'
ruby run_file_assistant.rb
echo '-------------------------------------------------------------------------'
echo '6. 新增framework/Lib'
ruby run_make_file.rb
echo '-------------------------------------------------------------------------'
echo '7. 打开Xcode工程'
cd ../basePro
open OunaClass.xcodeproj
echo '-------------------------------------------------------------------------'
echo 'Enjoy yourself!'
|
package oauth;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.nostalgia.PasswordRepository;
import com.nostalgia.UserRepository;
import com.nostalgia.persistence.model.User;
import lombok.extern.slf4j.Slf4j;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.xml.ws.soap.AddressingFeature.Responses;
@Slf4j
@Path("/oauth2/token")
@Produces(MediaType.APPLICATION_JSON)
public class OAuth2Resource {
private ImmutableList<String> allowedGrantTypes;
private AccessTokenRepository accessTokenDAO;
private UserRepository userDAO;
private PasswordRepository passRepo;
private static final Logger logger = LoggerFactory.getLogger(OAuth2Resource.class.getName());
public OAuth2Resource(ImmutableList<String> allowedGrantTypes, AccessTokenRepository accessTokenDAO, UserRepository userDAO, PasswordRepository passRepo) {
this.allowedGrantTypes = allowedGrantTypes;
this.accessTokenDAO = accessTokenDAO;
this.passRepo = passRepo;
this.userDAO = userDAO;
logger.info("Constructed OAuth2Resource with grant types: " + allowedGrantTypes.toString());
}
}
|
package main;
import java.util.Arrays;
import java.util.Scanner;
/**
* @author <NAME>
*
*/
public class FindingGenes
{
public static void main(String[] args)
{
Scanner input = new Scanner(System.in);
String genome = "";
while (!genome.matches("[ACTG]+"))
{
System.out.print("Enter a genome string: ");
genome = input.next().toUpperCase();
}
input.close();
String[] genes = findGenes(genome);
if (genes.length == 0)
{
System.out.println("no gene is found");
}
else
{
for (String gene: genes)
{
System.out.println(gene);
}
}
}
/**
* Returns the array of gene strings given the genome string.
* <ul>
* <li>
* If the given string is not a valid genome string, the array of gene strings will default to being empty.
* </li>
* </ul>
*
* @param genome the genome string
* @return the array of gene strings
*/
public static String[] findGenes(String genome)
{
String genomeUppercase = genome.toUpperCase();
StringBuilder genes = new StringBuilder();
if (genomeUppercase.matches("[ACTG]+"))
{
int potentialGenePrefixIndex = genomeUppercase.indexOf("ATG");
while ((potentialGenePrefixIndex != -1) && (genomeUppercase.length() >= 9))
{
genomeUppercase = genomeUppercase.substring(potentialGenePrefixIndex + 3);
potentialGenePrefixIndex = genomeUppercase.indexOf("ATG");
int[] potentialValidGeneSuffixesIndices = {genomeUppercase.indexOf("TAG"), genomeUppercase.indexOf("TAA"),
genomeUppercase.indexOf("TGA")};
Arrays.sort(potentialValidGeneSuffixesIndices);
if (potentialValidGeneSuffixesIndices[potentialValidGeneSuffixesIndices.length - 1] == -1)
{
break;
}
int validGeneSuffix = potentialValidGeneSuffixesIndices[0];
int currentSuffixesIndex = 1;
while ((validGeneSuffix == -1) && (currentSuffixesIndex < potentialValidGeneSuffixesIndices.length))
{
validGeneSuffix = potentialValidGeneSuffixesIndices[currentSuffixesIndex];
currentSuffixesIndex++;
}
if ((potentialGenePrefixIndex == -1) || ((potentialGenePrefixIndex != -1) && (validGeneSuffix < potentialGenePrefixIndex)))
{
String potentialGene = genomeUppercase.substring(0, validGeneSuffix);
if (potentialGene.length() % 3 == 0)
{
genomeUppercase = genomeUppercase.substring(validGeneSuffix + 3);
potentialGenePrefixIndex = genomeUppercase.indexOf("ATG");
genes.append(potentialGene + " ");
}
}
}
}
return (genes.length() > 0) ? genes.toString().split(" ") : new String[0];
}
}
|
/**
* @author ooooo
* @date 2021/1/29 20:47
*/
#ifndef CPP_1678__SOLUTION1_H_
#define CPP_1678__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <unordered_map>
#include <unordered_set>
#include <stack>
#include <numeric>
#include <queue>
using namespace std;
class Solution {
public:
string interpret(string command) {
string ans = "";
for (int i = 0; i < command.size(); ++i) {
char c = command[i];
if (c == 'G') {
ans += c;
} else if (c == ')') {
if (command[i - 1] == '(') {
ans += 'o';
} else {
ans += "al";
}
}
}
return ans;
}
};
#endif //CPP_1678__SOLUTION1_H_
|
for (int i = 0; i < conditions.length; i++) {
if (/* condition for overlap */) {
// Handle overlap condition
} else if (/* condition for offset */) {
// Handle offset condition
} else {
// No overlap, offset OK
found = true;
break;
}
}
|
import authDirectiveTransformer from "./auth";
const applyDirectives = (schema) => authDirectiveTransformer(schema, "auth");
export default applyDirectives;
|
defalternate_case(string):
reversed_string = string[::-1]
new_string = ""
for index, char in enumerate(reversed_string):
if index % 2 == 0:
new_string += char.upper()
else:
new_string += char.lower()
return new_string
assert(alternating_case("Hello world") == "DLROw OLLEh")
|
<!DOCTYPE html>
<html>
<head>
<title>Days Between Dates</title>
</head>
<body>
<h1>Days Between Dates</h1>
<form action="">
<input type="text" id="date1" name="date1" placeholder="Enter date 1"/>
<input type="text" id="date2" name="date2" placeholder="Enter date 2"/>
<button onclick="calculateDaysBetweenDates()">Calculate</button>
<div id="daysBetweenDates"></div>
</form>
<script>
const calculateDaysBetweenDates = () => {
const date1 = document.getElementById('date1').value;
const date2 = document.getElementById('date2').value;
const startDate = new Date(date1);
const endDate = new Date(date2);
const timeDifference = Math.abs(endDate.getTime() - startDate.getTime());
const daysBetweenDates = Math.ceil(timeDifference / (1000 * 3600 * 24));
document.getElementById('daysBetweenDates').innerHTML = `There are ${daysBetweenDates} days between ${date1} and ${date2}`;
}
</script>
</body>
</html>
|
<reponame>cyber-itl/citl-static-analysis
#pragma once
#include <cstdint>
#include <tuple>
#include <vector>
#include "capstone/capstone.h"
std::tuple<cs_arch, cs_mode> map_triple_cs(uint32_t triple);
std::vector<uint64_t> get_imm_vals(const cs_insn &insn, cs_arch arch, uint32_t base_reg, uint64_t reg_val);
bool is_nop(cs_arch arch, cs_insn *insn);
bool is_pc_in_arm_ops(cs_arm arm_details);
bool is_lr_in_arm_ops(cs_arm arm_details);
unsigned rotr32(unsigned val, unsigned amt);
|
import dj_database_url
import os
import sys
from datetime import timedelta
from pathlib import Path
from django.db.backends.mysql.base import DatabaseWrapper
DatabaseWrapper.data_types['DateTimeField'] = 'datetime'
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = os.getenv('SECRET_KEY')
if os.getenv('ENV') in ['PROD']:
ALLOWED_HOSTS = [os.getenv('HOST')]
DEBUG = False
else:
ALLOWED_HOSTS = ["*"]
DEBUG = True
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'drf_yasg',
'rest_framework',
'core.apps.CoreConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'todo_api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'staticfiles', 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'todo_api.wsgi.application'
db_from_env = dj_database_url.config(
engine='django.db.backends.mysql',
env='CLEARDB_DATABASE_URL',
conn_max_age=60
)
DATABASES = {'default': db_from_env}
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=1),
'REFRESH_TOKEN_LIFETIME': timedelta(days=2),
'ROTATE_REFRESH_TOKENS': False,
'BLACKLIST_AFTER_ROTATION': True,
'UPDATE_LAST_LOGIN': False,
'ALGORITHM': 'HS256',
'SIGNING_KEY': SECRET_KEY,
'VERIFYING_KEY': None,
'AUDIENCE': None,
'ISSUER': None,
'AUTH_HEADER_TYPES': ('Bearer',),
'AUTH_HEADER_NAME': 'HTTP_AUTHORIZATION',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'AUTH_TOKEN_CLASSES': ('rest_framework_simplejwt.tokens.AccessToken',),
'TOKEN_TYPE_CLAIM': 'token_type',
'JTI_CLAIM': 'jti'
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework_simplejwt.authentication.JWTAuthentication',
],
'DEFAULT_THROTTLE_CLASSES': [
'rest_framework.throttling.AnonRateThrottle',
],
'DEFAULT_THROTTLE_RATES': {
'anon': '100/day',
}
}
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
SWAGGER_SETTINGS = {
'SECURITY_DEFINITIONS': {
'Bearer': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header'
},
},
"USE_SESSION_AUTH": False
}
|
var AuthnetPaymentMethodController = function(params) { this.init(params); };
AuthnetPaymentMethodController.prototype = {
container: 'payment_method_container',
customer_profile_id: false,
payment_profile_id: false,
card_brand: false,
card_last4: false,
card_name: false,
card_zip: false,
init: function(params)
{
var that = this;
for (var i in params)
that[i] = params[i];
},
refresh: function(after)
{
var that = this;
$.ajax({
url: '/checkout/authnet/json',
type: 'get',
success: function(resp) {
that.stripe_key = resp.stripe_key;
that.customer_id = resp.customer_id;
that.card_brand = resp.card_brand;
that.card_last4 = resp.card_last4;
that.cc.print_ready_message();
if (after) after();
}
});
},
print: function()
{
var that = this;
//if (!that.stripe_key)
//{
// that.refresh(function() { that.print(); });
// return;
//}
var msg = that.card_brand && that.card_last4 ? that.card_brand + ' ending in ' + that.card_last4 : 'You have no card on file.';
var div = $('<div/>')
.append($('<h3/>').html('Payment Method'))
.append($('<p/>')
.append(msg).append(' ')
.append($('<a/>').attr('href', '#').html('Edit').click(function(e) {
e.preventDefault();
that.edit();
})
));
$('#'+that.container).empty().append(div);
},
edit: function()
{
caboose_modal_url('/checkout/authnet');
},
update: function()
{
var that = this;
var info = {
number: $('#card_number').val(),
exp: $('#card_exp').val(),
cvc: $('#card_cvc').val(),
name: $('card_name').val(),
address_zip: $('card_zip').val()
};
var exp = info.exp.split('/');
var m = exp.length > 0 ? exp[0] : '';
var y = exp.length > 1 ? exp[1] : '';
var error = false;
if (!$.payment.validateCardNumber(info.number)) error = "Invalid card number.";
if (!$.payment.validateCardExpiry(m, y)) error = "Invalid expiration date.";
if (!$.payment.validateCardCVC(info.cvc)) error = "Invalid CVC.";
if (error) { $('#payment_message').html("<p class='note error'>" + error + "</p>"); return; }
$('#save_payment_btn').attr('disabled', 'true').val('Saving card...');
Stripe.setPublishableKey(that.stripe_key);
Stripe.card.createToken(info, function(status, resp) {
if (resp.error)
{
$('#save_payment_btn').attr('disabled', 'false').val('Save Payment Method');
$('#payment_message').html("<p class='note error'>" + resp.error.message + "</p>");
}
else
{
that.card_brand = resp.card.brand;
that.card_last4 = resp.card.last4;
$.ajax({
url: '/checkout/stripe-details',
type: 'put',
data: { token: resp.id, card: resp.card },
success: function(resp2) {
if (resp2.success)
{
that.customer_id = resp.customer_id;
that.print();
}
if (resp2.error) $('#payment_message').html("<p class='note error'>" + resp2.error + "</p>");
}
});
}
});
},
ready: function()
{
var that = this;
if (!that.customer_id ) return false;
if (!that.card_brand ) return false;
if (!that.card_last4 ) return false;
return true;
}
};
|
<filename>learn_uwsgi/models/type.py
# coding: utf-8
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from .base_model_ import Model
from ..serialization import deserialize_model
class Type(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
"""
allowed enum values
"""
VLAN = "vlan"
VETH = "veth"
VCAN = "vcan"
VXCAN = "vxcan"
DUMMY = "dummy"
IFB = "ifb"
MACVLAN = "macvlan"
MACVTAP = "macvtap"
BRIDGE = "bridge"
BOND = "bond"
TEAM = "team"
IPOIB = "ipoib"
IP6TNL = "ip6tnl"
IPIP = "ipip"
SIT = "sit"
VXLAN = "vxlan"
GRE = "gre"
GRETAP = "gretap"
ERSPAN = "erspan"
IP6GRE = "ip6gre"
IP6GRETAP = "ip6gretap"
IP6ERSPAN = "ip6erspan"
VTI = "vti"
NLMON = "nlmon"
TEAM_SLAVE = "team_slave"
BOND_SLAVE = "bond_slave"
IPVLAN = "ipvlan"
GENEVE = "geneve"
BRIDGE_SLAVE = "bridge_slave"
VRF = "vrf"
MACSEC = "macsec"
def __init__(self): # noqa: E501
"""Type - a model defined in OpenAPI
"""
self.openapi_types = {
}
self.attribute_map = {
}
@classmethod
def from_dict(cls, dikt) -> 'Type':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Type of this Type. # noqa: E501
:rtype: Type
"""
return deserialize_model(dikt, cls)
|
import { prisma } from '@infra/prisma/client'
import { TemplateMapper } from '@modules/broadcasting/mappers/TemplateMapper'
import { Template } from '../../domain/template/template'
import {
ITemplatesRepository,
TemplatesSearchParams,
TemplatesSearchResult,
} from '../ITemplatesRepository'
export class PrismaTemplatesRepository implements ITemplatesRepository {
async findAll(): Promise<Template[]> {
const templates = await prisma.template.findMany()
return templates.map(template => TemplateMapper.toDomain(template))
}
async findById(id: string): Promise<Template> {
const template = await prisma.template.findUnique({ where: { id } })
return TemplateMapper.toDomain(template)
}
async findDefaultTemplate(): Promise<Template> {
const template = await prisma.template.findFirst({
where: { is_default: true },
})
return template ? TemplateMapper.toDomain(template) : null
}
async save(template: Template): Promise<void> {
const data = TemplateMapper.toPersistence(template)
await prisma.template.update({
where: {
id: template.id,
},
data,
})
}
async create(template: Template): Promise<void> {
const data = TemplateMapper.toPersistence(template)
await prisma.template.create({ data })
}
async search({
query,
page,
perPage,
}: TemplatesSearchParams): Promise<TemplatesSearchResult> {
const queryPayload = {
take: perPage,
skip: (page - 1) * perPage,
where: {},
}
if (query) {
queryPayload.where = {
title: { contains: query, mode: 'insensitive' },
}
}
const templates = await prisma.template.findMany({
...queryPayload,
orderBy: {
title: 'asc',
},
})
const estimatedCount = await prisma.template.aggregate({
_count: true,
where: queryPayload.where,
})
return {
data: templates.map(template => TemplateMapper.toDomain(template)),
totalCount: estimatedCount._count,
}
}
}
|
#!/bin/bash
echo "Setting up experiment roles"
kubectl apply -f experiment_roles/
|
// Use bracket notation to find the second-to-last character in the lastName string.
// Hint
// Try looking at the thirdToLastLetterOfFirstName variable declaration if you get stuck.
// Example
var firstName = "Ada";
var thirdToLastLetterOfFirstName = firstName[firstName.length - 3];
// Setup
var lastName = "Lovelace";
// Only change code below this line
var secondToLastLetterOfLastName = lastName[lastName.length - 2];
|
<reponame>kiya69/terminalstore
var gulp = require('gulp');
var run = require('gulp-run');
var connect = require('gulp-connect');
var path = require('path');
var stylus = require('gulp-stylus');
var nib = require('nib');
gulp.task('connect', function() {
connect.server({
port: 8880
});
});
gulp.task('stylus', function() {
gulp.src('./css/style.styl')
.pipe(stylus({
use: [nib()]
}))
.pipe(gulp.dest('./css'));
})
gulp.task('watch', function() {
gulp.watch('./css/*.styl', ['stylus']);
});
gulp.task('default', ['connect', 'watch']);
var build_files = [
'css/**/*.*',
'img/**/*.*',
'js/**/*.*',
'index.html',
'noWebGL.html'
];
gulp.task('build', function() {
gulp.src(build_files, {
base: './'
})
.pipe(gulp.dest('build'));
});
gulp.task('upload', function() {
run('aws s3 sync build s3://labs.floored.com/buildings').exec();
});
gulp.task('publish', ['build', 'upload']);
|
class UsersTable < ActiveRecord::Migration[5.1]
def change
create_table :users do |t|
t.string :usernmae
t.text :email
t.string :password_digest
end
end
end
|
<html>
<head>
<title>My Webpage</title>
</head>
<body>
<h1>My Webpage</h1>
<div>
<p>This is the main content of the webpage.</p>
</div>
<div>
<p>This is the sidebar content of the webpage.</p>
</div>
<div>
<p>This is the footer content of the webpage.</p>
</div>
</body>
</html>
|
#!/bin/sh
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2015 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
#######################################################################
# Copyright [2014] [Cisco Systems, Inc.]
#
# Licensed under the Apache License, Version 2.0 (the \"License\");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################
source /etc/utopia/service.d/ulog_functions.sh
#------------------------------------------------------------------
# This script is used to restart syseventd daemon and to
# bring its internal state to as before by re-registring all apps
# This script is typically called by process monitor if & when
# it detects syseventd daemon has died
# This script doesn't take any parameter
#------------------------------------------------------------------
do_restart() {
ulog system "Restarting sysevent subsystem"
/sbin/syseventd
sleep 2
apply_system_defaults
INIT_DIR=/etc/utopia/registration.d
# run all executables in the sysevent registration directory
execute_dir $INIT_DIR
ulog system "Restarting lan and wan"
sysevent set lan-start
sysevent set wan-start
}
#------------------------------------------------------------------
do_restart
|
#!/bin/sh
#
# Script buid building and packaging the Hyperloop iOS package
#
CWD=`pwd`
CURVERSION=`grep "^version:" manifest`
VERSION=`grep "^version:" manifest | cut -c 10-`
METABASE_VERSION=`grep "\"version\":" ../packages/hyperloop-ios-metabase/package.json | cut -d \" -f 4`
export TITANIUM_SDK="`node ../tools/tiver.js`"
echo "Titanium SDK version: "
echo $TITANIUM_SDK
XC=`which xcpretty`
CHECK="✓ "
if [ $? -eq 1 ];
then
gem install xcpretty
fi
rm -rf build
# Inject the TITANIUM_SDK value into titanium.xcconfig explicitly, just exporting the value doesn't override it, it seems
sed -i.bak 's@TITANIUM_SDK = .*@TITANIUM_SDK = '"$TITANIUM_SDK"'@g' ./titanium.xcconfig
if [ "${CI}" = "1" ];
then
echo "Testing ..."
xcodebuild clean >/dev/null
xcodebuild -sdk iphonesimulator -destination 'platform=iOS Simulator,name=iPhone 12' -scheme hyperloop -target Tests -configuration Debug GCC_PREPROCESSOR_DEFINITIONS='USE_JSCORE_FRAMEWORK=1' test | xcpretty -r junit
if [ $? -ne 0 ];
then
exit $?
fi
fi
# Build the native module
echo "\nBuilding module ..."
mkdir -p build/zip
ti build -p ios --build-only
cd dist
unzip hyperloop-iphone-$VERSION.zip -d ../build/zip
cd ..
# Package the metabase into the .zip
echo "Packaging iOS metabase..."
cd ../packages/hyperloop-ios-metabase
rm *.tgz
npm pack >/dev/null 2>&1
cd $CWD
# Install dependencies
echo "Installing npm dependencies..."
cd build/zip/modules/iphone/hyperloop/$VERSION/hooks
npm i --production
npm i $CWD/../packages/hyperloop-ios-metabase/hyperloop-metabase-$METABASE_VERSION.tgz
rm -rf node_modules/findit/test
rm -rf package-lock.json
cd $CWD
cd $CWD/build/zip
rm -rf $CWD/hyperloop-iphone-$VERSION.zip
zip -q -r $CWD/hyperloop-iphone-$VERSION.zip * --exclude=*test* --exclude=*.DS_Store* --exclude=*.git* --exclude *.travis.yml* --exclude *.gitignore* --exclude *.npmignore* --exclude *CHANGELOG* --exclude *.jshintrc*
unset TITANIUM_SDK
echo "$CHECK Done packaging iOS module!\n"
exit 0
|
#!/bin/bash
mkdir -p $PREFIX/bin
cp *.pl $PREFIX/bin
|
#*******************************************************************************
# Copyright 2019 Fabrizio Pastore, Leonardo Mariani
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#*******************************************************************************
klfaCsvAnalysis.sh componentLevel training transformersConfig.txt preprocessingRules.txt test.csv
|
#!/usr/bin/env bash
# Some helpful functions
yell() { echo -e "${RED}FAILED> $* ${NC}" >&2; }
die() { yell "$*"; exit 1; }
try() { "$@" || die "failed executing: $*"; }
log() { echo -e "--> $*"; }
# Colors for colorizing
RED='\033[0;31m'
GREEN='\033[0;32m'
PURPLE='\033[0;35m'
BLUE='\033[0;34m'
YELLOW='\033[0;33m'
NC='\033[0m'
INSTALL_PATH=${INSTALL_PATH:-"/usr/local/bin"}
NEED_SUDO=0
function maybe_sudo() {
if [[ "$NEED_SUDO" == '1' ]]; then
sudo "$@"
else
"$@"
fi
}
# check for curl
hasCurl=$(which curl)
if [ "$?" = "1" ]; then
die "You need to install curl to use this script."
fi
log "Getting latest version..."
# adapted from https://github.com/openfaas/faas-cli/blob/master/get.sh
version=$(curl -s -f -H 'Content-Type: text/plain' https://releases.hasura.io/graphql-engine?agent=cli-get.sh)
if [ ! $version ]; then
log "${YELLOW}"
log "Failed while attempting to install hasura graphql-engine cli. Please manually install:"
log ""
log "2. Open your web browser and go to https://github.com/hasura/graphql-engine/releases"
log "2. Download the cli from latest release for your platform. Name it 'hasura'."
log "3. chmod +x ./hasura"
log "4. mv ./hasura /usr/local/bin"
log "${NC}"
die "exiting..."
fi
log "Latest version is $version"
# check for existing hasura installation
hasCli=$(which hasura)
if [ "$?" = "0" ]; then
log ""
log "${GREEN}You already have the hasura cli at '${hasCli}'${NC}"
export n=3
log "${YELLOW}Downloading again in $n seconds... Press Ctrl+C to cancel.${NC}"
log ""
sleep $n
fi
# get platform and arch
platform='unknown'
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
platform='linux'
elif [[ "$unamestr" == 'Darwin' ]]; then
platform='darwin'
fi
if [[ "$platform" == 'unknown' ]]; then
die "Unknown OS platform"
fi
arch='unknown'
archstr=`uname -m`
if [[ "$archstr" == 'x86_64' ]]; then
arch='amd64'
else
arch='386'
fi
# some variables
suffix="-${platform}-${arch}"
targetFile="/tmp/cli-hasura$suffix"
if [ -e $targetFile ]; then
rm $targetFile
fi
log "${PURPLE}Downloading hasura for $platform-$arch to ${targetFile}${NC}"
url=https://github.com/hasura/graphql-engine/releases/download/$version/cli-hasura$suffix
try curl -L# -f -o $targetFile "$url"
try chmod +x $targetFile
log "${GREEN}Download complete!${NC}"
# check for sudo
needSudo=$(touch ${INSTALL_PATH}/.hasurainstall &> /dev/null)
if [[ "$?" == "1" ]]; then
NEED_SUDO=1
fi
rm ${INSTALL_PATH}/.hasurainstall &> /dev/null
if [[ "$NEED_SUDO" == '1' ]]; then
log
log "${YELLOW}Path '$INSTALL_PATH' requires root access to write."
log "${YELLOW}This script will attempt to execute the move command with sudo.${NC}"
log "${YELLOW}Are you ok with that? (y/N)${NC}"
read a
if [[ $a == "Y" || $a == "y" || $a = "" ]]; then
log
else
log
log " ${BLUE}sudo mv $targetFile ${INSTALL_PATH}/hasura${NC}"
log
die "Please move the binary manually using the command above."
fi
fi
log "Moving cli from $targetFile to ${INSTALL_PATH}"
try maybe_sudo mv $targetFile ${INSTALL_PATH}/hasura
log
log "${GREEN}hasura cli installed to ${INSTALL_PATH}${NC}"
log
if [ -e $targetFile ]; then
rm $targetFile
fi
hasura version
if ! $(echo "$PATH" | grep -q "$INSTALL_PATH"); then
log
log "${YELLOW}$INSTALL_PATH not found in \$PATH, you might need to add it${NC}"
log
fi
|
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.linear_model import LinearRegression
# Read the blog post data
data = pd.read_csv('blog_posts.csv')
# Feature extraction
vectorizer = TfidfVectorizer()
posts_features = vectorizer.fit_transform(data['text'])
# Split data into train and test sets
train_features = posts_features[:int(0.8*len(data))]
test_features = posts_features[int(0.8*len(data)):]
train_popularity = data['popularity'][:int(0.8*len(data))]
test_popularity = data['popularity'][int(0.8*len(data)):]
# Fit model and evaluate it
model = LinearRegression()
model.fit(train_features, train_popularity)
predictions = model.predict(test_features)
print("Accuracy score:", model.score(test_features, test_popularity))
|
package com.infamous.framework.sensitive.core;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
class DefaultSensitiveObjectTest {
@Test
public void testToString() {
DefaultSensitiveObject obj = new DefaultSensitiveObject("123");
assertNotNull(obj.toString());
assertNotEquals("123", obj.toString());
}
@Test
public void testToString_Null() {
DefaultSensitiveObject obj = new DefaultSensitiveObject(null);
assertNotNull(obj.toString());
assertNotEquals("123", obj.toString());
}
}
|
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >&-
APP_HOME="`pwd -P`"
cd "$SAVED" >&-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
<gh_stars>0
module.exports={
mongoURI:' mongodb+srv://yasir:yasir123@cluster-bi6kh.mongodb.net/test?retryWrites=true&w=majority'
};
|
<gh_stars>0
// Copyright 2010 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.services;
import java.io.IOException;
/**
* Filter contributed into the {@link ComponentRequestHandler} pipeline to set the
* {@link RequestGlobals#getActivePageName() activePageName property}.
*
* @since 5.2.0
*/
public class InitializeActivePageName implements ComponentRequestFilter
{
private final RequestGlobals globals;
public InitializeActivePageName(RequestGlobals globals)
{
this.globals = globals;
}
public void handleComponentEvent(ComponentEventRequestParameters parameters,
ComponentRequestHandler handler) throws IOException
{
globals.storeActivePageName(parameters.getActivePageName());
handler.handleComponentEvent(parameters);
}
public void handlePageRender(PageRenderRequestParameters parameters,
ComponentRequestHandler handler) throws IOException
{
globals.storeActivePageName(parameters.getLogicalPageName());
handler.handlePageRender(parameters);
}
}
|
# This file was generated on 2019-07-08T14:21:39+01:00 from the rspec-dev repo.
# DO NOT modify it by hand as your changes will get lost the next time it is generated.
# Taken from:
# https://github.com/travis-ci/travis-build/blob/e9314616e182a23e6a280199cd9070bfc7cae548/lib/travis/build/script/templates/header.sh#L34-L53
travis_retry() {
local result=0
local count=1
while [ $count -le 3 ]; do
[ $result -ne 0 ] && {
echo -e "\n\033[33;1mThe command \"$@\" failed. Retrying, $count of 3.\033[0m\n" >&2
}
"$@"
result=$?
[ $result -eq 0 ] && break
count=$(($count + 1))
sleep 1
done
[ $count -eq 3 ] && {
echo "\n\033[33;1mThe command \"$@\" failed 3 times.\033[0m\n" >&2
}
return $result
}
# Taken from https://github.com/vcr/vcr/commit/fa96819c92b783ec0c794f788183e170e4f684b2
# and https://github.com/vcr/vcr/commit/040aaac5370c68cd13c847c076749cd547a6f9b1
nano_cmd="$(type -p gdate date | head -1)"
nano_format="+%s%N"
[ "$(uname -s)" != "Darwin" ] || nano_format="${nano_format/%N/000000000}"
travis_time_start() {
travis_timer_id=$(printf %08x $(( RANDOM * RANDOM )))
travis_start_time=$($nano_cmd -u "$nano_format")
printf "travis_time:start:%s\r\e[0m" $travis_timer_id
}
travis_time_finish() {
local travis_end_time=$($nano_cmd -u "$nano_format")
local duration=$(($travis_end_time-$travis_start_time))
printf "travis_time:end:%s:start=%s,finish=%s,duration=%s\r\e[0m" \
$travis_timer_id $travis_start_time $travis_end_time $duration
}
fold() {
local name="$1"
local status=0
shift 1
if [ -n "$TRAVIS" ]; then
printf "travis_fold:start:%s\r\e[0m" "$name"
travis_time_start
else
echo "============= Starting $name ==============="
fi
"$@"
status=$?
[ -z "$TRAVIS" ] || travis_time_finish
if [ "$status" -eq 0 ]; then
if [ -n "$TRAVIS" ]; then
printf "travis_fold:end:%s\r\e[0m" "$name"
else
echo "============= Ending $name ==============="
fi
else
STATUS="$status"
fi
return $status
}
|
#!/bin/bash
OS_NAME="$(uname -s)"
if [ "$OS_NAME" == 'Darwin' ]; then
brew install gfortran
sudo easy_install pip
else
# assuming using ubuntu
sudo apt-get update
sudo apt-get -y install python-pip python-dev libopenblas-dev liblapack-dev gfortran
fi
sudo pip install -r requirements.txt
curl http://sigopt-public.s3-website-us-west-2.amazonaws.com/NEGATIVE_list.json -O
curl http://sigopt-public.s3-website-us-west-2.amazonaws.com/POSITIVE_list.json -O
|
<reponame>zilbuc/gatsby-tutorial<filename>src/templates/bike-template.js
import React from 'react';
import Layout from '../components/layout';
import { Link, graphql } from 'gatsby';
import Img from 'gatsby-image';
const BikeTemplate = ({data}) => {
const { title, price } = data.bikeQuery;
const { description } = data.bikeQuery.description;
const { fluid } = data.bikeQuery.image;
return (
<Layout>
<h2>Bike</h2>
<Img fluid={fluid} alt='bike'/>
<h3 style={{ marginBottom: '5px'}}>
{title}{' '}
<span style={{ color: 'grey', marginLeft: '15px'}}>
${price}
</span>
</h3>
<p>{description}</p>
<Link to='/bikes/'>Back to Bikes</Link>
</Layout>
);
}
export const query = graphql`
query($id:String!) {
bikeQuery: contentfulBike (id: {eq: $id}) {
title
price
description {
description
}
image {
fluid {
...GatsbyContentfulFluid_tracedSVG
}
}
}
}
`
export default BikeTemplate;
|
public setLimit(limit: number): T {
if (limit <= 0 || !Number.isInteger(limit)) {
Log.Error(AbstractScanQuery.name, 'setLimit', 'Limit parameter must be a positive integer', [{name: 'Given limit', value: limit}]);
} else {
this.request.Limit = limit;
}
return this as unknown as T;
}
|
<gh_stars>1000+
module Chewy
class LogSubscriber < ActiveSupport::LogSubscriber
def logger
Chewy.logger
end
def import_objects(event)
render_action('Import', event) { |payload| payload[:import] }
end
def search_query(event)
render_action('Search', event) { |payload| payload[:request] }
end
def delete_query(event)
render_action('Delete by Query', event) { |payload| payload[:request] }
end
def render_action(action, event)
payload = event.payload
description = yield(payload)
return if description.blank?
subject = payload[:type].presence || payload[:index]
action = "#{subject} #{action} (#{event.duration.round(1)}ms)"
action = color(action, GREEN, true)
debug(" #{action} #{description}")
end
end
end
Chewy::LogSubscriber.attach_to :chewy
|
<html>
<head>
<title>My Webpage </title>
</head>
<body>
<h1>Heading</h1>
<p>This is a paragraph.</p>
<button>Click Me!</button>
</body>
</html>
|
package apps;
import org.jooby.Jooby;
public class App1096d extends Jooby {
{
use(Route1096d.class);
}
}
|
l = ["Hello", "World", "Test"]
result = [x for x in l if x != "World"]
print(result)
|
#!/bin/sh
# base16-shell (https://github.com/chriskempson/base16-shell)
# Base16 Shell template by Chris Kempson (http://chriskempson.com)
# Default Dark scheme by Chris Kempson (http://chriskempson.com)
base00="18/18/18"
base01="28/28/28"
base02="38/38/38"
base03="58/58/58"
base04="b8/b8/b8"
base05="d8/d8/d8"
base06="e8/e8/e8"
base07="f8/f8/f8"
base08="ab/46/42"
base09="dc/96/56"
base0A="f7/ca/88"
base0B="a1/b5/6c"
base0C="86/c1/b9"
base0D="7c/af/c2"
base0E="ba/8b/af"
base0F="a1/69/46"
color_foreground=$base05
color_background=$base00
if [ -n "$TMUX" ]; then
# Tell tmux to pass the escape sequences through
# (Source: http://permalink.gmane.org/gmane.comp.terminal-emulators.tmux.user/1324)
put_template() { printf '\033Ptmux;\033\033]4;%d;rgb:%s\033\033\\\033\\' $@; }
put_template_var() { printf '\033Ptmux;\033\033]%d;rgb:%s\033\033\\\033\\' $@; }
put_template_custom() { printf '\033Ptmux;\033\033]%s%s\033\033\\\033\\' $@; }
elif [ "${TERM%%[-.]*}" = "screen" ]; then
# GNU screen (screen, screen-256color, screen-256color-bce)
put_template() { printf '\033P\033]4;%d;rgb:%s\007\033\\' $@; }
put_template_var() { printf '\033P\033]%d;rgb:%s\007\033\\' $@; }
put_template_custom() { printf '\033P\033]%s%s\007\033\\' $@; }
elif [ "${TERM%%-*}" = "linux" ]; then
put_template() { [ $1 -lt 16 ] && printf "\e]P%x%s" $1 $(echo $2 | sed 's/\///g'); }
put_template_var() { true; }
put_template_custom() { true; }
else
put_template() { printf '\033]4;%d;rgb:%s\033\\' $@; }
put_template_var() { printf '\033]%d;rgb:%s\033\\' $@; }
put_template_custom() { printf '\033]%s%s\033\\' $@; }
fi
# 16 color space
put_template 0 $base00 # Black
put_template 1 $base08 # Red
put_template 2 $base0B # Green
put_template 3 $base0A # Yellow
put_template 4 $base0D # Blue
put_template 5 $base0E # Magenta
put_template 6 $base0C # Cyan
put_template 7 $base05 # White
put_template 8 $base03 # Bright Black
put_template 9 $base08 # Bright Red
put_template 10 $base0B # Bright Green
put_template 11 $base0A # Bright Yellow
put_template 12 $base0D # Bright Blue
put_template 13 $base0E # Bright Magenta
put_template 14 $base0C # Bright Cyan
put_template 15 $base07 # Bright White
# 256 color space
put_template 234 $base00
put_template 235 $base01
put_template 237 $base02
put_template 240 $base03
put_template 250 $base04
put_template 188 $base05
put_template 254 $base06
put_template 231 $base07
put_template 131 $base08
put_template 215 $base09
put_template 222 $base0A
put_template 107 $base0B
put_template 116 $base0C
put_template 74 $base0D
put_template 139 $base0E
put_template 130 $base0F
# foreground / background / cursor color
if [ -n "$ITERM_SESSION_ID" ]; then
# iTerm2 proprietary escape codes
put_template_custom Pg d8d8d8 # foreground
put_template_custom Ph 181818 # background
put_template_custom Pi d8d8d8 # bold color
put_template_custom Pj 383838 # selection color
put_template_custom Pk d8d8d8 # selected text color
put_template_custom Pl d8d8d8 # cursor
put_template_custom Pm 181818 # cursor text
else
put_template_var 10 $color_foreground
if [ "$BASE16_SHELL_SET_BACKGROUND" != false ]; then
put_template_var 11 $color_background
if [ "${TERM%%-*}" = "rxvt" ]; then
put_template_var 708 $color_background # internal border (rxvt)
fi
fi
put_template_custom 12 ";7" # cursor (reverse video)
fi
# clean up
unset -f put_template
unset -f put_template_var
unset -f put_template_custom
unset base00
unset base01
unset base02
unset base03
unset base04
unset base05
unset base06
unset base07
unset base08
unset base09
unset base0A
unset base0B
unset base0C
unset base0D
unset base0E
unset base0F
unset color_foreground
unset color_background
|
<filename>core/src/main/java/demo/java/v2c10/WebService1/WarehouseClient.java
package demo.java.v2c10.WebService1;
import java.rmi.*;
import javax.naming.*;
//import com.horstmann.corejava.server.*;
/**
* The client for the warehouse program.
* @version 1.0 2007-10-09
* @author <NAME>
*/
public class WarehouseClient
{
public static void main(String[] args) throws NamingException, RemoteException
{/*
WarehouseService service = new WarehouseService();
Warehouse port = service.getPort(Warehouse.class);
String descr = "Blackwell Toaster";
double price = port.getPrice(descr);
System.out.println(descr + ": " + price);
*/}
}
|
// 5426. 비밀 편지
// 2019.10.10
// 구현
#include<iostream>
#include<cmath>
#include<string>
using namespace std;
char board[101][101];
int main()
{
int t;
cin >> t;
while (t-- > 0)
{
string s;
cin >> s;
int size = sqrt(s.size());
int cnt = 0;
// 배열에 입력
for (int i = 0; i < size; i++)
{
for (int j = 0; j < size; j++)
{
board[i][j] = s[cnt++];
}
}
// 디코딩
for (int i = size - 1; i >= 0; i--)
{
for (int j = 0; j < size; j++)
{
cout << board[j][i];
}
}
cout << "\n";
}
return 0;
}
|
<filename>test/attr.test.js
require("./chai.helper");
var domHelper = require("./dom.helper");
describe("attr", function () {
beforeEach(function () {
domHelper(
"<div id=\"single1\" class=\"red\" data-spong=\"bang\"></div>" +
"<div id=\"single2\" class=\"red\" data-spong=\"bloing\"></div>"
);
});
it("should set an attribute on a collection of elements", function () {
var divs = $("div");
divs.attr("data-foo", "bar");
var rawDivs = document.querySelectorAll("div");
for (var i = 0; i < rawDivs.length; i += 1) {
rawDivs[i].getAttribute("data-foo").should.equal("bar");
}
});
it("should set attributes from an object on a collection of elements", function () {
var divs = $("div");
divs.attr({
"data-foo": "bar",
"data-ex": "why",
"data-alpha": "zed"
});
var rawDivs = document.querySelectorAll("div");
for (var i = 0; i < rawDivs.length; i += 1) {
rawDivs[i].getAttribute("data-foo").should.equal("bar");
rawDivs[i].getAttribute("data-ex").should.equal("why");
rawDivs[i].getAttribute("data-alpha").should.equal("zed");
}
});
it("should set a (pseudo) attribute to a function, object or array", function () {
// function
var fn = function () {
return "bar";
};
$(".red").attr("data-foo", fn);
$(document.getElementById("single1")).attr("data-foo").should.equal(fn);
$(document.getElementById("single2")).attr("data-foo").should.equal(fn);
// object
var obj = {
a: 1,
b: 2
};
$(".red").attr("data-foo", obj);
$(document.getElementById("single1")).attr("data-foo").should.equal(obj);
$(document.getElementById("single2")).attr("data-foo").should.equal(obj);
// array
var arr = ["a", "b", "c"];
$(".red").attr("data-foo", arr);
$(document.getElementById("single1")).attr("data-foo").should.equal(arr);
$(document.getElementById("single2")).attr("data-foo").should.equal(arr);
});
it("should remove an attribute if value is set to null", function () {
$(".red").attr("data-greet", "hello");
$(".red").attr("data-greet", null);
var elt1 = document.getElementById("single1");
var value1 = elt1.getAttribute("data-greet");
(value1 === null).should.be.true
var elt2 = document.getElementById("single2");
var value2 = elt2.getAttribute("data-greet");
(value2 === null).should.be.true
});
it("should remove a (pseudo) attribute if value is set to null", function () {
$(".red").attr("data-greet", {a: 1, b: 2});
$(".red").attr("data-greet", null);
var elt1 = document.getElementById("single1");
var value1 = elt1.getAttribute("data-greet");
(value1 === null).should.be.true
var elt2 = document.getElementById("single2");
var value2 = elt2.getAttribute("data-greet");
(value2 === null).should.be.true
});
it("should get an attribute's value from a single element", function () {
var actual = $("#single1").attr("data-spong");
actual.should.equal("bang");
});
it("should get the first element's attribute value if called on an af collection", function () {
var actual = $(".red").attr("data-spong");
actual.should.equal("bang");
});
});
|
#include "catch.hpp"
#include "eval.hpp"
SCENARIO("The evaluator shall produce correct multiplication results", "[calc][op][mul]") {
CHECK(eval("0 * 0") == 0);
CHECK(eval("0 * 1") == 0);
CHECK(eval("1 * 0") == 0);
CHECK(eval("1 * 1") == 1);
CHECK(eval("1 * 2") == 2);
CHECK(eval("2 * 1") == 2);
CHECK(eval("2 * 3 * 4") == 24);
CHECK(eval("(2 * 3) * 4") == 24);
CHECK(eval("2 * (3 * 4)") == 24);
CHECK(eval("-1 * 1") == -1);
CHECK(eval("1 * -1") == -1);
CHECK(eval("-1 * -1") == 1);
}
|
#!/bin/sh -l
out=$(/app/mzap $1)
echo "::set-output name=output::$out"
|
const milena = "love";
console.log(milena.slice(2))
|
set -e
/usr/bin/mysqldump -u root stopstalkdb > /root/stopstalk-logs/stopstalkdb.sql
sleep 10
/usr/bin/mysqldump -u root uvajudge > /root/stopstalk-logs/uvajudge.sql
sleep 10
/usr/local/bin/aws s3 cp /root/stopstalk-logs/ s3://stopstalk-db-dumps/mysql/ --recursive --exclude "*" --include "*.sql"
|
# Create a drop down list to select an item
select_box = Select(root, values=["Item 1", "Item 2", "Item 3"])
# Create a variable to store the current selection
selected_item = StringVar()
# Associate the variable with the drop down list
select_box.config(textvariable=selected_item)
|
<reponame>mintygargle/crow-concepts-site
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _styledComponents = _interopRequireDefault(require("styled-components"));
var _SecondaryButton = _interopRequireDefault(require("./SecondaryButton"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _templateObject() {
var data = _taggedTemplateLiteral(["\n background-color: ", ";\n color: ", ";\n border-radius: ", ";\n box-shadow: none;\n height: fit-content;\n\n a {\n color: ", ";\n }\n\n &:hover {\n filter: brightness(110%);\n }\n\n &:active {\n filter: brightness(85%);\n }\n"]);
_templateObject = function _templateObject() {
return data;
};
return data;
}
function _taggedTemplateLiteral(strings, raw) { if (!raw) { raw = strings.slice(0); } return Object.freeze(Object.defineProperties(strings, { raw: { value: Object.freeze(raw) } })); }
var SecondaryButton = (0, _styledComponents["default"])(_SecondaryButton["default"])(_templateObject(), function (props) {
return props.theme.white;
}, function (props) {
return props.theme.primary;
}, function (props) {
return props.theme.border_radius;
}, function (props) {
return props.theme.primary;
});
var _default = SecondaryButton;
exports["default"] = _default;
//# sourceMappingURL=SecondaryButton.styled.js.map
|
#!/usr/bin/env bash
set -euo pipefail
echo "Setting up EKS cluster with cloudformation, helm and kiam..."
echo "AWS region: $AWS_DEFAULT_REGION"
echo "EC2 ssh key name: $KEY_NAME"
echo "Checking helm install"
helm version --client
echo "Checking aws install"
aws --version
# Check the key pair exists
aws ec2 describe-key-pairs --key-name $KEY_NAME
CLUSTER_NAME="example-kiam-cluster"
STACK_NAME=$CLUSTER_NAME
ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text)
# This command creates the full environment for the kiam example to run.
# Since it necessarily creates roles, the `--capabilities CAPABILITY_NAMED_IAM` flag
# is required.
echo "Creating cloudformation stack..."
aws cloudformation create-stack \
--capabilities CAPABILITY_NAMED_IAM \
--stack-name $CLUSTER_NAME \
--parameters ParameterKey=EKSClusterName,ParameterValue=$CLUSTER_NAME ParameterKey=KeyName,ParameterValue=$KEY_NAME \
--template-body file://cloudformation-vpc-eks-kiam.yaml
echo "Waiting for the $STACK_NAME stack to finish creating. The generation of an EKS cluster can take some time (~15 minutes). You can watch the progress in the AWS console, or why not go and get a coffee?"
aws cloudformation wait stack-create-complete --stack-name $STACK_NAME
echo "Retrieve the connection details for the new cluster..."
aws eks update-kubeconfig --name $CLUSTER_NAME
# Added new context arn:aws:eks:eu-west-1:905282256883:cluster/eks-cloudformation-helm-kiam to /home/username/.kube/config
# Output the roles of the two node groups
NODE_INSTANCE_ROLE=$(aws cloudformation describe-stacks --stack-name $STACK_NAME --query 'Stacks[0].Outputs[?OutputKey==`NodeInstanceRole`].OutputValue' --output text)
KIAM_SERVER_NODE_INSTANCE_ROLE=$(aws cloudformation describe-stacks --stack-name $STACK_NAME --query 'Stacks[0].Outputs[?OutputKey==`KiamServerNodeInstanceRole`].OutputValue' --output text)
echo "Found node instance role: $NODE_INSTANCE_ROLE"
echo "Found kiam server node instance role: $KIAM_SERVER_NODE_INSTANCE_ROLE"
echo "Ensure that the nodes from the worker groups can join the cluster."
# Note, the file must contain the above node instance role so we insert it before applying the template.
cp templates/aws-auth-cm.yaml /tmp/aws-auth-cm-temp.yaml
sed -i 's@KIAM_SERVER_NODE_INSTANCE_ROLE@'$KIAM_SERVER_NODE_INSTANCE_ROLE'@g' /tmp/aws-auth-cm-temp.yaml
sed -i 's@NODE_INSTANCE_ROLE@'$NODE_INSTANCE_ROLE'@g' /tmp/aws-auth-cm-temp.yaml
kubectl apply -f /tmp/aws-auth-cm-temp.yaml
rm /tmp/aws-auth-cm-temp.yaml
echo "Wait for the nodes to become visible and Ready."
while [ "$(kubectl get nodes | grep -c ' Ready ')" != 2 ]; do echo "$(date): Looking for running nodes..." && sleep 2 ; done
echo "Nodes all found to be Ready."
kubectl get nodes
echo "Allowing add-ons to administer themselves..."
kubectl create clusterrolebinding add-on-cluster-admin --clusterrole=cluster-admin --serviceaccount=kube-system:default
echo "Installing helm..."
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3
chmod 700 get_helm.sh
./get_helm.sh
cp helm /usr/sbin
./get_helm.sh
echo "Preparing kiam-secrets..."
helm upgrade --install kiam-secrets kiam-secrets --namespace kube-system
echo "Installing kiam..."
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
helm install kiam bitnami/kiam --namespace kube-system --values values.yaml
#helm upgrade --install kiam stable/kiam --namespace kube-system --values values.yaml
echo "Creating the example namespace and business logic pods..."
kubectl apply -f templates/example-namespace.yaml
helm upgrade --install example-business-logic example-business-logic --namespace example
echo "**********************************************************"
echo "Complete!"
echo "You can now ssh onto the example business logic pod to check the kiam installation."
echo "See the blog post for more details at https://bambooengineering.io/2019/06/14/kiam-on-eks-with-helm.html"
|
<gh_stars>1-10
/*
*
*/
package net.community.chest.eclipse.wst;
import java.io.IOException;
import java.io.StreamCorruptedException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import javax.xml.transform.TransformerException;
import net.community.chest.dom.DOMUtils;
import net.community.chest.eclipse.AbstractEclipseFileTransformer;
import net.community.chest.io.FileUtil;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* <P>Copyright GPLv2</P>
*
* @author <NAME>.
* @since Apr 26, 2009 2:24:30 PM
*/
public class WstComponentsFileTransformer extends AbstractEclipseFileTransformer {
public WstComponentsFileTransformer ()
{
super();
}
// NOTE !!! does not generate a "closed" entry
public Writer writeRootElement (Element root, final Writer w) throws TransformerException, IOException
{
if ((null == root) || (null == w))
throw new TransformerException("transformRootElement() no " + Element.class.getSimpleName() + "/" + Writer.class.getSimpleName() + " instance(s)");
if (isValidatingElementName())
{
final String tagName=root.getTagName();
if (!WstUtils.PROJ_MODULES_ELEM_NAME.equals(tagName))
throw new TransformerException("transformRootElement(" + tagName + ") bad " + Element.class.getSimpleName() + " name - expected " + WstUtils.PROJ_MODULES_ELEM_NAME);
}
return writeElementData(root, w, false, false);
}
/*
* @see net.community.chest.io.dom.AbstractTransformer#writeElementData(org.w3c.dom.Element, java.io.Writer, boolean, boolean, java.lang.CharSequence)
*/
@Override
public Writer writeElementData (Element elem, Writer org, boolean followChildren, boolean closeIt, CharSequence indent)
throws IOException
{
final String tagName=elem.getTagName();
// special handling because it's children have a value string
if (WstUtils.DEPMODULE_ELEM_NAME.equalsIgnoreCase(tagName))
{
Writer w=
super.writeElementData(elem, org, false, false, indent);
final Collection<? extends Element> chList=
DOMUtils.extractAllNodes(Element.class, elem, Node.ELEMENT_NODE);
if ((chList != null) && (chList.size() > 0))
{
for (final Element se : chList)
w = writeElementData(se, w, true, true, indent);
}
if (closeIt)
return FileUtil.writeln(w, indent + "</" + tagName + ">");
else
return w;
}
if (WstUtils.DEPTYPE_ELEM_NAME.equalsIgnoreCase(tagName))
{
final String val=DOMUtils.getElementStringValue(elem);
if ((null == val) || (val.length() <= 0))
throw new StreamCorruptedException("writeElementData(" + DOMUtils.toString(elem) + ") missing value");
return FileUtil.writeln(org, indent + "\t<" + tagName + ">" + val + "</" + tagName + ">");
}
return super.writeElementData(elem, org, followChildren, closeIt, indent);
}
public Writer transformModuleEntry (Element elem, final Writer w) throws TransformerException, IOException
{
if ((null == elem) || (null == w))
throw new TransformerException("transformModuleEntry() no " + Element.class.getSimpleName() + "/" + Writer.class.getSimpleName() + " instance(s)");
return writeElementData(elem, w, true, true, "\t\t");
}
public Writer transformDependencyModules (final List<Element> el, final Writer org) throws TransformerException, IOException
{
final int numElems=(null == el) ? 0 : el.size();
if (numElems <= 0)
return org;
if (numElems > 1)
Collections.sort(el, WstDepModuleEntryComparator.ASCENDING);
Writer w=org;
for (final Element elem : el)
w = transformModuleEntry(elem, w);
return w;
}
public Writer transformDependencyModules (NodeList nodes, Writer org) throws TransformerException, IOException
{
final int numNodes=(null == nodes) /* OK */ ? 0 : nodes.getLength();
if (numNodes <= 0)
return org;
Writer w=org;
List<Element> ml=null;
for (int nIndex=0; nIndex < numNodes; nIndex++)
{
final Node n=nodes.item(nIndex);
if ((null == n) || (n.getNodeType() != Node.ELEMENT_NODE))
continue;
final Element elem=(Element) n;
final String tagName=elem.getTagName();
if (WstUtils.DEPMODULE_ELEM_NAME.equalsIgnoreCase(tagName))
{
if (null == ml)
ml = new ArrayList<Element>(numNodes);
ml.add(elem);
continue;
}
w = transformModuleEntry(elem, w);
}
return transformDependencyModules(ml, w);
}
public Writer transformDependencyModules (Element root, Writer org)
throws TransformerException, IOException
{
// write root entry
Writer w=writeRootElement(root, org);
final Collection<? extends Element> el=DOMUtils.extractAllNodes(Element.class, root, Node.ELEMENT_NODE);
if ((el != null) && (el.size() > 0))
{
for (final Element elem : el)
{
final String tagName=(null == elem) ? null : elem.getTagName();
if (WstUtils.WEB_MODULE_ELEM_NAME.equalsIgnoreCase(tagName))
{
w = writeElementData(elem, w, false, false /* don't close it */, "\t");
w = transformDependencyModules(elem.getChildNodes(), w);
w = FileUtil.writeln(w, "\t</" + tagName + ">");
}
}
}
// write closing entry
final String tagName=root.getTagName();
w = FileUtil.writeln(w, "</" + tagName + ">");
return w;
}
/*
* @see net.community.chest.io.dom.AbstractTransformer#transformRootElement(org.w3c.dom.Element, java.io.Writer)
*/
@Override
public Writer transformRootElement (Element root, Writer org)
throws TransformerException, IOException
{
if (isSortedOutputEntries())
return transformDependencyModules(root, org);
else
return writeElementData(root, org, true, true, "\t");
}
public static final WstComponentsFileTransformer DEFAULT=new WstComponentsFileTransformer();
}
|
<gh_stars>0
package com.opalfire.foodorder.activities;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.Toast;
import com.braintreepayments.cardform.view.CardForm;
import com.opalfire.foodorder.HomeActivity;
import com.opalfire.foodorder.R;
import com.opalfire.foodorder.build.api.ApiClient;
import com.opalfire.foodorder.build.api.ApiInterface;
import com.opalfire.foodorder.build.configure.BuildConfigure;
import com.opalfire.foodorder.helper.CustomDialog;
import com.opalfire.foodorder.models.Message;
import com.opalfire.foodorder.utils.Utils;
import com.stripe.android.Stripe;
import com.stripe.android.TokenCallback;
import com.stripe.android.model.Card;
import com.stripe.android.model.Token;
import org.json.JSONObject;
import java.util.regex.Pattern;
import butterknife.BindView;
import butterknife.ButterKnife;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper;
public class AddCardActivity extends AppCompatActivity {
static final Pattern CODE_PATTERN = Pattern.compile("([0-9]{0,4})|([0-9]{4}-)+|([0-9]{4}-[0-9]{0,4})+");
String Card_Token = "";
Activity activity;
ApiInterface apiInterface = ApiClient.getRetrofit().create(ApiInterface.class);
Context context;
CustomDialog customDialog;
@BindView(R.id.toolbar)
Toolbar toolbar;
@BindView(R.id.card_form)
CardForm cardForm;
@BindView(R.id.addCard)
Button addCard;
@BindView(R.id.activity_add_card)
LinearLayout activityAddCard;
protected void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.activity_add_card);
ButterKnife.bind(this);
context = this;
activity = this;
setSupportActionBar(toolbar);
toolbar.setNavigationIcon((int) R.drawable.ic_back);
toolbar.setNavigationOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
onBackPressed();
}
});
cardForm.cardRequired(true)
.expirationRequired(true)
.cvvRequired(true)
.postalCodeRequired(false)
.mobileNumberRequired(false)
.actionLabel("Add CardDetails")
.setup(activity);
addCard.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
customDialog = new CustomDialog(context);
customDialog.setCancelable(false);
if (customDialog != null) {
customDialog.show();
}
if ((cardForm.getCardNumber() != null) && (cardForm.getExpirationMonth() != null) && (cardForm.getExpirationYear() != null) && (cardForm.getCvv() != null)) {
if ((!cardForm.getCardNumber().equals("")) && (!cardForm.getExpirationMonth().equals("")) && (!cardForm.getExpirationYear().equals("")) && (!cardForm.getCvv().equals(""))) {
int i = Integer.parseInt(cardForm.getExpirationMonth());
int j = Integer.parseInt(cardForm.getExpirationYear());
String str = cardForm.getCvv();
String localStringBuilder = "CardDetails Number: " +
cardForm.getCardNumber() +
"Month: " +
i +
" Year: " +
j;
Utils.print("MyTest", localStringBuilder);
Card card = new Card(cardForm.getCardNumber(), i, j, str);
try {
new Stripe(AddCardActivity.this, BuildConfigure.STRIPE_PK).createToken(card, new TokenCallback() {
@Override
public void onError(Exception paramAnonymous2Exception) {
displayMessage(context.getResources().getString(R.string.enter_card_details));
if ((customDialog != null) && (customDialog.isShowing())) {
customDialog.dismiss();
}
}
@Override
public void onSuccess(Token token) {
Utils.print("CardToken:", token.getId());
Utils.print("CardToken:", token.getCard().getLast4());
Card_Token = token.getId();
addCardToAccount(Card_Token);
}
});
return;
} catch (Exception e) {
e.printStackTrace();
if (customDialog == null) {
return;
}
}
if (customDialog.isShowing()) {
customDialog.dismiss();
}
} else {
if ((customDialog != null) && (customDialog.isShowing())) {
customDialog.dismiss();
}
displayMessage(context.getResources().getString(R.string.enter_card_details));
}
} else {
if ((customDialog != null) && (customDialog.isShowing())) {
customDialog.dismiss();
}
displayMessage(context.getResources().getString(R.string.enter_card_details));
}
}
});
}
public void addCardToAccount(String str) {
Log.e("stripe_token", str);
apiInterface.addCard(str).enqueue(new Callback<Message>() {
@Override
public void onResponse(Call<Message> call, Response<Message> response) {
customDialog.dismiss();
if (response.isSuccessful()) {
Toast.makeText(context, response.body().getMessage(), Toast.LENGTH_SHORT).show();
finish();
return;
}
try {
Toast.makeText(context, new JSONObject(response.errorBody().string()).optString("error"), Toast.LENGTH_SHORT).show();
} catch (Exception e) {
}
}
@Override
public void onFailure(Call<Message> call, Throwable t) {
t.printStackTrace();
}
});
}
public void displayMessage(String str) {
Snackbar.make(getCurrentFocus(), str, -1).setAction("Action", null).show();
}
public void GoToBeginActivity() {
Intent intent = new Intent(activity, HomeActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK);
startActivity(intent);
activity.finish();
}
public void onBackPressed() {
super.onBackPressed();
}
protected void attachBaseContext(Context context) {
super.attachBaseContext(CalligraphyContextWrapper.wrap(context));
}
}
|
public class MarksProcessor {
public static void main(String[] args) {
int engMark = 90;
int chemMark = 95;
int mathsMark = 99;
int csMark = 97;
int phyMark = 85;
int totalMarks = engMark + chemMark + mathsMark + csMark + phyMark;
double percentage = (totalMarks/500.0) * 100;
System.out.println("Total Marks : "+totalMarks);
System.out.println("Percentage : "+percentage+"%");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.