text stringlengths 1 1.05M |
|---|
#!/bin/bash
#SBATCH --job-name=fastMRI
#SBATCH --ntasks=2
#SBATCH --cpus-per-task=2
#SBATCH --mem=24000
#SBATCH --gres=gpu:4
#SBATCH -o /home/%u/%j-%x-on-%N.out
#SBATCH -e /home/%u/%j-%x-on-%N.err
#SBATCH --mail-type=ALL
#Timelimit format: "hours:minutes:seconds" -- max is 24h
#SBATCH --time=24:00:00
# Tell's pipenv to install the virtualenvs in the cluster folder
export WORKON_HOME=/cluster/`whoami`/.cache
export XDG_CACHE_DIR=/cluster/`whoami`/.cache
export PYTHONUSERBASE=/cluster/`whoami`/.python_packages
export PATH=/cluster/folle/miniconda/bin:$PATH
echo "Your job is running on" $(hostname)
conda init bash
source /home/folle/.bashrc
conda activate sr
pip install dev-requirements.txt
python train_varnet_demo.py |
julia exp1/scenario_1a_randomgraph.jl
julia exp1/scenario_2a_kuniform.jl
julia exp1/scenario_1b_preferential_attachment.jl
julia exp1/scenario_2b_duniform.jl
julia exp2/scenario_3a_random.jl
julia exp2/scenario_3c_kuniform.jl
julia exp2/scenario_3b_preferential_attachment.jl
julia exp2/scenario_3d_duniform.jl
julia got/got_p_experiment.jl
julia got/got_experiment_random.jl
|
g++ UnionIntersection.cpp listADTArr.cpp listADTArr.h -o solution
|
<reponame>keller35/ssh2-sftp-client
'use strict';
const chai = require('chai');
const expect = chai.expect;
const chaiSubset = require('chai-subset');
const chaiAsPromised = require('chai-as-promised');
const utils = require('../src/utils');
chai.use(chaiSubset);
chai.use(chaiAsPromised);
describe('fmtError() tests', function () {
it('fmtError returns Error object', function () {
return expect(utils.fmtError('test msg', 'test', 'error code')).to.be.an(
'error'
);
});
it('fmtError has expected values', function () {
return expect(
utils.fmtError('test msg', 'name', 'error code')
).to.containSubset({
message: 'name: test msg',
code: 'error code'
});
});
it('fmtError has retry count', function () {
return expect(
utils.fmtError('test msg', 'name', 'error code', 4)
).to.containSubset({
message: 'name: test msg after 4 attempts',
code: 'error code'
});
});
it('fmtError has default error code', function () {
return expect(utils.fmtError('test msg', 'nme').code).to.equal(
'ERR_GENERIC_CLIENT'
);
});
it('fmtError has default name', function () {
return expect(utils.fmtError('test msg').message).to.equal(
'sftp: test msg'
);
});
it('fmtError handles null error', function () {
return expect(utils.fmtError(undefined)).to.containSubset({
message: 'sftp: Undefined error - probably a bug!',
code: 'ERR_GENERIC_CLIENT'
});
});
it('fmtError handles custom error 1', function () {
return expect(
utils.fmtError(utils.fmtError('Original Error', 'someMethod'), 'top')
).to.containSubset({
message: 'top->someMethod: Original Error',
code: 'ERR_GENERIC_CLIENT',
custom: true
});
});
it('fmtError custom errors', function () {
let e1 = utils.fmtError('Original Error', 'somefunc');
return expect(utils.fmtError(e1, 'top')).to.containSubset({
message: 'top->somefunc: Original Error',
code: 'ERR_GENERIC_CLIENT',
custom: true
});
});
it('fmtError error code ENOTFOUND', function () {
let e = new Error('Not Found');
e.code = 'ENOTFOUND';
e.level = 'Client';
e.hostname = 'bogus.com';
return expect(utils.fmtError(e, 'func')).to.containSubset({
message: 'func: Client error. Address lookup failed for host bogus.com',
code: 'ENOTFOUND'
});
});
it('fmtError error code ECONNREFUSED', function () {
let e = new Error('Connection refused');
e.code = 'ECONNREFUSED';
e.level = 'Server';
e.address = '1.1.1.1';
return expect(utils.fmtError(e, 'func')).to.containSubset({
message: 'func: Server error. Remote host at 1.1.1.1 refused connection',
code: 'ECONNREFUSED'
});
});
it('fmtError error code ECONNRESET', function () {
let e = new Error('Connection reset');
e.code = 'ECONNRESET';
return expect(utils.fmtError(e, 'func')).to.containSubset({
message: 'func: Remote host has reset the connection: Connection reset',
code: 'ECONNRESET'
});
});
});
describe('errorListener', function () {
let client = {
debugMsg: (msg) => {
//console.log(msg);
null;
},
errorHandled: false,
endCalled: false
};
beforeEach(function () {
client.errorHandled = false;
client.endCalled = false;
});
it('error is rejected', function () {
let p = new Promise((resolve, reject) => {
let handler = utils.errorListener(client, 'Test1', reject);
let e = new Error('A plain error');
e.code = 'GENERIC ERROR';
handler(e);
});
return expect(p).to.be.rejectedWith(/Test1: A plain error/);
});
it('error is thrown', function () {
let handler = utils.errorListener(client, 'Test2');
let e = utils.fmtError('A thrown error');
e.code = 'GENERIC ERROR';
let fn = () => {
handler(e);
};
return expect(fn).to.throw(/Test2->sftp: A thrown error/);
});
});
describe('endListener', function () {
let client = {
debugMsg: (msg) => {
//console.log(msg);
null;
},
errorHandled: false,
endCalled: false
};
beforeEach(function () {
client.errorHandled = false;
client.endCalled = false;
});
it('end rejected', function () {
let p = new Promise((resolve, reject) => {
let handler = utils.endListener(client, 'Test3', reject);
handler();
});
return expect(p).to.be.rejectedWith(/Test3: Unexpected end event raised/);
});
it('end raises error', function () {
let handler = utils.endListener(client, 'Test4');
return expect(handler).to.throw(/Test4: Unexpected end event raised/);
});
});
describe('closeListener', function () {
let client = {
debugMsg: (msg) => {
//console.log(msg);
null;
},
errorHandled: false,
endCalled: false
};
beforeEach(function () {
client.errorHandled = false;
client.endCalled = false;
});
it('close rejected', function () {
let p = new Promise((resolve, reject) => {
let handler = utils.closeListener(client, 'Test5', reject);
handler();
});
return expect(p).to.be.rejectedWith(/Test5: Unexpected close event raised/);
});
it('close throws error', function () {
let handler = utils.closeListener(client, 'Test6');
return expect(handler).to.throw(/Test6: Unexpected close event raised/);
});
});
|
export default {
name: "<NAME>"
}
|
package com.davixdevelop.terracustomtreegen.baker;
import com.davixdevelop.terracustomtreegen.PopulatorEventHandler;
import com.davixdevelop.terracustomtreegen.repo.CustomTreeRepository;
import com.davixdevelop.terracustomtreegen.repo.TreeBiome;
import com.davixdevelop.terracustomtreegen.repo.TreeData;
import net.buildtheearth.terraplusplus.dataset.IScalarDataset;
import net.buildtheearth.terraplusplus.generator.CachedChunkData;
import net.buildtheearth.terraplusplus.generator.GeneratorDatasets;
import net.buildtheearth.terraplusplus.generator.data.IEarthDataBaker;
import net.buildtheearth.terraplusplus.projection.OutOfProjectionBoundsException;
import net.buildtheearth.terraplusplus.util.CornerBoundingBox2d;
import net.buildtheearth.terraplusplus.util.bvh.Bounds2d;
import net.minecraft.util.math.ChunkPos;
import java.util.ArrayList;
import java.util.concurrent.CompletableFuture;
public class TreeMapBaker implements IEarthDataBaker<TreeMapBaker.Data> {
public static CustomTreeRepository TREE_REPO = new CustomTreeRepository();
public static final String KEY_CUSTOM_TREE_REPO_TREE_MAP = "davixdevelop_terratreerepo_tree_gens";
@Override
public CompletableFuture<TreeMapBaker.Data> requestData(ChunkPos chunkPos, GeneratorDatasets generatorDatasets, Bounds2d bounds2d, CornerBoundingBox2d cornerBoundingBox2d) throws OutOfProjectionBoundsException {
CompletableFuture<double[]> continents = generatorDatasets.<IScalarDataset>getCustom(PopulatorEventHandler.KEY_CONTINENT).getAsync(cornerBoundingBox2d, 16, 16);
CompletableFuture<double[]> climate = generatorDatasets.<IScalarDataset>getCustom(PopulatorEventHandler.KEY_KOPPEN).getAsync(cornerBoundingBox2d, 16, 16);
return CompletableFuture.allOf(continents, climate)
.thenApply(unused -> new Data(continents.join(), climate.join()));
}
@Override
public void bake(ChunkPos chunkPos, CachedChunkData.Builder builder, TreeMapBaker.Data data) {
TreeData treeData = new TreeData();
if(data !=null){
double climate = data.climates[128];
double continent = data.continents[128];
if(climate != 0 && continent != 0){
treeData.treeIndexes = new ArrayList<>();
for(int t = 0; t < TreeMapBaker.TREE_REPO.getTreeMeta().size(); t++){
TreeBiome treeBiome = TreeMapBaker.TREE_REPO.getTreeMeta().get(t);
if(treeBiome.climate.contains((int)climate) && treeBiome.continents.contains((int)continent))
treeData.treeIndexes.add(t);
}
}
}
builder.putCustom(KEY_CUSTOM_TREE_REPO_TREE_MAP, treeData);
}
public class Data {
public Data(double[] cont, double[] climti){
continents = cont;
climates = climti;
}
public double[] continents;
public double[] climates;
}
}
|
package com.hapramp.datastore.callbacks;
import com.hapramp.steem.models.Feed;
public interface SinglePostCallback {
void onPostFetched(Feed feed);
void onPostFetchError(String err);
}
|
const express = require('express');
const bodyParser = require('body-parser');
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const FormSchema = new Schema({
name: String,
email: String
});
const FormModel = mongoose.model('FormData', FormSchema);
const app = express();
app.use(bodyParser.urlencoded({ extended: true }));
app.post('/form', (req, res) => {
const formData = new FormModel({
name: req.body.name,
email: req.body.email
});
formData
.save()
.then(savedFormData => res.status(201).send(savedFormData))
.catch(err => res.status(500).send({ message: err.message }));
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`Form data service is running at http://localhost:${PORT}`);
}); |
<reponame>KennethBWSong/TeamsFx
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
"use strict";
import { OptionItem } from "./qm";
import { Platform, Stage, VsCodeEnv } from "./constants";
export type Json = Record<string, unknown>;
export type ConfigValue = any;
export type PluginIdentity = string;
export type PluginConfig = ConfigMap;
export type ReadonlyPluginConfig = ReadonlyMap<string, ConfigValue>;
export type SolutionConfig = Map<PluginIdentity, PluginConfig>;
export type ReadonlySolutionConfig = ReadonlyMap<PluginIdentity, ReadonlyPluginConfig>;
export class ConfigMap extends Map<string, ConfigValue> {
getString(k: string, defaultValue?: string): string | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return String(v);
}
getBoolean(k: string, defaultValue?: boolean): boolean | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return Boolean(v);
}
getNumber(k: string, defaultValue?: number): number | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return Number(v);
}
getStringArray(k: string, defaultValue?: string[]): string[] | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return v as string[];
}
getNumberArray(k: string, defaultValue?: number[]): number[] | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return v as number[];
}
getBooleanArray(k: string, defaultValue?: boolean[]): boolean[] | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return v as boolean[];
}
getOptionItem(k: string, defaultValue?: OptionItem): OptionItem | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return v as OptionItem;
}
getOptionItemArray(k: string, defaultValue?: OptionItem[]): OptionItem[] | undefined {
const v = super.get(k);
if (!v) return defaultValue;
return v as OptionItem[];
}
toJSON(): Dict<unknown> {
const out: Dict<unknown> = {};
for (const entry of super.entries()) {
out[entry[0]] = entry[1];
}
return out;
}
public static fromJSON(obj?: Dict<unknown>): ConfigMap | undefined {
if (!obj) return undefined;
const map = new ConfigMap();
for (const entry of Object.entries(obj)) {
map.set(entry[0], entry[1]);
}
return map;
}
// constructor(){
// super();
// Object.setPrototypeOf(this, ConfigMap.prototype);
// }
constructor(entries?: readonly (readonly [string, ConfigValue])[] | null) {
super(entries);
Object.setPrototypeOf(this, ConfigMap.prototype);
}
}
// eslint-disable-next-line @typescript-eslint/ban-types
export type Void = {};
export const Void = {};
export interface Dict<T> {
[key: string]: T | undefined;
}
export type ResourceTemplate = Dict<ConfigValue>;
export type ResourceTemplates = {
[k: string]: ResourceTemplate | undefined;
};
export type ResourceConfig = ResourceTemplate;
export type ResourceConfigs = ResourceTemplates;
export type ReadonlyResourceConfig = Readonly<ResourceConfig>;
export type ReadonlyResourceConfigs = Readonly<{
[k: string]: ReadonlyResourceConfig | undefined;
}>;
/**
* environment meta data
*/
export interface EnvMeta {
name: string;
local: boolean;
sideloading: boolean;
}
export type EnvConfig = Dict<string>;
/**
* project static settings
*/
export interface ProjectSettings {
appName: string;
projectId: string;
solutionSettings?: SolutionSettings;
}
/**
* solution settings
*/
export interface SolutionSettings extends Dict<ConfigValue> {
name: string;
version: string;
}
export interface AzureSolutionSettings extends SolutionSettings {
hostType: string;
capabilities: string[];
azureResources: string[];
activeResourcePlugins: string[];
}
/**
* project dynamic states
*/
export interface ProjectStates {
solution: Dict<ConfigValue>;
resources: {
[k: string]: Dict<ConfigValue>;
};
}
export interface Inputs extends Json {
projectPath?: string;
targetEnvName?: string;
platform: Platform;
stage?: Stage;
vscodeEnv?: VsCodeEnv;
ignoreLock?: boolean;
ignoreTypeCheck?: boolean;
ignoreConfigPersist?: boolean;
correlationId?: string;
}
export interface ProjectConfig {
settings?: ProjectSettings;
config?: SolutionConfig;
}
|
<filename>src/components/common/Footer.js
import React from 'react'
function Footer() {
const date = new Date().getFullYear();
return (
<div id="footer" className='z-50 bg-gradient-to-r from-custom-purple-400 to-custom-cyan text-white flex justify-center items-center h-24 w-full'>
Copyright © {date}
<br />
</div>
)
}
export default Footer
|
package pkg
import (
"github.com/stripe/stripe-go"
"github.com/stripe/stripe-go/client"
"net/http"
"time"
)
type Environment struct {
Name string
Key string
URL string
}
func NewMockedEnvironment(name, stripeMockURL string) *Environment {
return &Environment{
Name: name,
Key: "sk_test_123",
URL: stripeMockURL,
}
}
func NewStripeEnvironment(name, key string) *Environment {
return &Environment{
Name: name,
Key: key,
URL: stripe.APIURL,
}
}
func (e *Environment) GetClient() *client.API {
stripeClient := client.API{}
c := http.Client{Timeout: 30 * time.Second}
stripeClient.Init(e.Key, &stripe.Backends{
API: &stripe.BackendConfiguration{
stripe.APIBackend,
e.URL,
&c,
},
})
return &stripeClient
}
|
git_prompt_info() {
current_branch=$(git current-branch 2> /dev/null)
if [[ -n $current_branch ]]; then
echo " %{$fg_bold[blue]%}$current_branch%{$reset_color%}"
fi
}
emoji_prompt() {
echo "%{$fg_bold[red]%}✈ %{$reset_color%}"
}
setopt promptsubst
# Allow exported PS1 variable to override default prompt.
if ! env | grep -q '^PS1='; then
PS1='${SSH_CONNECTION+"%{$fg_bold[blue]%}%n@%m:"}%{$fg_bold[yellow]%}%c%{$reset_color%}$(git_prompt_info) $(emoji_prompt)'
fi
|
// We define a harmonious array is an array where the difference between its maximum value and its minimum value is exactly 1.
//
// Now, given an integer array, you need to find the length of its longest harmonious subsequence among all its possible subsequences.
//
// Example 1:
//
// Input: [1,3,2,2,5,2,3,7]
// Output: 5
// Explanation: The longest harmonious subsequence is [3,2,2,2,3].
//
//
//
// Note:
// The length of the input array will not exceed 20,000.
/**
* @param {number[]} nums
* @return {number}
*/
var findLHS = function(nums) {
let map = new Map();
for(let i = 0; i < nums.length; i++){
let num = nums[i];
var count = 0;
if(map.has(num)){
count = map.get(num);
}
count++;
map.set(num, count);
}
let max = 0;
map.forEach((value, key)=>{
let keyPlus = key + 1;
if(map.has(keyPlus)){
let valuePlus = map.get(keyPlus);
max = Math.max(max, valuePlus + value);
}
});
return max;
};
|
#!/usr/bin/env bash
#
# The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
# (the "License"). You may not use this work except in compliance with the License, which is
# available at www.apache.org/licenses/LICENSE-2.0
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied, as more fully set forth in the License.
#
# See the NOTICE file distributed with this work for information regarding copyright ownership.
#
readonly RELEASE_NAME='alluxio'
function printUsage {
echo "Usage: MODE [UFS]"
echo
echo "MODE is one of:"
echo -e " single-ufs \t Generate Alluxio YAML templates for a single-master environment using UFS journal."
echo -e " multi-embedded \t Generate Alluxio YAML templates with multiple masters using embedded journal."
echo -e " all \t Generate Alluxio YAML templates for all combinations."
echo
echo "UFS is only for single-ufs mode. It should be one of:"
echo -e " local \t Use a local destination for UFS journal."
echo -e " hdfs \t Use HDFS for UFS journal"
}
function generateTemplates {
echo "Generating templates into $dir"
# Prepare target directories
if [[ ! -d "${dir}/master" ]]; then
mkdir -p ${dir}/master
fi
if [[ ! -d "${dir}/worker" ]]; then
mkdir -p ${dir}/worker
fi
if [[ ! -d "${dir}/logserver" ]]; then
mkdir -p ${dir}/logserver
fi
if [[ ! -d "${dir}/csi" ]]; then
mkdir -p ${dir}/csi
fi
config=./$dir/config.yaml
if [[ ! -f "$config" ]]; then
echo "A config file $config is needed in $dir!"
echo "See https://docs.alluxio.io/os/user/edge/en/deploy/Running-Alluxio-On-Kubernetes.html#example-hdfs-as-the-under-store"
echo "for the format of config.yaml."
touch $config
echo "Using default config"
echo "${defaultConfig}"
cat << EOF >> $config
${defaultConfig}
EOF
fi
generateConfigTemplates
generateMasterTemplates
generateWorkerTemplates
generateFuseTemplates
generateLoggingTemplates
generateCsiTemplates
}
function generateConfigTemplates {
echo "Generating configmap templates into $dir"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/config/alluxio-conf.yaml -f $dir/config.yaml > "$dir/alluxio-configmap.yaml.template"
}
function generateMasterTemplates {
echo "Generating master templates into $dir"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/master/statefulset.yaml -f $dir/config.yaml > "$dir/master/alluxio-master-statefulset.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/master/service.yaml -f $dir/config.yaml > "$dir/master/alluxio-master-service.yaml.template"
}
function generateWorkerTemplates {
echo "Generating worker templates into $dir"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/worker/daemonset.yaml -f $dir/config.yaml > "$dir/worker/alluxio-worker-daemonset.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/worker/domain-socket-pvc.yaml -f $dir/config.yaml > "$dir/worker/alluxio-worker-pvc.yaml.template"
}
function generateFuseTemplates {
echo "Generating fuse templates"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set fuse.enabled=true --show-only templates/fuse/daemonset.yaml -f $dir/config.yaml > "alluxio-fuse.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set fuse.clientEnabled=true --show-only templates/fuse/client-daemonset.yaml -f $dir/config.yaml > "alluxio-fuse-client.yaml.template"
}
function generateLoggingTemplates {
echo "Generating remote logserver templates"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/logserver/deployment.yaml -f $dir/config.yaml > "$dir/logserver/alluxio-logserver-deployment.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/logserver/service.yaml -f $dir/config.yaml > "$dir/logserver/alluxio-logserver-service.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/logserver/log-pvc.yaml -f $dir/config.yaml > "$dir/logserver/alluxio-logserver-pvc.yaml.template"
}
function generateMasterServiceTemplates {
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --show-only templates/master/service.yaml -f $dir/config.yaml > "$dir/alluxio-master-service.yaml.template"
}
function generateCsiTemplates {
echo "Genertating csi templates"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.enabled=true --show-only templates/csi/controller-rbac.yaml -f $dir/config.yaml > "$dir/csi/alluxio-csi-controller-rbac.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.enabled=true --show-only templates/csi/controller.yaml -f $dir/config.yaml > "$dir/csi/alluxio-csi-controller.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.enabled=true --show-only templates/csi/driver.yaml -f $dir/config.yaml > "$dir/csi/alluxio-csi-driver.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.enabled=true --show-only templates/csi/nodeplugin.yaml -f $dir/config.yaml > "$dir/csi/alluxio-csi-nodeplugin.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.clientEnabled=true --show-only templates/csi/storage-class.yaml -f $dir/config.yaml > "$dir/csi/alluxio-storage-class.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.clientEnabled=true --show-only templates/csi/pvc.yaml -f $dir/config.yaml > "$dir/csi/alluxio-pvc.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.clientEnabled=true --show-only templates/csi/pvc-static.yaml -f $dir/config.yaml > "$dir/csi/alluxio-pvc-static.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.clientEnabled=true --show-only templates/csi/pv.yaml -f $dir/config.yaml > "$dir/csi/alluxio-pv.yaml.template"
helm template --name-template ${RELEASE_NAME} helm-chart/alluxio/ --set csi.clientEnabled=true --show-only templates/csi/nginx-pod.yaml -f $dir/config.yaml > "$dir/csi/alluxio-nginx-pod.yaml.template"
}
function generateSingleUfsTemplates {
echo "Target FS $1"
targetFs=$1
case $targetFs in
"local")
echo "Using local journal"
dir="singleMaster-localJournal"
read -r -d '' defaultConfig << 'EOM'
master:
count: 1 # For multiMaster mode increase this to >1
journal:
type: "UFS"
ufsType: "local"
folder: "/journal"
EOM
generateTemplates
;;
"hdfs")
echo "Journal UFS $ufs"
dir="singleMaster-hdfsJournal"
read -r -d '' defaultConfig << 'EOM'
master:
count: 1
journal:
type: "UFS"
ufsType: "HDFS"
folder: "hdfs://{$hostname}:{$hostport}/journal"
properties:
alluxio.master.mount.table.root.ufs: "hdfs://{$hostname}:{$hostport}/{$underFSStorage}"
alluxio.master.journal.ufs.option.alluxio.underfs.hdfs.configuration: "/secrets/hdfsConfig/core-site.xml:/secrets/hdfsConfig/hdfs-site.xml"
secrets:
master:
alluxio-hdfs-config: hdfsConfig
worker:
alluxio-hdfs-config: hdfsConfig
EOM
generateTemplates
;;
*)
echo "Unknown Journal UFS type $ufs"
printUsage
exit 1
esac
}
function generateMultiEmbeddedTemplates {
dir="multiMaster-embeddedJournal"
read -r -d '' defaultConfig << 'EOM'
master:
count: 3
journal:
type: "EMBEDDED"
ufsType: "local" # This field will not be looked at
folder: "/journal"
EOM
generateTemplates
}
function generateAllTemplates {
generateSingleUfsTemplates "local"
generateSingleUfsTemplates "hdfs"
generateMultiEmbeddedTemplates
}
function main {
mode=$1
case $mode in
"single-ufs")
echo "Generating templates for $mode"
if ! [ $# -eq 2 ]; then
printUsage
exit 1
fi
ufs=$2
generateSingleUfsTemplates "$ufs"
;;
"multi-embedded")
echo "Generating templates for $mode"
generateMultiEmbeddedTemplates
;;
"all")
echo "Generating templates for all combinations"
generateAllTemplates
;;
*)
echo "Unknown mode $mode"
printUsage
exit 1
esac
}
if [ $# -lt 1 ] || [ $# -gt 2 ]; then
printUsage
exit 1
fi
main "$@"
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dao;
import conexao.Call;
import conexao.Conexao;
import java.io.Serializable;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import modelo.Agencia;
import modelo.Banco;
import modelo.Cheque;
import modelo.Movimento;
import modelo.SequenciaCheque;
import modelo.Utilizador;
import sessao.SessionUtil;
import validacao.OperacaoData;
/**
*
* @author AhmedJorge
*/
public class AdministracaoDao implements Serializable{
private String sql;
private CallableStatement cs;
private String resultado;
private SimpleDateFormat sdfIngles= new SimpleDateFormat("yyyy-MM-dd");
private SimpleDateFormat sdfPT= new SimpleDateFormat("dd-MM-yyyy");
// FUNC_REG_BANCO
public ArrayList<Banco> listaTodosBanco()
{
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
ArrayList<Banco> al= new ArrayList<>();
@SuppressWarnings("UnusedAssignment")
ResultSet rs = null;
sql="select * FROM VER_BANCO";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.execute();
rs=cs.executeQuery();
if (rs!=null)
{
while (rs.next())
{
al.add(new Banco(rs.getString("Nome"), rs.getString("SIGlA"), rs.getString("SALDO"), rs.getString("ID")));
}
}
rs.close();
}
catch (SQLException ex)
{
Logger.getLogger(CreditoDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("Erro a obter fontes rendimentos "+ex.getMessage());
}
}
return al;
}
public ArrayList<Banco> listaTodosBanco2()
{
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
ArrayList<Banco> al= new ArrayList<>();
@SuppressWarnings("UnusedAssignment")
ResultSet rs = null;
sql="select * FROM VER_BANCO";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.execute();
rs=cs.executeQuery();
if (rs!=null)
{
while (rs.next())
{
al.add(new Banco(rs.getString("NOME"), rs.getString("SIGLA"), rs.getString("SALDO"), rs.getString("ID"), rs.getString("SALDOSF")));
}
}
rs.close();
}
catch (SQLException ex)
{
Logger.getLogger(CreditoDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("Erro a listar bancos"+ex.getMessage());
}
}
return al;
}
public String registrarBanco(Banco b)
{
sql="{?=call FUNC_REG_BANCO(?,?,?,?)}";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.registerOutParameter(1,Types.VARCHAR);
cs.setString(2, SessionUtil.getUserlogado().getNif());
cs.setString(3, b.getSigla());
cs.setString(4, b.getNome());
cs.setInt(5, SessionUtil.getUserlogado().getIdAgencia());
cs.execute();
resultado = cs.getString(1);
System.err.println("banco "+ resultado);
conexao.desCon();
}
catch (SQLException ex)
{
Logger.getLogger(ClienteDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("Erro ao Registar Credito"+ex.getMessage());
}
}
return resultado;
}
public ArrayList<Movimento> MovimentacaoBanco(Banco b,Date data)
{
ArrayList<Movimento> al = new ArrayList<>();
try {
ResultSet rs = Call.callTableFunction("FUNCT_LOAD_MOVIMENTOBANCO", "*", b.getId(), data, SessionUtil.getUserlogado().getIdAgencia());
if (rs != null) {
while (rs.next()) {
al.add(new Movimento(null, rs.getString("DATA"), rs.getString("CREDITO"), rs.getString(4), rs.getString("DEBITO")));
}
}
} catch (SQLException ex) {
Logger.getLogger(ClienteDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("listar Movimeto do Banco" + ex.getMessage());
}
return al;
}
public ArrayList<Cheque> getSeguenciaBanco2(String idBanco)
{
ArrayList<Cheque> al= new ArrayList<>();
sql="SELECT * from table(FUNCT_LOAD_CHEQUEEMPRESA(?))";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.setString(1, idBanco);
cs.execute();
ResultSet rs = cs.getResultSet();
if(rs!=null)
{
while (rs.next())
{
al.add( new Cheque(rs.getString("DATA"), rs.getString("INICIO"),rs.getString("FIM"), rs.getString(5),rs.getString("AGENCIA"), rs.getString("ESTADO")));
}
}
System.err.println("banco "+ resultado);
conexao.desCon();
}
catch (SQLException ex)
{
Logger.getLogger(ClienteDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("listar Movimeto do Banco"+ex.getMessage());
}
}
return al;
}
public ArrayList<Cheque> getSeguenciaBanco(Banco b)
{
ArrayList<Cheque> al= new ArrayList<>();
sql="SELECT * from table(FUNCT_LOAD_CHEQUEEMPRESA(?,?))";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.setString(1, b.getId());
cs.setInt(2, SessionUtil.getUserlogado().getIdAgencia());
cs.execute();
ResultSet rs = cs.getResultSet();
if(rs!=null)
{
while (rs.next())
{
al.add( new Cheque(rs.getString("DATA"), rs.getString("INICIO"),rs.getString("FIM"), rs.getString(5),rs.getString("AGENCIA"), rs.getString("ESTADO")));
}
}
System.err.println("banco "+ resultado);
conexao.desCon();
}
catch (SQLException ex)
{
Logger.getLogger(ClienteDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("listar Movimeto do Banco"+ex.getMessage());
}
}
return al;
}
public ArrayList<Cheque> getSeguenciaBancoCheque(int b, String agencia)
{
ArrayList<Cheque> al= new ArrayList<>();
sql="SELECT * from table(FUNCT_LOAD_CHEQUEEMPRESA(?,?))";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.setInt(1, b);
cs.setObject(2, ((agencia == null || agencia.equals("")) ? null : agencia));
cs.execute();
ResultSet rs = cs.getResultSet();
if(rs!=null)
{
while (rs.next())
{
al.add( new Cheque(rs.getString("DATA"), rs.getString("INICIO"),rs.getString("FIM"), rs.getString(5),rs.getString("AGENCIA"), rs.getString("ESTADO")));
}
}
System.err.println("banco "+ resultado);
conexao.desCon();
}
catch (SQLException ex)
{
Logger.getLogger(ClienteDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("listar Movimeto do Banco"+ex.getMessage());
}
}
return al;
}
public List<Banco> cobrancaDepositos(int idBanco, Date dataInicial, Date dataFinal, String agencia)
{
List<Banco> info = new ArrayList<>();
Object agency = (agencia == null || agencia.equals("")) ? null : agencia;
String functionName = "FUNCT_LOAD_COBRANCABANCO";
ResultSet rs = Call.callTableFunction(functionName, "*",idBanco,OperacaoData.toSQLDate(dataInicial),OperacaoData.toSQLDate(dataFinal), agency);
if(rs != null)
{
try {
while(rs.next())
{
Banco b = new Banco();
b.setData(rs.getString("DATA"));
b.setCredito(rs.getString("CREDITO"));
b.setDebito(rs.getString("DEBITO"));
b.setSigla(rs.getString("SIGLA"));
b.setNome(rs.getString("BANCO"));
info.add(b);
}
rs.close();
} catch (SQLException ex) {
Logger.getLogger(AdministracaoDao.class.getName()).log(Level.SEVERE, null, ex);
}
}
System.out.println("tamanho da lista "+info.size());
return info;
}
public List<Banco> saldosDiarios(String idBanco, Date dia, String agencia)
{
List<Banco> info = new ArrayList<>();
Object numAgencia = ((agencia == null || agencia.equals("")) ? null : agencia);
String functionName = "FUNCT_LOAD_MOVIMENTOBANCO";
ResultSet rs = Call.callTableFunction(functionName,"*", idBanco,((dia == null)? null : OperacaoData.toSQLDate(dia)),agencia);
if(rs != null)
{
try {
while(rs.next())
{
Banco banco = new Banco();
banco.setCredito(rs.getString("CREDITO"));
banco.setDebito(rs.getString("DEBITO"));
banco.setNome(rs.getString("DESIGNAÇÃO OPERAÇÃO"));
banco.setData(rs.getString("DATA"));
info.add(banco);
}
rs.close();
} catch (SQLException ex) {
Logger.getLogger(AdministracaoDao.class.getName()).log(Level.SEVERE, null, ex);
}
}
return info;
}
public String regAgencia(Agencia agencia)
{
String functionName = "FUNC_REG_AGENCIA";
Object resp = Call.callSampleFunction(functionName, Types.VARCHAR,
Integer.valueOf(SessionUtil.getUserlogado().getNif()),
Integer.valueOf(agencia.getLocalidade()),
SessionUtil.getUserlogado().getIdAgencia(),
agencia.getNome());
return resp.toString();
}
public List<SequenciaCheque> listaChequesDisponiveis()
{
List<SequenciaCheque> cheques = new ArrayList<>();
ResultSet rs = Call.selectFrom("VER_CHEQUES_DIPONIVEIS", "*");
if(rs != null)
{
try {
while(rs.next())
{
SequenciaCheque c = new SequenciaCheque();
c.setAgencia(rs.getString("AGENCIA"));
c.setBanco(rs.getString("BANCO"));
c.setTotalfolhas(rs.getString("TOTAL"));
c.setTotalDestribuido(rs.getString("DESTRIBUIDA"));
cheques.add(c);
}
rs.close();
} catch (SQLException ex) {
Logger.getLogger(AdministracaoDao.class.getName()).log(Level.SEVERE, null, ex);
}
}
return cheques;
}
} |
import React, { useState } from "react";
function App() {
const [tasks, setTasks] = useState([]);
const handleCreateTask = (task) => {
const newTasks = [...tasks, task];
setTasks(newTasks);
}
const handleUpdateTask = (id, updatedTask) => {
const newTasks = tasks.map(task => {
if (task.id === id) {
return { ...task, ...updatedTask }
}
return task;
});
setTasks(newTasks);
}
const handleDeleteTask = (id) => {
const newTasks = tasks.filter(task => task.id !== id);
setTasks(newTasks);
}
const listTasks = () => {
return tasks.map(task => (
<li key={task.id}>{task.description}</li>
));
}
return (
<div>
<h1>To-Do List</h1>
<CreateTask handleCreateTask={handleCreateTask} />
<UpdateTask listOfTasks={tasks} handleUpdateTask={handleUpdateTask} />
<DeleteTask listOfTasks={tasks} handleDeleteTask={handleDeleteTask} />
{listTasks()}
</div>
);
}
export default App; |
#!/bin/bash
# This is for a VM for multiple parties and bridged networking.
# On distribution it should fetch an IP address from the router.
# Problem: When changing the MAC address, the interface name changes
# which then is not configured for DHCP.
# Solution: One "eth0" to rule them all.
# WARNING! This only works for VMs with a single interface!
find /etc/udev/rules.d/ -name "*-net.rules" -exec rm {} \;
cat <<'EOF' > /etc/udev/rules.d/70-persistent-net.rules
SUBSYSTEM=="net", ACTION=="add", DRIVERS=="?*", ATTR{dev_id}=="0x0", ATTR{type}=="1", KERNEL=="eth*", NAME="eth0"
EOF
|
import { NodeInitializer } from 'node-red';
import { MyStromSwitchStatusNode, MyStromSwitchStatusNodeDef } from './modules/types';
import { MyStromSwitchConfigNode } from '../mystrom-switch-config/modules/types';
import { MyStromSwitch } from '../../modules/mystrom-switch';
const nodeInit: NodeInitializer = (RED): void => {
function MyStromSwitchStatusNodeConstructor(this: MyStromSwitchStatusNode, config: MyStromSwitchStatusNodeDef): void {
RED.nodes.createNode(this, config);
this.switch = RED.nodes.getNode(config.switch) as MyStromSwitchConfigNode;
this.on('input', (msg, send, done) => {
this.status({ fill: 'blue', shape: 'dot', text: 'running' });
new MyStromSwitch(this.switch.address)
.getSwitchStatus()
.then((response) => {
this.status({ fill: 'green', shape: 'dot', text: 'successful' });
msg.payload = {
name: this.switch.name,
address: this.switch.address,
power: response.power,
relay: response.relay,
temperature: response.temperature,
};
send(msg);
done();
})
.catch((error) => {
this.status({ fill: 'red', shape: 'dot', text: 'failed' });
done(error);
});
});
}
RED.nodes.registerType('mystrom-switch-status', MyStromSwitchStatusNodeConstructor);
};
export = nodeInit;
|
import * as React from 'react';
import SVGDefsContext, { SVGDefsContextProps } from './SVGDefsContext';
import { SVGDefsSetter } from './SVGDefsSetter';
interface SVGDefsProps {
id: string;
children: React.ReactNode;
}
export type SVGDefsSetterProps = SVGDefsContextProps & SVGDefsProps;
/**
* Contributes `children` to the parent SVG `<defs>` element.
* A contribution is assumed to be static in nature in that the children will never change
* for a given ID. This is because there may be multiple children referencing the same defs contribution.
* The assumption must be that there is not a single owner but many owners and therefore each
* owner must be contributing the same def.
*/
export default class SVGDefs extends React.Component<SVGDefsProps> {
shouldComponentUpdate() {
return false;
}
render() {
return (
<SVGDefsContext.Consumer>
{({ addDef, removeDef }) => <SVGDefsSetter {...this.props} addDef={addDef} removeDef={removeDef} />}
</SVGDefsContext.Consumer>
);
}
}
|
FactoryGirl.define do
factory :cfror_integer, :class => 'Cfror::Integer' do
field
body 1
end
end
|
<reponame>rbg001/WxJava
package me.chanjar.weixin.mp.bean.card;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import me.chanjar.weixin.mp.util.json.WxMpGsonBuilder;
import java.io.Serializable;
/**
* .
* @author leeis
* @Date 2018/12/29
*/
@Data
public class Card implements Serializable {
private static final long serialVersionUID = -3697110761983756780L;
/**
* 基本信息.
*/
@SerializedName("base_info")
private BaseInfo baseInfo;
/**
* 创建优惠券特有的高级字段.
*/
@SerializedName("advanced_info")
private AdvancedInfo advancedInfo;
}
|
<filename>src/main/java/com/meida/model/base/BaseUser.java
package com.meida.model.base;
import com.jfinal.plugin.activerecord.Model;
import com.jfinal.plugin.activerecord.IBean;
/**
* Generated by JFinal, do not modify this file.
*/
@SuppressWarnings("serial")
public abstract class BaseUser<M extends BaseUser<M>> extends Model<M> implements IBean {
public void setId(Long id) {
set("id", id);
}
public Long getId() {
return get("id");
}
public void setName(String name) {
set("name", name);
}
public String getName() {
return get("name");
}
public void setEmail(String email) {
set("email", email);
}
public String getEmail() {
return get("email");
}
public void setPassword(String password) {
set("password", password);
}
public String getPassword() {
return get("password");
}
public void setPhone(String phone) {
set("phone", phone);
}
public String getPhone() {
return get("phone");
}
public void setNick(String nick) {
set("nick", nick);
}
public String getNick() {
return get("nick");
}
public void setWechat(String wechat) {
set("wechat", wechat);
}
public String getWechat() {
return get("wechat");
}
public void setWorkAddress(String workAddress) {
set("work_address", workAddress);
}
public String getWorkAddress() {
return get("work_address");
}
public void setStatus(Integer status) {
set("status", status);
}
public Integer getStatus() {
return get("status");
}
public void setOpenId(String openId) {
set("openId", openId);
}
public String getOpenId() {
return get("openId");
}
public void setCompany(String company) {
set("company", company);
}
public String getCompany() {
return get("company");
}
public void setCompanyAddress(String companyAddress) {
set("companyAddress", companyAddress);
}
public String getCompanyAddress() {
return get("companyAddress");
}
public void setDeleteFlag(Boolean deleteFlag) {
set("deleteFlag", deleteFlag);
}
public Boolean getDeleteFlag() {
return get("deleteFlag");
}
public void setCreateTime(java.util.Date createTime) {
set("createTime", createTime);
}
public java.util.Date getCreateTime() {
return get("createTime");
}
public void setCreater(Long creater) {
set("creater", creater);
}
public Long getCreater() {
return get("creater");
}
public void setUpdateTime(java.util.Date updateTime) {
set("updateTime", updateTime);
}
public java.util.Date getUpdateTime() {
return get("updateTime");
}
public void setUpdater(Long updater) {
set("updater", updater);
}
public Long getUpdater() {
return get("updater");
}
}
|
#!/bin/bash
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
set -x
set -o pipefail
PREFIX="$1"
case $OSTYPE in
darwin*)
TARGET=darwin64-x86_64-cc
;;
*)
echo IDKWTF
exit 1
esac
perl Configure --prefix="$PREFIX" no-shared $ARGS "$TARGET"
make -j $(nproc)
make install_sw
|
#!/usr/bin/env bash
# Run Susceptible-Recovered-Infected (SIR) model experiments and generate plots
trap "echo Stopping; exit;" SIGINT SIGTERM
# Grid of observation noise standard deviations to run experiments for
# -1 value indicates unknown observation noise standard deviation as variable to infer
OBS_NOISE_STD_GRID=(0.3162 1 3.162 10 -1)
# Number of independent chains to run per experiment
NUM_CHAIN=4
# Number of iterations in adaptive warm-up stage of chains
NUM_WARM_UP_ITER=500
# Number of iterations in main stage of chains
NUM_MAIN_ITER=2500
# Values of random seeds to run experiments for
SEED_VALS=(20200710)
# Values of Hamiltonian splittings to run experiments for (CHMC only)
SPLITTING_VALS=("standard" "gaussian")
# Values of metric (mass matrix) types to run experiments for (HMC only)
METRIC_TYPE_VALS=("identity" "diagonal")
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
EXPERIMENT_DIR="$(dirname ${SCRIPT_DIR})/experiments"
FIGURE_DIR="$(dirname ${SCRIPT_DIR})/figures"
echo "================================================================================"
echo "Running Susceptible-Infected-Recovered (SIR) model experiments"
echo "Saving results to ${EXPERIMENT_DIR}"
echo "Saving figures to ${FIGURE_DIR}"
echo "--------------------------------------------------------------------------------"
echo "Varying observation noise standard deviation with CHMC"
echo "--------------------------------------------------------------------------------"
for SEED in ${SEED_VALS[@]}; do
for SPLITTING in ${SPLITTING_VALS[@]}; do
for OBS_NOISE_STD in ${OBS_NOISE_STD_GRID[@]}; do
echo "Seed ${SEED}, ${SPLITTING} splitting, obs. noise std ${OBS_NOISE_STD}"
python ${SCRIPT_DIR}/sir_model_chmc_experiment.py \
--observation-noise-std ${OBS_NOISE_STD} \
--splitting ${SPLITTING} \
--seed ${SEED} \
--output-root-dir ${EXPERIMENT_DIR} \
--num-chain ${NUM_CHAIN} \
--num-warm-up-iter ${NUM_WARM_UP_ITER} \
--num-main-iter ${NUM_MAIN_ITER}
done
done
done
echo "--------------------------------------------------------------------------------"
echo "Varying observation noise standard deviation with HMC"
echo "--------------------------------------------------------------------------------"
for SEED in ${SEED_VALS[@]}; do
for METRIC_TYPE in ${METRIC_TYPE_VALS[@]}; do
for OBS_NOISE_STD in ${OBS_NOISE_STD_GRID[@]}; do
echo "Seed ${SEED}, ${METRIC_TYPE} metric, obs. noise std ${OBS_NOISE_STD}"
python ${SCRIPT_DIR}/sir_model_hmc_experiment.py \
--observation-noise-std ${OBS_NOISE_STD} \
--metric-type ${METRIC_TYPE} \
--seed ${SEED} \
--output-root-dir ${EXPERIMENT_DIR} \
--num-chain ${NUM_CHAIN} \
--num-warm-up-iter ${NUM_WARM_UP_ITER} \
--num-main-iter ${NUM_MAIN_ITER}
done
done
done
echo "--------------------------------------------------------------------------------"
echo "Generating plots"
echo "--------------------------------------------------------------------------------"
python ${SCRIPT_DIR}/sir_model_generate_plots.py \
--experiment-dir ${EXPERIMENT_DIR} \
--output-dir ${FIGURE_DIR} \
--obs-noise-std-grid ${OBS_NOISE_STD_GRID[@]}
|
<gh_stars>1-10
package crawler
type Board struct {
Title string
URL string
}
type Thread struct {
Title string
Board *Board
URL string
ResCount int
}
type ThreadData struct {
Board *Board
Thread *Thread
URL string
Handle string
MailTo string
Date string
Comment string
Other string
No int
}
|
#!/bin/bash
# As an alternative to shell script loops like this, see bevelbar-time.
while sleep 0.25
do
echo 0 # Monitor 0
echo 0"$(date +%s)" # Select style 0
echo 1I am pressed # Select style 1
echo 2This is another style
echo - # Request an empty segment
echo "0<-- that's an empty segment"
echo e # End input for selected monitor
echo 1
echo 0"$(date)"
echo e
echo 2
echo 0I am on monitor number 2
echo e
echo f
done | ./bevelbar \
-h left -v top \
-H 10 -V 5 \
-b 3 -B 2 \
-m 2 -e 20 \
-f 'Terminus:pixelsize=10' \
-p '#a3a3a3' \
-o '#e1e1e1' -O '#262626' \
-s 0 -c '#bebebe' -c '#000000' -c '#e1e1e1' -c '#747474' \
-s 1 -c '#bebebe' -c '#000000' -c '#747474' -c '#e1e1e1' \
-s 2 -c '#a11212' -c '#ffffff' -c '#e61919' -c '#570A0A'
# Note that all arguments to bevelbar are optional.
|
class CLI:
def __init__(self):
self.commands = {}
def add_command(self, command_name, functionality):
self.commands[command_name] = functionality
def list_commands(self):
return list(self.commands.keys())
def execute_command(self, command_name):
if command_name in self.commands:
self.commands[command_name]()
else:
return f"Command '{command_name}' not found."
# Usage demonstration
def set_translation_start():
print("Executing set_translation_start command")
def update_attribute():
print("Executing update_attribute command")
def update_dbxref():
print("Executing update_dbxref command")
cli = CLI()
cli.add_command("set_translation_start", set_translation_start)
cli.add_command("update_attribute", update_attribute)
cli.add_command("update_dbxref", update_dbxref)
print(cli.list_commands()) # Output: ['set_translation_start', 'update_attribute', 'update_dbxref']
cli.execute_command("set_translation_start") # Output: Executing set_translation_start command
cli.execute_command("unknown_command") # Output: Command 'unknown_command' not found. |
#!/bin/bash
# Copyright (c) 2020-2029 ASTRID Project (https://www.astrid-project.eu)
# author: Alex Carrega <alessandro.carrega@cnit.it>
pipenv lock -r --dev-only > dev/requirements.txt
|
var File = function (data) {
if (typeof data !== 'object') return;
this.name = data.name || '';
this.type = data.type || 'file';
this.size = parseInt(data.size) || 0;
this.path = data.path || '';
this.owner = parseInt(data.owner) || '';
this.group = parseInt(data.group) || '';
this.modifiedTime = data.modifiedTime || '';
this.permissions = data.permissions || '';
File.prototype.bytesToSize = function () {
if (this.size === 0) return '0 Byte';
const sizes = {
Bytes: 1,
KB: Math.pow(1024, 1),
MB: Math.pow(1024, 2),
GB: Math.pow(1024, 3),
TB: Math.pow(1024, 4),
};
var unit = null;
for (var prop in sizes) {
if (this.size >= sizes[prop]) {
unit = prop;
}
}
return (this.size / sizes[unit]).toFixed(0) + ' ' + unit;
};
File.prototype.isFile = function () {
return this.type === 'file';
};
File.prototype.isDir = function () {
return this.type === 'dir';
};
};
export default File; |
<gh_stars>0
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
import veriloggen.core.vtypes as vtypes
import pyverilog.dataflow.reorder as reorder
import pyverilog.dataflow.optimizer as voptimizer
from pyverilog.dataflow.dataflow import *
def try_optimize(node, width=32):
try:
return optimize(node, width)
except:
return node
return node
def optimize(node, width=32):
df_tree = makeDFTree(node)
opt = voptimizer.VerilogOptimizer({}, default_width=width)
opt_dfnode = opt.optimize(df_tree)
opt_node = makeASTTree(opt_dfnode)
return opt_node
def makeDFTree(node):
if isinstance(node, vtypes._Variable):
name = node.name
v = DFTerminal(name)
v.original = node
return v
if isinstance(node, bool):
v = 1 if node else 0
return DFIntConst(str(v))
if isinstance(node, int):
return DFIntConst(str(node))
if isinstance(node, float):
return DFFloatConst(str(node))
if isinstance(node, str):
return DFStringConst(node)
if isinstance(node, vtypes.Int):
return DFIntConst(str(node.value))
if isinstance(node, vtypes.Float):
return DFFloatConst(str(node.value))
if isinstance(node, vtypes.Str):
return DFStringConst(node.value)
if isinstance(node, vtypes.Cond):
true_df = makeDFTree(node.true_value)
false_df = makeDFTree(node.false_value)
cond_df = makeDFTree(node.cond)
if isinstance(cond_df, DFBranch):
return reorder.insertCond(cond_df, true_df, false_df)
return DFBranch(cond_df, true_df, false_df)
if isinstance(node, vtypes._UnaryOperator):
right_df = makeDFTree(node.right)
if isinstance(right_df, DFBranch):
return reorder.insertUnaryOp(right_df, node.__class__.__name__)
return DFOperator((right_df,), node.__class__.__name__)
if isinstance(node, vtypes._BinaryOperator):
left_df = makeDFTree(node.left)
right_df = makeDFTree(node.right)
if isinstance(left_df, DFBranch) or isinstance(right_df, DFBranch):
return reorder.insertOp(left_df, right_df, node.__class__.__name__)
return DFOperator((left_df, right_df,), node.__class__.__name__)
if isinstance(node, vtypes.SystemTask):
return DFSyscall(node.cmd, tuple([makeDFTree(n) for n in node.args]))
raise TypeError("unsupported type: %s %s" % (str(type(node)), str(node)))
operators = {
'Plus': vtypes.Plus,
'Minus': vtypes.Minus,
'Times': vtypes.Times,
'Divide': vtypes.Divide,
'Mod': vtypes.Mod,
'Power': vtypes.Power,
'Sll': vtypes.Sll,
'Srl': vtypes.Srl,
'Sra': vtypes.Sra,
'Or': vtypes.Or,
'Xor': vtypes.Xor,
'And': vtypes.And,
'Divide': vtypes.Divide,
'Land': vtypes.Land,
'Lor': vtypes.Lor,
'Unot': vtypes.Unot,
'Ulnot': vtypes.Ulnot,
'Uplus': vtypes.Uplus,
'Uminus': vtypes.Uminus,
'Eq': vtypes.Eq,
'NotEq': vtypes.NotEq,
'LessThan': vtypes.LessThan,
'LessEq': vtypes.LessEq,
'GreaterThan': vtypes.GreaterThan,
'GreaterEq': vtypes.GreaterEq,
'Eq': vtypes.Eq,
'NotEq': vtypes.NotEq,
}
def getOp(op):
return operators[op]
def makeASTTree(node):
if isinstance(node, DFBranch):
return Cond(makeASTTree(node.condnode),
makeASTTree(node.truenode),
makeASTTree(node.falsenode))
if isinstance(node, DFIntConst):
return vtypes.Int(int(node.value))
if isinstance(node, DFFloatConst):
return vtypes.Float(float(node.value))
if isinstance(node, DFStringConst):
return vtypes.Str(node.value)
if isinstance(node, DFEvalValue):
if isinstance(node.value, int):
return vtypes.Int(node.value)
if isinstance(node.value, float):
return vtypes.Float(node.value)
if isinstance(node.value, DFStringConst):
return vtypes.Str(node.value)
raise TypeError('Unknown constant')
if isinstance(node, DFTerminal):
return node.original
if isinstance(node, DFUndefined):
return vtypes.IntX()
if isinstance(node, DFHighImpedance):
return vtypes.IntZ()
if isinstance(node, DFOperator):
if len(node.nextnodes) == 1:
return getOp(node.operator)(makeASTTree(node.nextnodes[0]))
return getOp(node.operator)(makeASTTree(node.nextnodes[0]), makeASTTree(node.nextnodes[1]))
if isinstance(node, DFSyscall):
return vtypes.SystemTask(node.syscall, tuple([makeASTTree(n) for n in node.nextnodes]))
raise TypeError("Unsupported DFNode %s" % type(node))
|
#!/bin/bash
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
rm -rf bin/cache/pkg/tests
git clone https://github.com/flutter/tests.git bin/cache/pkg/tests
dart --enable-asserts dev/customer_testing/run_tests.dart --skip-on-fetch-failure --skip-template bin/cache/pkg/tests/registry/*.test
|
import cv2
import os
import sys
sys.path.append("/home/syc/py-faster-rcnn/caffe-fast-rcnn/python")
import caffe
import numpy as np
import random
import time
from math import *
import dproc
from utils import *
video_dir = '/media/syc/My Passport/_dataset/tracking2013/'
video_name = "Shaking/img"
video_transpose = False
video_resize = (960, 540)
# bbox = 198,214,34,81
bbox = 225,135,61,71
(x,y,w,h) = bbox
batch_size = 4
from_seq = True
scale = 1
model_dir = 'model/'
data_dir = 'data/'
proto_solver = model_dir + 'solver.prototxt'
proto_feat = model_dir + 'feat.prototxt'
model_feat = model_dir + "ZF_faster_rcnn_final.caffemodel"
# mean_file = model_dir + 'ilsvrc_2012_mean.npy'
target_size = 127.0
class DeepTracker:
def __init__(self):
caffe.set_device(0)
caffe.set_mode_gpu()
self.inited = False
self.prob = None
self.bbox = (0,0,0,0)
self.mean = np.array([102.9801, 115.9465, 122.7717])
self.idx = 0
def getFeat(self, frame, box_large):
(x,y,w,h) = box_large
data = dproc.transpose(frame, self.mean, box_large)
self.featnet.blobs['data'].reshape(1,3,h,w)
self.featnet.blobs['data'].data[0] = data
self.featnet.forward()
feat = self.featnet.blobs['feat'].data[0]
return feat
def update(self, frame, bbox ,step = 16):
t1 = time.clock()
(x,y,w,h) = box_large = padding(bbox, 1.0, 60)
feat = self.getFeat(frame, box_large)
(c_sm, h_sm, w_sm) = feat.shape
labels = dproc.makeLabels(bbox, box_large, w_sm, h_sm)
labels_seg = dproc.makeLabelsSeg(bbox, box_large, w_sm, h_sm)
self.solver.net.blobs['data'].reshape(1,c_sm,h_sm,w_sm)
self.solver.net.blobs['data'].data[0] = feat
self.solver.net.blobs['labels'].reshape(1,1,h_sm, w_sm)
self.solver.net.blobs['labels'].data[0] = labels
self.solver.net.blobs['labels_seg'].reshape(1,1,h_sm, w_sm)
self.solver.net.blobs['labels_seg'].data[0] = labels_seg
self.solver.step(step)
t2 = time.clock()
print 'update takes %f seconds.' % (1.0*(t2-t1))
def init(self, frame, bbox):
self.solver = caffe.SGDSolver(proto_solver)
self.featnet = caffe.Net(proto_feat,model_feat,caffe.TEST)
self.bbox = bbox
# (_x,_y,_w,_h) = bbox
# scale = target_size / max(_w,_h)
# (_x,_y,_w,_h) = bbox = scaleBox(bbox, scale)
frame = cv2.resize(frame,(0,0),fx=scale,fy=scale)
self.update(frame, bbox, 2048)
self.inited = True
self.prob = np.zeros((frame.shape[0],frame.shape[1]))
def UpdateSize(self, seg_big, bbox, box_large, smin = 0.8, smax = 1.2, step = 0.04, lamda = 0.4):
(_x,_y,_w,_h) = bbox
(x,y,w,h) = box_large
cx = _x + 0.5*_w
cy = _y + 0.5*_h
w_new = h_new = 0
score_size = -9e5
s = smin
while s < smax:
hh = _h*s
ww = _w*s
score_temp = seg_big[rdint(cy-y-0.5*hh):rdint(cy-y+0.5*hh),rdint(cx-x-0.5*ww):rdint(cx-x+0.5*ww)].sum() - lamda*ww*hh
if score_temp > score_size:
score_size = score_temp
w_new = ww
h_new = hh
s = s + step
_x = rdint(cx-0.5*w_new)
_y = rdint(cy-0.5*h_new)
_w = rdint(w_new)
_h = rdint(h_new)
return (_x, _y, _w, _h)
def track(self, frame):
(_x,_y,_w,_h) = bbox = self.bbox
# scale = target_size / max(_w,_h)
# (_x,_y,_w,_h) = bbox = scaleBox(bbox, scale)
# frame = cv2.resize(frame,(0,0),fx=scale,fy=scale)
(x,y,w,h) = box_large = padding(bbox, 0.7, 35)
feat = self.getFeat(frame, box_large)
(c_sm, h_sm, w_sm) = feat.shape
self.solver.net.blobs['data'].reshape(1,c_sm,h_sm,w_sm)
self.solver.net.blobs['labels'].reshape(1,1,h_sm, w_sm)
self.solver.net.blobs['labels_seg'].reshape(1,1,h_sm, w_sm)
self.solver.net.blobs['data'].data[0] = feat
self.solver.net.forward()
score = dproc.softmax(self.solver.net.blobs['score'].data[0])
seg = dproc.softmax(self.solver.net.blobs['score_seg'].data[0])
# color = dproc.softmaxColor(self.solver.net.blobs['score'].data[0])
# color = cv2.resize(color, (w_sm*4,h_sm*4))
score_big = cv2.resize(score, (w_sm*4,h_sm*4))
seg_big = cv2.resize(seg, (w_sm*4,h_sm*4))
self.prob = score_big
if score_big.max() > 0.00:
cx = score_big.argmax() % (4*w_sm)
cy = score_big.argmax() // (4*w_sm)
_x = rdint(x + cx - 0.5*_w)
_y = rdint(y + cy - 0.5*_h)
bbox = (_x,_y,_w,_h)
# bbox = self.UpdateSize(seg_big, bbox, box_large)
self.bbox = bbox = scaleBox(bbox, 1/scale)
self.update(frame, bbox)
return self.bbox
if __name__ == "__main__":
dt = DeepTracker()
success, frame = True, None
seq = []
idx = 0
if from_seq:
for filename in os.listdir(os.path.join(video_dir,video_name)):
if '.jpg' in filename:
seq.append(os.path.join(video_dir,video_name,filename))
seq.sort()
frame = cv2.imread(seq[idx])
idx += 1
else:
cap = cv2.VideoCapture(video_dir+video_name)
success, frame = cap.read()
while success :
t1 = time.clock()
if dt.inited:
bbox = dt.track(frame)
cv2.imshow('prob', dt.prob)
(x,y,w,h) = bbox
result = frame.copy()
cv2.rectangle(result, (x,y), (x+w,y+h), (0, 255, 255), 2)
cv2.imshow(video_name, result)
key = cv2.waitKey(3)
if key == 27:
break
elif key == 112 or from_seq and not dt.inited:
cv2.waitKey(10)
dt.init(frame, bbox)
if from_seq:
if idx >= len(seq):
break
else:
frame = cv2.imread(seq[idx])
idx += 1
else:
success, frame = cap.read()
t2 = time.clock()
print "total speed: %ffps."% (1.0/(t2-t1)) |
<filename>Include/ZoomList.hpp
#ifndef ZOOM_LIST_HPP
#define ZOOM_LIST_HPP
#include <vector>
#include <utility>
#include "Zoom.hpp"
class ZoomList {
private:
int m_width{0};
int m_height{0};
std::vector<Zoom> m_zooms;
double m_xCenter{0.0f};
double m_yCenter{0.0f};
double m_scale{1.0f};
public:
ZoomList(int width, int height);
void add(const Zoom& zoom);
std::pair<double, double> doZoom(int x, int y);
};
#endif // ZOOM_LIST_HPP |
#! /bin/bash
gzcat backup-2020-01-21.sql.gz | docker exec -i evemonk_postgresql psql -U postgres
|
def calculateLetterFrequency(inputString):
frequency = {}
for char in inputString.lower():
if char.isalpha():
if char in frequency:
frequency[char] += 1
else:
frequency[char] = 1
return frequency |
#!/bin/bash
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
source util.sh
# Use this to keep track of what HTTP status codes we receive.
# declare -A codes
# generate_traffic will print a status update every UPDATE_FREQUENCY messages.
UPDATE_FREQUENCY=25
main() {
# Get our working project, or exit if it's not set.
local project_id=$(get_project_id)
if [[ -z "$project_id" ]]; then
exit 1
fi
export url="https://${project_id}.appspot.com/airportName?iataCode=${IATA_CODE}&key=${API_KEY}"
echo "This command will exit automatically in $TIMEOUT_SECONDS seconds."
echo "Generating traffic to ${url}..."
echo "Press Ctrl-C to stop."
local endtime=$(($(date +%s) + $TIMEOUT_SECONDS))
local request_count=0
# Send queries repeatedly until TIMEOUT_SECONDS seconds have elapsed.
while [[ $(date +%s) -lt $endtime ]]; do
request_count=$(( request_count + 1))
if [[ $((request_count % UPDATE_FREQUENCY)) == 0 ]]; then
echo "Served ${request_count} requests."
fi
# Make the HTTP request and save its status in an associative array.
http_status=$(curl -so /dev/null -w "%{http_code}" "$url")
if [[ "${!codes[@]}" != *"$http_status"* ]]; then
codes["$http_status"]="1"
else
codes["$http_status"]="$(( ${codes[$http_status]} + 1 ))"
fi
done
}
print_status() {
echo ""
echo "HTTP status codes received from ${url}:"
for code in "${!codes[@]}"; do
echo "${code}: ${codes[$code]}"
done
}
# Defaults.
IATA_CODE="SFO"
TIMEOUT_SECONDS=$((5 * 60)) # Timeout after 5 minutes.
if [[ "$#" == 1 ]]; then
API_KEY="$1"
elif [[ "$#" == 2 ]]; then
API_KEY="$1"
IATA_CODE="$2"
else
echo "Wrong number of arguments specified."
echo "Usage: generate_traffic_with_key.sh api-key [iata-code]"
exit 1
fi
# Print the received codes when we exit.
trap print_status EXIT
main "$@"
|
<reponame>srishti77/OpenWhisk_SGAK
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var vscode = require('vscode');
let log;
function pad(input, length) {
if (input == undefined) {
input = '';
}
while (input.length < length) {
input += ' ';
}
return input;
}
function appendHeading(name) {
log.appendLine('\n'+name+'\n---------------------------------------------------------------------------------');
}
function appendEntry(entry, qualified) {
var qualifiedName = formatQualifiedName(entry, qualified);
var suffix = ''
if ( entry.hasOwnProperty('binding') && entry.binding ) {
suffix = ' binding';
}
log.appendLine( pad(qualifiedName, 66) + (entry.publish ? 'public':'private') + suffix);
}
function appendActivation(entry, qualified) {
var qualifiedName = formatQualifiedName(entry, qualified);
var suffix = ''
if ( entry.hasOwnProperty('binding') && entry.binding ) {
suffix = ' binding';
}
log.appendLine( pad(entry.activationId, 45) + entry.name);
}
function formatQualifiedName(entry, qualified) {
if (qualified == undefined) {
qualified = true;
}
var qualifiedName = (qualified ? (entry.namespace + '/'):'') + entry.name;
return qualifiedName;
}
function parseQualifiedName(name) {
var nameString = name.toString();
var startIndex = nameString.indexOf('/');
var namespace = nameString.substring(0, startIndex);
var parsedName = nameString.substring(startIndex+1);
return {
"name":parsedName,
"namespace":namespace
};
}
function setLog(_log) {
log = _log;
}
function printOpenWhiskError(error) {
log.appendLine('\nERROR: '+error.toString());
if (error.error.activationId) {
log.appendLine('activationId: '+error.error.activationId);
}
if (error.error.logs && (error.error.logs.length > 0)) {
for (var x=0; x<error.error.logs.length; x++) {
log.appendLine(error.error.logs[x]);
}
}
else if (error.error.error) {
log.appendLine(error.error.error);
}
}
function parseParametersString(parameterString) {
var params = {};
var tokens = parameterString.split('-p ');
for (var x=0; x<tokens.length; x++) {
var token = tokens[x]
var firstSpace = token.indexOf(' ');
if (token.length >0 && firstSpace >= 0) {
var key = token.substring(0, firstSpace).trim();
var value = token.substring(firstSpace+1).trim();
params[key] = value;
}
}
console.log(params)
return params;
}
module.exports = {
pad:pad,
appendHeading:appendHeading,
appendEntry:appendEntry,
formatQualifiedName:formatQualifiedName,
setLog:setLog,
printOpenWhiskError: printOpenWhiskError,
parseParametersString: parseParametersString,
parseQualifiedName:parseQualifiedName,
appendActivation:appendActivation
}
|
#!/bin/sh
xcodebuild -parallelizeTargets \
-project "IconScanner.xcodeproj" \
-target "IconScanner" \
-configuration "Release" \
clean \
build
open build/Release
|
const express = require('express');
const logger = require('morgan');
const cookieParser = require('cookie-parser');
const bodyParser = require('body-parser');
const compress = require('compression');
const nunjucks = require('nunjucks');
const router = require('./routes');
module.exports = (app, config) => {
const env = process.env.NODE_ENV || 'development';
/* eslint-disable no-param-reassign */
app.locals.ENV = env;
app.locals.ENV_DEVELOPMENT = env === 'development';
/* eslint-enable no-param-reassign */
app.set('views', `${config.root}/app/views`);
app.set('view engine', 'nunjucks');
nunjucks.configure(`${config.root}/app/views`, {
autoescape: true,
express: app,
watch: true,
});
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({
extended: true,
}));
app.use(cookieParser());
app.use(compress());
app.use(express.static(`${config.root}/public`));
app.use('/', router);
app.use((req, res, next) => {
const err = new Error('Not Found');
err.statusCode = 404;
next(err);
});
// eslint-disable-next-line no-unused-vars
app.use((err, req, res, next) => {
console.log(err);
res.status(err.statusCode || 500);
res.render('error', {
message: err,
error: app.get('env') === 'development' ? err : {},
title: 'error',
});
});
};
|
var test = require('tape');
var foo = function(cb) {
setTimeout(function() {
console.log("beep");
cb();
},1000);
}
test('testWaterfallAsync',function(t) {
t.plan(1);
var Q = require('../');
var queue = new Q;
queue.series([
function(lib) {
console.log("before 1");
foo(function() {
lib.done({one:1})
})
console.log("after 1");
},
function(lib) {
console.log("before 42");
foo(function() {
lib.done({life:42})
});
console.log("after 42");
},
function(lib) {
var x = lib.get('one') + lib.get('life');
console.log(x);
t.equal(43,x);
lib.done();
}
]);
});
|
#!/bin/sh
# Copyright © 2012 Bart Massey
# [This program is licensed under the "MIT License"]
# Please see the file COPYING in the source
# distribution of this software for license terms.
TMP="/tmp/i7gh-release.$$"
trap "rm -rf $TMP" 0 1 2 3 15
cp -a *.materials/Release $TMP
[ -d *.materials/Figures ] && cp -a *.materials/Figures $TMP
git checkout gh-pages || exit 1
git clean -df
#rm -rf *.inform *.materials
cp -a $TMP/. .
git add .
MASTER=`git show-ref heads/master | awk '{print $1;}'`
MSG="web version of $MASTER from master"
git commit -am "$MSG"
git push
git checkout master
git clean -df
#rm -rf interpreter
|
#!/bin/bash
# Access granted under MIT Open Source License: https://en.wikipedia.org/wiki/MIT_License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
# TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#######################################################
# Deploys all necessary azure resources and stores
# configuration information in an .ENV file
#
# Prerequisites:
# - User is logged in to the azure cli
# - Correct Azure subscription is selected
#######################################################
set -o errexit
set -o pipefail
set -o nounset
# set -o xtrace # For debugging
###################
# REQUIRED ENV VARIABLES:
#
# ENV_NAME
# RESOURCE_GROUP_NAME
# RESOURCE_GROUP_LOCATION
# AZURE_SUBSCRIPTION_ID
# AZURESQL_SERVER_PASSWORD
#####################
# DEPLOY ARM TEMPLATE
# Set account to where ARM template will be deployed to
echo "Deploying to Subscription: $AZURE_SUBSCRIPTION_ID"
az account set --subscription $AZURE_SUBSCRIPTION_ID
# Create resource group
echo "Creating resource group: $RESOURCE_GROUP_NAME"
az group create --name "$RESOURCE_GROUP_NAME" --location "$RESOURCE_GROUP_LOCATION" --tags Environment=$ENV_NAME
# By default, set all KeyVault permission to deployer
# Retrieve KeyVault User Id
kv_owner_object_id=$(az ad signed-in-user show --output json | jq -r '.objectId')
# Deploy arm template
echo "Deploying resources into $RESOURCE_GROUP_NAME"
arm_output=$(az deployment group create \
--resource-group "$RESOURCE_GROUP_NAME" \
--template-file "./infrastructure/azuredeploy.json" \
--parameters @"./infrastructure/azuredeploy.parameters.${ENV_NAME}.json" \
--parameters keyvault_owner_object_id=${kv_owner_object_id} deployment_id=${DEPLOYMENT_ID} sqlServerPassword=${AZURESQL_SERVER_PASSWORD} \
--output json)
if [[ -z $arm_output ]]; then
echo >&2 "ARM deployment failed."
exit 1
fi
#########################
# CREATE AND CONFIGURE SERVICE PRINCIPAL FOR ADLA GEN2
# Retrieve storage account name
export AZURE_STORAGE_ACCOUNT=$(echo $arm_output | jq -r '.properties.outputs.storage_account_name.value')
# Retrieve storage account (ADLS Gen2) key
export AZURE_STORAGE_KEY=$(az storage account keys list \
--account-name $AZURE_STORAGE_ACCOUNT \
--resource-group $RESOURCE_GROUP_NAME \
--output json |
jq -r '.[0].value')
# Retrieve full storage account azure id
stor_id=$(az storage account show \
--name "$AZURE_STORAGE_ACCOUNT" \
--resource-group "$RESOURCE_GROUP_NAME" \
--output json |
jq -r '.id')
# Add file system storage account
storage_file_system=datalake
echo "Creating ADLS Gen2 File system: $storage_file_system"
az storage container create --name $storage_file_system
echo "Creating folders within the file system."
# Create folders for databricks libs
az storage fs directory create -n '/sys/databricks/libs' -f $storage_file_system
# Create folders for SQL external tables
az storage fs directory create -n '/data/dw/fact_parking' -f $storage_file_system
az storage fs directory create -n '/data/dw/dim_st_marker' -f $storage_file_system
az storage fs directory create -n '/data/dw/dim_parking_bay' -f $storage_file_system
az storage fs directory create -n '/data/dw/dim_location' -f $storage_file_system
echo "Uploading seed data to data/seed"
az storage blob upload --container-name $storage_file_system\
--file data/seed/dim_date.csv --name "data/seed/dim_date/dim_date.csv"
az storage blob upload --container-name $storage_file_system \
--file data/seed/dim_time.csv --name "data/seed/dim_time/dim_time.csv"
# Create SP and grant correct rights to storage account
sp_stor_name=$(echo $arm_output | jq -r '.properties.outputs.service_principal_storage_name.value')
echo "Creating Service Principal (SP) for access to ADLA Gen2: '$sp_stor_name'"
sp_stor_out=$(az ad sp create-for-rbac \
--role "Storage Blob Data Contributor" \
--scopes "$stor_id" \
--name $sp_stor_name \
--output json)
export SP_STOR_ID=$(echo $sp_stor_out | jq -r '.appId')
export SP_STOR_PASS=$(echo $sp_stor_out | jq -r '.password')
export SP_STOR_TENANT=$(echo $sp_stor_out | jq -r '.tenant')
###################
# SQL
echo "Retrieving SQL Server information from the deployment."
# Retrieve SQL creds
export SQL_SERVER_NAME=$(echo $arm_output | jq -r '.properties.outputs.sql_server_name.value')
export SQL_SERVER_USERNAME=$(echo $arm_output | jq -r '.properties.outputs.sql_server_username.value')
export SQL_SERVER_PASSWORD=$(echo $arm_output | jq -r '.properties.outputs.sql_server_password.value')
export SQL_DW_DATABASE_NAME=$(echo $arm_output | jq -r '.properties.outputs.sql_dw_database_name.value')
# SQL Connection String
sql_dw_connstr_nocred=$(az sql db show-connection-string --client ado.net \
--name $SQL_DW_DATABASE_NAME --server $SQL_SERVER_NAME --output json |
jq -r .)
sql_dw_connstr_uname=${sql_dw_connstr_nocred/<username>/$SQL_SERVER_USERNAME}
sql_dw_connstr_uname_pass=${sql_dw_connstr_uname/<password>/$SQL_SERVER_PASSWORD}
####################
# APPLICATION INSIGHTS
echo "Retrieving ApplicationInsights information from the deployment."
appinsights_name=$(echo $arm_output | jq -r '.properties.outputs.appinsights_name.value')
export APPINSIGHTS_KEY=$(az monitor app-insights component show \
--app "$appinsights_name" \
--resource-group "$RESOURCE_GROUP_NAME" \
--output json |
jq -r '.instrumentationKey')
# ###########################
# # RETRIEVE DATABRICKS INFORMATION AND CONFIGURE WORKSPACE
#
echo "Retrieving Databricks information from the deployment."
databricks_location=$(echo $arm_output | jq -r '.properties.outputs.databricks_location.value')
databricks_workspace_name=$(echo $arm_output | jq -r '.properties.outputs.databricks_workspace_name.value')
databricks_workspace_id=$(echo $arm_output | jq -r '.properties.outputs.databricks_workspace_id.value')
export DATABRICKS_HOST=https://${databricks_location}.azuredatabricks.net
# Retrieve databricks PAT token
echo "Generating a Databricks PAT token."
databricks_global_token=$(az account get-access-token --resource 2ff814a6-3304-4ab8-85cb-cd0e6f879c1d --output json | jq -r .accessToken) # Databricks app global id
azure_api_token=$(az account get-access-token --resource https://management.core.windows.net/ --output json | jq -r .accessToken)
api_response=$(curl -sf $DATABRICKS_HOST/api/2.0/token/create \
-H "Authorization: Bearer $databricks_global_token" \
-H "X-Databricks-Azure-SP-Management-Token:$azure_api_token" \
-H "X-Databricks-Azure-Workspace-Resource-Id:$databricks_workspace_id" \
-d '{ "comment": "For deployment" }')
databricks_token=$(echo $api_response | jq -r '.token_value')
export DATABRICKS_TOKEN=$databricks_token
echo "Waiting for Databricks workspace to be ready..."
sleep 3m # It takes a while for a databricks workspace to be ready for new clusters.
# Configure databricks
. ./scripts/configure_databricks.sh
####################
# DATA FACTORY
# Retrieve KeyVault details
echo "Retrieving KeyVault information from the deployment."
kv_name=$(echo $arm_output | jq -r '.properties.outputs.keyvault_name.value')
export KV_URL=https://$kv_name.vault.azure.net/
echo "Updating Data Factory LinkedService to point to newly deployed resources (KeyVault, Databricks and DataLake)."
# Create a copy of the ADF dir into a .tmp/ folder.
adfTempDir=.tmp/adf
mkdir -p $adfTempDir && cp -a adf/ .tmp/
# Update LinkedServices to point to newly deployed Datalake, Databricks and KeyVault
tmpfile=.tmpfile
adfLsDir=$adfTempDir/linkedService
jq --arg kvurl "$KV_URL" '.properties.typeProperties.baseUrl = $kvurl' $adfLsDir/Ls_KeyVault_01.json > "$tmpfile" && mv "$tmpfile" $adfLsDir/Ls_KeyVault_01.json
jq --arg databricksDomain "$DATABRICKS_HOST" '.properties.typeProperties.domain = $databricksDomain' $adfLsDir/Ls_AzureDatabricks_01.json > "$tmpfile" && mv "$tmpfile" $adfLsDir/Ls_AzureDatabricks_01.json
jq --arg datalakeUrl "https://$AZURE_STORAGE_ACCOUNT.dfs.core.windows.net" '.properties.typeProperties.url = $datalakeUrl' $adfLsDir/Ls_AdlsGen2_01.json > "$tmpfile" && mv "$tmpfile" $adfLsDir/Ls_AdlsGen2_01.json
# Deploy ADF artifacts
export DATAFACTORY_NAME=$(echo $arm_output | jq -r '.properties.outputs.datafactory_name.value')
export ADF_DIR=$adfTempDir
. ./scripts/deploy_adf_artifacts.sh
# SP for integration tests
sp_adf_name=$(echo $arm_output | jq -r '.properties.outputs.service_principal_datafactory_name.value')
sp_adf_out=$(az ad sp create-for-rbac \
--role "Data Factory contributor" \
--scopes "/subscriptions/$AZURE_SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP_NAME/providers/Microsoft.DataFactory/factories/$DATAFACTORY_NAME" \
--name "$sp_adf_name" \
--output json)
export SP_ADF_ID=$(echo $sp_adf_out | jq -r '.appId')
export SP_ADF_PASS=$(echo $sp_adf_out | jq -r '.password')
export SP_ADF_TENANT=$(echo $sp_adf_out | jq -r '.tenant')
####################
# SAVE RELEVANT SECRETS IN KEYVAULT
echo "Storing secrets in KeyVault."
az keyvault secret set --vault-name $kv_name --name "subscriptionId" --value "$AZURE_SUBSCRIPTION_ID"
az keyvault secret set --vault-name $kv_name --name "kvUrl" --value "$KV_URL"
az keyvault secret set --vault-name $kv_name --name "sqlsrvrName" --value "$SQL_SERVER_NAME"
az keyvault secret set --vault-name $kv_name --name "sqlsrvUsername" --value "$SQL_SERVER_USERNAME"
az keyvault secret set --vault-name $kv_name --name "sqlsrvrPassword" --value "$SQL_SERVER_PASSWORD"
az keyvault secret set --vault-name $kv_name --name "sqldwDatabaseName" --value "$SQL_DW_DATABASE_NAME"
az keyvault secret set --vault-name $kv_name --name "sqldwConnectionString" --value "$sql_dw_connstr_uname_pass"
az keyvault secret set --vault-name $kv_name --name "datalakeAccountName" --value "$AZURE_STORAGE_ACCOUNT"
az keyvault secret set --vault-name $kv_name --name "datalakeKey" --value "$AZURE_STORAGE_KEY"
az keyvault secret set --vault-name $kv_name --name "spStorName" --value "$sp_stor_name"
az keyvault secret set --vault-name $kv_name --name "spStorId" --value "$SP_STOR_ID"
az keyvault secret set --vault-name $kv_name --name "spStorPass" --value "$SP_STOR_PASS"
az keyvault secret set --vault-name $kv_name --name "spStorTenantId" --value "$SP_STOR_TENANT"
az keyvault secret set --vault-name $kv_name --name "databricksDomain" --value "$DATABRICKS_HOST"
az keyvault secret set --vault-name $kv_name --name "databricksToken" --value "$DATABRICKS_TOKEN"
az keyvault secret set --vault-name $kv_name --name "applicationInsightsKey" --value "$APPINSIGHTS_KEY"
az keyvault secret set --vault-name $kv_name --name "adfName" --value "$DATAFACTORY_NAME"
az keyvault secret set --vault-name $kv_name --name "spAdfName" --value "$sp_adf_name"
az keyvault secret set --vault-name $kv_name --name "spAdfId" --value "$SP_ADF_ID"
az keyvault secret set --vault-name $kv_name --name "spAdfPass" --value "$SP_ADF_PASS"
az keyvault secret set --vault-name $kv_name --name "spAdfTenantId" --value "$SP_ADF_TENANT"
####################
# AZDO Azure Service Connection and Variables Groups
. ./scripts/deploy_azdo_service_connections_azure.sh
. ./scripts/deploy_azdo_variables.sh
####################
# BUILD ENV FILE FROM CONFIG INFORMATION
env_file=".env.${ENV_NAME}"
echo "Appending configuration to .env file."
cat << EOF >> $env_file
# ------ Configuration from deployment on ${TIMESTAMP} -----------
RESOURCE_GROUP_NAME=${RESOURCE_GROUP_NAME}
RESOURCE_GROUP_LOCATION=${RESOURCE_GROUP_LOCATION}
SQL_SERVER_NAME=${SQL_SERVER_NAME}
SQL_SERVER_USERNAME=${SQL_SERVER_USERNAME}
SQL_SERVER_PASSWORD=${SQL_SERVER_PASSWORD}
SQL_DW_DATABASE_NAME=${SQL_DW_DATABASE_NAME}
AZURE_STORAGE_ACCOUNT=${AZURE_STORAGE_ACCOUNT}
AZURE_STORAGE_KEY=${AZURE_STORAGE_KEY}
SP_STOR_NAME=${sp_stor_name}
SP_STOR_ID=${SP_STOR_ID}
SP_STOR_PASS=${SP_STOR_PASS}
SP_STOR_TENANT=${SP_STOR_TENANT}
DATABRICKS_HOST=${DATABRICKS_HOST}
DATABRICKS_TOKEN=${DATABRICKS_TOKEN}
DATAFACTORY_NAME=${DATAFACTORY_NAME}
APPINSIGHTS_KEY=${APPINSIGHTS_KEY}
KV_URL=${KV_URL}
EOF
echo "Completed deploying Azure resources $RESOURCE_GROUP_NAME ($ENV_NAME)"
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/LXCategory/LXCategory.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/LXCategory/LXCategory.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
/**************************************************************************
*
* Copyright (c) 2016-2017 <NAME>, All Rights Reserved.
*
**************************************************************************/
"use strict";
/**
* Utility functions for working with traits.
*/
define(function ( require ) {
var R = require("ramda");
/**
* Extends an existing object with additional methods implemented
* by the trait.
*
* If the object already has a method with the same name as a
* trait method, then that method in the object WILL NOT be
* overriden by the trait method.
*
* @param object The object that will receive additional methods.
*
* @param traitObject The object that implements the methods that
* will be added to "object".
*
* @param traitMethodList List of the names of methods (String) of
* "traitObject" that will be added to "object".
*/
function extendWith(
object,
traitObject,
traitMethodList) {
// We WILL NOT override methods on the object that have the
// same name as methods in the trait object. That means the
// given object methods always override the trait methods.
var isMethodOverridable = R.compose(R.not, R.hasIn(R.__, object));
R.filter(isMethodOverridable, traitMethodList)
.forEach(function ( methodName ) {
var traitMethod = traitObject[methodName];
var proxyMethod = function () {
return traitMethod.apply(traitObject, arguments);
};
object[methodName] = proxyMethod;
});
}
/**
* Extends an existing object with additional methods implemented
* by the trait. All the methods in the trait object which match
* the given regular expression are added to the object.
*
* @param object The object that will receive additional methods.
*
* @param traitObject The object that implements the methods that
* will be added to "object".
*
* @param traitMethodRegex Regular expression for methods names of
* "traitObject" that will be added to "object".
*/
function extendWithSome(
object,
traitObject,
traitMethodRegex) {
var predicate = traitMethodRegex.test.bind(traitMethodRegex);
var traitMethodList = traitObject.keys().filter(predicate);
extendWith(object, traitObject, traitMethodList);
}
var Traits = {
extendWith : extendWith,
extendWithSome : extendWithSome,
};
return Traits;
});
|
import Modifier from '@mafia/structures/Modifier';
import type Role from '@mafia/structures/Role';
import { Attack } from '../managers/NightActionsManager';
const VALID_ROLES = ['Vigilante', 'Goon', 'Godfather', 'Serial Killer'];
export default class StrongmanModifier extends Modifier {
public patch(role: Role) {
role.modifiers.attack = Attack.Powerful;
}
public canPatch(role: Role) {
return VALID_ROLES.includes(role.name);
}
}
|
#!/bin/sh
#
# This script starts the application container. If
# needed, it will seed the Docker repository with the firmware's built-in
# container image.
MANIFEST="/usr/share/container-images/images.manifest"
info() {
echo "container-image: ${*}"
logger -t "container-image" "${*}"
}
die() {
info "Fatal: ${*}"
exit 1
}
start_container() {
local status
local docker_name docker_image
docker_image="${1}"
docker_name="${2}"
info "Starting container ${docker_image}..."
status=$(docker inspect "${docker_image}" --format='{{.State.Status}}')
case "${status}" in
exited)
# This case requires a different startup method
docker start "${docker_name}"
;;
running)
die "${docker_image} already running..."
;;
*)
docker run ${DOCKER_OPTS} --name "${docker_name}" -d "${docker_image}"
;;
esac
}
stop_container() {
local docker_name
docker_name="${1}"
info "Stopping container ${docker_name}..."
if ! docker stop "${docker_name}"; then
die "Error stopping ${docker_name}"
fi
}
remove_container() {
local docker_name
docker_name="${1}"
info "Removing container ${docker_name}..."
if ! docker rm "${docker_name}"; then
die "Error removing ${docker_name}"
fi
}
########################### MAIN SCRIPT ###########################
DOCKER_OPTS=""
case "$1" in
start)
info "Starting containers from ${MANIFEST}"
while read -r name version tag _; do
start_container "${name}:${version}" "${tag}"|| die "Error starting the container ${name}"
info "Success starting the container ${name}:${version}"
done < "${MANIFEST}"
;;
stop)
while read -r name version tag _; do
stop_container "${tag}"
remove_container "${tag}"
info "Stopping the container ${tag}"
done < "${MANIFEST}"
;;
*)
die "Usage: $0 {start|stop}"
;;
esac
|
import TableTemplate from '@/assets/img/table.png'
import DialogTemplate from '@/assets/img/dialog.png'
const templates = [
{ id: 0, img: TableTemplate, label: '搜索表格', path: '/template/basicData' },
{ id: 1, img: DialogTemplate, label: '弹窗', path: '/template/dialog' },
]
export { templates } |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2021/7/15 2:47 下午
# @File : my_ppo.py
# @Author: johnson
# @Desc : 使用stable-baseline3 的PPO算法
import os
import sys
import time
import gfootball.env as football_env
import argparse
from stable_baselines3 import PPO
from stable_baselines3.common.callbacks import CheckpointCallback
#所有可用的训练模式
levels = ['11_vs_11_competition','11_vs_11_easy_stochastic','11_vs_11_hard_stochastic','11_vs_11_kaggle','11_vs_11_stochastic','1_vs_1_easy',
'5_vs_5','academy_3_vs_1_with_keeper','academy_corner','academy_counterattack_easy','academy_counterattack_hard','academy_empty_goal',
'academy_empty_goal_close','academy_pass_and_shoot_with_keeper','academy_run_pass_and_shoot_with_keeper','academy_run_to_score',
'academy_run_to_score_with_keeper','academy_single_goal_versus_lazy']
def model_config(parser):
parser.add_argument('--level', default='5_vs_5', type=str, choices=levels, help='定义要解决的问题,要使用的游戏场景,一共11种')
parser.add_argument('--state', default='extracted_stacked', type=str, help='extracted 或者extracted_stacked')
parser.add_argument('--reward_experiment', default='scoring,checkpoints', type=str, help='奖励的方式,"scoring" 或者 "scoring,checkpoints, 注意奖励方式,如果踢全场,最好用2种结合"')
parser.add_argument('--num_timesteps', default=20000000, type=int, help='训练的时间步数,一般可以200万个step')
parser.add_argument('--nsteps', default=128, type=int, help='batch size 是 nsteps')
parser.add_argument('--output_path', default='output', type=str, help='模型保存的路径,模型名称根据时间自动命名,默认为output')
parser.add_argument('--model_save_prefix', default='ppo_model', type=str, help='模型保存的名称的前缀')
parser.add_argument('--model_save_frequency', default=100000, type=int, help='每所少个step保存一次模型,默认为100000')
return parser
def data_config(parser):
parser.add_argument('--log_dir', default='logs', help='日志目录')
parser.add_argument('--tensorboard', action='store_true')
return parser
def train_config(parser):
parser.add_argument('--do_train', action='store_true', help="训练并测试模型")
parser.add_argument('--do_eval', action='store_true', help="只测试模型,需要给出要加载的模型checkpoint")
parser.add_argument('--load_checkpoint', default='output/ppo_model_20000000_steps.zip', type=str, help="只测试模型,需要给出要加载的模型checkpoint")
parser.add_argument('--initial_checkpoint', default='', type=str, help="训练时,使用哪个模型继续训练,默认为空")
parser.add_argument('--dump_scores', action='store_true', default=True, help="保存记录分数的样本轨迹。")
parser.add_argument('--dump_full_episodes', action='store_true', default=True, help="记录每个整个episode的轨迹。")
parser.add_argument('--render', action='store_true',default=False, help="是否显示动画")
parser.add_argument('--debug', action='store_true', help="print debug info")
return parser
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser = data_config(parser)
parser = model_config(parser)
parser = train_config(parser)
args = parser.parse_args()
if args.do_eval:
# 那么测试时用真实的时间,那么足球动画就不会加速,能看清
other_config_options = {'real_time':True}
else:
other_config_options = {}
env = football_env.create_environment(
env_name=args.level, stacked=('stacked' in args.state),
rewards=args.reward_experiment,
logdir=args.log_dir,
write_goal_dumps=args.dump_scores,
write_full_episode_dumps=args.dump_full_episodes,
render=args.render,
dump_frequency=50,
other_config_options=other_config_options,)
#模型的配置
model = PPO("MlpPolicy", env, verbose=1)
if args.initial_checkpoint:
model.load(args.initial_checkpoint)
if args.do_train:
print(f"开始训练,会耗时较长, 即将训练{args.num_timesteps}个step,模型保存频率为{args.model_save_frequency}")
checkpoint_callback = CheckpointCallback(save_freq=args.model_save_frequency, save_path=args.output_path,
name_prefix=args.model_save_prefix)
model.learn(total_timesteps=args.num_timesteps, callback=checkpoint_callback)
#保存最后一次训练好的训练好的模型
# 模型保存的位置/output/0714095907.zip
save_path = os.path.join(args.output_path, args.model_save_prefix + '_final.zip')
model.save(save_path)
elif args.do_eval:
print(f"评估模式,直接加载模型")
model.load(args.load_checkpoint)
else:
print(f"请选择需要训练还是测试评估, --do_train, --do_eval")
sys.exit(0)
#环境重置,方便测试模型
obs = env.reset()
# 测试模型
print(f"开始测试模型效果:")
step = 0
for i in range(1000):
step += 1
print(f"循环第{i}次,开始进行第{step}个step操作")
action, _states = model.predict(obs, deterministic=True)
obs, reward, done, info = env.step(action)
env.render()
if done:
print(f"这一个episode足球结束,开始下一个step测试")
step = 0
obs = env.reset()
env.close()
|
<gh_stars>100-1000
//
// mulle_objc_object_convenience.h
// mulle-objc-runtime
//
// Created by Nat! on 11.03.15.
// Copyright (c) 2015 Nat! - <NAME>.
// Copyright (c) 2015 Codeon GmbH.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// Neither the name of <NAME> nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#ifndef mulle_objc_object_convenience_h__
#define mulle_objc_object_convenience_h__
#include "mulle-objc-object.h"
#include "mulle-objc-class.h"
#include "mulle-objc-class-search.h"
#include "mulle-objc-class-convenience.h"
#include "mulle-objc-call.h"
// methods on class shortcuts
static inline char *_mulle_objc_object_get_isa_name( void *obj)
{
return( _mulle_objc_class_get_name( _mulle_objc_object_get_isa( obj)));
}
static inline mulle_objc_implementation_t
_mulle_objc_object_cacheonlylookup_implementation( void *obj,
mulle_objc_methodid_t methodid)
{
struct _mulle_objc_class *cls;
cls = _mulle_objc_object_get_isa( obj);
return( _mulle_objc_class_lookup_implementation_cacheonly( cls, methodid));
}
static inline mulle_objc_implementation_t
_mulle_objc_object_lookup_implementation( void *obj,
mulle_objc_methodid_t methodid)
{
struct _mulle_objc_class *cls;
cls = _mulle_objc_object_get_isa( obj);
return( _mulle_objc_class_lookup_implementation( cls, methodid));
}
static inline mulle_objc_implementation_t
_mulle_objc_object_lookup_implementation_no_forward( void *obj,
mulle_objc_methodid_t methodid)
{
struct _mulle_objc_class *cls;
cls = _mulle_objc_object_get_isa( obj);
return( _mulle_objc_class_lookup_implementation_noforward( cls, methodid));
}
static inline struct _mulle_objc_method *
_mulle_objc_object_defaultsearch_method( void *obj,
mulle_objc_methodid_t methodid)
{
struct _mulle_objc_class *cls;
cls = _mulle_objc_object_get_isa( obj);
return( mulle_objc_class_defaultsearch_method( cls, methodid));
}
MULLE_C_NONNULL_RETURN static inline struct mulle_allocator *
_mulle_objc_instance_get_allocator( void *obj)
{
struct _mulle_objc_class *cls;
struct _mulle_objc_infraclass *infra;
cls = _mulle_objc_object_get_isa( obj);
infra = _mulle_objc_class_as_infraclass( cls);
return( _mulle_objc_infraclass_get_allocator( infra));
}
#pragma mark - instance deletion
static inline void __mulle_objc_instance_will_free( struct _mulle_objc_object *obj)
{
// too slow for non debug
#if DEBUG
{
struct _mulle_objc_universe *universe;
universe = _mulle_objc_object_get_universe( obj);
if( universe->debug.trace.instance)
{
void _mulle_objc_instance_trace_free( void *obj);
_mulle_objc_instance_trace_free( obj);
}
}
#endif
}
static inline void __mulle_objc_instance_free( void *obj,
struct _mulle_objc_class *cls,
struct mulle_allocator *allocator)
{
struct _mulle_objc_objectheader *header;
void *alloc;
__mulle_objc_instance_will_free( obj);
header = _mulle_objc_object_get_objectheader( obj);
alloc = _mulle_objc_objectheader_get_alloc( header, cls->headerextrasize);
mulle_allocator_free( allocator, alloc);
}
static inline void _mulle_objc_instance_free_allocator( void *obj,
struct mulle_allocator *allocator)
{
struct _mulle_objc_class *cls;
void *alloc;
cls = _mulle_objc_object_get_isa( obj);
__mulle_objc_instance_free( obj, cls, allocator);
}
static inline void _mulle_objc_instance_free( void *obj)
{
struct mulle_allocator *allocator;
struct _mulle_objc_class *cls;
struct _mulle_objc_infraclass *infra;
cls = _mulle_objc_object_get_isa( obj);
infra = _mulle_objc_class_as_infraclass( cls);
allocator = _mulle_objc_infraclass_get_allocator( infra);
__mulle_objc_instance_free( obj, cls, allocator);
}
static inline void mulle_objc_instance_free( void *obj)
{
if( ! obj)
return;
_mulle_objc_instance_free( obj);
}
#endif
|
function insertDashes(inputStr) {
let result = inputStr[0];
for (let i = 1; i < inputStr.length; i++) {
if (parseInt(inputStr[i - 1]) % 2 !== 0 && parseInt(inputStr[i]) % 2 !== 0) {
result += `-${inputStr[i]}`;
} else {
result += inputStr[i];
}
}
return result;
} |
<filename>src/data/transaction-history-data.js
export default () => [
{
"id": 1,
"date": "01-01-2020",
"factor": "Grijze stroom",
"waarde": 649,
"default": 104,
"status": "Complete",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
{
"id": 2,
"date": "01-07-2020",
"factor": "Ingekochte duurzame elektriciteit ('groen'): Windkracht",
"waarde": 0,
"default": 0,
"status": "Complete",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
{
"id": 3,
"date": "01-24-2020",
"factor": "Ingekochte duurzame elektriciteit ('groen'): Waterkracht",
"waarde": 0,
"default": 0,
"status": "Complete",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
{
"id": 4,
"date": "02-28-2020",
"factor": "Ingekochte duurzame elektriciteit ('groen'): Zonne-energie",
"waarde": 0,
"default": 0,
"status": "Complete",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
{
"id": 5,
"date": "03-16-2020",
"factor": "Ingekochte duurzame elektriciteit ('groen'): Stortgas",
"waarde": 80,
"default": 80,
"status": "Pending",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
{
"id": 6,
"date": "03-18-2020",
"factor": "'Ingekochte duurzame elektriciteit ('groen'): Biomassa",
"waarde": 75,
"default": 75,
"status": "Cancelled",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
{
"id": 7,
"date": "04-12-2020",
"factor": "Ingekochte duurzame elektriciteit ('groen'): Overig",
"waarde": 0,
"default": 0,
"status": "Cancelled",
"eenheid": "g CO2 / kWh",
"total": "$6541.92"
},
// {
// "id": 2,
// "date": "7/2/2018",
// "customer": "<NAME>",
// "products": 434,
// "status": "Complete",
// "total": "$1477.65"
// }, {
// "id": 3,
// "date": "8/12/2018",
// "customer": "<NAME>",
// "products": 227,
// "status": "Cancelled",
// "total": "$8628.05"
// }, {
// "id": 4,
// "date": "5/14/2018",
// "customer": "<NAME>",
// "products": 237,
// "status": "Cancelled",
// "total": "$6964.24"
// }, {
// "id": 5,
// "date": "1/30/2018",
// "customer": "<NAME>",
// "products": 213,
// "status": "Complete",
// "total": "$7472.82"
// }, {
// "id": 6,
// "date": "1/24/2018",
// "customer": "<NAME>",
// "products": 324,
// "status": "Complete",
// "total": "$2106.83"
// }, {
// "id": 7,
// "date": "10/11/2017",
// "customer": "<NAME>",
// "products": 74,
// "status": "Cancelled",
// "total": "$3218.88"
// }, {
// "id": 8,
// "date": "5/27/2018",
// "customer": "<NAME>",
// "products": 11,
// "status": "Pending",
// "total": "$5221.35"
// }, {
// "id": 9,
// "date": "12/16/2017",
// "customer": "<NAME>",
// "products": 79,
// "status": "Complete",
// "total": "$8104.03"
// }, {
// "id": 10,
// "date": "6/6/2018",
// "customer": "<NAME>",
// "products": 37,
// "status": "Pending",
// "total": "$5551.26"
// }, {
// "id": 11,
// "date": "4/1/2018",
// "customer": "<NAME>",
// "products": 291,
// "status": "Pending",
// "total": "$3677.70"
// }, {
// "id": 12,
// "date": "10/13/2017",
// "customer": "<NAME>",
// "products": 115,
// "status": "Pending",
// "total": "$6807.83"
// }, {
// "id": 13,
// "date": "4/11/2018",
// "customer": "<NAME>",
// "products": 278,
// "status": "Complete",
// "total": "$1374.93"
// }, {
// "id": 14,
// "date": "10/24/2017",
// "customer": "<NAME>",
// "products": 158,
// "status": "Pending",
// "total": "$7913.69"
// }, {
// "id": 15,
// "date": "11/24/2017",
// "customer": "<NAME>",
// "products": 148,
// "status": "Cancelled",
// "total": "$5051.54"
// }, {
// "id": 16,
// "date": "11/19/2017",
// "customer": "<NAME>",
// "products": 422,
// "status": "Complete",
// "total": "$8708.35"
// }, {
// "id": 17,
// "date": "8/12/2018",
// "customer": "<NAME>",
// "products": 134,
// "status": "Complete",
// "total": "$1057.82"
// }, {
// "id": 18,
// "date": "1/31/2018",
// "customer": "<NAME>",
// "products": 53,
// "status": "Complete",
// "total": "$824.90"
// }, {
// "id": 19,
// "date": "4/27/2018",
// "customer": "<NAME>",
// "products": 363,
// "status": "Complete",
// "total": "$3895.29"
// }, {
// "id": 20,
// "date": "2/1/2018",
// "customer": "<NAME>",
// "products": 212,
// "status": "Complete",
// "total": "$3835.73"
// }, {
// "id": 21,
// "date": "1/15/2018",
// "customer": "<NAME>",
// "products": 462,
// "status": "Cancelled",
// "total": "$9175.35"
// }, {
// "id": 22,
// "date": "4/24/2018",
// "customer": "<NAME>",
// "products": 206,
// "status": "Complete",
// "total": "$2268.94"
// }, {
// "id": 23,
// "date": "11/2/2017",
// "customer": "<NAME>",
// "products": 328,
// "status": "Complete",
// "total": "$8232.85"
// }, {
// "id": 24,
// "date": "5/17/2018",
// "customer": "<NAME>",
// "products": 380,
// "status": "Cancelled",
// "total": "$6705.88"
// }, {
// "id": 25,
// "date": "7/11/2018",
// "customer": "<NAME>",
// "products": 101,
// "status": "Complete",
// "total": "$7185.83"
// }, {
// "id": 26,
// "date": "6/16/2018",
// "customer": "<NAME>",
// "products": 229,
// "status": "Complete",
// "total": "$6867.34"
// }, {
// "id": 27,
// "date": "8/31/2018",
// "customer": "<NAME>",
// "products": 10,
// "status": "Complete",
// "total": "$4564.19"
// }, {
// "id": 28,
// "date": "5/7/2018",
// "customer": "<NAME>",
// "products": 292,
// "status": "Pending",
// "total": "$4788.81"
// }, {
// "id": 29,
// "date": "2/3/2018",
// "customer": "<NAME>",
// "products": 123,
// "status": "Cancelled",
// "total": "$9239.38"
// }, {
// "id": 30,
// "date": "6/19/2018",
// "customer": "<NAME>",
// "products": 469,
// "status": "Pending",
// "total": "$2741.51"
// }, {
// "id": 31,
// "date": "9/19/2018",
// "customer": "<NAME>",
// "products": 46,
// "status": "Cancelled",
// "total": "$5209.47"
// }, {
// "id": 32,
// "date": "9/5/2018",
// "customer": "<NAME>",
// "products": 258,
// "status": "Complete",
// "total": "$7918.66"
// }, {
// "id": 33,
// "date": "1/3/2018",
// "customer": "<NAME>",
// "products": 205,
// "status": "Pending",
// "total": "$3694.19"
// }, {
// "id": 34,
// "date": "6/5/2018",
// "customer": "<NAME>",
// "products": 194,
// "status": "Complete",
// "total": "$7299.82"
// }, {
// "id": 35,
// "date": "3/8/2018",
// "customer": "<NAME>",
// "products": 341,
// "status": "Cancelled",
// "total": "$3914.58"
// }, {
// "id": 36,
// "date": "7/17/2018",
// "customer": "<NAME>",
// "products": 131,
// "status": "Complete",
// "total": "$8126.78"
// }, {
// "id": 37,
// "date": "10/23/2017",
// "customer": "<NAME>",
// "products": 484,
// "status": "Complete",
// "total": "$888.47"
// }, {
// "id": 38,
// "date": "4/14/2018",
// "customer": "<NAME>",
// "products": 190,
// "status": "Pending",
// "total": "$5329.60"
// }, {
// "id": 39,
// "date": "10/31/2017",
// "customer": "<NAME>",
// "products": 101,
// "status": "Complete",
// "total": "$7244.68"
// }, {
// "id": 40,
// "date": "8/7/2018",
// "customer": "<NAME>",
// "products": 93,
// "status": "Complete",
// "total": "$2229.05"
// }, {
// "id": 41,
// "date": "12/16/2017",
// "customer": "<NAME>",
// "products": 401,
// "status": "Pending",
// "total": "$4697.39"
// }, {
// "id": 42,
// "date": "1/19/2018",
// "customer": "<NAME>",
// "products": 381,
// "status": "Cancelled",
// "total": "$306.84"
// }, {
// "id": 43,
// "date": "7/19/2018",
// "customer": "<NAME>",
// "products": 185,
// "status": "Complete",
// "total": "$1131.60"
// }, {
// "id": 44,
// "date": "9/25/2018",
// "customer": "<NAME>",
// "products": 14,
// "status": "Pending",
// "total": "$8211.85"
// }, {
// "id": 45,
// "date": "1/25/2018",
// "customer": "<NAME>",
// "products": 31,
// "status": "Pending",
// "total": "$7035.55"
// }, {
// "id": 46,
// "date": "4/14/2018",
// "customer": "<NAME>",
// "products": 324,
// "status": "Cancelled",
// "total": "$3249.78"
// }, {
// "id": 47,
// "date": "7/23/2018",
// "customer": "<NAME>",
// "products": 229,
// "status": "Cancelled",
// "total": "$3429.54"
// }, {
// "id": 48,
// "date": "12/28/2017",
// "customer": "<NAME>",
// "products": 465,
// "status": "Complete",
// "total": "$746.53"
// }, {
// "id": 49,
// "date": "8/17/2018",
// "customer": "<NAME>",
// "products": 18,
// "status": "Complete",
// "total": "$8165.86"
// }, {
// "id": 50,
// "date": "6/5/2018",
// "customer": "<NAME>",
// "products": 390,
// "status": "Complete",
// "total": "$4042.83"
// }
];
|
package training.string;
import org.junit.jupiter.api.Test;
import java.util.function.Function;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 151. 翻转字符串里的单词: https://leetcode-cn.com/problems/reverse-words-in-a-string/
*
* 给定一个字符串,逐个翻转字符串中的每个单词。
*
* 说明:
* - 无空格字符构成一个「单词」。
* - 输入字符串可以在前面或者后面包含多余的空格,但是反转后的字符不能包括。
* - 如果两个单词间有多余的空格,将反转后单词间的空格减少到只含一个。
*
* 例 1:
* 输入:"the sky is blue"
* 输出:"blue is sky the"
*
* 例 2:
* 输入:" hello world! "
* 输出:"world! hello"
* 解释:输入字符串可以在前面或者后面包含多余的空格,但是反转后的字符不能包括。
*
* 例 3:
* 输入:"a good example"
* 输出:"example good a"
* 解释:如果两个单词间有多余的空格,将反转后单词间的空格减少到只含一个。
*
* 例 4:
* 输入:s = " Bob Loves Alice "
* 输出:"Alice Loves Bob"
*
* 例 5:
* 输入:s = "Alice does not even like bob"
* 输出:"bob like even not does Alice"
*
* 约束:
* 1 <= s.length <= 10**4
* s 包含英文大小写字母、数字和空格 ' '
* s 中「至少存在一个单词」
*/
public class E151_Medium_ReverseWordsInString {
static void test(Function<String, String> method) {
assertEquals("blue is sky the", method.apply("the sky is blue"));
assertEquals("world! hello", method.apply(" hello world! "));
assertEquals("example good a", method.apply("a good example"));
assertEquals("<NAME> Bob", method.apply(" Bob Loves Alice "));
assertEquals("bob like even not does Alice",
method.apply("Alice does not even like bob"));
assertEquals("b", method.apply("b"));
assertEquals("a b c d", method.apply(" d c b a "));
}
/**
* LeetCode 耗时:3 ms - 96.08%
* 内存消耗:38.4 MB - 82.47%
*/
public String reverseWords(String s) {
int lo = 0, hi = s.length() - 1;
// 跳过前后空格
while (s.charAt(lo) == ' ')
lo++;
while (s.charAt(hi) == ' ')
hi--;
char[] result = new char[hi - lo + 1];
int idx = result.length - 1;
while (lo <= hi) {
int end = lo;
// 找到下一个单词的范围
while (++end <= hi && s.charAt(end) != ' ');
// 将其复制到 result
for (int i = end - 1; i >= lo; i--, idx--)
result[idx] = s.charAt(i);
if ((lo = end) > hi)
break;
// 添加一个空格
result[idx--] = ' ';
// 跳过空格,为下一个单词准备
while (++lo <= hi && s.charAt(lo) == ' ');
}
return new String(result, idx + 1, result.length - idx - 1);
}
@Test
void testReverseWords() {
test(this::reverseWords);
}
}
|
#!/usr/bin/env bash
# CONFIG set
FORK_NAME="Yatopia"
API_REPO=""
SERVER_REPO=""
PAPER_API_REPO=""
PAPER_SERVER_REPO=""
MCDEV_REPO=""
# Added Multithreading to builds
# By JosephWorks
mvncmd="mvn -T 1.5C"
gitcmd="git -c commit.gpgsign=false -c core.quotepath=false -c core.safecrlf=false -c i18n.commit.encoding=UTF-8 -c i18n.logoutputencoding=UTF-8"
# DIR configure
# resolve shell-specifics
case "$(echo "$SHELL" | sed -E 's|/usr(/local)?||g')" in
"/bin/zsh")
RCPATH="$HOME/.zshrc"
SOURCE="${BASH_SOURCE[0]:-${(%):-%N}}"
;;
*)
RCPATH="$HOME/.bashrc"
if [[ -f "$HOME/.bash_aliases" ]]; then
RCPATH="$HOME/.bash_aliases"
fi
SOURCE="${BASH_SOURCE[0]}"
;;
esac
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ "$SOURCE" != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SOURCE=$([[ "$SOURCE" = /* ]] && echo "$SOURCE" || echo "$PWD/${SOURCE#./}")
scriptdir=$(dirname "$SOURCE")
basedir=$(dirname "$scriptdir")
function basedir {
cd "$basedir"
}
function paperdir {
cd "$basedir/Tuinity"
}
gitcmd() {
$gitcmd "$@"
}
# COLOUR functions
color() {
if [ $2 ]; then
echo -e "\e[$1;$2m"
else
echo -e "\e[$1m"
fi
}
colorend() {
echo -e "\e[m"
}
function bashcolor {
if [ $2 ]; then
echo -e "\e[$1;$2m"
else
echo -e "\e[$1m"
fi
}
function bashcolorend {
echo -e "\e[m"
}
# GIT functions
gitstash() {
STASHED=$($gitcmd stash 2>/dev/null|| return 0) # errors are ok
}
gitunstash() {
if [[ "$STASHED" != "No local changes to save" ]] ; then
$gitcmd stash pop 2>/dev/null|| return 0 # errors are ok
fi
}
function gethead {
basedir
git log -1 --oneline
}
function gitpush {
if [ "$(git config minecraft.push-${FORK_NAME})" == "1" ]; then
echo "Push - $1 ($3) to $2"
(
basedir
git remote rm script-push > /dev/null 2>&1
git remote add script-push $2 >/dev/null 2>&1
git push script-push $3 -f
)
fi
}
# PATCH functions
function cleanupPatches {
cd "$1"
for patch in *.patch; do
gitver=$(tail -n 2 $patch | grep -ve "^$" | tail -n 1)
diffs=$(git diff --staged $patch | grep -E "^(\+|\-)" | grep -Ev "(From [a-z0-9]{32,}|\-\-\- a|\+\+\+ b|.index|Date\: )")
testver=$(echo "$diffs" | tail -n 2 | grep -ve "^$" | tail -n 1 | grep "$gitver")
if [ "x$testver" != "x" ]; then
diffs=$(echo "$diffs" | tail -n +3)
fi
if [ "x$diffs" == "x" ] ; then
git reset HEAD $patch >/dev/null
git checkout -- $patch >/dev/null
fi
done
}
function containsElement {
local e
for e in "${@:2}"; do
[[ "$e" == "$1" ]] && return 0;
done
return 1
}
|
from rasa_core.channels import HttpInputChannel
from rasa_core.agent import Agent
from rasa_core.interpreter import RegexInterpreter
# initializing the agent
agent = Agent.load('models/dialogue', interpreter=RegexInterpreter())
# setting input channel
input_channel = HttpInputChannel(5004, '/webhooks/rasa/webhook')
agent.handle_channels([input_channel], 5004, serve_forever=True)
# implement stories and responses
def booking_movies(dispatcher, tracker, domain):
tracker.events.append(Event("action_listen"))
response = "Welcome to Movie Bot!\
What movie would you like to watch?"
dispatcher.utter_message(response)
return [SlotSet('movietitle',None)]
def booking_dates(dispatcher, tracker, domain):
movie_title = tracker.get_slot('movietitle')
response = "Alright! Please enter the date when you would like to watch the "+ movie_title +" ?"
dispatcher.utter_message(response)
return [SlotSet('moviedate',None)]
def booking_times(dispatcher, tracker, domain):
movie_date = tracker.get_slot('moviedate')
response = "Okay, you have selected "+movie_date+" as your date. What time would you like to watch the movie?"
dispatcher.utter_message(response)
return [SlotSet('movietime',None)]
def booking_confirmation(dispatcher, tracker, domain):
movie_time = tracker.get_slot('movietime')
response = "Great! You have selected "+movie_time+" as your time for the movie. Please enter your payment details and we'll book the tickets for you."
dispatcher.utter_message(response)
return[] |
import pandas as pd
import numpy as np
# Read in dataset
df = pd.read_csv('dataset.csv')
# Separate input (x) and output (y) variables
X = df.drop('target', axis=1)
y = df['target']
# Split the dataset into training and test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1)
# Create a machine learning model
from sklearn.linear_model import LinearRegression
model = LinearRegression(fit_intercept = True) # create linear regression model
model.fit(X_train, y_train) # fit the model
# Make predictions
y_pred = model.predict(X_test) |
<reponame>zsh2401/cat-paw<filename>src/view/pages/IndexPage/Left/index.tsx<gh_stars>1-10
import React from 'react'
import BCard from './BCard'
import { Divider } from 'rsuite'
export default function () {
return <div>
<Divider>去哪咧?</Divider>
<div className="row">
<BCard to="/flow" name="-Flow-" description="我的心流...你是否感知到?" />
<BCard to="/read/tea" name="茶" description="随便说点啥" />
<BCard to="/fragment/loveclock" name="TimeCounter!" description={"\"时间是表征物质运动的最基本物理量,是人类文明发展中的一个重要组成部分\""} />
<BCard to="/read/loveletters" name="嘿嘿,情书" description={"虽然我语文不好!但我就是要写!不服咬我!"} />
</div>
<Divider>Anywhere but here</Divider>
<div className="row">
<BCard onClick={()=>alert("抱歉,您不可以访问树屋")} name="树屋" description={"Hey!我们的树屋已经长蜘蛛网了!回去康康吧~"} />
<BCard external to="https://zsh2401.top" name="滑滑的博客" description={"\"山不在高,有仙则名。水不在深,有龙则灵。斯是陋室,惟吾德馨。\""} />
</div>
</div>
} |
use std::convert::Infallible;
use hyper::{Body, Request, Response, Server};
use hyper::service::{make_service_fn, service_fn};
async fn ping_handler(_req: Request<Body>) -> Result<Response<Body>, Infallible> {
Ok(Response::new("Pong".into()))
}
async fn retrieve_handler(_req: Request<Body>) -> Result<Response<Body>, Infallible> {
Ok(Response::new("Retrieving data".into()))
}
async fn command_handler(_req: Request<Body>) -> Result<Response<Body>, Infallible> {
Ok(Response::new("Executing command".into()))
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let make_svc = make_service_fn(|_conn| {
async {
Ok::<_, Infallible>(service_fn(|req| async {
match req.uri().path() {
"/ping" => ping_handler(req).await,
"/retrieve" => retrieve_handler(req).await,
"/command" => command_handler(req).await,
_ => Ok(Response::builder()
.status(404)
.body(Body::empty())
.unwrap()),
}
}))
}
});
let addr = ([0, 0, 0, 0], 8080).into();
let server = Server::bind(&addr).serve(make_svc);
if let Err(e) = server.await {
eprintln!("server error: {}", e);
}
Ok(())
} |
export const reducer = (state, newState) => ({ ...state, ...newState });
const curry = (fn) => {
return function curried(...args) {
if (args.length >= fn.length) {
return fn(...args);
}
return (...args1) => {
return curried(...(args.concat(args1)));
};
};
};
export const traverseTree = curry((reducerFn, initialValue, currItem) => {
if (Array.isArray(currItem)) {
return currItem.reduce(traverseTree(reducerFn), [])
}
const acc = reducerFn(initialValue, currItem);
if (!currItem.children || !currItem.children.length) {
return acc;
}
return currItem.children.reduce(traverseTree(reducerFn), acc);
});
|
# Homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
# Oh My ZSH
chsh -s "$(which zsh)"
sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
# Powerline Fonts
git clone https://github.com/powerline/fonts.git --depth=1
cd fonts
./install.sh
cd ..
rm -rf fonts
# Environment
cp zsh/.zshrc ~/.zshrc
cp zsh/agnoster.zsh-theme ~/.oh-my-zsh/themes/agnoster.zsh-theme
# Node Setup (COC dependency)
./scripts/uninstall-node.sh
echo "Please visit https://nodejs.org/en/download/ and download Node"
echo "Confirm when done by pressing Enter Key"
read
# Simple Tooling
brew install htop direnv wget tmux yarn
# Pip 3 setup
pip3 install jedi --user
# Autojump
git clone git://github.com/wting/autojump.git
cd autojump
./install.sh
cd ~
# Vim
cp vim/.vimrc ~/.vimrc
# Vundle
git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim
vim +PluginInstall +qall
# COC Setup
vim -c ':call coc#util#install()'
vim -c 'CocInstall coc-python coc-rust-analyzer coc-json coc-markdownlint'
cp vim/coc-settings.json ~/.vim/coc-settings.json
# Rust Analyzer
curl -L https://github.com/rust-analyzer/rust-analyzer/releases/latest/download/rust-analyzer-linux -o ~/.local/bin/rust-analyzer
chmod +x ~/.local/bin/rust-analyzer
# Git
git config --global user.name "sslavov93"
git config --global user.email "sslavov93@gmail.com"
git config --global credential.helper cache
git config --global credential.helper 'cache --timeout=3600'
ssh-keygen -t rsa -b 4096 -C "sslavov93@gmail.com"
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub
|
<gh_stars>100-1000
# test basic async for execution
# example taken from PEP0492
class AsyncIteratorWrapper:
def __init__(self, obj):
print('init')
self._it = iter(obj)
async def __aiter__(self):
print('aiter')
return self
async def __anext__(self):
print('anext')
try:
value = next(self._it)
except StopIteration:
raise StopAsyncIteration
return value
async def coro():
async for letter in AsyncIteratorWrapper('abc'):
print(letter)
o = coro()
try:
o.send(None)
except StopIteration:
print('finished')
|
package cbedoy.cblibrary.services;
import java.util.ArrayList;
import java.util.HashMap;
/**
* Created by <NAME> on 28/12/2014.
*
* Mobile App Developer
* CBLibrary
*
* E-mail: <EMAIL>
* Facebook: https://www.facebook.com/carlos.bedoy
* Github: https://github.com/cbedoy
*/
public class NotificationCenter {
private static NotificationCenter mInstance;
private final HashMap<Integer, ArrayList<NotificationListener>> mListeners;
private NotificationCenter() {
mListeners = new HashMap<Integer, ArrayList<NotificationListener>>();
}
public static NotificationCenter getInstance() {
if (mInstance == null) {
mInstance = new NotificationCenter();
}
return mInstance;
}
public static void clearInstance() {
mInstance = null;
}
public void addListener(Integer type, NotificationListener listener) {
synchronized (mListeners) {
ArrayList<NotificationListener> listeners = mListeners.get(type);
if (listeners == null) {
listeners = new ArrayList<NotificationListener>();
}
if (!listeners.contains(listener)) {
listeners.add(listener);
}
mListeners.put(type, listeners);
}
}
public void removeListener(Integer type, NotificationListener listener) {
synchronized (mListeners) {
ArrayList<NotificationListener> listeners = mListeners.get(type);
if (listeners != null && listeners.contains(listener)) {
listeners.remove(listener);
}
if (listeners != null && listeners.size() > 0) {
mListeners.put(type, listeners);
} else {
mListeners.remove(type);
}
}
}
public void postNotification(Integer type, Object... notification) {
synchronized (mListeners) {
ArrayList<NotificationListener> listeners = mListeners.get(type);
if (listeners != null && listeners.size() > 0) {
for (NotificationListener listener : listeners) {
listener.didReceivedNotification(type, notification);
}
}
}
}
public interface NotificationListener {
public abstract void didReceivedNotification(Integer type, Object... args);
}
}
|
<filename>src/components/pages/dept/GovtDept.js
import React, { Component } from 'react';
import GovtService from '../../../services/GovtService'
import './govtDept.css'
class GovtDept extends Component {
constructor(props) {
super(props)
this.state = {
name: '',
rewards: '',
currentName: ''
}
this.fetchDetails = this.fetchDetails.bind(this);
this.handleChange = this.handleChange.bind(this);
this.submitNameChange = this.submitNameChange.bind(this);
}
componentWillMount() {
this.govtService = new GovtService()
this.govtService.init(this.props.match.params.id).then(() => {
this.fetchDetails()
})
console.log(this.props.match.params.id)
}
handleChange(e) {
this.state[e.target.name] = e.target.value;
console.log(this.state)
}
fetchDetails() {
let self = this;
this.govtService.getDetails().then(res => {
if(res.name === '') {
res.name = 'No Name assigned'
}
self.setState({
name: res.name,
rewards: Number(res.reward)
})
})
}
submitNameChange(e) {
e.preventDefault()
this.govtService.registerName(this.state.name)
}
render() {
return(
<section className="container">
<ul className="nav nav-tabs" id="myTab" role="tablist">
<li className="nav-item">
<a className="nav-link active" id="home-tab" data-toggle="tab" href="#home" role="tab" aria-controls="home" aria-selected="true">Claim your Identity</a>
</li>
<li className="nav-item">
<a className="nav-link" id="profile-tab" data-toggle="tab" href="#profile" role="tab" aria-controls="profile" aria-selected="false">View Your reward points</a>
</li>
</ul>
<div className="tab-content" id="myTabContent">
<div className="tab-pane fade show active claim" id="home" role="tabpanel" aria-labelledby="home-tab">
<h3 className="current-name">Your current Name: {this.state.name}</h3>
<form onSubmit={this.submitNameChange}>
<div className="form-group row">
<label htmlFor="name" className="col-sm-2 col-form-label">Entity/Person Name</label>
<div className="col-sm-10">
<input type="text" className="form-control" id="name" name="name" placeholder="MCD" onChange={this.handleChange}/>
<small id="password<PASSWORD>" className="form-text text-muted">
This is the name of the organisation or any person.
</small>
</div>
</div>
<div className="form-group row">
<div className="col-sm-2"></div>
<div className="col-sm-10">
<button type="submit" className="btn btn-success btn-lg btn-block">Update</button>
</div>
</div>
</form>
</div>
<div className="tab-pane fade" id="profile" role="tabpanel" aria-labelledby="profile-tab">
<h4>Your current reward points are:</h4>
<h2>{this.state.rewards}</h2>
</div>
</div>
</section>
)
}
}
export default GovtDept;
|
class MongoModel:
def __init__(self, **kwargs):
self._id = kwargs.get('id', None)
# Initialize other fields based on kwargs
@classmethod
def from_mongo(cls, data):
id = data.pop('_id', None)
return cls(**dict(data, id=id))
def mongo(self, **kwargs):
exclude_unset = kwargs.pop('exclude_unset', True)
by_alias = kwargs.pop('by_alias', True)
if exclude_unset:
data = {k: v for k, v in self.__dict__.items() if v is not None}
else:
data = self.__dict__
if by_alias:
# Assume aliases are defined in a class variable called 'aliases'
data = {self.aliases.get(k, k): v for k, v in data.items()}
return data |
<filename>SCRIPT/Element/Notify.js
const _notifyDurations = { "three": 3, "five": 5, "eight": 8, "fourteen": 14, "twenty": 20, "permanent": -99 };
const parseElement = (elm) => {
let tmp = elm.getAttribute("class");
let val;
let result = {};
for (let str of tmp.split(" ")) {
if (str.startsWith("notify-")) {
val = str;
}
};
if (val.length > 0) {
let vals = val.split("-");
if (vals.length === 4) {
result = {
"type": vals[1],
"position": vals[2],
"duration": vals[3],
"element": elm
};
};
};
return result;
};
const showElement = (elm) => {
let notify = parseElement(elm);
setTimeout(() => {
notify.element.classList.add("show");
if (notify.duration !== "permanent") {
setTimeout(() => {
notify.element.classList.remove("show");
}, _notifyDurations[notify.duration] * 1000);
}
}, 800);
};
document.querySelectorAll("[class^=notify-]").forEach((elm) => {
elm.addEventListener("cmd:open", (e) => {
showElement(elm);
});
elm.dispatchEvent(new Event("cmd:open"));
});
|
#include <stdio.h>
#include <string.h>
//Concatenates two strings
int main(int argc, char* argv[]){
if(argc != 3){
printf("ERROR: please type in a string and a positive integer\n");
return 0;
}
char s1[256];
char s2[256];
strcpy(s1, argv[1]);
strcpy(s2, argv[2]);
strcat(s1, " ");
strcat(s1, s2);
puts(s1);
}
|
<reponame>guidiaalo/sdk-js
/**
* Make the async function wait
* @internal
* @param timerMS Time to wait
*/
const sleep = (timerMS: number) => new Promise((resolve) => setTimeout(resolve, timerMS));
export default sleep;
|
<gh_stars>1-10
package com.home.demo.service.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.engine.IdentityService;
import org.flowable.engine.RepositoryService;
import org.flowable.engine.RuntimeService;
import org.flowable.engine.TaskService;
import org.flowable.engine.repository.Deployment;
import org.flowable.engine.repository.ProcessDefinition;
import org.flowable.task.api.Task;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.home.demo.bean.UserTask;
import com.home.demo.bean.Vacation;
import com.home.demo.service.HolidayService;
import com.home.demo.util.Result;
import com.home.demo.util.ResultGenerator;
@Service
public class HolidayServiceImpl implements HolidayService{
@Autowired
private TaskService taskService;
@Autowired
private RepositoryService repositoryService;
@Autowired
private RuntimeService runtimeService;
@Autowired
private IdentityService identityService;
//获取所有任务
@Override
public Result findAll() {
try {
//获取管理员要处理的任务
List<Task> tasks = taskService.createTaskQuery().list();
System.out.println("You have " + tasks.size() + " tasks:");
List<UserTask> userTasks = new ArrayList<UserTask>();
for (int i=0; i<tasks.size(); i++) {
Map<String, Object> processVariables = taskService.getVariables(tasks.get(i).getId());
String taskFrom = (String)processVariables.get("employeeName");
userTasks.add(new UserTask(tasks.get(i).getId(),tasks.get(i).getName(),taskFrom,"the management group"));
System.out.println((i+1) + ") " + tasks.get(i).getName());
}
return ResultGenerator.getSuccessResult(userTasks);
}catch(Exception e) {
e.printStackTrace();
return ResultGenerator.getFailResult("获取请假列表失败!");
}
}
//获取管理员想要处理的任务
@Override
public Result findById(String taskId) {
try {
//询问管理员处理哪一个任务
System.out.println("Which task would you like to complete?");
List<Task> tasks = taskService.createTaskQuery().taskCandidateGroup("management")
.taskId(taskId).list();
if(tasks.size() == 0) {
return ResultGenerator.getSuccessResult("No tasks!");
}
Task task = tasks.get(0);
Map<String, Object> processVariables = taskService.getVariables(taskId);
processVariables.put("taskId", taskId);
processVariables.put("taskName", task.getName());
return ResultGenerator.getSuccessResult(processVariables);
}catch(Exception e) {
e.printStackTrace();
return ResultGenerator.getFailResult("获取请假信息失败!");
}
}
//管理员处理任务
@Override
public Result modify(String taskId, boolean isApproved,String comments) {
try {
Map<String, Object> variables = new HashMap<String, Object>();
variables.put("vacationApproved",isApproved);
variables.put("comments",comments);
taskService.complete(taskId,variables);
variables.put("taskId",taskId);
return ResultGenerator.getSuccessResult(variables);
}catch(Exception e) {
e.printStackTrace();
return ResultGenerator.getFailResult("处理请假信息失败!");
}
}
//创建请假申请
@Override
public Result save(Vacation vacation) {
try {
//传入实际参数
Map<String, Object> variables = new HashMap<String, Object>();
identityService.setAuthenticatedUserId(vacation.getEmployeeName());
variables.put("employeeName", vacation.getEmployeeName());
variables.put("startDate", vacation.getStartDate());
variables.put("numberOfDays", vacation.getNumberOfDays());
variables.put("reason", vacation.getReason());
runtimeService.startProcessInstanceByKey("vacationRequest", variables);
return ResultGenerator.getSuccessResult(variables);
}catch(Exception e) {
e.printStackTrace();
return ResultGenerator.getFailResult("假期申请提交失败!");
}
}
//部署请假流程
@Override
public Result deployProcess() {
Deployment deployment = repositoryService.createDeployment()
.addClasspathResource("processes/vacation-request.bpmn20.xml")
.deploy();
//查询流程定义
ProcessDefinition processDefinition = repositoryService.createProcessDefinitionQuery()
.deploymentId(deployment.getId())
.singleResult();
Map<String, Object> result = new HashMap<String, Object>();
result.put("deploymentId", deployment.getId());
result.put("deploymentName", deployment.getName());
result.put("processDefinitionId", processDefinition.getId());
result.put("processDefinitionName", processDefinition.getName());
return ResultGenerator.getSuccessResult(result);
}
//根据用户所在权限组获取任务
@Override
public Result findByGroupName(String groupName) {
try {
List<Task> tasks = taskService.createTaskQuery().taskCandidateGroup(groupName).list();
System.out.println("You have " + tasks.size() + " tasks:");
List<UserTask> userTasks = new ArrayList<UserTask>();
for (int i=0; i<tasks.size(); i++) {
Map<String, Object> processVariables = taskService.getVariables(tasks.get(i).getId());
String taskFrom = (String)processVariables.get("employeeName");
userTasks.add(new UserTask(tasks.get(i).getId(),tasks.get(i).getName(),taskFrom,"the management group"));
System.out.println((i+1) + ") " + tasks.get(i).getName());
}
return ResultGenerator.getSuccessResult(userTasks);
}catch(Exception e) {
e.printStackTrace();
return ResultGenerator.getFailResult("获取请假信息失败!");
}
}
@Override
public Result findByAssignee(String assignee) {
try {
List<Task> tasks = taskService.createTaskQuery().taskAssignee(assignee).
taskName("Modify vacation request").list();
System.out.println("You have " + tasks.size() + " tasks:");
List<UserTask> userTasks = new ArrayList<UserTask>();
for (int i=0; i<tasks.size(); i++) {
Map<String, Object> processVariables = taskService.getVariables(tasks.get(i).getId());
String taskFrom = (String)processVariables.get("employeeName");
userTasks.add(new UserTask(tasks.get(i).getId(),tasks.get(i).getName(),taskFrom,"the management group"));
System.out.println((i+1) + ") " + tasks.get(i).getName());
}
return ResultGenerator.getSuccessResult(userTasks);
}catch(Exception e) {
e.printStackTrace();
return ResultGenerator.getFailResult("获取请假信息失败!");
}
}
}
|
// Function to generate the number of rows
void pascalTriangle(int rows)
{
// Array to store generated pascal
// triangle values
int arr[rows] [rows];
// Iterate through every line and print
// integer(s) in it
for (int line = 0; line < rows; line++)
{
// Every line has number of
// integers equal to line number
for (int i = 0; i <= line; i++)
{
// First and last values in every
// row are 1
if (line == i || i == 0)
arr[line][i] = 1;
else // Other values are sum of values
// just above and left of above
arr[line][i] = arr[line - 1][i - 1] +
arr[line - 1][i];
cout << arr[line][i] << " ";
}
cout << endl;
}
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled/7-1024+0+512-shuffled-N-VB-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_and_verbs_first_two_thirds_sixth --eval_function last_sixth_eval |
/*
*
* Copyright (c) 2021
* <NAME>
*
* Use, modification and distribution are subject to the
* Boost Software License, Version 1.0. (See accompanying file
* LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*
*/
#if defined(_WIN32) && !defined(BOOST_REGEX_NO_W32)
//
// Make sure our forward declarations match those in windows.h:
//
#define NO_STRICT
#include <boost/regex.hpp>
#include <windows.h>
void test_proc()
{
std::string text, re;
boost::regex exp(re);
regex_match(text, exp);
}
#endif
|
<reponame>UNIZAR-30245-2018/modelo-control<filename>src/modelo/datos/DAO/UsuarioDAO.java
/**
*
*/
package modelo.datos.DAO;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.LocalDate;
import java.util.ArrayList;
import modelo.datos.VO.JuegoVO;
import modelo.datos.VO.SeguidorVO;
import modelo.datos.VO.UsuarioVO;
/**
* @author <NAME>
*
*/
public class UsuarioDAO {
/**
* @param seudonimo
* @param conexion
* @return UsuarioVO vacio si no existe ningun usuario con el seudonimo
* especificado <br>
* UsuarioVO con el seudonimo especificado
*/
public UsuarioVO getUsuario(String seudonimo, Connection conexion) {
UsuarioVO retVal = new UsuarioVO();
try {
String query = "SELECT * FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = new UsuarioVO(rs.getString(1), rs.getString(2), rs.getString(3), rs.getString(4),
rs.getString(5), rs.getInt(6), rs.getInt(7));
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public UsuarioVO getUsuarioEmail(String email, Connection conexion) {
UsuarioVO retVal = new UsuarioVO();
try {
String query = "SELECT * FROM usuario WHERE email = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, email);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el email " + email);
} else {
retVal = new UsuarioVO(rs.getString(1), rs.getString(2), rs.getString(3), rs.getString(4),
rs.getString(5), rs.getInt(6), rs.getInt(7));
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public ArrayList<UsuarioVO> getAllUsuario(Connection conexion) {
ArrayList<UsuarioVO> retVal = new ArrayList<UsuarioVO>();
try {
String query = "SELECT * from usuario";
PreparedStatement ps = conexion.prepareStatement(query);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No hay ningun usuario en la tabla usuario");
} else {
do {
retVal.add(new UsuarioVO(rs.getString(1), rs.getString(2), rs.getString(3), rs.getString(4),
rs.getString(5), rs.getInt(6), rs.getInt(7)));
} while (rs.next());
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public String getSeudonimo(String email, Connection conexion) {
String retVal = "Error";
try {
String query = "SELECT seudonimo FROM usuario WHERE email = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, email);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el email de " + email);
} else {
retVal = rs.getString(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public String getNombre(String seudonimo, Connection conexion) {
String retVal = "Error";
try {
String query = "SELECT nombre FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = rs.getString(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public String getEmail(String seudonimo, Connection conexion) {
String retVal = "Error";
try {
String query = "SELECT email FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = rs.getString(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public String getPassword(String seudonimo, Connection conexion) {
String retVal = "Error";
try {
String query = "SELECT password FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = rs.getString(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public String getImagen(String seudonimo, Connection conexion) {
String retVal = "Error";
try {
String query = "SELECT imagen FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = rs.getString(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public int getNivel(String seudonimo, Connection conexion) {
int retVal = -1;
try {
String query = "SELECT nivel FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = rs.getInt(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public int getExperiencia(String seudonimo, Connection conexion) {
int retVal = -1;
try {
String query = "SELECT experiencia FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (!rs.first()) {
throw new SQLException("Error: No se ha encontrado ningun usuario con el seudonimo de " + seudonimo);
} else {
retVal = rs.getInt(1);
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
/**
* @param seudonimo
* @param password
* @param conexion
* @return true si encuentra un usuario con igual nombre y contraseña, o email y
* contraseña
*/
public boolean existeUsuarioSeudonimo(String seudonimo, String password, Connection conexion) {
boolean retVal = false;
try {
if (seudonimo != null) {
String query = "SELECT * FROM usuario WHERE seudonimo = ? AND password = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ps.setString(2, password);
ResultSet rs = ps.executeQuery();
if (rs.first()) {
return true;
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public boolean existeUsuarioEmail(String email, String password, Connection conexion) {
boolean retVal = false;
try {
if (email != null) {
String query = "SELECT * FROM usuario WHERE email = ? AND password = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, email);
ps.setString(2, password);
ResultSet rs = ps.executeQuery();
if (rs.first()) {
return true;
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public boolean existeEmail(String email, Connection conexion) {
boolean retVal = false;
try {
if (email != null) {
String query = "SELECT * FROM usuario WHERE email = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, email);
ResultSet rs = ps.executeQuery();
if (rs.first()) {
retVal = true;
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public boolean existeSeudonimo(String seudonimo, Connection conexion) {
boolean retVal = false;
try {
if (seudonimo != null) {
String query = "SELECT * FROM usuario WHERE seudonimo = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, seudonimo);
ResultSet rs = ps.executeQuery();
if (rs.first()) {
retVal = true;
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public void insertarUsuario(UsuarioVO usuario, Connection conexion) {
try {
String query = "INSERT INTO usuario (seudonimo, nombre, email, password) VALUES (?,?,?,?)";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, usuario.getSeudonimo());
ps.setString(2, usuario.getNombre());
ps.setString(3, usuario.getEmail());
ps.setString(4, usuario.getPassword());
if(ps.executeUpdate() != 1) {
throw new SQLException("Ha habido problemas a la hora de insertar el usuario");
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
}
/*Método que saca todos los juegos completados de un usuario */
public ArrayList<JuegoVO> getEnCursoByUser(String user, Connection conexion) {
ArrayList<JuegoVO> retVal = new ArrayList<>();
JuegoDAO juegoDAO = new JuegoDAO();
try {
String queryCompletados = "SELECT * FROM juegoEnCurso WHERE usuario = ?";
PreparedStatement psCompletados = conexion.prepareStatement(queryCompletados);
psCompletados.setString(1, user);
ResultSet rs = psCompletados.executeQuery();
if (!rs.first()) {
throw new SQLException(
"Error: No se ha encontrado ningún juego en curso al usuario " + user);
} else {
while (rs.next()) {
int juego = rs.getInt("id_juego");
retVal.add(juegoDAO.getJuego(juego,conexion));
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public ArrayList<JuegoVO> getCompletosByUser(String user, Connection conexion) {
ArrayList<JuegoVO> retVal = new ArrayList<>();
JuegoDAO juegoDAO = new JuegoDAO();
try {
String queryCompletados = "SELECT * FROM juegoCompletado WHERE usuario = ?";
PreparedStatement psCompletados = conexion.prepareStatement(queryCompletados);
psCompletados.setString(1, user);
ResultSet rs = psCompletados.executeQuery();
if (!rs.first()) {
throw new SQLException(
"Error: No se ha encontrado ningún juego completado al usuario " + user);
} else {
while (rs.next()) {
int juego = rs.getInt("id_juego");
retVal.add(juegoDAO.getJuego(juego,conexion));
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public ArrayList<JuegoVO> getPendientesByUser(String user, Connection conexion) {
ArrayList<JuegoVO> retVal = new ArrayList<>();
JuegoDAO juegoDAO = new JuegoDAO();
try {
String queryCompletados = "SELECT * FROM juegoPendiente WHERE usuario = ?";
PreparedStatement psCompletados = conexion.prepareStatement(queryCompletados);
psCompletados.setString(1, user);
ResultSet rs = psCompletados.executeQuery();
if (!rs.first()) {
throw new SQLException(
"Error: No se ha encontrado ningún juego pendiente al usuario " + user);
} else {
while (rs.next()) {
int juego = rs.getInt("id_juego");
retVal.add(juegoDAO.getJuego(juego,conexion));
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return retVal;
}
public void insertarJuegoEnCurso(UsuarioVO usuario, String id,Connection conexion) {
try {
String query = "INSERT INTO juegoEnCurso (usuario,id_juego) VALUES (?,?)";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, usuario.getSeudonimo());
ps.setString(2, id);
if(ps.executeUpdate() != 1) {
throw new SQLException("Ha habido problemas a la hora de insertar el usuario");
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
}
public void insertarJuegoPendiente(UsuarioVO usuario, String id,Connection conexion) {
try {
String query = "INSERT INTO juegoPendiente (usuario,id_juego) VALUES (?,?)";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, usuario.getSeudonimo());
ps.setString(2, id);
if(ps.executeUpdate() != 1) {
throw new SQLException("Ha habido problemas a la hora de insertar el usuario");
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
}
public void insertarJuegoCompletado(UsuarioVO usuario, String id,Connection conexion) {
try {
String query = "INSERT INTO juegoCompletado (usuario,id_juego) VALUES (?,?)";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, usuario.getSeudonimo());
ps.setString(2, id);
if(ps.executeUpdate() != 1) {
throw new SQLException("Ha habido problemas a la hora de insertar el usuario");
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
}
public boolean sonSeguidores(UsuarioVO usuario, UsuarioVO sigue,Connection conexion) {
boolean res = false;
try {
String query = "SELECT * FROM seguidor WHERE usuario = ? AND usuario_seguido = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, usuario.getSeudonimo());
ps.setString(2, sigue.getSeudonimo());
ResultSet rs = ps.executeQuery();
if (rs.first()) {
res = true;
}
} catch (Exception e) {
e.printStackTrace(System.err);
}
return res;
}
public ArrayList<SeguidorVO> getSeguidos(String usuario,Connection conexion) {
ArrayList<SeguidorVO> retVal = new ArrayList<>();
try {
String query = "SELECT * FROM seguidor WHERE usuario = ?";
PreparedStatement ps = conexion.prepareStatement(query);
ps.setString(1, usuario);
ResultSet rs = ps.executeQuery();
if (rs.first()) {
do{
String user = rs.getString("usuario");
String userSeguido = rs.getString("usuario_seguido");
LocalDate fecha = rs.getDate("fecha").toLocalDate();
retVal.add(new SeguidorVO(user,userSeguido,fecha));}
while (rs.next());
}
else{
System.out.println("No se ha encontrado ningun seguidoo");
}
} catch (SQLException e) {
e.printStackTrace();
}
return retVal;
}
}
|
package com.yoga.core.mybatis;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
@MappedTypes({List.class})
@MappedJdbcTypes({JdbcType.VARCHAR})
public class IntListTypeHandler extends BaseTypeHandler<List<Integer>> {
@Override
public void setNonNullParameter(PreparedStatement preparedStatement, int i, List<Integer> integers, JdbcType jdbcType) throws SQLException {
String str = Joiner.on(",").skipNulls().join(integers);
preparedStatement.setString(i, str);
}
@Override
public List<Integer> getNullableResult(ResultSet rs, String columnName) throws SQLException {
return this.stringToList(rs.getString(columnName));
}
@Override
public List<Integer> getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return this.stringToList(rs.getString(columnIndex));
}
@Override
public List<Integer> getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
return this.stringToList(cs.getString(columnIndex));
}
private List<Integer> stringToList(String str) {
return Strings.isNullOrEmpty(str) ? new ArrayList<>() : Arrays.stream(str.split(",")).map(Integer::valueOf).collect(Collectors.toList());
}
}
|
import java.util.Scanner;
public class AlienUsername
{
public static void main(String[] args)
{
Scanner scanner = new Scanner(System.in);
int testCases = scanner.nextInt();
for (int i = 0; i < testCases; i++)
{
String input = scanner.next();
if(input.matches("[_\\.]\\d+[a-zA-Z]*_?"))
{
System.out.println("VALID");
}
else
{
System.out.println("INVALID");
}
}
scanner.close();
}
}
|
import React from 'react';
import { CodeDemo, Api } from '../CommonDispalyComponents';
import './progress.example.scss';
import CodeDemo1 from './progress.codeDemo1';
const code1 = require('!!raw-loader!./progress.codeDemo1.tsx');
import CodeDemo2 from './progress.codeDemo2';
const code2 = require('!!raw-loader!./progress.codeDemo2.tsx');
import CodeDemo3 from './progress.codeDemo3';
const code3 = require('!!raw-loader!./progress.codeDemo3.tsx');
import CodeDemo4 from './progress.codeDemo4';
const code4 = require('!!raw-loader!./progress.codeDemo4.tsx');
import CodeDemo5 from './progress.codeDemo5';
const code5 = require('!!raw-loader!./progress.codeDemo5.tsx');
import CodeDemo6 from './progress.codeDemo6';
const code6 = require('!!raw-loader!./progress.codeDemo6.tsx');
import CodeDemo7 from './progress.codeDemo7';
const code7 = require('!!raw-loader!./progress.codeDemo7.tsx');
import CodeDemo8 from './progress.codeDemo8';
const code8 = require('!!raw-loader!./progress.codeDemo8.tsx');
const ProgressExample: React.FunctionComponent = () => {
return (
<div className="progress-example-page">
<section>
<h1>Progress 进度条</h1>
<p>展示操作的当前进度。</p>
</section>
<section>
<h2>何时使用</h2>
<p>在操作需要较长时间才能完成时,为用户显示该操作的当前进度和状态。</p>
</section>
<section>
<h2>代码演示</h2>
<div className="code-demonstration">
<div className="code-demo-column">
<CodeDemo
title="进度条"
content={<p>普通进度条。</p>}
code={code1.default}
>
<CodeDemo1 />
</CodeDemo>
<CodeDemo
title="仪表盘"
content={<p>仪表盘样式的圆形进度条。</p>}
code={code3.default}
>
<CodeDemo3 />
</CodeDemo>
<CodeDemo
title="自定义文案"
content={
<p>
可自定义进度条的文案。(默认的文案只展示整数进度,需要小数或其他展示文案的请使用自定义文案。)
</p>
}
code={code5.default}
>
<CodeDemo5 />
</CodeDemo>
<CodeDemo
title="自定义文案"
content={
<p>
可自定义进度条的文案。(默认的文案只展示整数进度,需要小数或其他展示文案的请使用自定义文案。)
</p>
}
code={code7.default}
>
<CodeDemo7 />
</CodeDemo>
</div>
<div className="code-demo-column">
<CodeDemo
title="进度圈"
content={<p>圆形的进度条。</p>}
code={code2.default}
>
<CodeDemo2 />
</CodeDemo>
<CodeDemo
title="动态化"
content={<p>进度条变化动画。</p>}
code={code4.default}
>
<CodeDemo4 />
</CodeDemo>
<CodeDemo
title="圆角/方角边缘"
content={
<p>
通过设置<code>strokeLinecap="square|round"</code>
可以调整进度条边缘的形状,默认为<code>round</code>
</p>
}
code={code6.default}
>
<CodeDemo6 />
</CodeDemo>
<CodeDemo
title="自定义颜色"
content={<p>可自定义背景色和进度条颜色</p>}
code={code8.default}
>
<CodeDemo8 />
</CodeDemo>
</div>
</div>
</section>
<section>
<h2>API</h2>
<Api
data={[
['type', '进度条的类型', 'normal | circle | dashboard', 'normal'],
['percent', '进度条的百分比', 'number', '-'],
['showInfo', '是否展示进度条数值或图标', 'boolean', 'true'],
['value', '自定义展示的文案', 'string', '-'],
['status', '进度条的状态', 'normal | success | fail', 'normal'],
['strokeLinecap', '进度条的边缘样式', 'square | round', 'round'],
['backgroundColor', '进度条的背景色', 'string', '#E5E5E5'],
[
'strokeColor',
<>
自定义进度条的颜色,优先级比<code>status</code>默认的颜色高
</>,
'string',
'-'
]
]}
/>
</section>
</div>
);
};
export default ProgressExample;
|
#! /bin/bash -ex
kubectl apply -f ../hello/cafe.yaml
kubectl apply -f ../hello/cafe-ingress.yaml
kubectl apply -f pass-on-session-cookie.yaml
|
package com.aditiva.ISO8583.interfaces;
/**
* @author aditiva
*/
public interface NetworkManagement<T> {
}
|
from collections import namedtuple
import requests
import logging
import time
from twitch.FuncCache import FuncCache
AppToken = namedtuple("AppToken", ["data", "expires"])
AppToken.__doc__ = """App token used to authorize API access.
data : str
The token itself, stored as a string.
expires : int
The time in seconds (unix timestamp) at which this token expires.
"""
class TwitchApi(object):
"""
Twitch API class.
Provides access to various Twitch APIs, and manages
app tokens automatically. API call results are
cached by default.
Attributes
----------
client_id : str
Twitch client ID, from dev.twitch.tv.
client_secret : str
Twitch client secret, from dev.twitch.tv.
Methods
-------
is_follower(from_id : str, to_id : str) -> bool
Check whether from_id is a follower of to_id.
get_user_id(target_user : str) -> str
Return the numerical user ID of the given username.
"""
def __init__(self, client_id, client_secret):
self.client_id = client_id
self.client_secret = client_secret
self.app_token = AppToken(None, 0)
self.log = logging.getLogger(__name__)
def _refresh_app_token(self):
"""Refreshes the Twitch app token when necessary.
This method will request a new app token when
the existing one is near or at expiration.
has expired.
See: https://dev.twitch.tv/docs/authentication
/getting-tokens-oauth/#oauth-client-credentials-flow
"""
# Do nothing if the token is not nearing expiration.
if self.app_token.expires > (time.time() + 5*60):
self.log.debug("Twitch API token is still fresh.")
return
if self.app_token.expires > time.time():
self.log.debug("Twitch API token expires in %d seconds."
" Getting new token."
% (self.app_token.expires - time.time()))
else:
self.log.debug("Twitch API token expired %d seconds ago."
" Getting new token."
% (time.time() - self.app_token.expires))
# Generate a request to the Twitch API endpoint for
# requesting an app token.
params = {
"client_id" : self.client_id,
"client_secret" : self.client_secret,
"grant_type" : "client_credentials"
}
try:
# Allow requests to raise an exception on error.
response = requests.post("https://id.twitch.tv/oauth2/token",
params = params)
response.raise_for_status()
except RequestException as e:
# If expired, raise exception. Otherwise, just log a warning.
if time.time() > self.app_token.expires:
self.log.warning("Failed to refresh Twitch API token: %s"
% str(e))
else:
raise
else:
# Convert response to json.
data = response.json()
self.app_token = AppToken(data["access_token"],
data["expires_in"])
def _call_api(self, action, endpoint, params):
"""Helper method that makes the actual Twitch API call.
This method automatically refreshes the app token and
adds it as a header to the request.
Parameters
----------
action : str
HTTP action verb
endpoint : str
Portion of the Twitch API URL after /helix/
params : dict
Query parameters as required for the request.
Returns
-------
dict
Dictionary containing the JSON response from Twitch.
"""
# Try to refresh the app token first.
self._refresh_app_token()
self.log.debug("Calling Twitch API: %s %s(%s)"
% (action, endpoint, str(params)))
headers = {
"Authorization": "Bearer %s" % self.app_token.data
}
response = requests.get("https://api.twitch.tv/helix/%s"
% endpoint.strip("/"),
params = params,
headers = headers)
response.raise_for_status()
return response.json()
@FuncCache(size = 1000, expiry = 5.0)
def is_follower(self, from_id, to_id):
"""Checks whether from_id is a follower of to_id.
See: https://dev.twitch.tv/docs/api
/reference/#get-users-follows
Parameters
----------
from_id : str
Numerical user ID of the following user.
to_id : str
Numerical user ID of the followed user.
Returns
-------
bool
True if user from_id is following user to_id.
"""
self.log.info("Calling Twitch API: Does user ID %s follow user ID %s?"
% (from_id, to_id))
response = self._call_api("GET", "/users/follows",
{"from_id": from_id, "to_id": to_id})
self.log.info("Twitch API response: %s"
% ("Yes" if response["total"] == 1 else "No"))
if response["total"] == 1:
return True
else:
return False
@FuncCache(size = 1000, expiry = 5.0)
def get_user_id(self, login):
"""Returns the numerical user ID of the given username.
See: https://dev.twitch.tv/docs/api
/reference/#get-users
Parameters
----------
login : str
The user's username.
Returns
-------
str
Numerical user ID.
"""
self.log.info("Calling Twitch API: What is %s's user ID?"
% login)
response = self._call_api("GET", "users",
{"login": login})
self.log.info("Twitch API response: %s's user ID is %s"
% (login, response["data"][0]["id"]))
return response["data"][0]["id"]
|
#!/usr/bin/env bash
set -o errtrace
set -o errexit
set -o nounset
# ignore tput errors for terms that do not
# support colors (colors will be blank strings)
set +e
RED=$(tput setaf 1)
GREEN=$(tput setaf 2)
NORMAL=$(tput sgr0)
BOLD=$(tput bold)
set -e
log() { printf "%b\n" "$*"; }
fail() { printf "${RED}" >&2; log "\nERROR: $*\n" >&2; printf "${NORMAL}" >&2; exit 1; }
pass() { printf "${GREEN}"; log "$*"; printf "${NORMAL}"; }
os=$(uname -s | tr '[:upper:]' '[:lower:]')
hw=$(uname -m | tr '[:upper:]' '[:lower:]')
case "$hw" in
amd64)
hw="x86_64"
;;
aarch64)
hw="arm64"
;;
*)
fail "Unsupported architecture ${hw} detected"
;;
esac
cua_version=""
pkg_arch="${os}_${hw}"
pkg_ext=".tar.gz"
pkg_cmd="tar"
pkg_args="xf"
pkg_file=""
pkg_url=""
cua_api_key=""
cua_api_app=""
cua_conf_file="/opt/circonus/unified-agent/etc/circonus-unified-agent.conf"
cua_bin_file="/opt/circonus/unified-agent/sbin/circonus-unified-agentd"
cua_service_file="/etc/rc.d/circonus-unified-agentd"
usage() {
printf "%b" "Circonus Unified Agent Install Help
Usage
${GREEN}install.sh --key <apikey>${NORMAL}
Options
--key Circonus API key/token **${BOLD}REQUIRED${NORMAL}**
[--app] Circonus API app name (authorized w/key) Default: circonus-unified-agent
[--help] This message
Note: Provide an authorized app for the key or ensure api
key/token has adequate privileges (default app state:allow)
"
}
__parse_parameters() {
local token=""
log "Parsing command line parameters"
while (( $# > 0 )) ; do
token="$1"
shift
case "$token" in
(--key)
if [[ -n "${1:-}" ]]; then
cua_api_key="$1"
shift
else
fail "--key must be followed by an api key."
fi
;;
(--app)
if [[ -n "${1:-}" ]]; then
cua_api_app="$1"
shift
else
fail "--app must be followed by an api app."
fi
;;
esac
done
}
__cua_init() {
set +o errexit
# trigger error if needed commands are not found...
local cmd_list="cat curl sed uname mkdir basename tar"
local cmd
for cmd in $cmd_list; do
type -P $cmd >/dev/null 2>&1 || fail "Unable to find '${cmd}' command. Ensure it is available in PATH '${PATH}' before continuing."
done
[[ -n "${pkg_cmd:-}" ]] || fail "Unable to find a package install command ($cmd_list)"
set -o errexit
__parse_parameters "$@"
[[ -n "${cua_api_key:-}" ]] || fail "Circonus API key is *required*."
}
__make_circonus_dir() {
local circ_dir="/opt/circonus/unified-agent"
log "Creating Circonus base directory: ${circ_dir}"
if [[ ! -d $circ_dir ]]; then
\mkdir -p $circ_dir
[[ $? -eq 0 ]] || fail "unable to create ${circ_dir}"
fi
log "Changing to ${circ_dir}"
\cd $circ_dir
[[ $? -eq 0 ]] || fail "unable to change to ${circ_dir}"
}
__get_cua_package() {
local pkg="${pkg_file}${pkg_ext}"
local url="${pkg_url}${pkg}"
if [[ ! -f $pkg ]]; then
log "Downloading agent package: ${url}"
set +o errexit
\curl -sLO "$url"
curl_err=$?
set -o errexit
[[ $curl_err -eq 0 ]] || fail "unable to download ${url} ($curl_err)"
fi
[[ -f $pkg ]] || fail "unable to find ${pkg} in current dir"
log "Installing: ${pkg_cmd} ${pkg_args} ${pkg}"
$pkg_cmd $pkg_args $pkg
[[ $? -eq 0 ]] || fail "installing ${pkg_cmd} ${pkg_args} ${pkg}"
\rm $pkg
}
__configure_agent() {
log "Updating configuration: ${cua_conf_file}"
\cp /opt/circonus/unified-agent/etc/example-circonus-unified-agent.conf ${cua_conf_file}
[[ -f $cua_conf_file ]] || fail "config file (${cua_conf_file}) not found"
log "\tSetting Circonus API key in configuration"
\sed -i -e "s/ api_token = \"\"/ api_token = \"${cua_api_key}\"/" $cua_conf_file
[[ $? -eq 0 ]] || fail "updating ${cua_conf_file} with api key"
if [[ -n "${cua_api_app}" ]]; then
log "\tSetting Circonus API app name in configuration"
\sed -i -e "s/ api_app = \"\"/ api_app = \"${cua_api_app}\"/" $cua_conf_file
[[ $? -eq 0 ]] || fail "updating ${cua_conf_file} with api app"
fi
log "Starting circonus-unified-agent service"
${cua_service_file} start
[[ $? -eq 0 ]] || fail "${cua_service_file} enable"
}
__configure_service() {
log "Configuring FreeBSD Service"
\cp /opt/circonus/unified-agent/service/circonus-unified-agent.freebsd ${cua_service_file}
[[ -f $cua_service_file ]] || fail "Service file (${cua_service_file}) not found"
${cua_service_file} enable
log "Created circonus-unified-agent service"
}
__get_latest_release() {
local url="https://api.github.com/repos/circonus-labs/circonus-unified-agent/releases/latest"
set +o errexit
\curl $url | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/'
curl_err=$?
set -o errexit
[[ $curl_err -eq 0 ]] || fail "unable to get latest release (${curl_err})"
}
cua_install() {
log "Getting latest release version from repository"
tag=$(__get_latest_release)
cua_version=${tag#v}
pkg_file="circonus-unified-agent_${cua_version}_${pkg_arch}"
pkg_url="https://github.com/circonus-labs/circonus-unified-agent/releases/download/v${cua_version}/"
log "Installing Circonus Unified Agent v${cua_version} for ${pkg_arch}"
cua_dir="/opt/circonus/unified-agent"
[[ -d $cua_dir ]] && fail "${cua_dir} previous installation directory found."
__cua_init "$@"
__make_circonus_dir
__get_cua_package
__configure_service
__configure_agent
echo
echo
pass "Circonus Unified Agent v${cua_version} installed"
echo
log "Make any additional customization to configuration:"
log " ${cua_conf_file}"
log "and restart agent for changes to take effect."
echo
echo
}
#
# no arguments are passed
#
if [[ $# -eq 0 ]]; then
usage
exit 0
fi
# short-circuit for help
if [[ "$*" == *--help* ]]; then
usage
exit 0
fi
#
# NOTE Ensure sufficient rights to do the install
#
(( UID != 0 )) && {
printf "\n%b\n\n" "${RED}Must run as root[sudo] -- installing software requires certain permissions.${NORMAL}"
exit 1
}
cua_install "$@"
# END
|
<filename>Lib/zDogPy/vector.py
'''Vector'''
from importlib import reload
import zDogPy.boilerplate
reload(zDogPy.boilerplate)
import math
from zDogPy.boilerplate import TAU, lerp
class Vector:
x = 0
y = 0
z = 0
def __init__(self, position=None, **kwargs):
self.set(position, **kwargs)
def __repr__(self):
return f'<zDogPy Vector {self.x} {self.y} {self.z}>'
def set(self, pos, **kwargs):
if pos and hasattr(pos, 'x'):
self.x = pos.x
elif 'x' in kwargs:
self.x = kwargs['x']
elif pos:
self.x = pos
if pos and hasattr(pos, 'y'):
self.y = pos.y
elif 'y' in kwargs:
self.y = kwargs['y']
elif pos:
self.y = pos
if pos and hasattr(pos, 'z'):
self.z = pos.z
elif 'z' in kwargs:
self.z = kwargs['z']
elif pos:
self.z = pos
return self
def write(self, **kwargs):
# set coordinates without sanitizing
# vec.write({ y: 2 }) only sets y coord
if 'x' in kwargs:
self.x = x
if 'y' in kwargs:
self.y = y
if 'z' in kwargs:
self.z = z
return self
def scale(self, scale):
if not scale:
return
result = self.multiply(scale)
self.x = result.x
self.y = result.y
self.z = result.z
return self
def translate(self, translation):
if not translation:
return
result = self.add(translation)
self.x = result.x
self.y = result.y
self.z = result.z
return self
def rotate(self, rotation):
if rotation is None:
return
if isinstance(rotation, dict):
rotation = Vector(**rotation)
self.rotateZ(rotation.z)
self.rotateY(rotation.y)
self.rotateX(rotation.x)
return self
def rotateZ(self, angle):
self.rotateProperty(self, angle, 'x', 'y')
def rotateX(self, angle):
self.rotateProperty(self, angle, 'y', 'z')
def rotateY(self, angle):
self.rotateProperty(self, angle, 'x', 'z')
def rotateProperty(self, vec, angle, propA, propB):
if not angle or angle % TAU == 0:
return
cos = math.cos(angle)
sin = math.sin(angle)
a = getattr(vec, propA)
b = getattr(vec, propB)
setattr(vec, propA, a * cos - b * sin)
setattr(vec, propB, b * cos + a * sin)
def add(self, pos):
if pos is None:
return
if hasattr(pos, 'x'):
self.x += pos.x
else:
# self.x += pos
pass
if hasattr(pos, 'y'):
self.y += pos.y
else:
# self.y += pos
pass
if hasattr(pos, 'z'):
self.z += pos.z
else:
# self.z += pos
pass
return self
def subtract(self, pos):
if pos is None:
return # self
self.x -= pos.x if isinstance(pos, Vector) else 0
self.y -= pos.y if isinstance(pos, Vector) else 0
self.z -= pos.z if isinstance(pos, Vector) else 0
return self
def multiply(self, pos):
if pos is None:
return
if isinstance(pos, Vector):
self.x *= pos.x
self.y *= pos.y
self.z *= pos.z
elif isinstance(pos, float) or isinstance(pos, int):
self.x *= pos
self.y *= pos
self.z *= pos
else:
self.x *= 1
self.y *= 1
self.z *= 1
return self
def transform(self, translation, rotation, scale):
self.multiply(scale)
self.rotate(rotation)
self.add(translation)
return self
def lerp(self, pos, t):
self.x = lerp(self.x, pos.x or 0, t)
self.y = lerp(self.y, pos.y or 0, t)
self.z = lerp(self.z, pos.z or 0, t)
return self
def magnitude(self):
_sum = self.x * self.x + self.y * self.y + self.z * self.z
return self.getMagnitudeSqrt(_sum)
def getMagnitudeSqrt(self, sum_):
# PERF: check if sum ~= 1 and skip sqrt
if (abs(sum_ - 1) < 0.00000001):
return 1
return math.sqrt(sum_)
def magnitude2d(self):
_sum = self.x * self.x + self.y * self.y
return self.getMagnitudeSqrt(_sum)
def copy(self):
return Vector(self)
|
var _unsupported_8cpp =
[
[ "BOOST_FIXTURE_TEST_CASE", "_unsupported_8cpp.xhtml#aae7fa857598a8590c2efe13832774dc9", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_unsupported_8cpp.xhtml#aa79dc79c1094831a55a21fbb8d4c12f5", null ]
]; |
const path = require('path');
const webpack = require('webpack');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const SWPrecacheWebpackPlugin = require('sw-precache-webpack-plugin');
const TSLintWebpackPlugin = require('tslint-webpack-plugin');
const StyleLintWebpackPlugin = require('stylelint-webpack-plugin');
const CompressionWebpackPlugin = require('compression-webpack-plugin');
const ImageminWebpackPlugin = require('imagemin-webpack-plugin').default;
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
const appconfig = require('./app.config.default');
module.exports = (environment = 'dev', mode = null) => {
const devEnv = environment === 'dev';
const audit = mode === 'audit';
return {
entry: getEntry(),
output: getOutput(),
resolve: getResolve(),
module: getModule(devEnv),
plugins: getPlugins(devEnv, audit),
stats: getStats(),
mode: getMode(devEnv),
};
};
const getEntry = () => {
const main = [
'./src/index.tsx'
];
const vendor = [
'axios',
'babel-polyfill',
'react',
'react-dom',
'react-router-dom',
];
return {
main,
vendor,
};
};
const getOutput = () => {
return {
filename: 'scripts/[name].[hash].js',
chunkFilename: 'scripts/[name].[hash].js',
path: path.resolve('dist'),
publicPath: '/',
};
};
const getResolve = () => ({
modules: [
'src',
'node_modules'
],
extensions: [
'.ts',
'.tsx',
'.js'
]
});
const getModule = (devEnv) => {
let typeScriptLoaders = [];
if (!devEnv) {
typeScriptLoaders.push('babel-loader');
}
typeScriptLoaders.push(
{
loader: 'awesome-typescript-loader',
query: {
configFileName: 'tsconfig.json',
silent: true
}
},
);
const styleLoaders = [
'css-loader',
];
const fontLoaders = [{
loader: 'file-loader',
options: {
name: 'fonts/[name].[ext]?[hash]',
},
}];
const imageLoaders = [{
loader: 'file-loader',
options: {
name: 'images/[name].[ext]?[hash]',
},
}];
return {
rules: [
{
test: /\.tsx?$/,
exclude: /node_modules/,
loaders: typeScriptLoaders,
},
{
test: /\.css$/,
use: styleLoaders,
},
{
test: /\.(eot|svg|ttf|woff|woff2)$/,
loader: fontLoaders,
}, {
test: /\.(gif|ico|jpe?g|png|svg)$/,
loaders: imageLoaders,
},
],
};
};
const getPlugins = (devEnv, audit) => {
let staticStyles = [
'/styles/normalize.css',
'/scripts/highlight/styles/dracula.css',
];
let staticScripts = [
'/scripts/highlight/highlight.pack.js',
];
if (!devEnv) {
staticScripts.push(
'/sw.js',
'/scripts/disqus.js',
'/scripts/googleAnalytics.js',
);
}
let plugins = [
new webpack.DefinePlugin({
'process.env':{
'NODE_ENV': JSON.stringify(devEnv ? 'development' : 'production')
}
}),
new CopyWebpackPlugin([
{ from: './assets/', to: './' },
{ from: './content/static/', to: './' },
{ from: './content/generated/', to: './' },
]),
new HtmlWebpackPlugin({
inject: false,
mobile: true,
template: './src/index.ejs',
favicon: './assets/favicon.png',
title: appconfig.title,
meta: appconfig.meta,
icons: appconfig.icons,
links: staticStyles,
scripts: staticScripts,
minify: devEnv ? false : {
collapseBooleanAttributes: true,
collapseInlineTagWhitespace: true,
collapseWhitespace: true,
keepClosingSlash: true,
minifyCSS: true,
minifyJS: true,
minifyURLs: true,
removeComments: true,
},
}),
];
if (devEnv) {
plugins.push(
new webpack.NamedModulesPlugin(),
new webpack.HotModuleReplacementPlugin(),
new TSLintWebpackPlugin({
files: ['./src/**/*.{ts,tsx}'],
config: './tslintconfig.json',
}),
new StyleLintWebpackPlugin(({
files: ['./src/**/*'],
configFile: './.stylelintrc',
})),
);
} else {
plugins.push(
new SWPrecacheWebpackPlugin({
cacheId: `soofka-pl-${new Date().getTime()}`,
filename: 'sw.js',
minify: !devEnv,
stripPrefix: 'dist',
staticFileGlobs: [
'dist/**.*',
'dist/styles/**/*',
'dist/scripts/**/*',
],
staticFileGlobsIgnorePatterns: [
/\.gz$/,
/sw.js$/,
],
}),
new webpack.LoaderOptionsPlugin({
minimize: true,
debug: false
}),
new CompressionWebpackPlugin({
asset: '[path].gz[query]',
algorithm: 'gzip',
test: /\.js$|\.css$|\.html$/,
threshold: 10240,
minRatio: 0.8,
}),
new ImageminWebpackPlugin({
test: /\.(gif|ico|jpe?g|png|svg)$/,
}),
);
}
if (audit) {
plugins.push(
new BundleAnalyzerPlugin({
analyzerMode: 'static',
reportFilename: '../stats/report.html'
})
);
}
return plugins;
};
const getStats = () => ({
colors: true,
hash: false,
version: false,
timings: true,
assets: false,
chunks: false,
modules: false,
reasons: true,
children: false,
source: false,
errors: true,
errorDetails: false,
warnings: true,
publicPath: false,
});
const getMode = (devEnv) => devEnv ? 'development' : 'production';
|
//Import controllers
const userController = require("./controllers/user");
const ratingController = require("./controllers/rating");
const routes = [
/************
Users
**********/
{
method: "GET",
url: "/api/users",
handler: userController.getUsers
},
{
method: "POST",
url: "/api/login",
handler: userController.userLogin
},
{
method: "GET",
url: "/api/userExists",
handler: userController.userExists
},
{
method: "GET",
url: "/api/user/:id",
handler: userController.getSingleUserByID
},
{
method: "POST",
url: "/api/user",
handler: userController.addUser
},
{
method: "DELETE",
url: "/api/user/:id",
handler: userController.deleteUser
},
/************
Ratings
**********/
{
method: "POST",
url: "/api/ratings",
handler: ratingController.queryRatings
},
{
method: "POST",
url: "/api/rating",
handler: ratingController.addRating
},
{
method: "DELETE",
url: "/api/rating",
handler: ratingController.deleteRating
}
];
module.exports = routes;
|
<reponame>benstepp/d3sim
var legendaryData = require('../../data/legendary');
var getCraftable = function(dClass) {
var craftable = {};
var slots = Object.keys(legendaryData);
var slotsLength = slots.length;
for (var i =0; i < slotsLength; i++) {
//initialize array for each slot
craftable[slots[i]] = [];
var items = legendaryData[slots[i]];
var itemsCount = items.length;
//loop through all items and check if item is craftable
for (var j = 0; j < itemsCount;j++) {
//has a kadala weight of zero and
//smartloot has the class so it is craftable
if(items[j].weight === 0 &&
items[j].smartLoot.indexOf(dClass) > -1) {
craftable[slots[i]].push(items[j]);
}
}
}
return craftable;
};
module.exports = getCraftable; |
<gh_stars>0
import {http} from "./config";
export default {
listar: () => {
return http.get("/category/list")
}
} |
<filename>src/kamil09875/bfparser/syntax/BFISet.java<gh_stars>0
package kamil09875.bfparser.syntax;
import java.io.IOException;
import kamil09875.bfparser.BFMemory;
import kamil09875.bfparser.Translator;
public enum BFISet implements BFInstruction{
LEFT{
@Override
public void execute(final BFMemory memory, final boolean debug){
if(debug){
System.out.println("Moving left");
memory.dump();
}
memory.left();
if(debug){
System.out.println(" vvv");
memory.dump();
System.out.println();
BFMemory.waitForUser();
}
}
@Override
public String translate(final Translator translator){
return translator.left();
}
},
RIGHT{
@Override
public void execute(final BFMemory memory, final boolean debug){
if(debug){
System.out.println("Moving right");
memory.dump();
}
memory.right();
if(debug){
System.out.println(" vvv");
memory.dump();
System.out.println();
BFMemory.waitForUser();
}
}
@Override
public String translate(final Translator translator){
return translator.right();
}
},
ADD{
@Override
public void execute(final BFMemory memory, final boolean debug){
if(debug){
System.out.println("Increasing current value");
memory.dump();
}
memory.add();
if(debug){
System.out.println(" vvv");
memory.dump();
System.out.println();
BFMemory.waitForUser();
}
}
@Override
public String translate(final Translator translator){
return translator.add();
}
},
SUB{
@Override
public void execute(final BFMemory memory, final boolean debug){
if(debug){
System.out.println("Decreasing current value");
memory.dump();
}
memory.sub();
if(debug){
System.out.println(" vvv");
memory.dump();
System.out.println();
BFMemory.waitForUser();
}
}
@Override
public String translate(final Translator translator){
return translator.sub();
}
},
OUTPUT{
@Override
public void execute(final BFMemory memory, final boolean debug){
if(debug){
System.out.println("Outputting current value");
memory.dump();
System.out.print("Output: " + memory.get() + " = ");
}
System.out.print((char)memory.get());
if(debug){
System.out.println(" vvv");
memory.dump();
System.out.println();
BFMemory.waitForUser();
}
}
@Override
public String translate(final Translator translator){
return translator.output();
}
},
INPUT{
@Override
public void execute(final BFMemory memory, final boolean debug){
if(debug){
System.out.println("Setting current value to user input");
memory.dump();
}
try{
memory.set(System.in.read());
}catch(IOException e){
memory.set(-1);
}
if(debug){
System.out.println(" vvv");
memory.dump();
System.out.println();
BFMemory.waitForUser();
}
}
@Override
public String translate(final Translator translator){
return translator.input();
}
};
}
|
#!/usr/bin/sh
#--------------------------------------------------------------------------------------------
# Expecting the following arguments in order -
# <host> = hostname/ip-address of HBase Master.
# This is made available as a macro in host configuration.
# <port> = Port # on which the hbase master metrics are available (default = 16010)
# This is made available as a macro in host configuration.
# <name_in_zabbix> = Name by which the HBase Master is configured in Zabbix.
# This is made available as a macro in host configuration.
#--------------------------------------------------------------------------------------------
COMMAND_LINE="$0 $*"
export SCRIPT_NAME="$0"
usage() {
echo "Usage: $SCRIPT_NAME <host> <port> <name_in_zabbix>"
}
if [ $# -ne 3 ]
then
usage ;
exit ;
fi
#--------------------------------------------------------------------------------------------
# First 2 parameters are required for connecting to HBase Master
# The 3th parameter NAME_IN_ZABBIX is required to be sent back to Zabbix to identify the
# Zabbix host/entity for which these metrics are destined.
#--------------------------------------------------------------------------------------------
export HBASE_HOST=$1
export METRICS_PORT=$2
export NAME_IN_ZABBIX=$3
#--------------------------------------------------------------------------------------------
# Set the data output file and the log fle from zabbix_sender
#--------------------------------------------------------------------------------------------
export DATA_FILE="/tmp/${NAME_IN_ZABBIX}_HBaseMaster.txt"
export BAK_DATA_FILE="/tmp/${NAME_IN_ZABBIX}_HBaseMaster_bak.txt"
export LOG_FILE="/tmp/${NAME_IN_ZABBIX}.log"
#--------------------------------------------------------------------------------------------
# Use python to get the metrics data from HBase Master and use screen-scraping to extract
# metrics.
# The final result of screen scraping is a file containing data in the following format -
# <NAME_IN_ZABBIX> <METRIC_NAME> <METRIC_VALUE>
#--------------------------------------------------------------------------------------------
python `dirname $0`/zabbix-HMaster.py $HBASE_HOST $METRICS_PORT $DATA_FILE $NAME_IN_ZABBIX
#--------------------------------------------------------------------------------------------
# Check the size of $DATA_FILE. If it is not empty, use zabbix_sender to send data to Zabbix.
#--------------------------------------------------------------------------------------------
if [[ -s $DATA_FILE ]]
then
zabbix_sender -vv -z 127.0.0.1 -i $DATA_FILE 2>>$LOG_FILE 1>>$LOG_FILE
echo -e "Successfully executed $COMMAND_LINE" >>$LOG_FILE
mv $DATA_FILE $BAK_DATA_FILE
echo "OK"
else
echo "Error in executing $COMMAND_LINE" >> $LOG_FILE
echo "ERROR"
fi
|
<reponame>laurence-myers/tcat<filename>src/generator/walker.ts<gh_stars>1-10
import {
ArrayIterationNode,
AssignmentNode,
GeneratorAstNode,
ObjectIterationNode,
TemplateRootNode,
ScopedBlockNode, ParameterNode, IfStatementNode
} from "./ast";
import { assertNever, last } from "../core";
import {ExpressionScopeRectifier} from "../ngExpression/expressionWalker";
import {ProgramNode} from "../ngExpression/ast";
export abstract class BaseWalker {
protected abstract walkArrayIterationNode(node : ArrayIterationNode) : void;
protected abstract walkAssignmentNode(node : AssignmentNode) : void;
protected abstract walkIfStatementNode(node : IfStatementNode) : void;
protected abstract walkObjectIterationNode(node : ObjectIterationNode) : void;
protected abstract walkParameterNode(node : ParameterNode) : void;
protected abstract walkScopedBlockNode(node : ScopedBlockNode) : void;
protected abstract walkTemplateRootNode(node : TemplateRootNode) : void;
protected dispatchAll(nodes : GeneratorAstNode[]) : void {
return nodes.forEach((node) => this.dispatch(node));
}
protected dispatch(node : GeneratorAstNode) : void {
switch (node.type) {
case 'ArrayIterationNode':
return this.walkArrayIterationNode(node);
case 'AssignmentNode':
return this.walkAssignmentNode(node);
case 'IfStatementNode':
return this.walkIfStatementNode(node);
case 'ObjectIterationNode':
return this.walkObjectIterationNode(node);
case 'ParameterNode':
return this.walkParameterNode(node);
case 'ScopedBlockNode':
return this.walkScopedBlockNode(node);
case 'TemplateRootNode':
return this.walkTemplateRootNode(node);
default:
assertNever(node);
break;
}
}
}
export class SkippingWalker extends BaseWalker {
protected walkArrayIterationNode(node : ArrayIterationNode) : void {
this.dispatchAll(node.children);
}
protected walkAssignmentNode(_node : AssignmentNode) {
}
protected walkIfStatementNode(node : IfStatementNode) : void {
this.dispatchAll(node.children);
}
protected walkObjectIterationNode(node : ObjectIterationNode) : void {
this.dispatchAll(node.children);
}
protected walkParameterNode(_node : ParameterNode) : void {
}
protected walkScopedBlockNode(node : ScopedBlockNode) {
this.dispatchAll(node.children);
}
protected walkTemplateRootNode(node : TemplateRootNode) : void {
this.dispatchAll(node.children);
}
}
export class TypeScriptGenerator extends SkippingWalker {
protected counters = {
scopes: 0,
expressions: 0,
blocks: 0
};
protected output = '';
protected indentLevel = 0;
protected indentString = ' ';
protected localsStack : Set<string>[] = [];
protected scopeNumberStack : number[] = [];
protected writeLine(value : string) : void {
for (let i = 0; i < this.indentLevel; i++) {
this.output += this.indentString;
}
this.output += value;
this.output += '\n';
}
protected pushLocalsScope() : void {
this.localsStack.push(new Set<string>());
}
protected popLocalsScope() : void {
this.localsStack.pop();
}
protected addLocal(name : string) : void {
if (this.localsStack.length === 0) {
this.pushLocalsScope();
}
const locals = last(this.localsStack);
locals!.add(name);
}
protected formatExpression(expression : ProgramNode) : string {
const expressionWalker = new ExpressionScopeRectifier(last(this.scopeNumberStack) || 0, this.localsStack);
return `(${ expressionWalker.walk(expression) })`;
}
protected walkArrayIterationNode(node : ArrayIterationNode) : void {
this.writeLine(`for (const ${ node.valueName } of ${ this.formatExpression(node.iterable) }) {`);
this.pushLocalsScope();
this.addLocal(node.valueName);
this.indentLevel++;
super.walkArrayIterationNode(node);
this.popLocalsScope();
this.indentLevel--;
this.writeLine(`}`);
}
protected walkAssignmentNode(node : AssignmentNode) : void {
const name = node.name || '_expr_' + ++this.counters.expressions;
this.addLocal(name);
const typeAnnotation =
node.typeAnnotation
? ' : ' + node.typeAnnotation
: '';
const expression =
node.expressionType === 'AngularJS'
? this.formatExpression(node.expression)
: node.expression;
this.writeLine(`${ node.variableType } ${ name }${ typeAnnotation } = ${ expression };`);
}
protected walkIfStatementNode(node : IfStatementNode) : void {
this.writeLine(`if (${ this.formatExpression(node.expression) }) {`);
this.indentLevel++;
super.walkIfStatementNode(node);
this.indentLevel--;
this.writeLine(`}`);
}
protected walkObjectIterationNode(node : ObjectIterationNode) : void {
this.writeLine(`for (const ${ node.keyName } in ${ this.formatExpression(node.iterable) }) {`);
this.pushLocalsScope();
this.addLocal(node.keyName);
this.indentLevel++;
this.writeLine(`const ${ node.valueName } = ${ this.formatExpression(node.iterable) }[${ node.keyName }];`);
this.addLocal(node.valueName);
super.walkObjectIterationNode(node);
this.popLocalsScope();
this.indentLevel--;
this.writeLine(`}`);
}
protected walkParameterNode(node : ParameterNode) : void {
this.writeLine(`${ node.name } : ${ node.typeAnnotation },`);
this.addLocal(node.name);
}
protected walkScopedBlockNode(node : ScopedBlockNode) : void {
this.pushLocalsScope();
const blockStart = `const _block_${ ++this.counters.blocks } = function (`;
const blockStartSuffix = `) {`;
if (node.parameters.length > 0 || node.scopeInterface) {
this.writeLine(blockStart);
this.indentLevel++;
if (node.scopeInterface) {
this.writeLine(`_scope_${ ++this.counters.scopes } : ${ node.scopeInterface },`);
this.scopeNumberStack.push(this.counters.scopes);
}
this.dispatchAll(node.parameters);
this.indentLevel--;
this.writeLine(blockStartSuffix);
} else {
this.writeLine(blockStart + blockStartSuffix);
}
this.indentLevel++;
super.walkScopedBlockNode(node);
this.popLocalsScope();
this.indentLevel--;
if (node.scopeInterface) {
this.scopeNumberStack.pop();
}
this.writeLine(`};`);
}
public generate(node : GeneratorAstNode) : string {
this.dispatch(node);
return this.output;
}
}
export function generateTypeScript(node : GeneratorAstNode) : string {
const generator = new TypeScriptGenerator();
return generator.generate(node);
} |
package com.fiends.bware.Activities;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.fiends.bware.R;
import com.fiends.bware.Utils.Bware;
import com.fiends.bware.Utils.BwareFiles;
import static com.fiends.bware.Utils.Bware.showAppExitDialog;
public class PermissionActivity extends AppCompatActivity {
private Button appPermission;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_permission);
appPermission = findViewById(R.id.app_permission_btn);
appPermission.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ActivityCompat.requestPermissions(PermissionActivity.this, Bware.permission, Bware.PERMISSION_CODE);
}
});
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
if (grantResults[0]==0 && grantResults[1]==0 && grantResults[2]==0) {
if (isLoggedIn()) {
startActivity(new Intent(PermissionActivity.this, DashBoardActivity.class));
finish();
} else {
startActivity(new Intent(PermissionActivity.this, RegisterActivity.class));
finish();
}
}
} else {
if (grantResults[0]==0 && grantResults[1]==0 || grantResults[2]==0) {
if (isLoggedIn()) {
startActivity(new Intent(PermissionActivity.this, DashBoardActivity.class));
finish();
} else {
startActivity(new Intent(PermissionActivity.this, RegisterActivity.class));
finish();
}
}
}
}
private boolean isLoggedIn() {
return new BwareFiles(PermissionActivity.this).getFileLength("User Token");
}
@Override
public void onBackPressed() {
showAppExitDialog(PermissionActivity.this);
}
} |
# Optimize the Memory Usage of a Web Page
The following optimizations can be used to reduce the amount of memory a web page uses:
1. Minify code: Minifying HTML, JavaScript and CSS code removes unnecessary characters such as spaces and line breaks that can increase the overall size of the website.
2. Compress images: Compressing images reduces their file size, meaning that the same image can be loaded faster without reducing the quality of the image.
3. Less plugin usage: Plugins and browser extensions can add a lot of bloat to the page which can significantly increase the page size.
4. Use caching: By caching page elements such as images and scripts, your page will load faster and use less memory.
5. Reduce redirects: Keeping redirects to a minimum can reduce the number of requests a page makes which will help reduce the amount of memory the page uses.
6. Use a content delivery network (CDN): A CDN helps to distribute your content over multiple servers which can help reduce the amount of memory the page uses. |
import { Logger } from '@nestjs/common';
import { CommandHandler, EventBus, ICommandHandler } from '@nestjs/cqrs';
import { InjectStripe } from 'nestjs-stripe';
import * as Stripe from 'stripe';
import { SubscriptionCreatedEvent } from '@ultimatebackend/core';
import { ChangeSubscriptionCommand } from '../../impl';
import { ChangeSubscriptionResponse } from '@ultimatebackend/proto-schema/billing';
import { RpcException } from '@nestjs/microservices';
import { subsToProtoStripeSubs } from '../../../../../common';
@CommandHandler(ChangeSubscriptionCommand)
export class ChangeSubscriptionHandler implements ICommandHandler<ChangeSubscriptionCommand> {
logger = new Logger(this.constructor.name);
public constructor(
@InjectStripe() private readonly stripeClient: Stripe,
private readonly eventBus: EventBus,
) {}
async execute(command: ChangeSubscriptionCommand): Promise<ChangeSubscriptionResponse> {
this.logger.log(`Async ${this.constructor.name}...`);
const { input } = command;
try {
if (input.customerId === null ) { // Check to make sure input is not null
throw new RpcException('Current customer id missing'); // Throw an input error
}
if (input.tenantId === null) { // Check to make sure input is not null
throw new RpcException('Current tenant id missing'); // Throw an input error
}
const [customer, subscriptions] = await Promise.all([
this.stripeClient.customers.retrieve(input.customerId),
this.stripeClient.subscriptions.list({customer: input.customerId, status: 'active'}),
]);
if (customer === null || subscriptions === null) { // Check to make sure input is not null
throw new RpcException('No subscription found for this customer'); // Throw an input error
}
if (subscriptions.data && subscriptions.data.length > 0) {
const currentSub = subscriptions.data.reduce(s => s.metadata.tenantId === input.tenantId && s);
if (currentSub) {
if (currentSub.items.data && currentSub.items.data.findIndex(cur => cur.plan.id = input.planId) !== -1) {
throw new RpcException('This tenant is currently on this plan');
}
const plan = await this.stripeClient.plans.retrieve(input.planId);
const sub = await this.stripeClient.subscriptions.update(currentSub.id, {
items: [{
plan: plan.id,
}],
});
await this.eventBus.publish(new SubscriptionCreatedEvent(sub));
return {
subscription: subsToProtoStripeSubs(sub),
};
}
throw new RpcException('No subscription for this tenant');
} else {
throw new RpcException('No subscription for this tenant');
}
} catch (error) {
this.logger.log(error);
throw new RpcException(error);
}
}
}
|
class NoStarchPress::Topic
attr_accessor :name, :url, :books
@@all = []
def initialize(name = nil, url = nil, books = [])
@name = name
@url = url
@books = []
@@all << self if ( !@@all.include?(self) and self.name != "Gift Certificates" )
end
def self.all
@@all.uniq
end
def add_book(book)
if book.topic == nil
book.topic = self
end
unless @books.include?(book)
@books << book
end
end
end |
<gh_stars>1-10
const responseFunctions = require('./responseFunctions');
const createResponseJson = responseFunctions.responseJson;
const requestError = responseFunctions.errorList;
const generateDataJson = responseFunctions.dataJson;
const generateErrorList = require('./generateErrorList');
function handleJoinPostRequest(req, res) {
let data = generateDataJson(req);
let errorsToCheck = ['ROOM_ENDED'];
let errorList = generateErrorList(errorsToCheck, data);
let noErrors = errorList.length === 0;
if (noErrors) {
requestSuccess(data, res);
} else {
requestError(errorList, res);
};
}
function requestSuccess(data, res) {
let player = players.player(data.sessionId);
player.setReady();
player.setToExit();
l(player.t(), 'to exit')
responseJson = {status: 'success'};
res.cookie(values.cookie.SESSION_ID_KEY, sessionId, {httpOnly: true}); // , secure: true
res.json(responseJson);
rooms.room(data.roomId).deleteRoomIfAllPlayersDisconnectedOrToExitAfterTimeout();
}
module.exports = handleJoinPostRequest;
|
<reponame>3dcitydb/web-feature-service
package vcs.citydb.wfs.kvp;
import net.opengis.fes._2.AbstractQueryExpressionType;
import net.opengis.wfs._2.GetFeatureType;
import org.citygml4j.builder.jaxb.CityGMLBuilder;
import vcs.citydb.wfs.config.WFSConfig;
import vcs.citydb.wfs.exception.KVPParseException;
import vcs.citydb.wfs.exception.WFSException;
import vcs.citydb.wfs.exception.WFSExceptionCode;
import vcs.citydb.wfs.kvp.parser.BigIntegerParser;
import vcs.citydb.wfs.kvp.parser.ResolveValueParser;
import vcs.citydb.wfs.kvp.parser.ResultTypeParser;
import vcs.citydb.wfs.kvp.parser.StringParser;
import javax.xml.bind.JAXBElement;
import javax.xml.validation.Schema;
import java.util.List;
import java.util.Map;
public class GetFeatureReader extends KVPRequestReader {
private final BaseRequestReader baseRequestReader;
private final QueryExpressionReader queryExpressionReader;
public GetFeatureReader(Map<String, String> parameters, Schema wfsSchema, CityGMLBuilder cityGMLBuilder, WFSConfig wfsConfig) {
super(parameters, wfsConfig);
baseRequestReader = new BaseRequestReader();
queryExpressionReader = new QueryExpressionReader(
new net.opengis.wfs._2.ObjectFactory(),
new net.opengis.fes._2.ObjectFactory(),
new net.opengis.gml.ObjectFactory(),
wfsSchema,
cityGMLBuilder,
wfsConfig);
}
@Override
public GetFeatureType readRequest() throws WFSException {
GetFeatureType wfsRequest = new GetFeatureType();
baseRequestReader.read(wfsRequest, parameters);
try {
// standard presentation parameters
if (parameters.containsKey(KVPConstants.START_INDEX))
wfsRequest.setStartIndex(new BigIntegerParser().parse(KVPConstants.START_INDEX, parameters.get(KVPConstants.START_INDEX)));
if (parameters.containsKey(KVPConstants.COUNT))
wfsRequest.setCount(new BigIntegerParser().parse(KVPConstants.COUNT, parameters.get(KVPConstants.COUNT)));
if (parameters.containsKey(KVPConstants.OUTPUT_FORMAT))
wfsRequest.setOutputFormat(new StringParser().parse(KVPConstants.OUTPUT_FORMAT, parameters.get(KVPConstants.OUTPUT_FORMAT)));
if (parameters.containsKey(KVPConstants.RESULT_TYPE))
wfsRequest.setResultType(new ResultTypeParser().parse(KVPConstants.RESULT_TYPE, parameters.get(KVPConstants.RESULT_TYPE)));
// standard resolve parameters
if (parameters.containsKey(KVPConstants.RESOLVE))
wfsRequest.setResolve(new ResolveValueParser().parse(KVPConstants.RESOLVE, parameters.get(KVPConstants.RESOLVE)));
if (parameters.containsKey(KVPConstants.RESOLVE_DEPTH))
wfsRequest.setResolveDepth(new StringParser().parse(KVPConstants.RESOLVE_DEPTH, parameters.get(KVPConstants.RESOLVE_DEPTH)));
if (parameters.containsKey(KVPConstants.RESOLVE_TIMEOUT))
wfsRequest.setResolveTimeout(new BigIntegerParser().parse(KVPConstants.RESOLVE_TIMEOUT, parameters.get(KVPConstants.RESOLVE_TIMEOUT)));
} catch (KVPParseException e) {
throw new WFSException(WFSExceptionCode.INVALID_PARAMETER_VALUE, e.getMessage(), e.getParameter(), e.getCause());
}
// queries
List<JAXBElement<? extends AbstractQueryExpressionType>> queries = queryExpressionReader.read(parameters, KVPConstants.GET_FEATURE, getNamespaces(), true);
wfsRequest.getAbstractQueryExpression().addAll(queries);
return wfsRequest;
}
@Override
public String getOperationName() {
return KVPConstants.GET_FEATURE;
}
}
|
package com.androidapp.cachewebviewlib.utils;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
public class AppUtils {
public static int getVersionCode(Context context){
PackageManager packageManager=context.getPackageManager();
PackageInfo packageInfo;
int versionCode=0;
try {
packageInfo=packageManager.getPackageInfo(context.getPackageName(),0);
versionCode=packageInfo.versionCode;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
return versionCode;
}
}
|
// Copy belows to core/utils/android/jni_helper.h
static jobject GetJUserByCoreUser(const User& user);
static jobjectArray GetJUsersArrayByCoreUsers(const std::vector<std::unique_ptr<User>>& users);
// Copy belows to core/utils/android/jni_helper.cc
jobject JniHelper::GetJUserByCoreUser(const User& user) {
JNIEnv* env = GetJniEnv();
if (!env) {
sakura::log_error("Failed to get JNIEnv");
return nullptr;
}
jclass userJclass = JniReferenceCache::SharedCache()->user_jclass();
jmethodID userConstructorMethodID = env->GetMethodID(userJclass, "<init>", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V");
jstring jUid = env->NewStringUTF(user.uid().c_str());
jstring jUsername = env->NewStringUTF(user.username().c_str());
jstring jDisplayName = env->NewStringUTF(user.display_name().c_str());
jint jStatus = static_cast<jint>(user.status());
jobject jUserObject = env->NewObject(userJclass,
userConstructorMethodID,
jUid,
jUsername,
jDisplayName,
jStatus);
env->DeleteLocalRef(jUid);
env->DeleteLocalRef(jUsername);
env->DeleteLocalRef(jDisplayName);
return jUserObject;
}
jobjectArray JniHelper::GetJUsersArrayByCoreUsers(const std::vector<std::unique_ptr<User>>& users) {
jclass userJclass = JniReferenceCache::SharedCache()->user_jclass();
JNIEnv* env = GetJniEnv();
if (!env) {
return env->NewObjectArray(0, userJclass, NULL);
}
jobjectArray jobjs = env->NewObjectArray(users.size(), userJclass, NULL);
jsize i = 0;
for (auto it = users.begin(); it != users.end(); ++it) {
jobject jUser = GetJUserByCoreUser(**it);
env->SetObjectArrayElement(jobjs, i, jUser);
env->DeleteLocalRef(jUser);
++i;
}
return jobjs;
}
|
<filename>app/js/nico-mylist.js<gh_stars>0
const cheerio = require("cheerio");
const fs = require("fs");
const path = require("path");
const { NicoClientRequest } = require("./nico-client-request");
const { NICO_URL } = require("./nico-url");
const { CacheStore } = require("./cache-store");
const { logger } = require("./logger");
class NicoMylist {
constructor(){
this._req = null;
this.reader = new NicoMylistReader();
this.mylist = null;
this.xml = null;
}
cancel(){
if (this._req) {
this._req.cancel();
}
}
async getMylist(mylist_id){
await this.requestXML(mylist_id);
this.mylist = this.reader.parse(mylist_id, this.xml);
return this.mylist;
}
async requestXML(mylist_id){
const url = this._getURL(mylist_id);
this.xml = await this._requestXML(url);
return this.xml;
}
_requestXML(url){
this._req = new NicoClientRequest();
return this._req.get(url);
}
_getURL(mylist_id){
const sort = 6; // 投稿が新しい順
if(/^mylist\/\d+$/.test(mylist_id)){
return `${NICO_URL.VIDEO}/${mylist_id}?rss=2.0&numbers=1&sort=${sort}`;
}
if(/^user\/\d+$/.test(mylist_id)){
return `${NICO_URL.VIDEO}/${mylist_id}/video?rss=2.0&numbers=1&sort=${sort}`;
}
throw new Error(`fault NicoMylist._getURL mylist_id=${mylist_id}`);
}
}
class NicoMylistReader {
parse(mylist_id, xml){
const $ = cheerio.load(xml, {xmlMode: true});
const title = $("channel > title").text();
const link = $("channel > link").text();
const description = $("channel > description").text();
const creator = $("channel > dc\\:creator").text();
const items = [];
$("channel > item").each((i, el) => {
const item = $(el);
const link = item.find("link").text();
const video_id = this._getVideoIDFromLink(link);
const description = this._parseCDATA(item.find("description").text());
items.push( {
no: i+1,
title: item.find("title").text(),
id: video_id,
link: link,
description: description.memo,
thumb_img: description.thumbnail_src,
length: description.length,
date: description.date,
});
});
const mylist = {
title: title,
mylist_id: mylist_id,
link: link,
creator: creator,
description: description,
items: items
};
if(!this._isCorrect(mylist)){
throw new Error("empty");
}
return mylist;
}
/**
*
* @param {String} link
*/
_getVideoIDFromLink(link){
const p = link.split("/").pop();
if(p.includes("?")){
return p.split("?")[0];
}else{
return p;
}
}
_parseCDATA(xml){
const $ = cheerio.load(xml, {xmlMode: true});
return {
memo: $(".nico-memo").text(),
thumbnail_src: $(".nico-thumbnail > img").attr("src"),
length: $(".nico-info-length").text(),
date: $(".nico-info-date").text(),
};
}
_isCorrect(mylist){
return mylist.title
&& mylist.mylist_id
&& mylist.link
&& mylist.creator
&& mylist.items.every(item => {
return item.title
&& item.id
&& item.link
&& item.thumb_img
&& item.length
&& item.date;
});
}
}
class NicoMylistStore {
constructor(get_dir_path){
this.get_dir_path = get_dir_path;
this.reader = new NicoMylistReader();
}
load(mylist_id){
const path = this._getFilePath(mylist_id);
const xml = fs.readFileSync(path, "utf-8");
return this.reader.parse(mylist_id, xml);
}
delete(mylist_id){
try {
const file_path = this._getFilePath(mylist_id);
fs.unlinkSync(file_path);
} catch (error) {
logger.debug(`NicoMylistStore: delete mylistid=${mylist_id}, ${error}`);
}
}
save(mylist_id, xml){
const file_path = this._getFilePath(mylist_id);
const dir = path.dirname(file_path);
try {
fs.statSync(dir);
} catch (error) {
fs.mkdirSync(dir);
}
fs.writeFileSync(file_path, xml, "utf-8");
}
_getFilePath(mylist_id){
const dir = this.get_dir_path;
const fname = mylist_id.replace("/", "-");
return path.join(dir, `${fname}.xml`);
}
}
class NicoMylistImageCache {
constructor(dir_path){
this._dir_path = dir_path;
/** @type Map<string, CacheStore> */
this._map = new Map();
this._exist_local_id_list = [];
}
setExistLocalIDList(mylist_id_list){
this._exist_local_id_list = mylist_id_list;
}
getImageHtml(mylist_id, url){
if(this._existLocal(mylist_id)){
const image = new Image();
if(this._has(mylist_id, url)){
// TODO <img src="${url}" ?
image.src = this._map.get(mylist_id).get(url);
}else{
if(this._isImgData(url)){
image.src = url;
}else{
image.onload = (e) => {
const data = this._getBase64(e.target);
this._set(mylist_id, url, data);
};
image.src = url;
}
}
image.classList.add("gridtable-thumbnail", "mylist-img");
return image.outerHTML;
}else{
return `<img src="${url}" class="gridtable-thumbnail mylist-img"/>`;
}
}
setImage(mylist_id, img){
this.loadCache(mylist_id);
const url = img.src;
if(this._isImgData(url)){
return;
}
if(this._has(mylist_id, url) === true){
return;
}
if(img.naturalWidth > 0 && img.naturalHeight > 0){
if(this._map.has(mylist_id) === false){
const cache_store = this._createCacheStore(mylist_id);
this._map.set(mylist_id, cache_store);
}
const data = this._getBase64(img);
this._set(mylist_id, url, data);
}
}
save(){
this._map.forEach((cache_store, mylist_id) => {
try {
if(this._existLocal(mylist_id) === true){
cache_store.save();
}
} catch (error) {
logger.debug(`NicoMylistImageCache: save mylistid=${mylist_id}, ${error}`);
}
});
}
delete(mylist_id){
this._map.delete(mylist_id);
try {
const file_path = path.join(this._dir_path,this._getFileName(mylist_id));
fs.unlinkSync(file_path);
} catch(error) {
logger.debug(`NicoMylistImageCache: delete mylistid=${mylist_id}, ${error}`);
}
}
_isImgData(url){
return url.startsWith("data:image/");
}
_has(mylist_id, url){
const has_mylist = this._map.has(mylist_id);
if(!url){
return has_mylist;
}
if(has_mylist === true){
return this._map.get(mylist_id).has(url);
}
return false;
}
_set(mylist_id, url, data){
this._map.get(mylist_id).set(url, data);
}
loadCache(mylist_id){
if(this._existLocal(mylist_id) === false){
return;
}
if(this._map.has(mylist_id)===true){
return;
}
const cache_store = this._createCacheStore(mylist_id);
try {
cache_store.load();
} catch (error) {
logger.debug(`NicoMylistImageCache: load mylistid=${mylist_id}, ${error}`);
}
this._map.set(mylist_id, cache_store);
}
_createCacheStore(mylist_id){
return new CacheStore(this._dir_path, this._getFileName(mylist_id));
}
_getFileName(mylist_id){
const fname = mylist_id.replace("/", "-");
return `${fname}-img.json`;
}
_existLocal(mylist_id){
return this._exist_local_id_list.includes(mylist_id);
}
_getBase64(img){
const width = img.naturalWidth;
const height = img.naturalHeight;
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0, width, height);
const data = canvas.toDataURL("image/jpeg");
return data;
}
}
module.exports = {
NicoMylist,
NicoMylistReader,
NicoMylistStore,
NicoMylistImageCache
}; |
<reponame>zhoujiagen/learning-algorithms
/*
* SimpleTreeTest.java
* JUnit based test
*
* Created on March 9, 2007, 11:45 PM
*/
package tamp.ch15.priority.priority;
import junit.framework.TestCase;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
/**
* @author mph
*/
public class SimpleTreeTest extends TestCase {
static final int LOG_RANGE = 6;
static final int TEST_SIZE = 64;
static Random random = new Random();
private final static int THREADS = 8;
private final static int PER_THREAD = TEST_SIZE / THREADS;
SimpleTree<Integer> instance;
Thread[] thread = new Thread[THREADS];
public SimpleTreeTest(String testName) {
super(testName);
}
protected void setUp() throws Exception {
}
protected void tearDown() throws Exception {
}
/**
* Test of add method, of class priority.SimpleTree.
*/
public void testAdd() {
System.out.println("sequential test");
Map<Integer, Integer> log = new HashMap<Integer, Integer>();
int priority, value;
SimpleTree<Integer> instance = new SimpleTree<Integer>(LOG_RANGE);
int range = (1 << LOG_RANGE);
for (int i = 0; i < TEST_SIZE; i++) {
priority = random.nextInt(range);
value = random.nextInt();
log.put(value, priority);
instance.add(value, priority);
}
// validate
int oldKey = -1;
for (int i = 0; i < TEST_SIZE; i++) {
value = instance.removeMin();
priority = log.get(value);
if (oldKey > priority) {
fail("non-ascending keys!");
}
}
System.out.println("OK.");
}
/**
* Parallel adds, sequential removeMin
*
* @throws java.lang.Exception
*/
public void testParallelAdd() throws Exception {
System.out.println("testParallelAdd");
int key, value;
instance = new SimpleTree<Integer>(LOG_RANGE);
for (int i = 0; i < THREADS; i++) {
thread[i] = new AddThread(i * PER_THREAD);
}
for (int i = 0; i < THREADS; i++) {
thread[i].start();
}
for (int i = 0; i < THREADS; i++) {
thread[i].join();
}
int oldValue = -1;
for (int i = 0; i < TEST_SIZE; i++) {
value = instance.removeMin();
if (oldValue > value) {
fail("non-ascending keys!");
}
oldValue = value;
}
System.out.println("OK.");
}
/**
* Parallel adds, sequential removeMin
*
* @throws java.lang.Exception
*/
public void testParallelBoth() throws Exception {
System.out.println("testParallelBoth");
int key, value;
instance = new SimpleTree<Integer>(LOG_RANGE);
for (int i = 0; i < THREADS; i++) {
thread[i] = new AddThread(i * PER_THREAD);
}
for (int i = 0; i < THREADS; i++) {
thread[i].start();
}
for (int i = 0; i < THREADS; i++) {
thread[i].join();
}
for (int i = 0; i < THREADS; i++) {
thread[i] = new RemoveMinThread();
}
for (int i = 0; i < THREADS; i++) {
thread[i].start();
}
for (int i = 0; i < THREADS; i++) {
thread[i].join();
}
System.out.println("OK.");
}
class AddThread extends Thread {
int base;
AddThread(int i) {
base = i;
}
public void run() {
for (int i = 0; i < PER_THREAD; i++) {
int x = base + i;
instance.add(x, x);
}
}
}
class RemoveMinThread extends Thread {
int last;
RemoveMinThread() {
last = Integer.MIN_VALUE;
}
public void run() {
for (int i = 0; i < PER_THREAD; i++) {
int x = instance.removeMin();
if (x < last) {
fail("non-ascending priorities: " + last + ", " + x);
}
last = x;
}
}
}
}
|
<reponame>undef-thesis/BT.Web<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { FormBuilder, FormGroup } from '@angular/forms';
import { Profile } from 'src/app/core/models/Profile';
import { UserProfileService } from 'src/app/core/services/user-profile.service';
@Component({
selector: 'app-personal',
templateUrl: './personal.component.html',
styleUrls: ['./personal.component.scss'],
})
export class PersonalComponent implements OnInit {
public profileForm: FormGroup;
public isLoading: boolean = false;
public apiError: string;
constructor(
private userProfileService: UserProfileService,
private formBuilder: FormBuilder
) {}
ngOnInit(): void {
this.profileForm = this.formBuilder.group({
firstname: [''],
lastname: [''],
});
}
public onSubmit(): void {
this.isLoading = true;
const profile = new Profile(this.f.firstname.value, this.f.lastname.value);
this.userProfileService
.updateProfileData(profile)
.subscribe(
() => {},
(error) => {
this.apiError = error.error;
}
)
.add(() => {
this.isLoading = false;
});
}
get f() {
return this.profileForm.controls;
}
}
|
#!/bin/bash
eval "$(gimme $GO_VERSION)"
source ~/.gimme/envs/go$GO_VERSION.env
wget https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOBUF_VERSION}/protoc-${PROTOBUF_VERSION}-linux-x86_64.zip
unzip protoc-${PROTOBUF_VERSION}-linux-x86_64.zip -d protobuf
go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
go get -u github.com/pseudomuto/protoc-gen-doc/cmd/protoc-gen-doc@v1.5.0
git clone --recursive https://github.com/koinos/EmbeddedProto
pushd EmbeddedProto
git checkout $PB_EMBEDDED_CPP_VERSION
./setup.sh
pwd
popd
|
<filename>src/main/java/com/magicminer/ai/constant/ConstantAlgorithm.java
package com.magicminer.ai.constant;
import com.magicminer.ai.Algorithm;
import com.magicminer.model.LotoFacilGame;
import java.util.Random;
public class ConstantAlgorithm implements Algorithm<LotoFacilGame> {
private LotoFacilGame constantGame;
private ConstantAlgorithm(){};
private ConstantAlgorithm(Random random) {
int count = 0;
Boolean[] balls = new Boolean[LotoFacilGame.GAME_SIZE];
for(int i = 0; i < LotoFacilGame.GAME_SIZE; i++){
boolean value = false;
if(count < LotoFacilGame.VALID_GAME_SIZE && (LotoFacilGame.VALID_GAME_SIZE - count >= LotoFacilGame.GAME_SIZE - i || random.nextBoolean())){
value = true;
count++;
}
balls[i] = value;
}
this.constantGame = LotoFacilGame.createByBalls(balls);
}
public static ConstantAlgorithm create(){
return new ConstantAlgorithm(new Random());
}
public static ConstantAlgorithm createBySeed(long seed){
Random random = new Random();
random.setSeed(seed);
return new ConstantAlgorithm(random);
}
@Override
public LotoFacilGame run() {
return LotoFacilGame.createByBalls(this.constantGame.getBalls());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.