text
stringlengths
1
1.05M
package org.rs2server.rs2.model.player.pc; import com.google.common.collect.Lists; import org.rs2server.rs2.Constants; import org.rs2server.rs2.content.TimedPunishment; import org.rs2server.rs2.model.Location; import org.rs2server.rs2.model.npc.pc.*; import org.rs2server.rs2.model.World; import org.rs2server.rs2.model.player.Player; import org.rs2server.rs2.tickable.StoppingTick; import org.rs2server.rs2.util.Misc; import org.rs2server.util.functional.Streamable; import javax.annotation.Nonnull; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; /** * Represents a single game of pest control. * @author twelve */ public final class PestControlInstance implements Streamable<Player> { public static final Location START_LOCATION_BASE = Location.create(2657, 2612); public static final long CAPACITY = 25; private final Set<Player> players; private final PestControlPortal bluePortal; private final PestControlPortal redPortal; private final PestControlPortal purplePortal; private final PestControlPortal yellowPortal; private final PestControlBoat boat; private final VoidKnight knight; private boolean destroyed; public PestControlInstance(PestControlBoat boat, Set<Player> players) { this.boat = boat; this.players = players; this.bluePortal = PestControlPortal.in(this, PortalCardinality.BLUE, 1744, 1748); this.redPortal = PestControlPortal.in(this, PortalCardinality.RED, 1746, 1750); this.purplePortal = PestControlPortal.in(this, PortalCardinality.PURPLE, 1743, 1747); this.yellowPortal = PestControlPortal.in(this, PortalCardinality.YELLOW, 1745, 1749); this.knight = VoidKnight.in(this); players.forEach(p -> p.setPestControlInstance(this)); } public List<PestControlPortal> getPortals() { return Lists.newArrayList(bluePortal, redPortal, purplePortal, yellowPortal); } public Set<Player> getPlayers() { return players; } public void sendMessage(@Nonnull String message) { stream().map(Player::getActionSender).forEach(a -> a.sendMessage(message)); } @Override public Stream<Player> stream() { return players.stream(); } public void start() { List<PestControlPortal> portals = getPortals(); players.forEach(p -> { p.setTeleportTarget(START_LOCATION_BASE.transform(Misc.random(0, 1), Misc.random(0, 1), 0)); portals.stream().map(PestControlPortal::getCardinality).forEach(c -> p.getActionSender().sendInterfaceConfig(408, c.getShieldChild(), false)); p.getActionSender().sendWalkableInterface(408); }); Collections.shuffle(portals); World.getWorld().submit(new ShieldDownTick(10, this, portals.get(0))); World.getWorld().submit(new ShieldDownTick(40, this, portals.get(1))); World.getWorld().submit(new ShieldDownTick(60, this, portals.get(2))); World.getWorld().submit(new ShieldDownTick(80, this, portals.get(3))); portals.forEach(PestControlPortal::register); knight.register(); } public void endGame() { players.forEach(p -> { if (p.getCombatState().isDead()) { World.getWorld().submit(new StoppingTick(10) { @Override public void executeAndStop() { p.getActionSender().removeWalkableInterface(); p.getActionQueue().clearAllActions(); p.setTeleportTarget(boat.getExit()); p.resetVariousInformation(); if (p.hasAttribute("hits_dealt") && (int) p.getAttribute("hits_dealt") >= 50) { p.getActionSender().sendMessage("You have been rewarded 10 Pest Control Points for your valiant effort."); int points = p.getDatabaseEntity().getStatistics().getPestControlPoints(); p.getDatabaseEntity().getStatistics().setPestControlPoints(points + 5 * Constants.PEST_MODIFIER); } else { p.getActionSender().sendMessage("You have not fought hard enough and proven yourself to earn anything."); } p.removeAttribute("hits_dealt"); } }); } else { p.getActionSender().removeWalkableInterface(); p.getActionQueue().clearAllActions(); p.setTeleportTarget(boat.getExit()); p.resetVariousInformation(); if (p.hasAttribute("hits_dealt") && (int) p.getAttribute("hits_dealt") >= 50) { p.getActionSender().sendMessage("You have been rewarded 10 Pest Control Points for your valiant effort."); int points = p.getDatabaseEntity().getStatistics().getPestControlPoints(); p.getDatabaseEntity().getStatistics().setPestControlPoints(points + 10 * Constants.PEST_MODIFIER); } else { p.getActionSender().sendMessage("You have not fought hard enough and proven yourself to earn anything."); } p.removeAttribute("hits_dealt"); } }); getPortals().forEach(i -> i.getNpcs().forEach(PestControlNpc::unregister)); getPortals().forEach(PestControlPortal::unregister); knight.unregister(); this.destroyed = true; } public void destroyPlayer(@Nonnull Player p) { } public boolean isDestroyed() { return destroyed; } public void tick() { stream().forEach(a -> { a.getActionSender().sendString(408, VoidKnight.CHILD, "200"); int damage = (a.getAttribute("hits_dealt") == null ? 0 : (int) a.getAttribute("hits_dealt")); a.getActionSender().sendString(408, 4, damage > 50 ? ("<col=00FF00>" + damage) : damage + ""); a.getActionSender().sendString(408, 2, TimeUnit.MILLISECONDS.toMinutes((boat.getGameTimeRemaining() * 600)) + " mins"); getPortals().stream().forEach(p -> a.getActionSender().sendString(408, p.getCardinality().getHealthChild(), "" + (p.isDestroyed() ? 0 : p.getSkills().getLevel(3)))); }); if (allPortalsDead()) { World.getWorld().submit(new StoppingTick(4) { @Override public void executeAndStop() { getBoat().endGame(); } }); } } public boolean allPortalsDead() { return (bluePortal.isDestroyed() && redPortal.isDestroyed() && yellowPortal.isDestroyed() && purplePortal.isDestroyed()); } public VoidKnight getKnight() { return knight; } public PestControlBoat getBoat() { return boat; } }
package org.slos.battle.abilities.attack; import org.slos.battle.abilities.Ability; import org.slos.battle.abilities.AbilityClassification; import org.slos.battle.abilities.AbilityEffect; import org.slos.battle.abilities.AbilityType; import org.slos.battle.abilities.rule.ReduceHalfDamageRule; import org.slos.battle.abilities.rule.attack.DamageRule; import org.slos.battle.abilities.rule.target.TargetRuleset; import org.slos.splinterlands.domain.monster.DamageType; public class ShieldAbility extends Ability implements AbilityEffect { public ShieldAbility() { super(AbilityType.SHIELD, AbilityClassification.ATTACK); } @Override public TargetRuleset getTargetRuleset() { return TargetRuleset.SELF; } @Override public DamageRule getEffect() { return new ReduceHalfDamageRule(DamageType.ATTACK, DamageType.RANGED); } }
import * as chai from 'chai'; import chaiHttp from 'chai-http'; import server from '../../app/server'; import '../../app/database'; chai.use(chaiHttp); export default chai.request(server);
import javax.validation.Constraint; import javax.validation.Payload; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER, ElementType.ANNOTATION_TYPE}) @Retention(RetentionPolicy.RUNTIME) @Constraint(validatedBy = ValidNomeValidator.class) @Documented public @interface ValidNome { String message() default "Nome deve ter entre 3 e 50 caracteres"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; } import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; public class ValidNomeValidator implements ConstraintValidator<ValidNome, String> { @Override public void initialize(ValidNome constraintAnnotation) { } @Override public boolean isValid(String value, ConstraintValidatorContext context) { return value != null && value.length() >= 3 && value.length() <= 50; } } import javax.persistence.Entity; import javax.persistence.Table; import javax.validation.constraints.Size; import java.io.Serializable; /** * Classe que representa o Funcionário da aplicação. */ @Entity @Table(name = "funcionario") public class Funcionario implements Serializable { private static final long serialVersionUID = -3942289873349653141L; @ValidNome private String nome; // Other fields and methods }
<reponame>kal727/l5r-sandbox<gh_stars>0 /*global describe, it, beforeEach, expect, spyOn*/ /*eslint camelcase: 0, no-invalid-this: 0 */ const DrawCard = require('../../../server/game/drawcard.js'); describe('the DrawCard', function() { describe('the useStealthToBypass() function', function() { function createCard() { let card = new DrawCard({}, {}); spyOn(card, 'canBeBypassedByStealth').and.returnValue(true); return card; } describe('when the card does not have stealth', function() { beforeEach(function() { this.source = createCard(); this.target = createCard(); }); it('should return false.', function() { expect(this.source.useStealthToBypass(this.target)).toBe(false); }); }); describe('when the card has stealth and the target does not', function() { beforeEach(function() { this.source = createCard(); this.source.addKeyword('Stealth'); this.target = createCard(); }); it('should return true.', function() { expect(this.source.useStealthToBypass(this.target)).toBe(true); }); it('should mark the target card as being bypassed', function() { this.source.useStealthToBypass(this.target); expect(this.target.stealth).toBe(true); }); it('should set the stealth target on the source card', function() { this.source.useStealthToBypass(this.target); expect(this.source.stealthTarget).toBe(this.target); }); }); describe('when the target cannot be bypassed', function() { beforeEach(function() { this.source = createCard(); this.source.addKeyword('Stealth'); this.target = createCard(); this.target.canBeBypassedByStealth.and.returnValue(false); }); it('should return false', function() { expect(this.source.useStealthToBypass(this.target)).toBe(false); }); it('should not mark the target card as being bypassed', function() { this.source.useStealthToBypass(this.target); expect(this.target.stealth).toBeFalsy(); }); it('should not set the stealth target on the source card', function() { this.source.useStealthToBypass(this.target); expect(this.source.stealthTarget).toBeUndefined(); }); }); }); });
# define a dictionary dictionary = { 'a': 1, 'b': 2, 'c': 3 } # print the number of items in the dictionary print(len(dictionary))
const crypto = require("crypto"); require("../../../psknode/bundles/pskruntime"); const doubleCheck = require('../../double-check'); const assert = doubleCheck.assert; function mainTest(api, finishTest){ channelName = crypto.randomBytes(24).toString('hex'); api.createForwardChannel(channelName, "publicKey", (res)=>{ assert.equal(res.statusCode, 200); let token = res.headers["tokenHeader"]; assert.notNull(token); let message = api.generateMessage(); let OwM = require("./../../swarmutils").OwM; message = OwM.prototype.convert(message); api.sendMessage(channelName, message, "signature", (res)=>{ assert.equal(res.statusCode, 200); api.receiveMessage(channelName, "signature", (err, res)=>{ //request is failling and IT SHOULD!!! assert.equal(res.statusCode, 409, "Should not be able to get message from channel that is forwarded"); finishTest(); }); }); }); } let timeout = 10000; let testName = "Failing to retrive a message from a channel that has forward to zeromq enable"; require("./Utils/TestInfrastructureUtils").createInfrastructureTest(testName, timeout, "127.0.0.1", function(err, api, finish){ if(!err){ mainTest(api, finish); }else{ console.log("No test run."); } });
#!/usr/bin/env bash #### exec builtin exec echo hi ## stdout: hi #### exec builtin with redirects exec 1>&2 echo 'to stderr' ## stdout-json: "" ## stderr: to stderr #### exec builtin with here doc # This has in a separate file because both code and data can be read from # stdin. $SH spec/builtins-exec-here-doc-helper.sh ## STDOUT: x=one y=two DONE ## END #### exec builtin accepts -- exec -- echo hi ## STDOUT: hi ## END ## BUG dash status: 127 ## BUG dash stdout-json: "" #### exec -- 2>&1 exec -- 3>&1 echo stdout 1>&3 ## STDOUT: stdout ## END ## BUG dash status: 127 ## BUG dash stdout-json: "" ## BUG mksh status: -11 ## BUG mksh stdout-json: "" #### cd and $PWD cd / echo $PWD ## stdout: / #### $OLDPWD cd / cd $TMP echo "old: $OLDPWD" env | grep OLDPWD # It's EXPORTED too! cd - ## STDOUT: old: / OLDPWD=/ / ## END ## BUG mksh STDOUT: old: / / ## END ## BUG zsh STDOUT: old: / OLDPWD=/ ## END #### pwd cd / pwd ## STDOUT: / ## END #### pwd after cd .. dir=$TMP/dir-one/dir-two mkdir -p $dir cd $dir echo $(basename $(pwd)) cd .. echo $(basename $(pwd)) ## STDOUT: dir-two dir-one ## END #### pwd with symlink and -P tmp=$TMP/builtins-pwd-1 mkdir -p $tmp/target ln -s -f $tmp/target $tmp/symlink cd $tmp/symlink echo pwd: basename $(pwd) echo pwd -P: basename $(pwd -P) ## STDOUT: pwd: symlink pwd -P: target ## END #### setting $PWD doesn't affect the value of 'pwd' builtin dir=/tmp/oil-spec-test/pwd mkdir -p $dir cd $dir PWD=foo echo before $PWD pwd echo after $PWD ## STDOUT: before foo /tmp/oil-spec-test/pwd after foo ## END #### unset PWD; then pwd dir=/tmp/oil-spec-test/pwd mkdir -p $dir cd $dir unset PWD echo PWD=$PWD pwd echo PWD=$PWD ## STDOUT: PWD= /tmp/oil-spec-test/pwd PWD= ## END #### 'unset PWD; pwd' before any cd (tickles a rare corner case) dir=/tmp/oil-spec-test/pwd-2 mkdir -p $dir cd $dir # ensure clean shell process state $SH -c 'unset PWD; pwd' ## STDOUT: /tmp/oil-spec-test/pwd-2 ## END #### lie about PWD; pwd before any cd dir=/tmp/oil-spec-test/pwd-3 mkdir -p $dir cd $dir # ensure clean shell process state $SH -c 'PWD=foo; pwd' ## STDOUT: /tmp/oil-spec-test/pwd-3 ## END #### remove pwd dir dir=/tmp/oil-spec-test/pwd mkdir -p $dir cd $dir pwd rmdir $dir echo status=$? pwd echo status=$? ## STDOUT: /tmp/oil-spec-test/pwd status=0 /tmp/oil-spec-test/pwd status=0 ## END ## OK mksh STDOUT: /tmp/oil-spec-test/pwd status=0 status=1 ## END #### pwd in symlinked dir on shell initialization tmp=$TMP/builtins-pwd-2 mkdir -p $tmp mkdir -p $tmp/target ln -s -f $tmp/target $tmp/symlink cd $tmp/symlink $SH -c 'basename $(pwd)' unset PWD $SH -c 'basename $(pwd)' ## STDOUT: symlink target ## END ## OK mksh STDOUT: target target ## END ## stderr-json: "" #### Test the current directory after 'cd ..' involving symlinks dir=$TMP/symlinktest mkdir -p $dir cd $dir mkdir -p a/b/c mkdir -p a/b/d ln -s -f a/b/c c > /dev/null cd c cd .. # Expecting a c/ (since we are in symlinktest) but osh gives c d (thinks we are # in b/) ls ## STDOUT: a c ## END #### cd with no arguments HOME=$TMP/home mkdir -p $HOME cd test $(pwd) = "$HOME" && echo OK ## stdout: OK #### cd to nonexistent dir cd /nonexistent/dir echo status=$? ## stdout: status=1 ## OK dash/mksh stdout: status=2 #### cd away from dir that was deleted dir=$TMP/cd-nonexistent mkdir -p $dir cd $dir rmdir $dir cd $TMP echo $(basename $OLDPWD) echo status=$? ## STDOUT: cd-nonexistent status=0 ## END #### cd permits double bare dash cd -- / echo $PWD ## stdout: / #### cd to symlink with -L and -P targ=$TMP/cd-symtarget lnk=$TMP/cd-symlink mkdir -p $targ ln -s $targ $lnk # -L behavior is the default cd $lnk test $PWD = "$TMP/cd-symlink" && echo OK cd -L $lnk test $PWD = "$TMP/cd-symlink" && echo OK cd -P $lnk test $PWD = "$TMP/cd-symtarget" && echo OK || echo $PWD ## STDOUT: OK OK OK ## END #### cd to relative path with -L and -P die() { echo "$@"; exit 1; } targ=$TMP/cd-symtarget/subdir lnk=$TMP/cd-symlink mkdir -p $targ ln -s $targ $lnk # -L behavior is the default cd $lnk/subdir test $PWD = "$TMP/cd-symlink/subdir" || die "failed" cd .. test $PWD = "$TMP/cd-symlink" && echo OK cd $lnk/subdir test $PWD = "$TMP/cd-symlink/subdir" || die "failed" cd -L .. test $PWD = "$TMP/cd-symlink" && echo OK cd $lnk/subdir test $PWD = "$TMP/cd-symlink/subdir" || die "failed" cd -P .. test $PWD = "$TMP/cd-symtarget" && echo OK || echo $PWD ## STDOUT: OK OK OK ## END #### Exit out of function f() { exit 3; } f exit 4 ## status: 3 #### Exit builtin with invalid arg exit invalid # Rationale: runtime errors are 1 ## status: 1 ## OK dash/bash status: 2 ## BUG zsh status: 0 #### Exit builtin with too many args # This is a parse error in OSH. exit 7 8 9 echo status=$? ## status: 2 ## stdout-json: "" ## BUG bash/zsh status: 0 ## BUG bash/zsh stdout: status=1 ## BUG dash status: 7 ## BUG dash stdout-json: "" ## OK mksh status: 1 ## OK mksh stdout-json: "" #### time block # bash and mksh work; dash does't. # TODO: osh needs to implement BraceGroup redirect properly. err=_tmp/time-$(basename $SH).txt { time { sleep 0.01 sleep 0.02 } } 2> $err cat $err | grep --only-matching user # Just check that we found 'user'. # This is fiddly: # | sed -n -E -e 's/.*(0m0\.03).*/\1/' # ## status: 0 ## stdout: user # not parsed ## BUG dash status: 2 ## BUG dash stdout-json: "" # time is a builtin in zsh? ## BUG zsh status: 1 ## BUG zsh stdout-json: "" #### time pipeline time echo hi | wc -c ## stdout: 3 ## status: 0 #### shift set -- 1 2 3 4 shift echo "$@" shift 2 echo "$@" ## stdout-json: "2 3 4\n4\n" ## status: 0 #### Shifting too far set -- 1 shift 2 ## status: 1 ## OK dash status: 2 #### Invalid shift argument shift ZZZ ## status: 2 ## OK bash status: 1 ## BUG mksh/zsh status: 0 #### get umask umask | grep '[0-9]\+' # check for digits ## status: 0 #### set umask in octal rm -f $TMP/umask-one $TMP/umask-two umask 0002 echo one > $TMP/umask-one umask 0022 echo two > $TMP/umask-two stat -c '%a' $TMP/umask-one $TMP/umask-two ## status: 0 ## stdout-json: "664\n644\n" ## stderr-json: "" #### set umask symbolically umask 0002 # begin in a known state for the test rm $TMP/umask-one $TMP/umask-two echo one > $TMP/umask-one umask g-w,o-w echo two > $TMP/umask-two stat -c '%a' $TMP/umask-one $TMP/umask-two ## status: 0 ## STDOUT: 664 644 ## END ## stderr-json: ""
#!/bin/bash host="$1" for i in {0..66}; do echo node-$i.$host echo 'cd /local/src/omniplay/dift/proc64; ./run_background_task.sh ./streamserver' | ssh -o StrictHostKeyChecking=no node-$i.$host done
import * as React from 'react'; import { FormControl, FormGroup } from 'react-bootstrap'; import { AppealTarget } from 'state'; import { getStatus, idolWord } from 'utility'; import { AppContext } from 'component/App'; export const IdolParameter: React.FC<{ produce?: boolean, index?: number }> = ({ produce = true, index = -1 }) => { const context = React.useContext(AppContext); const onChange = (value: any, type: AppealTarget) => { if (typeof value === 'string') { const parsedValue = parseInt(value, 10); if (parsedValue === parsedValue) { if (produce) { context.dispatch({ 'type': 'P_IDOL', 'value': `${type},${value}` }); } else { context.dispatch({ 'type': 'S_IDOL', 'value': `${index},${type},${value}` }); } } } } const onChangeName = (event: React.FormEvent<any>) => { if (produce) { context.dispatch({ 'type': 'P_NAME', 'value': event.currentTarget.value }); } else { context.dispatch({ 'type': 'S_NAME', 'value': `${index},${event.currentTarget.value}` }); } } const onChangeVo = (event: React.FormEvent<any>) => { onChange(event.currentTarget.value, 'vo'); } const onChangeDa = (event: React.FormEvent<any>) => { onChange(event.currentTarget.value, 'da'); } const onChangeVi = (event: React.FormEvent<any>) => { onChange(event.currentTarget.value, 'vi'); } const defaultName = () => { if (produce) { return '' + context.state.pIdolName; } else { return '' + context.state.sIdolName[index]; } } const defaultValue = (type: AppealTarget) => { if (produce) { return '' + getStatus(context.state.pIdolStatus, type); } else { return '' + getStatus(context.state.sIdolStatus[index], type); } } return ( <FormGroup className='d-flex m-3'> <FormControl className='mx-1' type='text' placeholder={produce ? `p${idolWord()}` : `s${index + 1}${idolWord()}`} value={defaultName()} onChange={onChangeName} /> <FormControl className='mx-1' type='text' placeholder='Vo' value={defaultValue('vo')} onChange={onChangeVo} /> <FormControl className='mx-1' type='text' placeholder='Da' value={defaultValue('da')} onChange={onChangeDa} /> <FormControl className='ml-1' type='text' placeholder='Vi' value={defaultValue('vi')} onChange={onChangeVi} /> </FormGroup> ) };
<filename>Documentation/classarmnn_1_1_cl_lstm_float_workload.js var classarmnn_1_1_cl_lstm_float_workload = [ [ "ClLstmFloatWorkload", "classarmnn_1_1_cl_lstm_float_workload.xhtml#aba06d1bb61940d3e2eec26ac7dabdc65", null ], [ "Execute", "classarmnn_1_1_cl_lstm_float_workload.xhtml#ae071e8822437c78baea75c3aef3a263a", null ] ];
#!/bin/bash # # Prepare MSYS environment so that all build tools have the $(TOOLPREFIX) as # expected in Makefile.tools: # - win64x64\common\Makefile.tools # - win32x86\common\Makefile.tools # Also, extend $PATH since MSYS is installed but not conveniently accessible # in a GitHub-Actions Windows environment. [[ -z "${MSYS_SYS}" ]] && exit 2 [[ -z "${MSYS_ENV}" ]] && exit 2 ln -f -s /c/msys64/${MSYS_SYS}/bin/clang /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-clang ln -f -s /c/msys64/${MSYS_SYS}/bin/clang++ /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-clang++ ln -f -s /c/msys64/${MSYS_SYS}/bin/ar /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-ar ln -f -s /c/msys64/${MSYS_SYS}/bin/dlltool /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-dlltool ln -f -s /c/msys64/${MSYS_SYS}/bin/as /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-as ln -f -s /c/msys64/${MSYS_SYS}/bin/windres /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-windres ln -f -s /c/msys64/${MSYS_SYS}/bin/nm /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-nm ln -f -s /c/msys64/${MSYS_SYS}/bin/dllwrap /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-dllwrap ln -f -s /c/msys64/${MSYS_SYS}/bin/strip /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-strip ln -f -s /c/msys64/${MSYS_SYS}/bin/objcopy /c/msys64/${MSYS_SYS}/bin/${MSYS_ENV}-w64-mingw32-objcopy PATH=$PATH:/c/msys64/${MSYS_SYS}/bin
fn swap_in_place(val1: &mut u32, val2: &mut u32) { let temp = *val1; *val1 = *val2; *val2 = temp; } fn main() { let mut a = 10; let mut b = 20; println!("Before swapping, a = {} and b = {}", a, b); swap_in_place(&mut a, &mut b); println!("After swapping, a = {} and b = {}", a, b); }
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # prepare data export PYTHONPATH=${PWD}:${PYTHONPATH} CUDA_VISIBLE_DEVICES=0,1,2,3 python code/train.py --arch cbr --data_path ./data/cityscapes_20cls/ --num_classes 20 --batch_size 32 --learning_rate 1e-2 --epochs 200 --ckpt_path ./checkpoint/erfnet_cbr/
<filename>src/Foreign/Day.js var dayjs = require("dayjs"); dayjs.extend(require("dayjs/plugin/advancedFormat")); exports._fromUTCString = function (nothing, just, str) { var d = dayjs(str); return d.isValid() ? just(d) : nothing; }; exports.toUTCString = function (d) { return d.format(); }; exports.fromMilliseconds = function (ms) { return dayjs(ms); }; exports.toMilliseconds = function (d) { return d.valueOf(); }; exports.now = function () { return dayjs(); }; exports.format = function (format) { return function (d) { return d.format(format); }; };
<filename>src/primitives/types/shader/shader.js // TODO: This file was created by bulk-decaffeinate. // Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from * DS102: Remove unnecessary code created because of implicit returns * DS206: Consider reworking classes to avoid initClass * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ import { Primitive } from "../../primitive.js"; export class Shader extends Primitive { static initClass() { this.traits = ["node", "bind", "shader"]; this.freeform = true; } init() { return (this.shader = null); } make() { const { language, code } = this.props; if (language !== "glsl") { throw new Error("GLSL required"); } // Bind to attached data sources this._helpers.bind.make([ { to: "shader.sources", trait: "source", multiple: true }, ]); // Parse snippet w/ shadergraph (will do implicit DOM script tag by ID // lookup if simple selector or ID given) const snippet = this._shaders.fetch(code); // Convert uniforms to attributes const types = this._types; const uniforms = {}; const make = (type) => { let t; switch (type) { case "i": return types.int(); case "f": return types.number(); case "v2": return types.vec2(); case "v3": return types.vec3(); case "v4": return types.vec4(); case "m3": return types.mat3(); case "m4": return types.mat4(); case "t": return types.object(); default: t = type.split(""); if (t.pop() === "v") { return types.array(make(t.join(""))); } else { return null; } } }; for (const def of Array.from(snippet._signatures.uniform)) { let type; if ((type = make(def.type))) { uniforms[def.name] = type; } } // Reconfigure node model return this.reconfigure({ props: { uniform: uniforms } }); } made() { // Notify of shader reallocation return this.trigger({ type: "source.rebuild", }); } unmake() { return (this.shader = null); } change(changed, _touched, _init) { if ( changed["shader.uniforms"] || changed["shader.code"] || changed["shader.language"] ) { return this.rebuild(); } } shaderBind(uniforms) { let k, u, v; if (uniforms == null) { uniforms = {}; } const { code } = this.props; // Merge in prop attributes as uniforms for (k in this.node.attributes) { v = this.node.attributes[k]; if (v.type != null && v.short != null && v.ns === "uniform") { if (uniforms[v.short] == null) { uniforms[v.short] = v; } } } // Merge in explicit uniform object if set if ((u = this.props.uniforms) != null) { for (k in u) { v = u[k]; uniforms[k] = v; } } // New shader const s = this._shaders.shader(); // Require sources if (this.bind.sources != null) { for (const source of Array.from(this.bind.sources)) { s.require(source.sourceShader(this._shaders.shader())); } } // Build bound shader return s.pipe(code, uniforms); } } Shader.initClass();
#!/usr/bin/env bash set -Eeuo pipefail root="$(dirname "${BASH_SOURCE[0]}")/../../.." # Source the main vars file to get the operator version to be used. # shellcheck disable=SC1091,SC1090 source "$root/hack/lib/__sources__.bash" version=${OPERATOR_VERSION:-v$(metadata.get dependencies.operator)} target_dir="$root/olm-catalog/serverless-operator/manifests/" target_serving_file="$target_dir/operator_v1alpha1_knativeserving_crd.yaml" target_eventing_file="$target_dir/operator_v1alpha1_knativeeventing_crd.yaml" rm -rf "$target_serving_file" "$target_eventing_file" serving_url="https://raw.githubusercontent.com/knative/operator/$version/config/300-serving.yaml" eventing_url="https://raw.githubusercontent.com/knative/operator/$version/config/300-eventing.yaml" wget --no-check-certificate "$serving_url" -O "$target_serving_file" wget --no-check-certificate "$eventing_url" -O "$target_eventing_file" # For SRVKE-755 state the actual default for Openshift Serverless disable HPA: git apply "$root/olm-catalog/serverless-operator/hack/001-eventing-sinkbinding-default-override" # Drop unsupported fields from the Serving CRD. git apply "$root/olm-catalog/serverless-operator/hack/002-serving-drop-unsupported-fields.patch" # Drop unsupported fields from the Eventing CRD. git apply "$root/olm-catalog/serverless-operator/hack/003-eventing-drop-unsupported-fields.patch" # Drop unsupported sources field from the Eventing CRD. git apply "$root/olm-catalog/serverless-operator/hack/004-eventing-drop-unsupported-sources.patch"
//Endpoint para eliminar algun personaje public function index_delete($id){ //Verificamos si los datos que nos llegan son correctos if (!$id || !is_numeric($id)) { $data = array("status" => 400, "msg" => 'Bad request.'); $this->response($data, REST_Controller::HTTP_BAD_REQUEST); } else { // Assuming $database is the database connection object $character = $database->findCharacterById($id); // Replace with actual database query to find character by id if ($character) { // Character found, proceed with deletion $database->deleteCharacterById($id); // Replace with actual database query to delete character by id $data = array("status" => 200, "msg" => 'Character successfully deleted.'); $this->response($data, REST_Controller::HTTP_OK); } else { // Character not found $data = array("status" => 404, "msg" => 'Character not found.'); $this->response($data, REST_Controller::HTTP_NOT_FOUND); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package servlets; import beans.ProfileBean; import entities.Iznajmljivanje; import entities.Korisnik; import java.io.IOException; import java.io.PrintWriter; import java.util.Date; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.FlushModeType; import javax.persistence.Persistence; import javax.persistence.Query; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; /** * * @author Nikola */ public class ReturnGear extends HttpServlet { private static long calculateDifference(Date before, Date after) { long diff = after.getTime() - before.getTime(); return (long)(Math.ceil(diff / (1000.0 * 60 * 60 * 24))); } /** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpSession session = request.getSession(); EntityManagerFactory emf = Persistence.createEntityManagerFactory("Februar2017PU"); EntityManager em = emf.createEntityManager(); try { ProfileBean profileBean = (ProfileBean)session.getAttribute("profileBean"); if (profileBean != null) { em.setFlushMode(FlushModeType.COMMIT); em.getTransaction().begin(); Query query = em.createQuery("SELECT i FROM Iznajmljivanje i WHERE i.idkorisnik.username = :username AND i.razduzeno = false"); query.setParameter("username", profileBean.getUsername()); List<Iznajmljivanje> results = query.getResultList(); double price = 0; Date today = new Date(); for (Iznajmljivanje result : results) { double days = calculateDifference(result.getDatumpreuz(), today); double priceForThis = days * result.getIdopreme().getCenapodanu() * (result.getImapopust() ? 0.7 : 1) - result.getDepozit(); price += priceForThis; result.setRazduzeno(true); result.setUkupnoNaplata(priceForThis); result.getIdopreme().setKolicina(result.getIdopreme().getKolicina() + 1); } em.getTransaction().commit(); profileBean.setPaid(price); profileBean.setShouldShowPaid(true); profileBean.resetGear(); session.setAttribute("profileBean", profileBean); } response.sendRedirect("profile.jsp"); } finally { em.close(); emf.close(); } } // <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code."> /** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
#!/bin/bash shopt -s extglob rm -rf package/boot/uboot-rockchip svn export --force https://github.com/immortalwrt/immortalwrt/branches/master/package/boot/uboot-rockchip package/boot/uboot-rockchip svn export --force https://github.com/immortalwrt/immortalwrt/branches/master/package/boot/arm-trusted-firmware-rockchip-vendor package/boot/arm-trusted-firmware-rockchip-vendor rm -rf target/linux/rockchip/!(Makefile|patches-5.10) svn co https://github.com/immortalwrt/immortalwrt/branches/master/target/linux/rockchip target/linux/rockchip rm -rf target/linux/rockchip/{.svn,patches-5.10/.svn} svn co https://github.com/immortalwrt/immortalwrt/branches/master/target/linux/rockchip/patches-5.10 target/linux/rockchip/patches-5.10 rm -Rf target/linux/rockchip/patches-5.10/{006-*-NanoPi-R,007-*-R4S}.patch curl -sfL https://raw.githubusercontent.com/immortalwrt/immortalwrt/master/package/kernel/linux/modules/video.mk -o package/kernel/linux/modules/video.mk curl -sfL https://raw.githubusercontent.com/friendlyarm/friendlywrt/master-v21.02/target/linux/rockchip/armv8/base-files/usr/bin/fa-fancontrol.sh --create-dirs -o files/usr/bin/fa-fancontrol.sh curl -sfL https://raw.githubusercontent.com/friendlyarm/friendlywrt/master-v21.02/target/linux/rockchip/armv8/base-files/usr/bin/fa-fancontrol-direct.sh --create-dirs -o files/usr/bin/fa-fancontrol-direct.sh curl -sfL https://raw.githubusercontent.com/friendlyarm/friendlywrt/master-v21.02/target/linux/rockchip/armv8/base-files/etc/init.d/fa-fancontrol --create-dirs -o files/etc/init.d/fa-fancontrol chmod +x files/usr/bin/fa-*.sh files/etc/init.d/fa-fancontrol sed -i 's,-mcpu=generic,-march=armv8-a+crypto+crc -mabi=lp64,g' include/target.mk sed -i '/;;/i\ethtool -K eth1 rx off tx off && logger -t disable-offloading "disabed rk3328 ethernet tcp/udp offloading tx/rx"' target/linux/rockchip/armv8/base-files/etc/hotplug.d/net/40-net-smp-affinity sed -i 's,kmod-usb-net-rtl8152$,kmod-usb-net-rtl8152-vendor,g' target/linux/rockchip/image/armv8.mk sed -i 's,kmod-r8169,kmod-r8168,g' target/linux/rockchip/image/armv8.mk echo ' CONFIG_ARM64_CRYPTO=y CONFIG_CRYPTO_AES_ARM64=y CONFIG_CRYPTO_AES_ARM64_BS=y CONFIG_CRYPTO_AES_ARM64_CE=y CONFIG_CRYPTO_AES_ARM64_CE_BLK=y CONFIG_CRYPTO_AES_ARM64_CE_CCM=y CONFIG_CRYPTO_AES_ARM64_NEON_BLK=y CONFIG_CRYPTO_CRYPTD=y CONFIG_CRYPTO_GF128MUL=y CONFIG_CRYPTO_GHASH_ARM64_CE=y CONFIG_CRYPTO_SHA1=y CONFIG_CRYPTO_SHA1_ARM64_CE=y CONFIG_CRYPTO_SHA256_ARM64=y CONFIG_CRYPTO_SHA2_ARM64_CE=y CONFIG_CRYPTO_SHA512_ARM64=y CONFIG_CRYPTO_SIMD=y CONFIG_REALTEK_PHY=y CONFIG_CPU_FREQ_GOV_USERSPACE=y CONFIG_CPU_FREQ_GOV_ONDEMAND=y CONFIG_CPU_FREQ_GOV_CONSERVATIVE=y ' >> ./target/linux/rockchip/armv8/config-5.10
import IActivitiesRepository from 'modules/selectedGame/domain/repositories/IActivitiesRepository'; import ActivitiesRepository from 'modules/selectedGame/infra/repositories/ActivitiesRepository'; export default function makeActivitiesRepository(): IActivitiesRepository { return new ActivitiesRepository(); }
#!/bin/bash export test_type=Devops if [[ "${test_type}" == "Devops" ]] then echo "test is Devops" else echo "test is not Devops" fi
<gh_stars>0 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ic_local_printshop_twotone = void 0; var ic_local_printshop_twotone = { "viewBox": "0 0 24 24", "children": [{ "name": "path", "attribs": { "d": "M0 0h24v24H0V0z", "fill": "none" }, "children": [] }, { "name": "path", "attribs": { "d": "M8 5h8v3H8zm11 5H5c-.55 0-1 .45-1 1v4h2v-2h12v2h2v-4c0-.55-.45-1-1-1zm-1 2.5c-.55 0-1-.45-1-1s.45-1 1-1 1 .45 1 1-.45 1-1 1z", "opacity": ".3" }, "children": [] }, { "name": "path", "attribs": { "d": "M19 8h-1V3H6v5H5c-1.66 0-3 1.34-3 3v6h4v4h12v-4h4v-6c0-1.66-1.34-3-3-3zM8 5h8v3H8V5zm8 14H8v-4h8v4zm4-4h-2v-2H6v2H4v-4c0-.55.45-1 1-1h14c.55 0 1 .45 1 1v4z" }, "children": [] }, { "name": "circle", "attribs": { "cx": "18", "cy": "11.5", "r": "1" }, "children": [] }] }; exports.ic_local_printshop_twotone = ic_local_printshop_twotone;
import React, { useCallback, useEffect, useReducer } from 'react'; import useConference from '../../../hooks/useConference'; import useMaybeUserProfile from '../../../hooks/useMaybeUserProfile'; import MenuExpander, { ButtonSpec } from "../Menu/MenuExpander"; import MenuGroup, { MenuGroupItems } from '../Menu/MenuGroup'; import MenuItem from '../Menu/MenuItem'; import { Conference, UserProfile, WatchedItems } from '@clowdr-app/clowdr-db-schema'; import { makeCancelable } from '@clowdr-app/clowdr-db-schema/build/Util'; import useMaybeChat from '../../../hooks/useMaybeChat'; import { ChatDescriptor, MemberDescriptor } from '../../../classes/Chat'; import assert from 'assert'; import { ServiceEventNames } from '../../../classes/Chat/Services/Twilio/ChatService'; import { LoadingSpinner } from '../../LoadingSpinner/LoadingSpinner'; import useLogger from '../../../hooks/useLogger'; import { DataDeletedEventDetails, DataUpdatedEventDetails } from '@clowdr-app/clowdr-db-schema/build/DataLayer/Cache/Cache'; import useDataSubscription from '../../../hooks/useDataSubscription'; import useSafeAsync from '../../../hooks/useSafeAsync'; import { addNotification } from '../../../classes/Notifications/Notifications'; import ReactMarkdown from 'react-markdown'; import { emojify } from 'react-emojione'; import { useLocation } from 'react-router-dom'; type ChatGroupTasks = "loadingActiveChats" | "loadingAllChats"; export type SidebarChatDescriptor = { exists: true; id: string; friendlyName: string; isModeration: boolean; isModerationHub: boolean; } & ({ isDM: false; } | { isDM: true; member1: MemberDescriptor & { displayName: string }; member2: MemberDescriptor & { displayName: string }; }); type FilteredChatDescriptor = (ChatDescriptor & { exists: true }) | { exists: false; friendlyName: string; targetPath: string; }; type FilteredSidebarChatDescriptor = SidebarChatDescriptor | { exists: false; friendlyName: string; targetPath: string; }; export type SidebarUserDescriptor = { id: string; name: string; isBanned: boolean; }; interface ChatsGroupState { tasks: Set<ChatGroupTasks>; isOpen: boolean; chatSearch: string | null; activeChats: Array<SidebarChatDescriptor> | null; allChats: Array<ChatDescriptor> | null; watchedChatIds: Array<string> | null; filteredChats: Array<FilteredSidebarChatDescriptor>; allUsers: Array<SidebarUserDescriptor> | null; } type ChatsGroupUpdate = { action: "updateAllChats"; chats: Array<ChatDescriptor> } | { action: "setActiveChats"; chats: Array<SidebarChatDescriptor> } | { action: "updateActiveChats"; chats: Array<SidebarChatDescriptor> } | { action: "updateFilteredChats"; chats: Array<FilteredSidebarChatDescriptor> } | { action: "deleteFromActiveChats"; chats: Array<string> } | { action: "deleteFromAllChats"; chats: Array<string> } | { action: "searchChats"; search: string | null } | { action: "setIsOpen"; isOpen: boolean } | { action: "updateChatDescriptors"; fActive: (x: SidebarChatDescriptor) => SidebarChatDescriptor; fFiltered: (x: FilteredSidebarChatDescriptor) => FilteredSidebarChatDescriptor; } | { action: "setWatchedChatIds"; ids: Array<string> } | { action: "updateAllUsers"; update: (old: Array<SidebarUserDescriptor> | null) => Array<SidebarUserDescriptor> | null } ; async function filterChats( allChats: Array<ChatDescriptor>, allUsers: Array<SidebarUserDescriptor>, currentUserProfileId: string | undefined, _search: string | null, minSearchLength: number ): Promise<Array<FilteredChatDescriptor>> { if (_search && _search.length >= minSearchLength) { const search = _search.toLowerCase(); const filteredUsers = allUsers.filter(x => x.name.toLowerCase().includes(search) && x.id !== currentUserProfileId && !x.isBanned ); const filteredChats: FilteredChatDescriptor[] = allChats .filter(x => x.friendlyName.toLowerCase().includes(search) || (x.isDM && filteredUsers.some(p => { return x.member1.profileId === currentUserProfileId || x.member2.profileId === currentUserProfileId; }))) .map(x => ({ ...x, exists: true })); return filteredChats.concat( filteredUsers .filter(p => !allChats.some(c => { return c.isDM && (c.member1.profileId === p.id || c.member2.profileId === p.id); })) .map(p => ({ exists: false, friendlyName: p.name, targetPath: `/chat/new/${p.id}` })) ); } else { return []; } } function nextSidebarState(currentState: ChatsGroupState, updates: ChatsGroupUpdate | Array<ChatsGroupUpdate>): ChatsGroupState { const nextState: ChatsGroupState = { tasks: new Set(currentState.tasks), isOpen: currentState.isOpen, chatSearch: currentState.chatSearch, allChats: currentState.allChats, activeChats: currentState.activeChats, filteredChats: currentState.filteredChats, watchedChatIds: currentState.watchedChatIds, allUsers: currentState.allUsers }; let allChatsUpdated = false; let activeChatsUpdated = false; function doUpdate(update: ChatsGroupUpdate) { switch (update.action) { case "searchChats": nextState.chatSearch = update.search?.length ? update.search : null; break; case "setIsOpen": nextState.isOpen = update.isOpen; break; case "updateAllChats": { const changes = [...update.chats]; const updatedIds = changes.map(x => x.id); nextState.allChats = nextState.allChats?.map(x => { const idx = updatedIds.indexOf(x.id); if (idx > -1) { const y = changes[idx]; updatedIds.splice(idx, 1); changes.splice(idx, 1); return y; } else { return x; } }) ?? null; nextState.allChats = nextState.allChats?.concat(changes) ?? changes; allChatsUpdated = true; } break; case "updateActiveChats": { const changes = [...update.chats]; const updatedIds = changes.map(x => x.id); nextState.activeChats = nextState.activeChats?.map(x => { const idx = updatedIds.indexOf(x.id); if (idx > -1) { const y = changes[idx]; updatedIds.splice(idx, 1); changes.splice(idx, 1); return y; } else { return x; } }) ?? null; nextState.activeChats = nextState.activeChats?.concat(changes) ?? changes; activeChatsUpdated = true; } break; case "deleteFromActiveChats": nextState.activeChats = nextState.activeChats?.filter(x => !update.chats.includes(x.id)) ?? null; activeChatsUpdated = true; break; case "deleteFromAllChats": nextState.allChats = nextState.allChats?.filter(x => !update.chats.includes(x.id)) ?? null; allChatsUpdated = true; break; case "updateFilteredChats": nextState.filteredChats = update.chats; break; case "updateChatDescriptors": if (nextState.activeChats) { nextState.activeChats = nextState.activeChats.map(update.fActive); } if (nextState.allChats) { nextState.filteredChats = nextState.filteredChats.map(update.fFiltered); } break; case "setActiveChats": nextState.activeChats = update.chats; activeChatsUpdated = true; break; case "setWatchedChatIds": nextState.watchedChatIds = update.ids; break; case "updateAllUsers": nextState.allUsers = update.update(nextState.allUsers ? [...nextState.allUsers] : null); break; } } if (updates instanceof Array) { updates.forEach(doUpdate); } else { doUpdate(updates); } if (allChatsUpdated) { if (nextState.allChats) { nextState.tasks.delete("loadingAllChats"); } else { nextState.filteredChats = []; } } if (activeChatsUpdated) { nextState.tasks.delete("loadingActiveChats"); } return nextState; } export async function upgradeChatDescriptor(conf: Conference, x: ChatDescriptor): Promise<SidebarChatDescriptor> { if (x.isDM) { const [p1, p2] = await Promise.all([ UserProfile.get(x.member1.profileId, conf.id), UserProfile.get(x.member2.profileId, conf.id) ]); assert(p1); assert(p2); return { ...x, exists: true, member1: { ...x.member1, displayName: p1.displayName }, member2: { ...x.member2, displayName: p2.displayName } }; } else { return { ...x, exists: true }; } } export function computeChatDisplayName(chat: SidebarChatDescriptor, mUser: UserProfile) { let friendlyName: string; let icon: JSX.Element; if (chat.isDM) { const member1 = chat.member1; const member2 = chat.member2; let otherOnline; if (member1.profileId !== mUser.id) { friendlyName = member1.displayName; otherOnline = member1.isOnline; } else { friendlyName = member2.displayName; otherOnline = member2.isOnline; } icon = <i className={`fa${otherOnline ? 's' : 'r'} fa-circle ${otherOnline ? 'online' : ''}`}></i>; } else { // Group chat - use chat friendly name from Twilio friendlyName = chat.friendlyName; icon = <i className="fas fa-hashtag"></i>; } return { friendlyName, icon }; } interface Props { minSearchLength: number; onItemClicked?: () => void; } export default function ChatsGroup(props: Props) { const conf = useConference(); const mUser = useMaybeUserProfile(); const mChat = useMaybeChat(); const location = useLocation(); const logger = useLogger("ChatsGroup"); const [state, dispatchUpdate] = useReducer(nextSidebarState, { tasks: new Set([ "loadingChats", "loadingActiveChats" ] as ChatGroupTasks[]), isOpen: true, chatSearch: null, allChats: null, activeChats: null, watchedChatIds: null, filteredChats: [], allUsers: null }); logger.enable(); const renderEmoji = useCallback((text: any) => { const doEmojify = (val: any) => <>{emojify(val, { output: 'unicode' })}</>; return doEmojify(text.value); }, []); useEffect(() => { let functionsToOff: Promise<Array<{ id: string; f: () => void }>> = Promise.resolve([]); if (mChat && state.activeChats) { functionsToOff = Promise.all(state.activeChats.map(async c => { return { id: c.id, f: await mChat.channelEventOn(c.id, "messageAdded", (msg) => { if (msg.author !== mUser?.id && !location.pathname.includes(`/chat/${c.id}`) && !location.pathname.includes(`/moderation/${c.id}`) && (!c.isModerationHub || !location.pathname.includes("/moderation/hub")) ) { const isAnnouncement = c.friendlyName === "Announcements"; const title = isAnnouncement ? "" : `**${mUser ? computeChatDisplayName(c, mUser).friendlyName : c.friendlyName}**\n\n`; const body = `${title}${msg.body}`; addNotification( <ReactMarkdown linkTarget="_blank" escapeHtml={true} renderers={{ text: renderEmoji }} > {body} </ReactMarkdown>, isAnnouncement ? undefined : c.isModerationHub ? (msg.attributes?.moderationChat ? { url: `/moderation/${msg.attributes.moderationChat}`, text: "Go to moderation channel" } : { url: `/moderation/hub`, text: "Go to moderation hub" } ) : c.isModeration ? { url: `/moderation/${c.id}`, text: "Go to moderation channel" } : { url: `/chat/${c.id}`, text: "Go to chat" }, 3000 ); } }) }; })); return () => { functionsToOff.then(fs => { fs.forEach(f => mChat.channelEventOff(f.id, "messageAdded", f.f)); }); }; } return () => { }; }, [location.pathname, mChat, mUser, renderEmoji, state.activeChats]); useSafeAsync(async () => { if (mChat) { const chats = await mChat.listWatchedChatsUnfiltered(); const chatsWithName: Array<SidebarChatDescriptor> = await Promise.all(chats.map(x => upgradeChatDescriptor(conf, x))); return chatsWithName; } return null; }, (data: Array<SidebarChatDescriptor> | null) => { if (data) { dispatchUpdate({ action: "setActiveChats", chats: data }); } // state.watchedChatIds is required so that active chats updates }, [conf, conf.id, mChat, state.watchedChatIds]); useSafeAsync(async () => mUser?.watched ?? null, (data: WatchedItems | null) => { if (data) { dispatchUpdate({ action: "setWatchedChatIds", ids: data.watchedChats }); } }, [mUser?.watchedId]); useSafeAsync(async () => UserProfile.getAll(conf.id), (data) => { dispatchUpdate({ action: "updateAllUsers", update: () => data.map(x => ({ id: x.id, name: x.displayName, isBanned: x.isBanned })) }); }, [conf.id]); // Initial fetch of all chats useEffect(() => { let cancel: () => void = () => { }; async function updateChats() { try { if (mChat) { const chatsP = makeCancelable(mChat.listAllChats()); cancel = chatsP.cancel; const chats = await chatsP.promise; dispatchUpdate({ action: "updateAllChats", chats }); } } catch (e) { if (!e.isCanceled) { throw e; } } } updateChats(); return cancel; }, [mChat]); // Update filtered chat results useEffect(() => { let cancel: () => void = () => { }; async function updateFiltered() { try { const promise = makeCancelable(filterChats( state.allChats ?? [], state.allUsers ?? [], mUser?.id, state.chatSearch, props.minSearchLength )); cancel = promise.cancel; const filteredChats = await promise.promise; const filteredChatsWithNames: Array<FilteredSidebarChatDescriptor> = await Promise.all(filteredChats.map(async x => x.exists ? await upgradeChatDescriptor(conf, x) : x)); dispatchUpdate({ action: "updateFilteredChats", chats: filteredChatsWithNames }); } catch (e) { if (!e.isCanceled) { throw e; } } } updateFiltered(); return cancel; }, [conf, conf.id, props.minSearchLength, state.allChats, state.chatSearch, state.allUsers, mUser]); // Subscribe to chat events useEffect(() => { if (mChat) { const chatService = mChat; const listeners: Map<ServiceEventNames, () => void> = new Map(); const memberJoinedlisteners: Map<string, () => void> = new Map(); async function attach() { try { listeners.set("userUpdated", await chatService.serviceEventOn("userUpdated", async (u) => { if (u.updateReasons.includes("friendlyName") || u.updateReasons.includes("online") || u.updateReasons.includes("attributes")) { function updateDescriptor(x: SidebarChatDescriptor): SidebarChatDescriptor { if (x.isDM) { const m1 = { ...x.member1 }; if (m1.profileId === u.user.profileId) { m1.isOnline = u.user.isOnline; } const m2 = { ...x.member2 }; if (m2 && m2.profileId === u.user.profileId) { m2.isOnline = u.user.isOnline; } return { ...x, member1: m1, member2: m2 } } else { return x; } } dispatchUpdate({ action: "updateChatDescriptors", fActive: updateDescriptor, fFiltered: (x) => x.exists ? updateDescriptor(x) : x }); } })); } catch (e) { if (!e.isCanceled) { throw e; } } } attach(); return function detach() { const keys1 = listeners.keys(); for (const key of keys1) { const listener = listeners.get(key) as () => void; chatService.serviceEventOff(key, listener); } const keys2 = memberJoinedlisteners.keys(); for (const key of keys2) { const listener = memberJoinedlisteners.get(key) as () => void; chatService.channelEventOff(key, "memberJoined", listener); } }; } return () => { }; }, [conf, logger, mChat]); const watchedId = mUser?.watchedId; const onWatchedItemsUpdated = useCallback(function _onWatchedItemsUpdated(update: DataUpdatedEventDetails<"WatchedItems">) { for (const object of update.objects) { if (object.id === watchedId) { dispatchUpdate({ action: "setWatchedChatIds", ids: (object as WatchedItems).watchedChats }); } } }, [watchedId]); useDataSubscription("WatchedItems", onWatchedItemsUpdated, null, state.tasks.has("loadingActiveChats"), conf); const onTextChatUpdated = useCallback(async function _onTextChatUpdated(update: DataUpdatedEventDetails<"TextChat">) { if (mChat) { let newAllChats: Array<any> | null = null; let newActiveChats: Array<any> | null = null; for (const object of update.objects) { const chat = await mChat.getChat(object.id); if (chat) { if (state.watchedChatIds?.includes(chat.id)) { const chatD = await upgradeChatDescriptor(conf, chat); if (!newActiveChats) { newActiveChats = []; } newActiveChats.push(chatD); } if (!newAllChats) { newAllChats = []; } newAllChats.push(chat); } } if (newAllChats) { const updates: Array<ChatsGroupUpdate> = [{ action: "updateAllChats", chats: newAllChats }]; if (newActiveChats) { updates.push({ action: "updateActiveChats", chats: newActiveChats }); } dispatchUpdate(updates); } } }, [conf, mChat, state.watchedChatIds]); const onTextChatDeleted = useCallback(async function _onTextChatDeleted(update: DataDeletedEventDetails<"TextChat">) { dispatchUpdate([ { action: "deleteFromAllChats", chats: [update.objectId] }, { action: "deleteFromActiveChats", chats: [update.objectId] } ]); }, []); useDataSubscription("TextChat", onTextChatUpdated, onTextChatDeleted, !state.watchedChatIds, conf); const onUserProfileUpdated = useCallback(async function _onUserProfileUpdated(update: DataUpdatedEventDetails<"UserProfile">) { dispatchUpdate({ action: "updateAllUsers", update: (existing) => { const updated = Array.from(existing ?? []); for (const object of update.objects) { const idx = updated?.findIndex(x => x.id === object.id); const profile = object as UserProfile; const item = { id: profile.id, name: profile.displayName, isBanned: profile.isBanned }; if (idx === -1) { updated.push(item); } else { updated.splice(idx, 1, item); } } return updated; } }); }, []); const onUserProfileDeleted = useCallback(async function _onUserProfileDeleted(update: DataDeletedEventDetails<"UserProfile">) { dispatchUpdate({ action: "updateAllUsers", update: (old) => old?.filter(x => x.id !== update.objectId) ?? null }); }, []); useDataSubscription("UserProfile", onUserProfileUpdated, onUserProfileDeleted, !state.allUsers, conf); let chatsExpander: JSX.Element = <></>; const chatsButtons: Array<ButtonSpec> = [ { type: "search", label: "Search all chats", icon: "fas fa-search", onSearch: (event) => { dispatchUpdate({ action: "searchChats", search: event.target.value }); return event.target.value; }, onSearchOpen: () => { dispatchUpdate({ action: "setIsOpen", isOpen: true }); }, onSearchClose: () => { dispatchUpdate({ action: "searchChats", search: null }); } }, { type: "link", label: "Show all chats", icon: "fas fa-users", url: "/chat" }, { type: "link", label: "Create new chat", icon: "fas fa-plus", url: "/chat/new" } ]; if (mUser) { let chatEl: JSX.Element; const chatSearchValid = state.chatSearch && state.chatSearch.length >= props.minSearchLength; if ((state.activeChats && state.activeChats.length > 0) || (chatSearchValid && state.allChats && state.filteredChats.length > 0)) { let chats: Array<FilteredSidebarChatDescriptor>; if (state.chatSearch && state.chatSearch.length >= props.minSearchLength) { chats = state.filteredChats; } else { // We know this can't be null, but TS can't quite figure that out chats = state.activeChats as Array<SidebarChatDescriptor>; } const renderedChats = chats .filter(x => !x.exists || (!x.isModeration && !x.isModerationHub)) .map(chat => { const { friendlyName, icon } = chat.exists ? computeChatDisplayName(chat, mUser) : { friendlyName: chat.friendlyName, icon: <i className="fas fa-plus" /> }; return { friendlyName, icon, key: chat.exists ? chat.id : `new-${friendlyName}`, path: chat.exists ? `/chat/${chat.id}` : chat.targetPath }; }) .sort((x, y) => x.friendlyName.localeCompare(y.friendlyName)); const chatMenuItems: MenuGroupItems = []; for (const { key, friendlyName, icon, path } of renderedChats) { // TODO: "New messages in this chat" boldification chatMenuItems.push({ key, element: <MenuItem title={friendlyName} label={friendlyName} icon={icon} action={path} bold={false} onClick={props.onItemClicked} /> }); } chatEl = <MenuGroup items={chatMenuItems} />; } else { chatEl = <> {state.allChats ? <></> : <LoadingSpinner />} <MenuGroup items={[{ key: "whole-chat", element: <MenuItem title="View all chats" label="All chats" icon={<i className="fas fa-users"></i>} action="/chat" bold={true} onClick={props.onItemClicked} /> }]} /> </>; } chatsExpander = <MenuExpander title="Chats" isOpen={state.isOpen} buttons={chatsButtons} onOpenStateChange={() => dispatchUpdate({ action: "setIsOpen", isOpen: !state.isOpen })} > {chatEl} </MenuExpander>; return chatsExpander; } else { return <></>; } }
package es.msanchez.spring.springinaction.entities; import es.msanchez.spring.springinaction.enums.Type; import lombok.Data; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; import javax.persistence.Entity; import javax.persistence.Id; @Getter @Setter @Entity @RequiredArgsConstructor public class Ingredient { @Id private final String id; private final String name; private final Type type; }
#!/usr/bin/env bash # Copyright 2021 The Rook Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -xeEo pipefail ############# # VARIABLES # ############# : "${BLOCK:=$(sudo lsblk --paths | awk '/14G/ {print $1}' | head -1)}" NETWORK_ERROR="connection reset by peer" SERVICE_UNAVAILABLE_ERROR="Service Unavailable" INTERNAL_ERROR="INTERNAL_ERROR" INTERNAL_SERVER_ERROR="500 Internal Server Error" ############# # FUNCTIONS # ############# function install_deps() { sudo wget https://github.com/mikefarah/yq/releases/download/3.4.1/yq_linux_amd64 -O /usr/local/bin/yq sudo chmod +x /usr/local/bin/yq } function print_k8s_cluster_status() { kubectl cluster-info kubectl get pods -n kube-system } function use_local_disk() { BLOCK_DATA_PART=${BLOCK}1 sudo dmsetup version || true sudo swapoff --all --verbose if mountpoint -q /mnt; then sudo umount /mnt # search for the device since it keeps changing between sda and sdb sudo wipefs --all --force "$BLOCK_DATA_PART" else # it's the hosted runner! sudo sgdisk --zap-all --clear --mbrtogpt -g -- "${BLOCK}" sudo dd if=/dev/zero of="${BLOCK}" bs=1M count=10 oflag=direct sudo parted -s "${BLOCK}" mklabel gpt fi sudo lsblk } function use_local_disk_for_integration_test() { sudo udevadm control --log-priority=debug sudo swapoff --all --verbose sudo umount /mnt sudo sed -i.bak '/\/mnt/d' /etc/fstab # search for the device since it keeps changing between sda and sdb PARTITION="${BLOCK}1" sudo wipefs --all --force "$PARTITION" sudo dd if=/dev/zero of="${PARTITION}" bs=1M count=1 sudo lsblk --bytes # add a udev rule to force the disk partitions to ceph # we have observed that some runners keep detaching/re-attaching the additional disk overriding the permissions to the default root:disk # for more details see: https://github.com/rook/rook/issues/7405 echo "SUBSYSTEM==\"block\", ATTR{size}==\"29356032\", ACTION==\"add\", RUN+=\"/bin/chown 167:167 $PARTITION\"" | sudo tee -a /etc/udev/rules.d/01-rook.rules # for below, see: https://access.redhat.com/solutions/1465913 block_base="$(basename "${BLOCK}")" echo "ACTION==\"add|change\", KERNEL==\"${block_base}\", OPTIONS:=\"nowatch\"" | sudo tee -a /etc/udev/rules.d/99-z-rook-nowatch.rules # The partition is still getting reloaded occasionally during operation. See https://github.com/rook/rook/issues/8975 # Try issuing some disk-inspection commands to jog the system so it won't reload the partitions # during OSD provisioning. sudo udevadm control --reload-rules || true sudo udevadm trigger || true time sudo udevadm settle || true sudo partprobe || true sudo lsblk --noheadings --pairs "${BLOCK}" || true sudo sgdisk --print "${BLOCK}" || true sudo udevadm info --query=property "${BLOCK}" || true sudo lsblk --noheadings --pairs "${PARTITION}" || true journalctl -o short-precise --dmesg | tail -40 || true cat /etc/fstab || true } function create_partitions_for_osds() { tests/scripts/create-bluestore-partitions.sh --disk "$BLOCK" --osd-count 2 sudo lsblk } function create_bluestore_partitions_and_pvcs() { BLOCK_PART="$BLOCK"2 DB_PART="$BLOCK"1 tests/scripts/create-bluestore-partitions.sh --disk "$BLOCK" --bluestore-type block.db --osd-count 1 tests/scripts/localPathPV.sh "$BLOCK_PART" "$DB_PART" } function create_bluestore_partitions_and_pvcs_for_wal(){ BLOCK_PART="$BLOCK"3 DB_PART="$BLOCK"1 WAL_PART="$BLOCK"2 tests/scripts/create-bluestore-partitions.sh --disk "$BLOCK" --bluestore-type block.wal --osd-count 1 tests/scripts/localPathPV.sh "$BLOCK_PART" "$DB_PART" "$WAL_PART" } function collect_udev_logs_in_background() { local log_dir="${1:-"/home/runner/work/rook/rook/tests/integration/_output/tests"}" mkdir -p "${log_dir}" udevadm monitor --property &> "${log_dir}"/udev-monitor-property.txt & udevadm monitor --kernel &> "${log_dir}"/udev-monitor-kernel.txt & udevadm monitor --udev &> "${log_dir}"/udev-monitor-udev.txt & } function build_rook() { build_type=build if [ -n "$1" ]; then build_type=$1 fi GOPATH=$(go env GOPATH) make clean for _ in $(seq 1 3); do if ! o=$(make -j"$(nproc)" IMAGES='ceph' "$build_type"); then case "$o" in *"$NETWORK_ERROR"*) echo "network failure occurred, retrying..." continue ;; *"$SERVICE_UNAVAILABLE_ERROR"*) echo "network failure occurred, retrying..." continue ;; *"$INTERNAL_ERROR"*) echo "network failure occurred, retrying..." continue ;; *"$INTERNAL_SERVER_ERROR"*) echo "network failure occurred, retrying..." continue ;; *) # valid failure exit 1 esac fi # no errors so we break the loop after the first iteration break done # validate build tests/scripts/validate_modified_files.sh build docker images if [[ "$build_type" == "build" ]]; then docker tag "$(docker images | awk '/build-/ {print $1}')" rook/ceph:local-build fi } function build_rook_all() { build_rook build.all } function validate_yaml() { cd cluster/examples/kubernetes/ceph # create the Rook CRDs and other resources kubectl create -f crds.yaml -f common.yaml # create the volume replication CRDs replication_version=v0.1.0 replication_url="https://raw.githubusercontent.com/csi-addons/volume-replication-operator/${replication_version}/config/crd/bases" kubectl create -f "${replication_url}/replication.storage.openshift.io_volumereplications.yaml" kubectl create -f "${replication_url}/replication.storage.openshift.io_volumereplicationclasses.yaml" # skipping folders and some yamls that are only for openshift. manifests="$(find . -maxdepth 1 -type f -name '*.yaml' -and -not -name '*openshift*' -and -not -name 'scc*')" with_f_arg="$(echo "$manifests" | awk '{printf " -f %s",$1}')" # don't add newline # shellcheck disable=SC2086 # '-f manifest1.yaml -f manifest2.yaml etc.' should not be quoted kubectl create ${with_f_arg} --dry-run=client } function create_cluster_prerequisites() { # this might be called from another function that has already done a cd ( cd cluster/examples/kubernetes/ceph && kubectl create -f crds.yaml -f common.yaml ) } function deploy_manifest_with_local_build() { if [[ "$USE_LOCAL_BUILD" != "false" ]]; then sed -i "s|image: rook/ceph:.*|image: rook/ceph:local-build|g" $1 fi kubectl create -f $1 } function deploy_cluster() { cd cluster/examples/kubernetes/ceph deploy_manifest_with_local_build operator.yaml sed -i "s|#deviceFilter:|deviceFilter: ${BLOCK/\/dev\/}|g" cluster-test.yaml kubectl create -f cluster-test.yaml kubectl create -f object-test.yaml kubectl create -f pool-test.yaml kubectl create -f filesystem-test.yaml kubectl create -f rbdmirror.yaml kubectl create -f filesystem-mirror.yaml kubectl create -f nfs-test.yaml deploy_manifest_with_local_build toolbox.yaml } function wait_for_prepare_pod() { get_pod_cmd=(kubectl --namespace rook-ceph get pod --no-headers) timeout=450 start_time="${SECONDS}" while [[ $(( SECONDS - start_time )) -lt $timeout ]]; do pod="$("${get_pod_cmd[@]}" --selector=app=rook-ceph-osd-prepare --output custom-columns=NAME:.metadata.name,PHASE:status.phase | awk 'FNR <= 1')" if echo "$pod" | grep 'Running\|Succeeded\|Failed'; then break; fi echo 'waiting for at least one osd prepare pod to be running or finished' sleep 5 done pod="$("${get_pod_cmd[@]}" --selector app=rook-ceph-osd-prepare --output name | awk 'FNR <= 1')" kubectl --namespace rook-ceph logs --follow "$pod" timeout=60 start_time="${SECONDS}" while [[ $(( SECONDS - start_time )) -lt $timeout ]]; do pod="$("${get_pod_cmd[@]}" --selector app=rook-ceph-osd,ceph_daemon_id=0 --output custom-columns=NAME:.metadata.name,PHASE:status.phase)" if echo "$pod" | grep 'Running'; then break; fi echo 'waiting for OSD 0 pod to be running' sleep 1 done # getting the below logs is a best-effort attempt, so use '|| true' to allow failures pod="$("${get_pod_cmd[@]}" --selector app=rook-ceph-osd,ceph_daemon_id=0 --output name)" || true kubectl --namespace rook-ceph logs "$pod" || true job="$(kubectl --namespace rook-ceph get job --selector app=rook-ceph-osd-prepare --output name | awk 'FNR <= 1')" || true kubectl -n rook-ceph describe "$job" || true kubectl -n rook-ceph describe deployment/rook-ceph-osd-0 || true } function wait_for_ceph_to_be_ready() { DAEMONS=$1 OSD_COUNT=$2 mkdir test tests/scripts/validate_cluster.sh "$DAEMONS" "$OSD_COUNT" kubectl -n rook-ceph get pods } function check_ownerreferences() { curl -L https://github.com/kubernetes-sigs/kubectl-check-ownerreferences/releases/download/v0.2.0/kubectl-check-ownerreferences-linux-amd64.tar.gz -o kubectl-check-ownerreferences-linux-amd64.tar.gz tar xzvf kubectl-check-ownerreferences-linux-amd64.tar.gz chmod +x kubectl-check-ownerreferences ./kubectl-check-ownerreferences -n rook-ceph } function create_LV_on_disk() { sudo sgdisk --zap-all "${BLOCK}" VG=test-rook-vg LV=test-rook-lv sudo pvcreate "$BLOCK" sudo vgcreate "$VG" "$BLOCK" || sudo vgcreate "$VG" "$BLOCK" || sudo vgcreate "$VG" "$BLOCK" sudo lvcreate -l 100%FREE -n "${LV}" "${VG}" tests/scripts/localPathPV.sh /dev/"${VG}"/${LV} kubectl create -f cluster/examples/kubernetes/ceph/crds.yaml kubectl create -f cluster/examples/kubernetes/ceph/common.yaml } function deploy_first_rook_cluster() { BLOCK=$(sudo lsblk|awk '/14G/ {print $1}'| head -1) create_cluster_prerequisites cd cluster/examples/kubernetes/ceph/ deploy_manifest_with_local_build operator.yaml yq w -i -d1 cluster-test.yaml spec.dashboard.enabled false yq w -i -d1 cluster-test.yaml spec.storage.useAllDevices false yq w -i -d1 cluster-test.yaml spec.storage.deviceFilter "${BLOCK}"1 kubectl create -f cluster-test.yaml deploy_manifest_with_local_build toolbox.yaml } function deploy_second_rook_cluster() { BLOCK=$(sudo lsblk|awk '/14G/ {print $1}'| head -1) cd cluster/examples/kubernetes/ceph/ NAMESPACE=rook-ceph-secondary envsubst < common-second-cluster.yaml | kubectl create -f - sed -i 's/namespace: rook-ceph/namespace: rook-ceph-secondary/g' cluster-test.yaml yq w -i -d1 cluster-test.yaml spec.storage.deviceFilter "${BLOCK}"2 yq w -i -d1 cluster-test.yaml spec.dataDirHostPath "/var/lib/rook-external" kubectl create -f cluster-test.yaml yq w -i toolbox.yaml metadata.namespace rook-ceph-secondary deploy_manifest_with_local_build toolbox.yaml toolbox.yaml } function wait_for_rgw() { for _ in {1..120}; do if [ "$(kubectl -n "$1" get pod -l app=rook-ceph-rgw --no-headers --field-selector=status.phase=Running|wc -l)" -ge 1 ] ; then echo "rgw pod is found" break fi echo "waiting for rgw pods" sleep 5 done for _ in {1..120}; do if [ "$(kubectl -n "$1" get deployment -l app=rook-ceph-rgw -o yaml | yq read - 'items[0].status.readyReplicas')" -ge 1 ] ; then echo "rgw is ready" break fi echo "waiting for rgw becomes ready" sleep 5 done } function verify_operator_log_message() { local message="$1" # param 1: the message to verify exists local namespace="${2:-rook-ceph}" # optional param 2: the namespace of the CephCluster (default: rook-ceph) kubectl --namespace "$namespace" logs deployment/rook-ceph-operator | grep "$message" } function wait_for_operator_log_message() { local message="$1" # param 1: the message to look for local timeout="$2" # param 2: the timeout for waiting for the message to exist local namespace="${3:-rook-ceph}" # optional param 3: the namespace of the CephCluster (default: rook-ceph) start_time="${SECONDS}" while [[ $(( SECONDS - start_time )) -lt $timeout ]]; do if verify_operator_log_message "$message" "$namespace"; then return 0; fi sleep 5 done echo "timed out" >&2 && return 1 } function restart_operator () { local namespace="${1:-rook-ceph}" # optional param 1: the namespace of the CephCluster (default: rook-ceph) kubectl --namespace "$namespace" delete pod --selector app=rook-ceph-operator # wait for new pod to be running get_pod_cmd=(kubectl --namespace "$namespace" get pod --selector app=rook-ceph-operator --no-headers) timeout 20 bash -c \ "until [[ -n \"\$(${get_pod_cmd[*]} --field-selector=status.phase=Running 2>/dev/null)\" ]] ; do echo waiting && sleep 1; done" "${get_pod_cmd[@]}" } function write_object_to_cluster1_read_from_cluster2() { cd cluster/examples/kubernetes/ceph/ echo "[default]" > s3cfg echo "host_bucket = no.way.in.hell" >> ./s3cfg echo "use_https = False" >> ./s3cfg fallocate -l 1M ./1M.dat echo "hello world" >> ./1M.dat CLUSTER_1_IP_ADDR=$(kubectl -n rook-ceph get svc rook-ceph-rgw-multisite-store -o jsonpath="{.spec.clusterIP}") BASE64_ACCESS_KEY=$(kubectl -n rook-ceph get secrets realm-a-keys -o jsonpath="{.data.access-key}") BASE64_SECRET_KEY=$(kubectl -n rook-ceph get secrets realm-a-keys -o jsonpath="{.data.secret-key}") ACCESS_KEY=$(echo ${BASE64_ACCESS_KEY} | base64 --decode) SECRET_KEY=$(echo ${BASE64_SECRET_KEY} | base64 --decode) s3cmd -v -d --config=s3cfg --access_key=${ACCESS_KEY} --secret_key=${SECRET_KEY} --host=${CLUSTER_1_IP_ADDR} mb s3://bkt s3cmd -v -d --config=s3cfg --access_key=${ACCESS_KEY} --secret_key=${SECRET_KEY} --host=${CLUSTER_1_IP_ADDR} put ./1M.dat s3://bkt CLUSTER_2_IP_ADDR=$(kubectl -n rook-ceph-secondary get svc rook-ceph-rgw-zone-b-multisite-store -o jsonpath="{.spec.clusterIP}") timeout 60 bash <<EOF until s3cmd -v -d --config=s3cfg --access_key=${ACCESS_KEY} --secret_key=${SECRET_KEY} --host=${CLUSTER_2_IP_ADDR} get s3://bkt/1M.dat 1M-get.dat --force; do echo "waiting for object to be replicated" sleep 5 done EOF diff 1M.dat 1M-get.dat } function create_helm_tag() { helm_tag="$(cat _output/version)" build_image="$(docker images | awk '/build-/ {print $1}')" docker tag "${build_image}" "rook/ceph:${helm_tag}" } FUNCTION="$1" shift # remove function arg now that we've recorded it # call the function with the remainder of the user-provided args # -e, -E, and -o=pipefail will ensure this script returns a failure if a part of the function fails $FUNCTION "$@"
package ru.contextguide.yandexservices.changes; import ru.contextguide.yandexservices.utils.JsonSerializableObject; import java.util.List; import java.util.Objects; public class CheckResponseModified implements JsonSerializableObject { private List<Long> campaignIds; private List<Long> adGroupIds; private List<Long> adIds; private List<CampaignStatItem> campaignsStat; /** * Идентификаторы кампаний, в параметрах которых произошли изменения (изменения в дочерних группах и объявлениях не учитываются). */ public List<Long> getCampaignIds() { return campaignIds; } public void setCampaignIds(List<Long> campaignIds) { this.campaignIds = campaignIds; } /** * Идентификаторы групп, в которых произошли изменения (учитываются также изменения во фразах, изменения в объявлениях не учитываются). */ public List<Long> getAdGroupIds() { return adGroupIds; } public void setAdGroupIds(List<Long> adGroupIds) { this.adGroupIds = adGroupIds; } /** * Идентификаторы объявлений, в которых произошли изменения. */ public List<Long> getAdIds() { return adIds; } public void setAdIds(List<Long> adIds) { this.adIds = adIds; } /** * Кампании, в статистике которых произошли корректировки. */ public List<CampaignStatItem> getCampaignsStat() { return campaignsStat; } public void setCampaignsStat(List<CampaignStatItem> campaignsStat) { this.campaignsStat = campaignsStat; } @Override public String toString() { return this.toJson(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CheckResponseModified that = (CheckResponseModified) o; return Objects.equals(campaignIds, that.campaignIds) && Objects.equals(adGroupIds, that.adGroupIds) && Objects.equals(adIds, that.adIds) && Objects.equals(campaignsStat, that.campaignsStat); } @Override public int hashCode() { return Objects.hash(campaignIds, adGroupIds, adIds, campaignsStat); } }
// Module imports const cors = require('cors'); const errorHandler = require('errorhandler'); const express = require('express'); const formData = require('express-form-data'); const fs = require('fs'); const mongoose = require('mongoose'); // Function to populate DB const populate = require('./src/routes/api/posts/populate'); // Environment variables require('dotenv').config(); // Route imports const routes = require('./src/routes'); // DB_URL const { DB_URL, NODE_ENV, PORT } = process.env; // Configure isProduction variable const isProduction = NODE_ENV === 'production'; // Initiate app const app = express(); const port = PORT || 8008; // Configure app app.use(cors()); app.use(require('morgan')('dev')); app.use(express.urlencoded({ extended: true })); app.use(express.json()); app.use(formData.parse()); if (!isProduction) app.use(errorHandler()); // DB connection mongoose.connect(DB_URL, { useNewUrlParser: true, useUnifiedTopology: true }); mongoose.set('debug', true); // Routes app.use(routes); app.listen(port, () => { console.log(`Server started on PORT: ${port}`, 'Uploading data to DB...'); populate(null, null, JSON.parse(fs.readFileSync('./lib/posts.json'))); });
<reponame>Professorvennie/Bronze-Age package com.professorvennie.bronzeage.tileentitys; import com.professorvennie.bronzeage.api.steam.ISteamBoiler; import com.professorvennie.bronzeage.api.steam.ISteamHandler; import com.professorvennie.bronzeage.api.steam.ISteamTank; import com.professorvennie.bronzeage.api.steam.SteamTank; import net.minecraftforge.common.util.ForgeDirection; /** * Created by ProfessorVennie on 10/21/2014 at 8:03 PM. */ public class TileEntitySteamPipe extends TileEntityMod implements ISteamHandler { public ForgeDirection[] connections = new ForgeDirection[6]; private SteamTank steamTank; public TileEntitySteamPipe() { steamTank = new SteamTank(0, 1000); } @Override public void updateEntity() { super.updateEntity(); for (ForgeDirection direction : connections) { if (direction != null) { switch (direction) { case UP: if (worldObj.getTileEntity(xCoord, yCoord + 1, zCoord) instanceof ISteamBoiler) { ISteamBoiler boiler = (ISteamBoiler) worldObj.getTileEntity(xCoord, yCoord + 1, zCoord); boiler.drain(direction, 100); this.fill(direction, 100); } break; } } } } @Override public ISteamTank getSteamTank() { return steamTank; } @Override public boolean canFill(ForgeDirection direction, int amount) { return steamTank.getAmount() + amount <= steamTank.getCapacity(); } @Override public boolean canDrain(ForgeDirection direction, int amount) { return steamTank.getAmount() - amount >= 0; } @Override public void fill(ForgeDirection direction, int amount) { if (canFill(direction, amount)) steamTank.fill(amount); } @Override public void drain(ForgeDirection direction, int amount) { if (canDrain(direction, amount)) steamTank.drain(amount); } @Override public int getSteamAmount() { return steamTank.getAmount(); } @Override public int getSteamCapacity() { return steamTank.getCapacity(); } }
#!/bin/bash # code highlighting # PYGMENTIZE="pygmentize -O full,style=native" PYGMENTIZE="pygmentize -O full,styles=autumn" SRC_PATH=../ clear #connect echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '138, 170 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Connect to the ocean network via starfish"; clear # upload echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '44, 76 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Upload and register assets"; clear # upload echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '170, 203 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Upload and register an asset"; clear # create listing data echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '320, 346 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Create listing data for each asset"; clear # generate_listing_checksum echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/utils.py | sed -n '119, 122 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Create a unique listing checksum"; clear # search valid listings echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/sync.py | sed -n '123, 145 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Search for a valid listing"; clear # is listing valid echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/sync.py | sed -n '145, 166 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "is listing valid"; clear # download assets echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '77, 112 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Download assets"; clear # download asset Part 1 echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '204, 231 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Download an asset Part 1"; clear # download asset Part 2 echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '204, 204 p' echo " ... " $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '231, 270 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Download an asset Part 2"; clear # auto topup echo "-------------------------------------------------------------------------" echo "" $PYGMENTIZE $SRC_PATH/ocean_drop/ocean_drop.py | sed -n '298, 320 p' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Auto topup"; clear echo "-------------------------------------------------------------------------" echo "" echo "" echo 'github repo for `ocean-drop` is at https://github.com/DEX-Company/ocean-drop' echo 'github repo for `starfish` library is at https://github.com/DEX-Company/starfish-py' echo 'github repo for `squid-py` library is at https://github.com/oceanprotocol/squid-py' echo "" echo "" echo "-------------------------------------------------------------------------" read -p "Find out more"; clear
#!/bin/bash # clean up from previous rm dramsim*.log # Create array of all tests declare -a sdl_arr=(sdl-1.py sdl-2.py sdl-3.py sdl2-1.py sdl3-1.py sdl3-2.py sdl3-3.py sdl4-1.py sdl4-2.py sdl5-1.py sdl8-1.py sdl8-3.py sdl8-4.py sdl9-1.py sdl9-2.py ) declare -a bk_arr=( testBackendChaining.py testBackendDelayBuffer.py testBackendPagedMulti.py testBackendReorderRow.py testBackendReorderSimple.py testBackendSimpleDRAM-1.py testBackendSimpleDRAM-2.py testBackendTimingDRAM.py testBackendVaultSim.py ) declare -a ca_arr=(testDistributedCaches.py testFlushes-2.py testFlushes.py testHashXor.py testIncoherent.py testNoninclusive-1.py testNoninclusive-2.py testPrefetchParams.py testThroughputThrottling.py ) declare -a scr_arr=(testScratchCache1.py testScratchCache2.py testScratchCache3.py testScratchCache4.py testScratchDirect.py testScratchNetwork.py ) arr=() while getopts dscba option do case "${option}" in d) arr+=( "${sdl_arr[@]}" );; s) arr+=( "${scr_arr[@]}" );; c) arr+=( "${ca_arr[@]}" );; b) arr+=( "${bk_arr[@]}" );; a) arr+=( "${sdl_arr[@]}" "${bk_arr[@]}" "${ca_arr[@]}" "${scr_arr[@]}" ) ;; esac done if [ -z "$arr" ]; then arr+=( "${sdl_arr[@]}" "${bk_arr[@]}" "${ca_arr[@]}" "${scr_arr[@]}" ) fi for i in "${arr[@]}" do echo "Running $i" if timeout 60 sst $i > log; then if grep -q "Simulation is complete, simulated time" log; then echo " Complete" else echo " FAILED" cp log fail_${i}.log fi else echo " FAILED" cp log fail_${i}.log fi done
#!/bin/sh ./node_modules/.bin/prisma migrate deploy --preview-feature
public class CustomMiddleware { private readonly RequestDelegate _next; public CustomMiddleware(RequestDelegate next) { _next = next; } public async Task Invoke(HttpContext context) { // Perform pre-processing tasks here // Await the next middleware in the pipeline without capturing the context await _next.Invoke(context).ConfigureAwait(false); // Perform post-processing tasks here } }
#!/bin/sh # This script updates the Kinvey HTML5 SDK Github Repo with the latest release dist="$(dirname "$0")/../dist" tmp="$(dirname "$0")/../tmp" git config user.name "Travis CI" git config user.email "travis@travis-ci.org" git clone https://${GITHUB_ACCESS_TOKEN}@github.com/Kinvey/phonegap-sdk.git $tmp > /dev/null 2>&1 git -C $tmp fetch origin git -C $tmp checkout master cp -r $dist/. $tmp git -C $tmp add . git -C $tmp commit -m "Travis Build: $TRAVIS_BUILD_NUMBER" git -C $tmp tag $TRAVIS_TAG git -C $tmp push --tags --quiet --set-upstream origin master
#!/bin/bash echo Cleaning World of Lochercraft ... find ../src -name "*.class" -delete find ../src -name "*.ctx" -delete rm -rf ../target
import Project from "./Project.vue"; import ProjectsContainer from "./ProjectsContainer.vue"; export { Project, ProjectsContainer };
#!/bin/bash function cleanup { # rm -f $TMPFILE exit 1 } trap cleanup SIGHUP SIGINT SIGTERM # This is the format of the sqlite database: # CREATE TABLE moz_cookies (id INTEGER PRIMARY KEY, # name TEXT, value TEXT, host TEXT, path TEXT, # expiry INTEGER, lastAccessed INTEGER, # isSecure INTEGER, isHttpOnly INTEGER); # We have to copy cookies.sqlite, because FireFox has a lock on it TMPFILE=`mktemp /tmp/cookies.sqlite.XXXXXXXXXX` cat $1 >> $TMPFILE sqlite3 -separator ' ' $TMPFILE << EOF .mode tabs .header off select host, case substr(host,1,1)='.' when 0 then 'FALSE' else 'TRUE' end, path, case isSecure when 0 then 'FALSE' else 'TRUE' end, expiry, name, value from moz_cookies; EOF cleanup
<filename>src/renderer/internal/ShaderManager.ts /// <reference path="ShaderProgram.ts"/> /// <reference path="../IShaderProgram.ts"/> /// <reference path="../shader/V2T2C1A1.ts"/> /// <reference path="../WebGLUtils.ts"/> module WOZLLA.renderer { /** * @class WOZLLA.renderer.ShaderManager * @extends WOZLLA.renderer.IShaderManager */ export class ShaderManager implements IShaderManager { _gl:any; _shaderMap:any; constructor(gl) { this._gl = gl; this._shaderMap = {}; this._shaderMap[IShaderProgram.V2T2C1A1] = this.createShaderProgram( shader.V2T2C1A1.VERTEX_SOURCE, shader.V2T2C1A1.FRAGMENT_SOURCE, shader.V2T2C1A1); } getShaderProgram(id):IShaderProgram { return this._shaderMap[id]; } createShaderProgram(vertexSource:string, fragmentSource:string, ShaderClass:Function=ShaderProgram):IShaderProgram { var result = WebGLUtils.compileProgram(this._gl, vertexSource, fragmentSource); var shaderProgram = <IShaderProgram>new (<any>ShaderClass)(result.program, result.vertexShader, result.fragmentShader); this._shaderMap[shaderProgram.id] = shaderProgram; return shaderProgram; } deleteShaderProgram(shaderProgram:IShaderProgram):void { this._gl.deleteProgram(shaderProgram.id); this._gl.deleteShader(shaderProgram.vertexShader); this._gl.deleteShader(shaderProgram.fragmentShader); delete this._shaderMap[shaderProgram.id]; } clear():void { for(var id in this._shaderMap) { this.deleteShaderProgram(this._shaderMap[id]); } } } }
const level = require('./level.json') const sqconfig = require('./sqconfig.json') module.exports = { getActions : function() { var sq = sqconfig['config'][this.config.model] this.chCount = sq['chCount'] this.mixCount = sq['mixCount'] this.grpCount = sq['grpCount'] this.fxrCount = sq['fxrCount'] this.fxsCount = sq['fxsCount'] this.mtxCount = sq['mtxCount'] this.dcaCount = sq['dcaCount'] this.muteGroup = sq['muteGroup'] this.SoftKey = sq['SoftKey'] this.sceneCount = sq['sceneCount'] let actions = {} this.CHOICES_INPUT_CHANNEL = [] for (let i = 0; i < this.chCount; i++) { this.CHOICES_INPUT_CHANNEL.push({ label: `CH ${i + 1}`, id: i }) } this.CHOICES_SCENES = [] for (let i = 0; i < this.sceneCount; i++) { this.CHOICES_SCENES.push({ label: `SCENE ${i + 1}`, id: i }) } this.CHOICES_MIX = []; this.CHOICES_MIX.push({ label: `LR`, id: 99 }) for (let i = 0; i < this.mixCount; i++) { this.CHOICES_MIX.push({ label: `AUX ${i + 1}`, id: i }) } this.CHOICES_GRP = [] for (let i = 0; i < this.grpCount; i++) { this.CHOICES_GRP.push({ label: `GROUP ${i + 1}`, id: i }) } this.CHOICES_FXR = [] for (let i = 0; i < this.fxrCount; i++) { this.CHOICES_FXR.push({ label: `FX RETURN ${i + 1}`, id: i }) } this.CHOICES_FXS = [] for (let i = 0; i < this.fxsCount; i++) { this.CHOICES_FXS.push({ label: `FX SEND ${i + 1}`, id: i }) } this.CHOICES_MTX = [] for (let i = 0; i < this.mtxCount; i++) { this.CHOICES_MTX.push({ label: `MATRIX ${i + 1}`, id: i }) } this.CHOICES_LEVEL = [ { label: `Last dB value`, id: 1000 }, { label: `Step +1 dB`, id: 998 }, { label: `Step -1 dB`, id: 999 } ] for (let i = -90; i <= -40; i = i + 5) { if (i == -90) { i = '-inf' } this.CHOICES_LEVEL.push({ label: `${i} dB`, id: i}) } for (let i = -39; i <= -10; i = i + 1) { this.CHOICES_LEVEL.push({ label: `${i} dB`, id: i}) } for (let i = -9.5; i <= 10; i = i + 0.5) { this.CHOICES_LEVEL.push({ label: `${i} dB`, id: i}) } this.CHOICES_PANLEVEL = [ { label: `Step Right`, id: 998 }, { label: `Step Left`, id: 999 } ] for (let i = -100; i <= 100; i = i + 5) { let pos = i < 0 ? `L${Math.abs(i)}` : i == 0 ? `CTR` : `R${Math.abs(i)}` this.CHOICES_PANLEVEL.push({ label: `${pos}`, id: `${pos}`}) } this.CHOICES_DCA = [] for (let i = 0; i < this.dcaCount; i++) { this.CHOICES_DCA.push({ label: `DCA ${i + 1}`, id: i }) } this.CHOICES_MUTEGRP = [] for (let i = 0; i < this.muteGroup; i++) { this.CHOICES_MUTEGRP.push({ label: `MuteGroup ${i + 1}`, id: i }) } this.CHOICES_SOFT = [] for (let i = 0; i < this.SoftKey; i++) { this.CHOICES_SOFT.push({ label: `SOFTKEY ${i + 1}`, id: i }) } // All fader mix choices this.CHOICES_ALLFADER = [] this.CHOICES_ALLFADER.push({ label: `LR`, id: 0 }) for (let i = 0; i < this.mixCount; i++) { this.CHOICES_ALLFADER.push({ label: `AUX ${i + 1}`, id: i + 1 }) } for (let i = 0; i < this.fxsCount; i++) { this.CHOICES_ALLFADER.push({ label: `FX SEND ${i + 1}`, id: i + 1 + this.mixCount }) } for (let i = 0; i < this.mtxCount; i++) { this.CHOICES_ALLFADER.push({ label: `MATRIX ${i + 1}`, id: i + 1 + this.mixCount + this.fxsCount }) } for (let i = 0; i < this.dcaCount; i++) { this.CHOICES_ALLFADER.push({ label: `DCA ${i + 1}`, id: i + 1 + this.mixCount + this.fxsCount + this.mtxCount + 12 }) } this.muteOptions = (name, qty, ofs) => { this.CHOICES = [] for (let i = 1; i <= qty; i++) { this.CHOICES.push({ label: `${name} ${i}`, id: i + ofs }) } return [ { type: 'dropdown', label: name, id: 'strip', default: 1 + ofs, choices: this.CHOICES, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mute', id: 'mute', default: 0, choices: [ {label: 'Toggle', id: 0}, {label: 'On', id: 1}, {label: 'Off', id: 2} ], } ] } this.fadeObj = { type: 'dropdown', label: 'Fading', id: 'fade', default: 0, choices: [ {label: `Off`, id: 0}, {label: `1s`, id: 1}, {label: `2s`, id: 2}, {label: `3s`, id: 3}, ], minChoicesForSearch: 0, } this.faderOptions = (name, qty, ofs) => { this.CHOICES = [] for (let i = 1; i <= qty; i++) { this.CHOICES.push({ label: `${name} ${i}`, id: i + ofs }) } return [ { type: 'dropdown', label: name, id: 'strip', default: 1 + ofs, choices: this.CHOICES, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'level', default: 0, choices: this.CHOICES_FADER, minChoicesForSearch: 0, }, this.fadeObj, ] } actions['mute_input'] = { label: 'Mute Input', options: this.muteOptions('Input Channel', this.chCount, -1), } actions['mute_lr'] = { label: 'Mute LR', options: [ { type: 'dropdown', label: 'LR', id: 'strip', default: 0, choices: [ { label: `LR`, id: 0 } ], minChoicesForSearch: 99, },{ type: 'dropdown', label: 'Mute', id: 'mute', default: 0, choices: [ {label: 'Toggle', id: 0}, {label: 'On', id: 1}, {label: 'Off', id: 2} ], } ], } actions['mute_aux'] = { label: 'Mute Aux', options: this.muteOptions('Aux', 12, -1), } actions['mute_group'] = { label: 'Mute Group', options: this.muteOptions('Aux', 12, -1), } actions['mute_matrix'] = { label: 'Mute Matrix', options: this.muteOptions('Matrix', 3, -1), } actions['mute_fx_send'] = { label: 'Mute FX Send', options: this.muteOptions('FX Send', 4, -1), } actions['mute_fx_return'] = { label: 'Mute FX Return', options: this.muteOptions('FX Return', 8, -1), } actions['mute_dca'] = { label: 'Mute DCA', options: this.muteOptions('DCA', 8, -1), } actions['mute_mutegroup'] = { label: 'Mute MuteGroup', options: this.muteOptions('Mute MuteGroup', 8, -1), } if (this.config.model == 'SQ6' || this.config.model == 'SQ7') { // Soft Rotary } actions['key_soft'] = { label: 'Press Softkey', options: [ { type: 'dropdown', label: 'Soft Key', id: 'softKey', default: '0', choices: this.CHOICES_SOFT, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Key type', id: 'pressedsk', default: '1', choices: [ {id: '0', label: 'Toggle'}, {id: '1', label: 'Press'}, {id: '2', label: 'Release'} ], minChoicesForSearch: 5, } ], } actions['ch_to_mix'] = { label: 'Assign channel to mix', options: [ { type: 'dropdown', label: 'Input Channel', id: 'inputChannel', default: '0', choices: this.CHOICES_INPUT_CHANNEL, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'mixAssign', default: [], multiple: true, choices: this.CHOICES_MIX, },{ type: 'checkbox', label: 'Active', id: 'mixActive', default: true, } ], } actions['ch_to_grp'] = { label: 'Assign channel to group', options: [ { type: 'dropdown', label: 'Input Channel', id: 'inputChannel', default: '0', choices: this.CHOICES_INPUT_CHANNEL, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Group', id: 'grpAssign', default: [], multiple: true, choices: this.CHOICES_GRP, },{ type: 'checkbox', label: 'Active', id: 'grpActive', default: true, } ], }; actions['grp_to_mix'] = { label: 'Assign group to mix', options: [ { type: 'dropdown', label: 'Group', id: 'inputGrp', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'mixAssign', default: [], multiple: true, choices: this.CHOICES_MIX, },{ type: 'checkbox', label: 'Active', id: 'mixActive', default: true, } ], } actions['fxr_to_grp'] = { label: 'Assign FX Return to group', options: [ { type: 'dropdown', label: 'FX Return', id: 'inputFxr', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Group', id: 'grpAssign', default: [], multiple: true, choices: this.CHOICES_GRP, },{ type: 'checkbox', label: 'Active', id: 'grpActive', default: true, } ], } actions['ch_to_fxs'] = { label: 'Assign channel to FX Send', options: [ { type: 'dropdown', label: 'Input Channel', id: 'inputChannel', default: '0', choices: this.CHOICES_INPUT_CHANNEL, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'FX Send', id: 'fxsAssign', default: [], multiple: true, choices: this.CHOICES_FXS, },{ type: 'checkbox', label: 'Active', id: 'fxsActive', default: true, } ], } actions['grp_to_fxs'] = { label: 'Assign group to FX send', options: [ { type: 'dropdown', label: 'Group', id: 'inputGrp', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'FX Send', id: 'fxsAssign', default: [], multiple: true, choices: this.CHOICES_FXS, },{ type: 'checkbox', label: 'Active', id: 'fxsActive', default: true, } ], } actions['fxr_to_fxs'] = { label: 'Assign FX return to FX send', options: [ { type: 'dropdown', label: 'FX return', id: 'inputFxr', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'FX Send', id: 'fxsAssign', default: [], multiple: true, choices: this.CHOICES_FXS, },{ type: 'checkbox', label: 'Active', id: 'fxsActive', default: true, } ], } actions['mix_to_mtx'] = { label: 'Assign mix to matrix', options: [ { type: 'dropdown', label: 'Mix', id: 'inputMix', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Matrix', id: 'mtxAssign', default: [], multiple: true, choices: this.CHOICES_MTX, },{ type: 'checkbox', label: 'Active', id: 'mtxActive', default: true, } ], } actions['grp_to_mtx'] = { label: 'Assign group to matrix', options: [ { type: 'dropdown', label: 'Group', id: 'inputGrp', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Matrix', id: 'mtxAssign', default: [], multiple: true, choices: this.CHOICES_MTX, },{ type: 'checkbox', label: 'Active', id: 'mtxActive', default: true, } ], } /* Level */ actions['chlev_to_mix'] = { label: 'Fader channel level to mix', options: [ { type: 'dropdown', label: 'Input channel', id: 'input', default: '0', choices: this.CHOICES_INPUT_CHANNEL, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'assign', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['grplev_to_mix'] = { label: 'Fader group level to mix', options: [ { type: 'dropdown', label: 'Group', id: 'input', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'assign', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['fxrlev_to_mix'] = { label: 'Fader FX return level to mix', options: [ { type: 'dropdown', label: 'FX return', id: 'input', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'assign', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['fxrlev_to_grp'] = { label: 'Fader FX return level to group', options: [ { type: 'dropdown', label: 'FX return', id: 'input', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Group', id: 'assign', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['chlev_to_fxs'] = { label: 'Fader channel level to FX send', options: [ { type: 'dropdown', label: 'Input channel', id: 'input', default: '0', choices: this.CHOICES_INPUT_CHANNEL, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'FX Send', id: 'assign', default: '0', choices: this.CHOICES_FXS, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['grplev_to_fxs'] = { label: 'Fader group level to FX send', options: [ { type: 'dropdown', label: 'Group', id: 'input', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'FX Send', id: 'assign', default: '0', choices: this.CHOICES_FXS, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['fxslev_to_fxs'] = { label: 'Fader FX return level to FX send', options: [ { type: 'dropdown', label: 'FX return', id: 'input', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'FX Send', id: 'assign', default: '0', choices: this.CHOICES_FXS, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['mixlev_to_mtx'] = { label: 'Fader mix level to matrix', options: [ { type: 'dropdown', label: 'Mix', id: 'input', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Matrix', id: 'assign', default: '0', choices: this.CHOICES_MTX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['grplev_to_mtx'] = { label: 'Fader group level to matrix', options: [ { type: 'dropdown', label: 'Group', id: 'input', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Matrix', id: 'assign', default: '0', choices: this.CHOICES_MTX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } actions['level_to_output'] = { label: 'Fader level to output', options: [ { type: 'dropdown', label: 'Fader', id: 'input', default: '0', choices: this.CHOICES_ALLFADER, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_LEVEL, minChoicesForSearch: 0, }, this.fadeObj, ], } /* Pan Balance */ actions['chpan_to_mix'] = { label: 'Pan/Bal channel level to mix', options: [ { type: 'dropdown', label: 'Input channel', id: 'input', default: '0', choices: this.CHOICES_INPUT_CHANNEL, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'assign', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, opt.assign, this.mixCount, [0x50,0x50], [0,0x44]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } actions['grppan_to_mix'] = { label: 'Pan/Bal group level to mix', options: [ { type: 'dropdown', label: 'Group', id: 'input', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'assign', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, opt.assign, this.mixCount, [0x50,0x55], [0x30,0x04]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } actions['fxrpan_to_mix'] = { label: 'Pan/Bal FX return level to mix', options: [ { type: 'dropdown', label: 'FX return', id: 'input', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Mix', id: 'assign', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, opt.assign, this.mixCount, [0x50,0x56], [0x3C,0x14]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } actions['fxrpan_to_grp'] = { label: 'Fader FX return level to group', options: [ { type: 'dropdown', label: 'FX return', id: 'input', default: '0', choices: this.CHOICES_FXR, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Group', id: 'assign', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, opt.assign, this.grpCount, [0,0x5B], [0,0x34]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } actions['mixpan_to_mtx'] = { label: 'Pan/Bal mix level to matrix', options: [ { type: 'dropdown', label: 'Mix', id: 'input', default: '0', choices: this.CHOICES_MIX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Matrix', id: 'assign', default: '0', choices: this.CHOICES_MTX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, opt.assign, this.mtxCount, [0x5E,0x5E], [0x24,0x27]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } actions['grppan_to_mtx'] = { label: 'Pan/Bal group level to matrix', options: [ { type: 'dropdown', label: 'Group', id: 'input', default: '0', choices: this.CHOICES_GRP, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Matrix', id: 'assign', default: '0', choices: this.CHOICES_MTX, minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, opt.assign, this.mtxCount, [0,0x5E], [0,0x4B]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } actions['pan_to_output'] = { label: 'Pan/Bal level to output', options: [ { type: 'dropdown', label: 'Fader', id: 'input', default: '0', choices: this.CHOICES_ALLFADER.filter(function(val, idx, arr){return idx < 19}), minChoicesForSearch: 0, },{ type: 'dropdown', label: 'Level', id: 'leveldb', default: '0', multiple: false, choices: this.CHOICES_PANLEVEL, minChoicesForSearch: 0, },{ type: 'textinput', label: 'Variable to show level (click config button to refresh)', id: 'showvar', default: '', } ], subscribe: (action) => { let opt = action.options let val = this.getLevel(opt.input, 99, 0, [0x5F,0], [0,0]) this.sendSocket(val.buffer[0]) opt.showvar = `\$(${this.config.label}:pan_${val.channel[0]}.${val.channel[1]})` }, } // Scene actions['scene_recall'] = { label: 'Scene recall', options: [ { type: 'number', label: 'Scene nr.', id: 'scene', default: 1, min: 1, max: this.sceneCount, required: true, } ], } actions['scene_step'] = { label: 'Scene step', options: [ { type: 'number', label: 'Scene +/-', id: 'scene', default: 1, min: -50, max: 50, required: true, } ], } actions['current_scene'] = { label: 'Current scene', options: [ { type: 'number', label: 'Scene nr.', id: 'scene', default: 1, min: 1, max: this.sceneCount, required: true, } ], } return actions }, }
class UniqueList: def __init__(self): self.elements = [] def add(self, element): if element not in self.elements: self.elements.append(element) def remove(self, element): if element in self.elements: self.elements.remove(element) def get_list(self): return self.elements
// Type definitions for the RDFJS specification 3.0 // Project: https://github.com/rdfjs/representation-task-force // Definitions by: <NAME> <https://github.com/rubensworks> // <NAME> <https://github.com/LaurensRietveld> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.3 /// <reference types="node" /> import * as stream from "stream"; import { EventEmitter } from "events"; /* Data Model Interfaces */ /* https://rdf.js.org/data-model-spec/ */ /** * Contains an Iri, RDF blank Node, RDF literal, variable name, or a default graph * @see NamedNode * @see BlankNode * @see Literal * @see Variable * @see DefaultGraph */ export type Term = NamedNode | BlankNode | Literal | Variable | DefaultGraph; /** * Contains an IRI. */ export interface NamedNode { /** * Contains the constant "NamedNode". */ termType: "NamedNode"; /** * The IRI of the named node (example: `http://example.org/resource`) */ value: string; /** * @param other The term to compare with. * @return True if and only if other has termType "NamedNode" and the same `value`. */ equals(other: Term | null | undefined): boolean; } /** * Contains an RDF blank node. */ export interface BlankNode { /** * Contains the constant "BlankNode". */ termType: "BlankNode"; /** * Blank node name as a string, without any serialization specific prefixes, * e.g. when parsing, * if the data was sourced from Turtle, remove _:, * if it was sourced from RDF/XML, do not change the blank node name (example: blank3). */ value: string; /** * @param other The term to compare with. * @return True if and only if other has termType "BlankNode" and the same `value`. */ equals(other: Term | null | undefined): boolean; } /** * An RDF literal, containing a string with an optional language tag and/or datatype. */ export interface Literal { /** * Contains the constant "Literal". */ termType: "Literal"; /** * The text value, unescaped, without language or type (example: <NAME>). */ value: string; /** * the language as lowercase BCP47 string (examples: en, en-gb) * or an empty string if the literal has no language. * @link http://tools.ietf.org/html/bcp47 */ language: string; /** * A NamedNode whose IRI represents the datatype of the literal. */ datatype: NamedNode; /** * @param other The term to compare with. * @return True if and only if other has termType "Literal" * and the same `value`, `language`, and `datatype`. */ equals(other: Term | null | undefined): boolean; } /** * A variable name. */ export interface Variable { /** * Contains the constant "Variable". */ termType: "Variable"; /** * The name of the variable *without* leading ? (example: a). */ value: string; /** * @param other The term to compare with. * @return True if and only if other has termType "Variable" and the same `value`. */ equals(other: Term | null | undefined): boolean; } /** * An instance of DefaultGraph represents the default graph. * It's only allowed to assign a DefaultGraph to the .graph property of a Quad. */ export interface DefaultGraph { /** * Contains the constant "DefaultGraph". */ termType: "DefaultGraph"; /** * Contains an empty string as constant value. */ value: ""; /** * @param other The term to compare with. * @return True if and only if other has termType "DefaultGraph". */ equals(other: Term | null | undefined): boolean; } /** * The subject, which is a NamedNode, BlankNode or Variable. * @see NamedNode * @see BlankNode * @see Variable */ export type Quad_Subject = NamedNode | BlankNode | Variable; /** * The predicate, which is a NamedNode or Variable. * @see NamedNode * @see Variable */ export type Quad_Predicate = NamedNode | Variable; /** * The object, which is a NamedNode, Literal, BlankNode or Variable. * @see NamedNode * @see Literal * @see BlankNode * @see Variable */ export type Quad_Object = NamedNode | Literal | BlankNode | Variable; /** * The named graph, which is a DefaultGraph, NamedNode, BlankNode or Variable. * @see DefaultGraph * @see NamedNode * @see BlankNode * @see Variable */ export type Quad_Graph = DefaultGraph | NamedNode | BlankNode | Variable; /** * An RDF quad, taking any Term in its positions, containing the subject, predicate, object and graph terms. */ export interface BaseQuad { /** * The subject. * @see Quad_Subject */ subject: Term; /** * The predicate. * @see Quad_Predicate */ predicate: Term; /** * The object. * @see Quad_Object */ object: Term; /** * The named graph. * @see Quad_Graph */ graph: Term; /** * @param other The term to compare with. * @return True if and only if the argument is a) of the same type b) has all components equal. */ equals(other: BaseQuad | null | undefined): boolean; } /** * An RDF quad, containing the subject, predicate, object and graph terms. */ export interface Quad extends BaseQuad { /** * The subject. * @see Quad_Subject */ subject: Quad_Subject; /** * The predicate. * @see Quad_Predicate */ predicate: Quad_Predicate; /** * The object. * @see Quad_Object */ object: Quad_Object; /** * The named graph. * @see Quad_Graph */ graph: Quad_Graph; /** * @param other The term to compare with. * @return True if and only if the argument is a) of the same type b) has all components equal. */ equals(other: BaseQuad | null | undefined): boolean; } /** * A factory for instantiating RDF terms and quads. */ export interface DataFactory<OutQuad extends BaseQuad = Quad, InQuad extends BaseQuad = OutQuad> { /** * @param value The IRI for the named node. * @return A new instance of NamedNode. * @see NamedNode */ namedNode(value: string): NamedNode; /** * @param value The optional blank node identifier. * @return A new instance of BlankNode. * If the `value` parameter is undefined a new identifier * for the blank node is generated for each call. * @see BlankNode */ blankNode(value?: string): BlankNode; /** * @param value The literal value. * @param languageOrDatatype The optional language or datatype. * If `languageOrDatatype` is a NamedNode, * then it is used for the value of `NamedNode.datatype`. * Otherwise `languageOrDatatype` is used for the value * of `NamedNode.language`. * @return A new instance of Literal. * @see Literal */ literal(value: string, languageOrDatatype?: string | NamedNode): Literal; /** * This method is optional. * @param value The variable name * @return A new instance of Variable. * @see Variable */ variable?(value: string): Variable; /** * @return An instance of DefaultGraph. */ defaultGraph(): DefaultGraph; /** * @param subject The quad subject term. * @param predicate The quad predicate term. * @param object The quad object term. * @param graph The quad graph term. * @return A new instance of Quad. * @see Quad */ quad(subject: InQuad['subject'], predicate: InQuad['predicate'], object: InQuad['object'], graph?: InQuad['graph']): OutQuad; } /* Stream Interfaces */ /* https://rdf.js.org/stream-spec/ */ /** * A quad stream. * This stream is only readable, not writable. * * Events: * * `readable()`: When a quad can be read from the stream, it will emit this event. * * `end()`: This event fires when there will be no more quads to read. * * `error(error: Error)`: This event fires if any error occurs. The `message` describes the error. * * `data(quad: RDF.Quad)`: This event is emitted for every quad that can be read from the stream. * The quad is the content of the data. * Optional events: * * prefix(prefix: string, iri: RDF.NamedNode): This event is emitted every time a prefix is mapped to some IRI. */ export interface Stream<Q extends BaseQuad = Quad> extends EventEmitter { /** * This method pulls a quad out of the internal buffer and returns it. * If there is no quad available, then it will return null. * * @return A quad from the internal buffer, or null if none is available. */ read(): Q; } /** * A Source is an object that emits quads. * * It can contain quads but also generate them on the fly. * * For example, parsers and transformations which generate quads can implement the Source interface. */ export interface Source<Q extends BaseQuad = Quad> { /** * Returns a stream that processes all quads matching the pattern. * * @param subject The optional exact subject or subject regex to match. * @param predicate The optional exact predicate or predicate regex to match. * @param object The optional exact object or object regex to match. * @param graph The optional exact graph or graph regex to match. * @return The resulting quad stream. */ match(subject?: Term | RegExp, predicate?: Term | RegExp, object?: Term | RegExp, graph?: Term | RegExp): Stream<Q>; } /** * A Sink is an object that consumes data from different kinds of streams. * * It can store the content of the stream or do some further processing. * * For example parsers, serializers, transformations and stores can implement the Sink interface. */ export interface Sink<InputStream extends EventEmitter, OutputStream extends EventEmitter> { /** * Consumes the given stream. * * The `end` and `error` events are used like described in the Stream interface. * Depending on the use case, subtypes of EventEmitter or Stream are used. * @see Stream * * @param stream The stream that will be consumed. * @return The resulting event emitter. */ import(stream: InputStream): OutputStream; } /** * A Store is an object that usually used to persist quads. * * The interface allows removing quads, beside read and write access. * The quads can be stored locally or remotely. * * Access to stores LDP or SPARQL endpoints can be implemented with a Store inteface. */ export interface Store<Q extends BaseQuad = Quad> extends Source<Q>, Sink<Stream<Q>, EventEmitter> { /** * Removes all streamed quads. * * The end and error events are used like described in the Stream interface. * @see Stream * * @param stream The stream that will be consumed. * @return The resulting event emitter. */ remove(stream: Stream<Q>): EventEmitter; /** * All quads matching the pattern will be removed. * * The `end` and `error` events are used like described in the Stream interface. * @see Stream * * @param subject The optional exact subject or subject regex to match. * @param predicate The optional exact predicate or predicate regex to match. * @param object The optional exact object or object regex to match. * @param graph The optional exact graph or graph regex to match. * @return The resulting event emitter. */ removeMatches(subject?: Term | RegExp, predicate?: Term | RegExp, object?: Term | RegExp, graph?: Term | RegExp) : EventEmitter; /** * Deletes the given named graph. * * The `end` and `error` events are used like described in the Stream interface. * @see Stream * * @param graph The graph term or string to match. * @return The resulting event emitter. */ deleteGraph(graph: Q['graph'] | string): EventEmitter; } /* Dataset Interfaces */ /* https://rdf.js.org/dataset-spec/ */ export interface DatasetCore<OutQuad extends BaseQuad = Quad, InQuad extends BaseQuad = OutQuad> { /** * A non-negative integer that specifies the number of quads in the set. */ readonly size: number; /** * Adds the specified quad to the dataset. * * Existing quads, as defined in `Quad.equals`, will be ignored. */ add(quad: InQuad): this; /** * Removes the specified quad from the dataset. */ delete(quad: InQuad): this; /** * Determines whether a dataset includes a certain quad. */ has(quad: InQuad): boolean; /** * Returns a new dataset that is comprised of all quads in the current instance matching the given arguments. * * The logic described in {@link https://rdf.js.org/dataset-spec/#quad-matching|Quad Matching} is applied for each * quad in this dataset to check if it should be included in the output dataset. * * This method always returns a new DatasetCore, even if that dataset contains no quads. * * Since a `DatasetCore` is an unordered set, the order of the quads within the returned sequence is arbitrary. * * @param subject The optional exact subject to match. * @param predicate The optional exact predicate to match. * @param object The optional exact object to match. * @param graph The optional exact graph to match. */ match(subject?: Term | null, predicate?: Term | null, object?: Term | null, graph?: Term | null): this; [Symbol.iterator](): Iterator<OutQuad>; } export interface DatasetCoreFactory<OutQuad extends BaseQuad = Quad, InQuad extends BaseQuad = OutQuad, D extends DatasetCore<OutQuad, InQuad> = DatasetCore<OutQuad, InQuad>> { /** * Returns a new dataset and imports all quads, if given. */ dataset(quads?: InQuad[]): D; } export interface Dataset<OutQuad extends BaseQuad = Quad, InQuad extends BaseQuad = OutQuad> extends DatasetCore<OutQuad, InQuad> { /** * Imports the quads into this dataset. * * This method differs from `Dataset.union` in that it adds all `quads` to the current instance, rather than * combining `quads` and the current instance to create a new instance. */ addAll(quads: Dataset<InQuad>|InQuad[]): this; /** * Returns `true` if the current instance is a superset of the given dataset; differently put: if the given dataset * is a subset of, is contained in the current dataset. * * Blank Nodes will be normalized. */ contains(other: Dataset<InQuad>): boolean; /** * This method removes the quads in the current instance that match the given arguments. * * The logic described in {@link https://rdf.js.org/dataset-spec/#quad-matching|Quad Matching} is applied for each * quad in this dataset to select the quads which will be deleted. * * @param subject The optional exact subject to match. * @param predicate The optional exact predicate to match. * @param object The optional exact object to match. * @param graph The optional exact graph to match. */ deleteMatches(subject?: Term, predicate?: Term, object?: Term, graph?: Term): this; /** * Returns a new dataset that contains all quads from the current dataset, not included in the given dataset. */ difference(other: Dataset<InQuad>): this; /** * Returns true if the current instance contains the same graph structure as the given dataset. * * Blank Nodes will be normalized. */ equals(other: Dataset<InQuad>): boolean; /** * Universal quantification method, tests whether every quad in the dataset passes the test implemented by the * provided `iteratee`. * * This method immediately returns boolean `false` once a quad that does not pass the test is found. * * This method always returns boolean `true` on an empty dataset. * * This method is aligned with `Array.prototype.every()` in ECMAScript-262. */ every(iteratee: QuadFilterIteratee<OutQuad>['test']): boolean; /** * Creates a new dataset with all the quads that pass the test implemented by the provided `iteratee`. * * This method is aligned with Array.prototype.filter() in ECMAScript-262. */ filter(iteratee: QuadFilterIteratee<OutQuad>['test']): this; /** * Executes the provided `iteratee` once on each quad in the dataset. * * This method is aligned with `Array.prototype.forEach()` in ECMAScript-262. */ forEach(iteratee: QuadRunIteratee<OutQuad>['run']): void; /** * Imports all quads from the given stream into the dataset. * * The stream events `end` and `error` are wrapped in a Promise. */ import(stream: Stream<InQuad>): Promise<this>; /** * Returns a new dataset containing alls quads from the current dataset that are also included in the given dataset. */ intersection(other: Dataset<InQuad>): this; /** * Returns a new dataset containing all quads returned by applying `iteratee` to each quad in the current dataset. */ map(iteratee: QuadMapIteratee<OutQuad>['map']): this; /** * This method calls the `iteratee` on each `quad` of the `Dataset`. The first time the `iteratee` is called, the * `accumulator` value is the `initialValue` or, if not given, equals to the first quad of the `Dataset`. The return * value of the `iteratee` is used as `accumulator` value for the next calls. * * This method returns the return value of the last `iteratee` call. * * This method is aligned with `Array.prototype.reduce()` in ECMAScript-262. */ reduce<A = any>(iteratee: QuadReduceIteratee<A, OutQuad>['run'], initialValue?: A): A; /** * Existential quantification method, tests whether some quads in the dataset pass the test implemented by the * provided `iteratee`. * * This method immediately returns boolean `true` once a quad that passes the test is found. * * This method is aligned with `Array.prototype.some()` in ECMAScript-262. */ some(iteratee: QuadFilterIteratee<OutQuad>['test']): boolean; /** * Returns the set of quads within the dataset as a host language native sequence, for example an `Array` in * ECMAScript-262. * * Since a `Dataset` is an unordered set, the order of the quads within the returned sequence is arbitrary. */ toArray(): OutQuad[]; /** * Returns an N-Quads string representation of the dataset, preprocessed with * {@link https://json-ld.github.io/normalization/spec/|RDF Dataset Normalization} algorithm. */ toCanonical(): string; /** * Returns a stream that contains all quads of the dataset. */ toStream(): Stream<OutQuad>; /** * Returns an N-Quads string representation of the dataset. * * No prior normalization is required, therefore the results for the same quads may vary depending on the `Dataset` * implementation. */ toString(): string; /** * Returns a new `Dataset` that is a concatenation of this dataset and the quads given as an argument. */ union(quads: Dataset<InQuad>): this; } export interface DatasetFactory<OutQuad extends BaseQuad = Quad, InQuad extends BaseQuad = OutQuad, D extends Dataset<OutQuad, InQuad> = Dataset<OutQuad, InQuad>> extends DatasetCoreFactory<OutQuad, InQuad, D> { /** * Returns a new dataset and imports all quads, if given. */ dataset(quads?: Dataset<InQuad>|InQuad[]): D; } export interface QuadFilterIteratee<Q extends BaseQuad = Quad> { /** * A callable function that returns `true` if the input quad passes the test this function implements. */ test(quad: Q, dataset: Dataset<Q>): boolean; } export interface QuadMapIteratee<Q extends BaseQuad = Quad> { /** * A callable function that can be executed on a quad and returns a quad. * * The returned quad can be the given quad or a new one. */ map(quad: Q, dataset: Dataset<Q>): Q; } export interface QuadReduceIteratee<A = any, Q extends BaseQuad = Quad> { /** * A callable function that can be executed on an accumulator and quad and returns a new accumulator. */ run(accumulator: A, quad: Q, dataset: Dataset<Q>): A; } export interface QuadRunIteratee<Q extends BaseQuad = Quad> { /** * A callable function that can be executed on a quad. */ run(quad: Q, dataset: Dataset<Q>): void; } export {};
<filename>src/main/java/com/balceda/reservationsapp/repository/ClientRepository.java package com.balceda.reservationsapp.repository; import java.util.List; import org.springframework.data.jpa.repository.JpaRepository; import com.balceda.reservationsapp.model.Client; public interface ClientRepository extends JpaRepository<Client, String> { public List<Client> findByLastName(String lastName); public Client findByPhone(String phone); public Client findByEmail(String email); public Client findByIdNumber(String idNumber); }
<filename>lang/py/cookbook/v2/source/cb2_17_3_exm_2.py .def(-self)
<gh_stars>1-10 class ApplicationController < ActionController::Base protect_from_forgery with: :exception if ENV['DEMO_LOGIN'].present? before_action { request.variant = :demo_login } end end
class User: def __init__(self, gitee_id, gitee_login, gitee_name, phone, avatar_url, cla_email, roles): self.gitee_id = gitee_id self.gitee_login = gitee_login self.gitee_name = gitee_name self.phone = phone self.avatar_url = avatar_url self.cla_email = cla_email self.roles = roles def _get_roles(self): return self.roles
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. require 'uri' require 'logger' # rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength module OCI # This site describes all the Rest endpoints of OSP Gateway. class OspGateway::SubscriptionServiceClient # Client used to make HTTP requests. # @return [OCI::ApiClient] attr_reader :api_client # Fully qualified endpoint URL # @return [String] attr_reader :endpoint # The default retry configuration to apply to all operations in this service client. This can be overridden # on a per-operation basis. The default retry configuration value is `nil`, which means that an operation # will not perform any retries # @return [OCI::Retry::RetryConfig] attr_reader :retry_config # The region, which will usually correspond to a value in {OCI::Regions::REGION_ENUM}. # @return [String] attr_reader :region # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Layout/EmptyLines, Metrics/PerceivedComplexity # Creates a new SubscriptionServiceClient. # Notes: # If a config is not specified, then the global OCI.config will be used. # # This client is not thread-safe # # Either a region or an endpoint must be specified. If an endpoint is specified, it will be used instead of the # region. A region may be specified in the config or via or the region parameter. If specified in both, then the # region parameter will be used. # @param [Config] config A Config object. # @param [String] region A region used to determine the service endpoint. This will usually # correspond to a value in {OCI::Regions::REGION_ENUM}, but may be an arbitrary string. # @param [String] endpoint The fully qualified endpoint URL # @param [OCI::BaseSigner] signer A signer implementation which can be used by this client. If this is not provided then # a signer will be constructed via the provided config. One use case of this parameter is instance principals authentication, # so that the instance principals signer can be provided to the client # @param [OCI::ApiClientProxySettings] proxy_settings If your environment requires you to use a proxy server for outgoing HTTP requests # the details for the proxy can be provided in this parameter # @param [OCI::Retry::RetryConfig] retry_config The retry configuration for this service client. This represents the default retry configuration to # apply across all operations. This can be overridden on a per-operation basis. The default retry configuration value is `nil`, which means that an operation # will not perform any retries def initialize(config: nil, region: nil, endpoint: nil, signer: nil, proxy_settings: nil, retry_config: nil) # If the signer is an InstancePrincipalsSecurityTokenSigner or SecurityTokenSigner and no config was supplied (they are self-sufficient signers) # then create a dummy config to pass to the ApiClient constructor. If customers wish to create a client which uses instance principals # and has config (either populated programmatically or loaded from a file), they must construct that config themselves and then # pass it to this constructor. # # If there is no signer (or the signer is not an instance principals signer) and no config was supplied, this is not valid # so try and load the config from the default file. config = OCI::Config.validate_and_build_config_with_signer(config, signer) signer = OCI::Signer.config_file_auth_builder(config) if signer.nil? @api_client = OCI::ApiClient.new(config, signer, proxy_settings: proxy_settings) @retry_config = retry_config if endpoint @endpoint = endpoint + '/20191001' else region ||= config.region region ||= signer.region if signer.respond_to?(:region) self.region = region end logger.info "SubscriptionServiceClient endpoint set to '#{@endpoint}'." if logger end # rubocop:enable Metrics/AbcSize, Metrics/CyclomaticComplexity, Layout/EmptyLines, Metrics/PerceivedComplexity # Set the region that will be used to determine the service endpoint. # This will usually correspond to a value in {OCI::Regions::REGION_ENUM}, # but may be an arbitrary string. def region=(new_region) @region = new_region raise 'A region must be specified.' unless @region @endpoint = OCI::Regions.get_service_endpoint_for_template(@region, 'https://ospap.oracle.com') + '/20191001' logger.info "SubscriptionServiceClient endpoint set to '#{@endpoint} from region #{@region}'." if logger end # @return [Logger] The logger for this client. May be nil. def logger @api_client.config.logger end # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:disable Metrics/MethodLength, Layout/EmptyLines # PSD2 authorization for subscription payment # @param [String] osp_home_region The home region's public name of the logged in user. # # @param [String] subscription_id Subscription id(OCID). # @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. # # @param [OCI::OspGateway::Models::AuthorizeSubscriptionPaymentDetails] authorize_subscription_payment_details subscription payment request. # @param [Hash] opts the optional parameters # @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level # retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry # @option opts [String] :if_match For optimistic concurrency control. In the PUT or DELETE call # for a resource, set the `if-match` parameter to the value of the # etag from a previous GET or POST response for that resource. # The resource will be updated or deleted only if the etag you # provide matches the resource's current etag value. # # @option opts [String] :opc_retry_token For requests that are not idempotent (creates being the main place of interest), THE APIs should take a header # called opc-retry-token to identify the customer desire across requests, to introduce some level of idempotency. # # @option opts [String] :opc_request_id The unique Oracle-assigned identifier for the request. If you need to contact Oracle about a # particular request, please provide the request ID. # # @return [Response] A Response object with data of type {OCI::OspGateway::Models::AuthorizeSubscriptionPaymentReceipt AuthorizeSubscriptionPaymentReceipt} # @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/ospgateway/authorize_subscription_payment.rb.html) to see an example of how to use authorize_subscription_payment API. def authorize_subscription_payment(osp_home_region, subscription_id, compartment_id, authorize_subscription_payment_details, opts = {}) logger.debug 'Calling operation SubscriptionServiceClient#authorize_subscription_payment.' if logger raise "Missing the required parameter 'osp_home_region' when calling authorize_subscription_payment." if osp_home_region.nil? raise "Missing the required parameter 'subscription_id' when calling authorize_subscription_payment." if subscription_id.nil? raise "Missing the required parameter 'compartment_id' when calling authorize_subscription_payment." if compartment_id.nil? raise "Missing the required parameter 'authorize_subscription_payment_details' when calling authorize_subscription_payment." if authorize_subscription_payment_details.nil? raise "Parameter value for 'subscription_id' must not be blank" if OCI::Internal::Util.blank_string?(subscription_id) path = '/subscriptions/{subscriptionId}/actions/psd2auth'.sub('{subscriptionId}', subscription_id.to_s) operation_signing_strategy = :standard # rubocop:disable Style/NegatedIf # Query Params query_params = {} query_params[:ospHomeRegion] = osp_home_region query_params[:compartmentId] = compartment_id # Header Params header_params = {} header_params[:accept] = 'application/json' header_params[:'content-type'] = 'application/json' header_params[:'if-match'] = opts[:if_match] if opts[:if_match] header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token] header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id] # rubocop:enable Style/NegatedIf header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token post_body = @api_client.object_to_http_body(authorize_subscription_payment_details) # rubocop:disable Metrics/BlockLength OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'SubscriptionServiceClient#authorize_subscription_payment') do @api_client.call_api( :POST, path, endpoint, header_params: header_params, query_params: query_params, operation_signing_strategy: operation_signing_strategy, body: post_body, return_type: 'OCI::OspGateway::Models::AuthorizeSubscriptionPaymentReceipt' ) end # rubocop:enable Metrics/BlockLength end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:enable Metrics/MethodLength, Layout/EmptyLines # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:disable Metrics/MethodLength, Layout/EmptyLines # Get the subscription plan. # @param [String] subscription_id Subscription id(OCID). # @param [String] osp_home_region The home region's public name of the logged in user. # # @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. # # @param [Hash] opts the optional parameters # @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level # retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry # @option opts [String] :opc_request_id The unique Oracle-assigned identifier for the request. If you need to contact Oracle about a # particular request, please provide the request ID. # # @return [Response] A Response object with data of type {OCI::OspGateway::Models::Subscription Subscription} # @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/ospgateway/get_subscription.rb.html) to see an example of how to use get_subscription API. def get_subscription(subscription_id, osp_home_region, compartment_id, opts = {}) logger.debug 'Calling operation SubscriptionServiceClient#get_subscription.' if logger raise "Missing the required parameter 'subscription_id' when calling get_subscription." if subscription_id.nil? raise "Missing the required parameter 'osp_home_region' when calling get_subscription." if osp_home_region.nil? raise "Missing the required parameter 'compartment_id' when calling get_subscription." if compartment_id.nil? raise "Parameter value for 'subscription_id' must not be blank" if OCI::Internal::Util.blank_string?(subscription_id) path = '/subscriptions/{subscriptionId}'.sub('{subscriptionId}', subscription_id.to_s) operation_signing_strategy = :standard # rubocop:disable Style/NegatedIf # Query Params query_params = {} query_params[:ospHomeRegion] = osp_home_region query_params[:compartmentId] = compartment_id # Header Params header_params = {} header_params[:accept] = 'application/json' header_params[:'content-type'] = 'application/json' header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id] # rubocop:enable Style/NegatedIf post_body = nil # rubocop:disable Metrics/BlockLength OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'SubscriptionServiceClient#get_subscription') do @api_client.call_api( :GET, path, endpoint, header_params: header_params, query_params: query_params, operation_signing_strategy: operation_signing_strategy, body: post_body, return_type: 'OCI::OspGateway::Models::Subscription' ) end # rubocop:enable Metrics/BlockLength end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:enable Metrics/MethodLength, Layout/EmptyLines # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:disable Metrics/MethodLength, Layout/EmptyLines # Get the subscription data for the compartment # @param [String] osp_home_region The home region's public name of the logged in user. # # @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. # # @param [Hash] opts the optional parameters # @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level # retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry # @option opts [String] :opc_request_id The unique Oracle-assigned identifier for the request. If you need to contact Oracle about a # particular request, please provide the request ID. # # @option opts [String] :page For list pagination. The value of the opc-next-page response header from the previous \"List\" call. # # @option opts [Integer] :limit For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. # (default to 500) # @option opts [String] :sort_by The field to sort by. Only one field can be selected for sorting. # (default to INVOICE_NO) # Allowed values are: INVOICE_NO, REF_NO, STATUS, TYPE, INVOICE_DATE, DUE_DATE, PAYM_REF, TOTAL_AMOUNT, BALANCE_DUE # @option opts [String] :sort_order The sort order to use (ascending or descending). # (default to ASC) # Allowed values are: ASC, DESC # @return [Response] A Response object with data of type {OCI::OspGateway::Models::SubscriptionCollection SubscriptionCollection} # @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/ospgateway/list_subscriptions.rb.html) to see an example of how to use list_subscriptions API. def list_subscriptions(osp_home_region, compartment_id, opts = {}) logger.debug 'Calling operation SubscriptionServiceClient#list_subscriptions.' if logger raise "Missing the required parameter 'osp_home_region' when calling list_subscriptions." if osp_home_region.nil? raise "Missing the required parameter 'compartment_id' when calling list_subscriptions." if compartment_id.nil? if opts[:sort_by] && !%w[INVOICE_NO REF_NO STATUS TYPE INVOICE_DATE DUE_DATE PAYM_REF TOTAL_AMOUNT BALANCE_DUE].include?(opts[:sort_by]) raise 'Invalid value for "sort_by", must be one of INVOICE_NO, REF_NO, STATUS, TYPE, INVOICE_DATE, DUE_DATE, PAYM_REF, TOTAL_AMOUNT, BALANCE_DUE.' end if opts[:sort_order] && !%w[ASC DESC].include?(opts[:sort_order]) raise 'Invalid value for "sort_order", must be one of ASC, DESC.' end path = '/subscriptions' operation_signing_strategy = :standard # rubocop:disable Style/NegatedIf # Query Params query_params = {} query_params[:ospHomeRegion] = osp_home_region query_params[:compartmentId] = compartment_id query_params[:page] = opts[:page] if opts[:page] query_params[:limit] = opts[:limit] if opts[:limit] query_params[:sortBy] = opts[:sort_by] if opts[:sort_by] query_params[:sortOrder] = opts[:sort_order] if opts[:sort_order] # Header Params header_params = {} header_params[:accept] = 'application/json' header_params[:'content-type'] = 'application/json' header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id] # rubocop:enable Style/NegatedIf post_body = nil # rubocop:disable Metrics/BlockLength OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'SubscriptionServiceClient#list_subscriptions') do @api_client.call_api( :GET, path, endpoint, header_params: header_params, query_params: query_params, operation_signing_strategy: operation_signing_strategy, body: post_body, return_type: 'OCI::OspGateway::Models::SubscriptionCollection' ) end # rubocop:enable Metrics/BlockLength end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:enable Metrics/MethodLength, Layout/EmptyLines # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:disable Metrics/MethodLength, Layout/EmptyLines # Pay a subscription # @param [String] osp_home_region The home region's public name of the logged in user. # # @param [String] subscription_id Subscription id(OCID). # @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. # # @param [OCI::OspGateway::Models::PaySubscriptionDetails] pay_subscription_details subscription payment request. # @param [Hash] opts the optional parameters # @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level # retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry # @option opts [String] :if_match For optimistic concurrency control. In the PUT or DELETE call # for a resource, set the `if-match` parameter to the value of the # etag from a previous GET or POST response for that resource. # The resource will be updated or deleted only if the etag you # provide matches the resource's current etag value. # # @option opts [String] :opc_retry_token For requests that are not idempotent (creates being the main place of interest), THE APIs should take a header # called opc-retry-token to identify the customer desire across requests, to introduce some level of idempotency. # # @option opts [String] :opc_request_id The unique Oracle-assigned identifier for the request. If you need to contact Oracle about a # particular request, please provide the request ID. # # @return [Response] A Response object with data of type {OCI::OspGateway::Models::PaySubscriptionReceipt PaySubscriptionReceipt} # @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/ospgateway/pay_subscription.rb.html) to see an example of how to use pay_subscription API. def pay_subscription(osp_home_region, subscription_id, compartment_id, pay_subscription_details, opts = {}) logger.debug 'Calling operation SubscriptionServiceClient#pay_subscription.' if logger raise "Missing the required parameter 'osp_home_region' when calling pay_subscription." if osp_home_region.nil? raise "Missing the required parameter 'subscription_id' when calling pay_subscription." if subscription_id.nil? raise "Missing the required parameter 'compartment_id' when calling pay_subscription." if compartment_id.nil? raise "Missing the required parameter 'pay_subscription_details' when calling pay_subscription." if pay_subscription_details.nil? raise "Parameter value for 'subscription_id' must not be blank" if OCI::Internal::Util.blank_string?(subscription_id) path = '/subscriptions/{subscriptionId}/actions/pay'.sub('{subscriptionId}', subscription_id.to_s) operation_signing_strategy = :standard # rubocop:disable Style/NegatedIf # Query Params query_params = {} query_params[:ospHomeRegion] = osp_home_region query_params[:compartmentId] = compartment_id # Header Params header_params = {} header_params[:accept] = 'application/json' header_params[:'content-type'] = 'application/json' header_params[:'if-match'] = opts[:if_match] if opts[:if_match] header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token] header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id] # rubocop:enable Style/NegatedIf header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token post_body = @api_client.object_to_http_body(pay_subscription_details) # rubocop:disable Metrics/BlockLength OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'SubscriptionServiceClient#pay_subscription') do @api_client.call_api( :POST, path, endpoint, header_params: header_params, query_params: query_params, operation_signing_strategy: operation_signing_strategy, body: post_body, return_type: 'OCI::OspGateway::Models::PaySubscriptionReceipt' ) end # rubocop:enable Metrics/BlockLength end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:enable Metrics/MethodLength, Layout/EmptyLines # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:disable Metrics/MethodLength, Layout/EmptyLines # Update plan of the subscription. # @param [String] subscription_id Subscription id(OCID). # @param [String] osp_home_region The home region's public name of the logged in user. # # @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. # # @param [OCI::OspGateway::Models::UpdateSubscriptionDetails] update_subscription_details Subscription update request. # @param [Hash] opts the optional parameters # @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level # retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry # @option opts [String] :opc_request_id The unique Oracle-assigned identifier for the request. If you need to contact Oracle about a # particular request, please provide the request ID. # # @option opts [String] :if_match For optimistic concurrency control. In the PUT or DELETE call # for a resource, set the `if-match` parameter to the value of the # etag from a previous GET or POST response for that resource. # The resource will be updated or deleted only if the etag you # provide matches the resource's current etag value. # # @return [Response] A Response object with data of type {OCI::OspGateway::Models::Subscription Subscription} # @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/ospgateway/update_subscription.rb.html) to see an example of how to use update_subscription API. def update_subscription(subscription_id, osp_home_region, compartment_id, update_subscription_details, opts = {}) logger.debug 'Calling operation SubscriptionServiceClient#update_subscription.' if logger raise "Missing the required parameter 'subscription_id' when calling update_subscription." if subscription_id.nil? raise "Missing the required parameter 'osp_home_region' when calling update_subscription." if osp_home_region.nil? raise "Missing the required parameter 'compartment_id' when calling update_subscription." if compartment_id.nil? raise "Missing the required parameter 'update_subscription_details' when calling update_subscription." if update_subscription_details.nil? raise "Parameter value for 'subscription_id' must not be blank" if OCI::Internal::Util.blank_string?(subscription_id) path = '/subscriptions/{subscriptionId}'.sub('{subscriptionId}', subscription_id.to_s) operation_signing_strategy = :standard # rubocop:disable Style/NegatedIf # Query Params query_params = {} query_params[:ospHomeRegion] = osp_home_region query_params[:compartmentId] = compartment_id # Header Params header_params = {} header_params[:accept] = 'application/json' header_params[:'content-type'] = 'application/json' header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id] header_params[:'if-match'] = opts[:if_match] if opts[:if_match] # rubocop:enable Style/NegatedIf post_body = @api_client.object_to_http_body(update_subscription_details) # rubocop:disable Metrics/BlockLength OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'SubscriptionServiceClient#update_subscription') do @api_client.call_api( :PUT, path, endpoint, header_params: header_params, query_params: query_params, operation_signing_strategy: operation_signing_strategy, body: post_body, return_type: 'OCI::OspGateway::Models::Subscription' ) end # rubocop:enable Metrics/BlockLength end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists # rubocop:enable Metrics/MethodLength, Layout/EmptyLines private def applicable_retry_config(opts = {}) return @retry_config unless opts.key?(:retry_config) opts[:retry_config] end end end # rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
<reponame>maven-nar/cpptasks-parallel<filename>src/test/java/com/github/maven_nar/cpptasks/compiler/TestCompilerConfiguration.java /* * * Copyright 2002-2004 The Ant-Contrib project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.maven_nar.cpptasks.compiler; import com.github.maven_nar.cpptasks.compiler.CompilerConfiguration; import junit.framework.TestCase; /** */ public abstract class TestCompilerConfiguration extends TestCase { public TestCompilerConfiguration(String name) { super(name); } protected abstract CompilerConfiguration create(); public String getObjectFileExtension() { return ".o"; } public void testBid() { CompilerConfiguration compiler = create(); int bid = compiler.bid("c:/foo\\bar\\hello.c"); assertEquals(100, bid); bid = compiler.bid("c:/foo\\bar/hello.c"); assertEquals(100, bid); bid = compiler.bid("c:/foo\\bar\\hello.h"); assertEquals(1, bid); bid = compiler.bid("c:/foo\\bar/hello.h"); assertEquals(1, bid); bid = compiler.bid("c:/foo\\bar/hello.pas"); assertEquals(0, bid); bid = compiler.bid("c:/foo\\bar/hello.java"); assertEquals(0, bid); } public void testGetOutputFileName1() { CompilerConfiguration compiler = create(); String input = "c:/foo\\bar\\hello.c"; // // may cause IllegalStateException since // setPlatformInfo has not been called try { String[] output = compiler.getOutputFileNames(input, null); } catch (java.lang.IllegalStateException ex) { } } public void testGetOutputFileName2() { CompilerConfiguration compiler = create(); String[] output = compiler.getOutputFileNames("c:/foo\\bar\\hello.c", null); assertEquals("hello" + getObjectFileExtension(), output[0]); output = compiler.getOutputFileNames("c:/foo\\bar/hello.c", null); assertEquals("hello" + getObjectFileExtension(), output[0]); output = compiler.getOutputFileNames("hello.c", null); assertEquals("hello" + getObjectFileExtension(), output[0]); output = compiler.getOutputFileNames("c:/foo\\bar\\hello.h", null); assertEquals(0, output.length); output = compiler.getOutputFileNames("c:/foo\\bar/hello.h", null); assertEquals(0, output.length); } }
euler -i inputs/figure2.txt -e mnpw --rcgo ; euler -i inputs/figure4.txt -e mnpw --rcgo ; euler -i inputs/figure5.txt -e mnpw --rcgo ; euler -i inputs/figure6.txt -e mnpw --rcgo ; euler -i inputs/figure7.txt -e mnpw --rcgo ; euler -i inputs/figure8.txt -e mnpw --rcgo ; euler -i inputs/figure9.txt -e mnpw --rcgo ; euler -i inputs/figure10.txt -e mnpw --rcgo ; euler -i inputs/figure11.txt -e mnpw --rcgo ; euler -i inputs/figure12.txt -e mnpw --rcgo ; euler -i inputs/figure13.txt -e mnpw --rcgo ; euler -i inputs/figure14.txt -e mnpw --rcgo ; euler -i inputs/figure15.txt -e mnpw --rcgo ; euler -i inputs/figure16.txt -e mnpw --rcgo ; #==> in case oinputs/ inconsistency and other special cases the commands will be diinputs/inputs/erent!
package dao; import models.*; import org.sql2o.*; import org.junit.*; import java.util.List; import static org.junit.Assert.*; public class Sql2oTeamMemberDaoTest { private Sql2oTeamDao teamDao; private Sql2oTeamMemberDao teamMemberDao; private Connection conn; @Before public void setUp() throws Exception { String connectionString = "jdbc:h2:mem:testing;INIT=RUNSCRIPT from 'classpath:db/create.sql'"; Sql2o sql2o = new Sql2o(connectionString, "", ""); teamMemberDao = new Sql2oTeamMemberDao(sql2o); conn = sql2o.open(); } @After public void tearDown() throws Exception { conn.close(); } @Test public void addingTeamMemberSetsId() throws Exception{ TeamMember teamMember = new TeamMember(); teamMember.setName("jeff"); int originalTeamMemberId = teamMember.getId(); teamMemberDao.add(teamMember); assertNotEquals(originalTeamMemberId, teamMember.getId()); } @Test public void getAllReturnsAllTeamMembers() throws Exception { TeamMember newTeamMember = setUpNewTeamMemberName(); TeamMember newTeamMember1 = setUpNewTeamMemberName(); assertEquals(2, teamMemberDao.getAll().size()); } @Test public void findByIdReturnsCorrectTeamMember() throws Exception { TeamMember teamMember = setUpNewTeamMemberName(); TeamMember teamMember1 = setUpNewTeamMemberName(); teamMemberDao.findById(teamMember.getId()); assertEquals(2, teamMember1.getId()); } @Test public void updateCorrectlyChangesTeamMembersName() throws Exception { TeamMember teamMember = setUpNewTeamMemberName(); teamMemberDao.update(teamMember.getId(), "jim", 1); TeamMember updatedTeamMember = teamMemberDao.findById(teamMember.getId()); assertNotEquals(teamMember, updatedTeamMember.getName()); } @Test public void deleteByIdDeletesCorrectTeamMember() throws Exception { TeamMember teamMember = setUpNewTeamMemberName(); TeamMember teamMember1 = setUpNewTeamMemberName(); teamMemberDao.deleteById(teamMember.getId()); assertEquals(1, teamMemberDao.getAll().size()); } @Test public void clearAllTeamMembersDeletesAllCorrectly() throws Exception { TeamMember teamMember = setUpNewTeamMemberName(); TeamMember teamMember1 = setUpNewTeamMemberName(); teamMemberDao.clearAllTeamMembers(); assertEquals(0, teamMemberDao.getAll().size()); } @Test public void teamIdIsReturnedCorrectly() throws Exception { TeamMember teamMember = new TeamMember(); teamMember.setName("jeff"); teamMember.setTeamId(1); int originalTeamId = teamMember.getTeamId(); teamMemberDao.add(teamMember); assertEquals(originalTeamId, teamMemberDao.findById(teamMember.getId()).getTeamId()); } @Test public void getAllMembersByTeamIdGetsAllMembersCorrectly() throws Exception { TeamMember teamMember = new TeamMember(); TeamMember teamMember1 = new TeamMember(); TeamMember teamMember2 = new TeamMember(); teamMember.setName("name"); teamMember1.setName("name1"); teamMember2.setName("name2"); teamMember.setTeamId(1); teamMember1.setTeamId(1); teamMember2.setTeamId(2); teamMemberDao.add(teamMember); teamMemberDao.add(teamMember1); teamMemberDao.add(teamMember2); List<TeamMember> allTeamOneMembers = teamMemberDao.getAllMembersByTeamId(1); assertEquals(2, allTeamOneMembers.size()); assertTrue(allTeamOneMembers.contains(teamMember)); assertFalse(allTeamOneMembers.contains(teamMember2)); } public TeamMember setUpNewTeamMemberName() { TeamMember teamMember = new TeamMember(); teamMember.setName("jeff"); teamMember.setTeamId(2); teamMemberDao.add(teamMember); return teamMember; } }
#!/usr/bin/env bash cd "$(dirname $0)/.." CUDA_VISIBLE_DEVICES=1 python3 imagenet.py \ -a=resnet18 \ --arch-cfg=last=True \ --batch-size=256 \ --epochs=100 \ -oo=sgd \ -oc=momentum=0.9 \ -wd=1e-4 \ --lr=0.1 \ --lr-method=step \ --lr-steps=30 \ --lr-gamma=0.1 \ --dataset-root=/raid/Lei_Data/imageNet/input_torch/ \ --dataset=folder \ --norm=CDSigma \ --norm-cfg=T=5,num_channels=64 \ --seed=1 \ $@
/** * @typedef {number} TestState */ /** * An enumeration of values for the states of test runs. * @enum {TestState} **/ export const TestStates = { found: 0, started: 1, succeeded: 2, failed: 4, completed: 8 };
<gh_stars>0 package build import ( "log" "testing" ) func Test_sliceContains(t *testing.T) { testSlice := []string { "apple", "banana", "pear", } if sliceContains(testSlice,"rabbit") || !sliceContains(testSlice,"apple") { log.Println("sliceContains match fails") t.Fail() } }
import angular from 'angular'; import controlComponent from './control.component'; let controlModule = angular.module('control', []) .component('control', controlComponent) .name; export default controlModule;
class HasNameSpec: def passes(self, candidate: PersonDTO) -> bool: return bool(candidate.name) # Modify the passes method to include the HasNameSpec in the composite test def passes(self, candidate: PersonDTO) -> bool: has_age_spec = HasAgeSpec() has_name_spec = HasNameSpec() # New specification for candidate's name age_less_than_spec = AgeIsLessThan(55) age_greater_than_spec = AgeIsGreaterThan(18) is_domestic = IsFromCountry("US") tests = ( has_age_spec & has_name_spec # Include the new specification & age_less_than_spec & age_greater_than_spec & -is_domestic ) return tests.passes(candidate)
# shellcheck shell=bash # shellcheck disable=SC2034 # # Load the `bash-preexec.sh` library, and define helper functions ## Prepare, load, fix, and install `bash-preexec.sh` # Disable `$PROMPT_COMMAND` modification for now. __bp_delay_install="delayed" # shellcheck source-path=SCRIPTDIR/../vendor/github.com/rcaloras/bash-preexec source "${BASH_IT?}/vendor/github.com/rcaloras/bash-preexec/bash-preexec.sh" # Block damanaging user's `$HISTCONTROL` function __bp_adjust_histcontrol() { :; } # Don't fail on readonly variables function __bp_require_not_readonly() { :; } # For performance, testing, and to avoid unexpected behavior: disable DEBUG traps in subshells. # See bash-it/bash-it#1040 and rcaloras/bash-preexec#26 : "${__bp_enable_subshells:=}" # blank # Modify `$PROMPT_COMMAND` in finalize hook _bash_it_library_finalize_hook+=('__bp_install_after_session_init') ## Helper functions function __check_precmd_conflict() { local f __bp_trim_whitespace f "${1?}" _bash-it-array-contains-element "${f}" "${precmd_functions[@]}" } function __check_preexec_conflict() { local f __bp_trim_whitespace f "${1?}" _bash-it-array-contains-element "${f}" "${preexec_functions[@]}" } function safe_append_prompt_command() { local prompt_re prompt_er f if [[ "${bash_preexec_imported:-${__bp_imported:-missing}}" == "defined" ]]; then # We are using bash-preexec __bp_trim_whitespace f "${1?}" if ! __check_precmd_conflict "${f}"; then precmd_functions+=("${f}") fi else # Match on word-boundaries prompt_re='(^|[^[:alnum:]_])' prompt_er='([^[:alnum:]_]|$)' if [[ ${PROMPT_COMMAND} =~ ${prompt_re}"${1}"${prompt_er} ]]; then return elif [[ -z ${PROMPT_COMMAND} ]]; then PROMPT_COMMAND="${1}" else PROMPT_COMMAND="${1};${PROMPT_COMMAND}" fi fi } function safe_append_preexec() { local prompt_re f if [[ "${bash_preexec_imported:-${__bp_imported:-missing}}" == "defined" ]]; then # We are using bash-preexec __bp_trim_whitespace f "${1?}" if ! __check_preexec_conflict "${f}"; then preexec_functions+=("${f}") fi else _log_error "${FUNCNAME[0]}: can't append to preexec hook because _bash-preexec.sh_ hasn't been loaded" fi }
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @interface QueueHelper : NSObject + (void)ensureToRunOnMainQueue:(void (^)(void))block; @end NS_ASSUME_NONNULL_END
#!/bin/sh # if managed image (SPM is present) if [ -d $SAG_HOME/profiles/SPM/bin ]; then # self-register $SAG_HOME/profiles/SPM/bin/register.sh # start SPM in background $SAG_HOME/profiles/SPM/bin/startup.sh fi # you can simply run main product run in foreground # $SAG_HOME/profiles/MWS_$INSTANCE_NAME/bin/console.sh # or do this... echo "Remove old logs" rm -rf $SAG_HOME/profiles/MWS_$INSTANCE_NAME/logs/* rm -rf $SAG_HOME/MWS/server/$INSTANCE_NAME/logs/* # or can start in background $SAG_HOME/profiles/MWS_$INSTANCE_NAME/bin/startup.sh echo "MWS process started. Waiting ..." # wait until IS server.log comes up while [ ! -f $SAG_HOME/MWS/server/$INSTANCE_NAME/logs/_full_.log ]; do tail $SAG_HOME/profiles/MWS_$INSTANCE_NAME/logs/wrapper.log sleep 5 done # this is our main container process echo "MWS is ONLINE at http://`hostname`:8585/" tail -f $SAG_HOME/MWS/server/$INSTANCE_NAME/logs/_full_.log
<filename>entry/sw_entry.go package rkentry import ( "context" "encoding/json" "github.com/markbates/pkger" "github.com/markbates/pkger/pkging" "github.com/rookie-ninja/rk-common/common" "go.uber.org/zap" "io/ioutil" "net/http" "os" "path" "strconv" "strings" "time" ) var ( swaggerJsonFiles = make(map[string]string, 0) swConfigFileContents = `` ) const ( // SwEntryType default entry type SwEntryType = "SwEntry" // SwEntryNameDefault default entry name SwEntryNameDefault = "SwDefault" // SwEntryDescription default entry description SwEntryDescription = "Internal RK entry for swagger UI." // ModPath used while reading files from pkger ModPath = "github.com/rookie-ninja/rk-entry" ) // Inner struct used while initializing swagger entry. type swUrlConfig struct { Urls []*swUrl `json:"urls" yaml:"urls"` } // Inner struct used while initializing swagger entry. type swUrl struct { Name string `json:"name" yaml:"name"` Url string `json:"url" yaml:"url"` } // BootConfigSw Bootstrap config of swagger. // 1: Enabled: Enable swagger. // 2: Path: Swagger path accessible from restful API. // 3: JsonPath: The path of where swagger JSON file was located. // 4: Headers: The headers that would added into each API response. type BootConfigSw struct { Enabled bool `yaml:"enabled" yaml:"enabled"` Path string `yaml:"path" yaml:"path"` JsonPath string `yaml:"jsonPath" yaml:"jsonPath"` Headers []string `yaml:"headers" yaml:"headers"` } // SwEntry implements rkentry.Entry interface. // 1: Path: Swagger path accessible from restful API. // 2: JsonPath: The path of where swagger JSON file was located. // 3: Headers: The headers that would added into each API response. // 4: Port: The port where swagger would listen to. // 5: EnableCommonService: Enable common service in swagger. type SwEntry struct { EntryName string `json:"entryName" yaml:"entryName"` EntryType string `json:"entryType" yaml:"entryType"` EntryDescription string `json:"-" yaml:"-"` EventLoggerEntry *EventLoggerEntry `json:"-" yaml:"-"` ZapLoggerEntry *ZapLoggerEntry `json:"-" yaml:"-"` JsonPath string `json:"jsonPath" yaml:"jsonPath"` Path string `json:"path" yaml:"path"` Headers map[string]string `json:"-" yaml:"-"` Port uint64 `json:"port" yaml:"port"` EnableCommonService bool `json:"-" yaml:"-"` AssetsFilePath string `json:"-" yaml:"-"` } // SwOption Swagger entry option. type SwOption func(*SwEntry) // WithPortSw Provide port. func WithPortSw(port uint64) SwOption { return func(entry *SwEntry) { entry.Port = port } } // WithNameSw Provide name. func WithNameSw(name string) SwOption { return func(entry *SwEntry) { entry.EntryName = name } } // WithPathSw Provide path. func WithPathSw(path string) SwOption { return func(entry *SwEntry) { if len(path) > 0 { entry.Path = path } } } // WithJsonPathSw Provide JsonPath. func WithJsonPathSw(path string) SwOption { return func(entry *SwEntry) { if len(path) > 0 { entry.JsonPath = path } } } // WithHeadersSw Provide headers. func WithHeadersSw(headers map[string]string) SwOption { return func(entry *SwEntry) { entry.Headers = headers } } // WithZapLoggerEntrySw Provide rkentry.ZapLoggerEntry. func WithZapLoggerEntrySw(zapLoggerEntry *ZapLoggerEntry) SwOption { return func(entry *SwEntry) { entry.ZapLoggerEntry = zapLoggerEntry } } // WithEventLoggerEntrySw Provide rkentry.EventLoggerEntry. func WithEventLoggerEntrySw(eventLoggerEntry *EventLoggerEntry) SwOption { return func(entry *SwEntry) { entry.EventLoggerEntry = eventLoggerEntry } } // WithEnableCommonServiceSw Provide enable common service option. func WithEnableCommonServiceSw(enable bool) SwOption { return func(entry *SwEntry) { entry.EnableCommonService = enable } } func RegisterSwEntryWithConfig(config *BootConfigSw, name string, port uint64, zap *ZapLoggerEntry, event *EventLoggerEntry, commonServiceEnabled bool) *SwEntry { var swEntry *SwEntry if config.Enabled { // Init swagger custom headers from config headers := make(map[string]string, 0) for i := range config.Headers { header := config.Headers[i] tokens := strings.Split(header, ":") if len(tokens) == 2 { headers[tokens[0]] = tokens[1] } } swEntry = RegisterSwEntry( WithNameSw(name), WithZapLoggerEntrySw(zap), WithEventLoggerEntrySw(event), WithEnableCommonServiceSw(commonServiceEnabled), WithPortSw(port), WithPathSw(config.Path), WithJsonPathSw(config.JsonPath), WithHeadersSw(headers)) } return swEntry } func RegisterSwEntry(opts ...SwOption) *SwEntry { entry := &SwEntry{ EntryName: SwEntryNameDefault, EntryType: SwEntryType, EntryDescription: SwEntryDescription, ZapLoggerEntry: GlobalAppCtx.GetZapLoggerEntryDefault(), EventLoggerEntry: GlobalAppCtx.GetEventLoggerEntryDefault(), Path: "sw", JsonPath: "", AssetsFilePath: "/rk/v1/assets/sw/", } for i := range opts { opts[i](entry) } // Deal with Path // add "/" at start and end side if missing if !strings.HasPrefix(entry.Path, "/") { entry.Path = "/" + entry.Path } if !strings.HasSuffix(entry.Path, "/") { entry.Path = entry.Path + "/" } if len(entry.EntryName) < 1 { entry.EntryName = "SwEntry-" + strconv.FormatUint(entry.Port, 10) } // init swagger configs entry.initSwaggerConfig() return entry } func (entry *SwEntry) Bootstrap(ctx context.Context) { // Noop } func (entry *SwEntry) Interrupt(ctx context.Context) { // Noop } func (entry *SwEntry) GetName() string { return entry.EntryName } func (entry *SwEntry) GetType() string { return entry.EntryType } func (entry *SwEntry) GetDescription() string { return entry.EntryDescription } func (entry *SwEntry) String() string { bytes, _ := json.Marshal(entry) return string(bytes) } // MarshalJSON Marshal entry func (entry *SwEntry) MarshalJSON() ([]byte, error) { m := map[string]interface{}{ "entryName": entry.EntryName, "entryType": entry.EntryType, "entryDescription": entry.EntryDescription, "eventLoggerEntry": entry.EventLoggerEntry.GetName(), "zapLoggerEntry": entry.ZapLoggerEntry.GetName(), "jsonPath": entry.JsonPath, "port": entry.Port, "path": entry.Path, "headers": entry.Headers, "enableCommonService": entry.EnableCommonService, } return json.Marshal(&m) } // UnmarshalJSON Unmarshal entry func (entry *SwEntry) UnmarshalJSON([]byte) error { return nil } // AssetsFileHandler Handler for swagger assets files. func (entry *SwEntry) AssetsFileHandler() http.HandlerFunc { return func(writer http.ResponseWriter, request *http.Request) { p := strings.TrimSuffix(strings.TrimPrefix(request.URL.Path, "/rk/v1"), "/") if file, err := openFromPkger(ModPath, p); err != nil { http.Error(writer, "Internal server error", http.StatusInternalServerError) } else { http.ServeContent(writer, request, path.Base(p), time.Now(), file) } } } // ConfigFileHandler handler for swagger config files. func (entry *SwEntry) ConfigFileHandler() http.HandlerFunc { return func(writer http.ResponseWriter, request *http.Request) { p := strings.TrimSuffix(request.URL.Path, "/") writer.Header().Set("cache-control", "no-cache") for k, v := range entry.Headers { writer.Header().Set(k, v) } switch p { case strings.TrimSuffix(entry.Path, "/"): if file, err := openFromPkger(ModPath, "/assets/sw/index.html"); err != nil { http.Error(writer, "Internal server error", http.StatusInternalServerError) } else { http.ServeContent(writer, request, "index.html", time.Now(), file) } case path.Join(entry.Path, "swagger-config.json"): http.ServeContent(writer, request, "swagger-config.json", time.Now(), strings.NewReader(swConfigFileContents)) default: p = strings.TrimPrefix(p, entry.Path) value, ok := swaggerJsonFiles[p] if ok { http.ServeContent(writer, request, p, time.Now(), strings.NewReader(value)) } else { http.NotFound(writer, request) } } } } // Init swagger config. // This function do the things bellow: // 1: List swagger files from entry.JSONPath. // 2: Read user swagger json files and deduplicate. // 3: Assign swagger contents into swaggerConfigJson variable func (entry *SwEntry) initSwaggerConfig() { swaggerUrlConfig := &swUrlConfig{ Urls: make([]*swUrl, 0), } if len(entry.JsonPath) > 0 { // 1: Add user API swagger JSON entry.listFilesWithSuffix(swaggerUrlConfig, entry.JsonPath, false) } else { // try to read from default directories // - docs // - api/gen/v1 // - api/gen entry.listFilesWithSuffix(swaggerUrlConfig, "docs", true) entry.listFilesWithSuffix(swaggerUrlConfig, "api/gen/v1", true) entry.listFilesWithSuffix(swaggerUrlConfig, "api/gen", true) } // 2: Add rk common APIs if entry.EnableCommonService { key := entry.EntryName + "-rk-common.swagger.json" // add common service json file swaggerJsonFiles[key] = string(readFileFromPkger(ModPath, "/assets/sw/config/swagger.json")) swaggerUrlConfig.Urls = append(swaggerUrlConfig.Urls, &swUrl{ Name: key, Url: path.Join(entry.Path, key), }) } // 3: Marshal to swagger-config.json and write to pkger bytes, err := json.Marshal(swaggerUrlConfig) if err != nil { entry.ZapLoggerEntry.GetLogger().Error("Failed to unmarshal swagger-config.json", zap.Error(err)) rkcommon.ShutdownWithError(err) } swConfigFileContents = string(bytes) } // List files with .json suffix and store them into swaggerJsonFiles variable. func (entry *SwEntry) listFilesWithSuffix(urlConfig *swUrlConfig, jsonPath string, ignoreError bool) { suffix := ".json" // re-path it with working directory if not absolute path if !path.IsAbs(entry.JsonPath) { wd, _ := os.Getwd() jsonPath = path.Join(wd, jsonPath) } files, err := ioutil.ReadDir(jsonPath) if err != nil && !ignoreError { entry.ZapLoggerEntry.GetLogger().Warn("Failed to list files with suffix", zap.String("path", jsonPath), zap.String("suffix", suffix), zap.String("error", err.Error())) return } for i := range files { file := files[i] if !file.IsDir() && strings.HasSuffix(file.Name(), suffix) { bytes, err := ioutil.ReadFile(path.Join(jsonPath, file.Name())) key := entry.EntryName + "-" + file.Name() if err != nil && !ignoreError { entry.ZapLoggerEntry.GetLogger().Info("Failed to read file with suffix", zap.String("path", path.Join(jsonPath, key)), zap.String("suffix", suffix), zap.String("error", err.Error())) rkcommon.ShutdownWithError(err) } swaggerJsonFiles[key] = string(bytes) urlConfig.Urls = append(urlConfig.Urls, &swUrl{ Name: key, Url: path.Join(entry.Path, key), }) } } } // Read go template files with Pkger. func readFileFromPkger(modPath, filePath string) []byte { var file pkging.File var err error if file, err = pkger.Open(path.Join(modPath+":", filePath)); err != nil { return []byte{} } var bytes []byte if bytes, err = ioutil.ReadAll(file); err != nil { return []byte{} } return bytes } func openFromPkger(modPath, filePath string) (pkging.File, error) { return pkger.Open(path.Join(modPath + ":" + filePath)) }
/* Copyright (c) 2019, Art Compiler LLC */ import { assert, message, messages, reserveCodeRange, decodeID, encodeID, validate, } from "./share.js" reserveCodeRange(1000, 1999, "compile"); messages[1001] = "Node ID %1 not found in pool."; messages[1002] = "Invalid tag in node with Node ID %1."; messages[1003] = "No async callback provided."; messages[1004] = "No visitor method defined for '%1'."; const transform = (function() { const table = { "X-TICK-FORMAT": xTickFormat, "Y-TICK-FORMAT": yTickFormat, "Y-TICK-SIZE": yTickSize, "X-AXIS-LABEL": xAxisLabel, "Y-AXIS-LABEL": yAxisLabel, "CHART-PADDING": chartPadding, "GAP": gap, "HORIZONTAL": horizontal, "HIDE-AXIS": hideAxis, "HIDE-X-AXIS": hideXAxis, "HIDE-Y-AXIS": hideYAxis, "HIDE-LEGEND": hideLegend, "HIDE-GRID": hideGrid, "HIDE-X-GRID": hideXGrid, "HIDE-Y-GRID": hideYGrid, "SHOW-Y-VALUES": showYValues, "STACK": stack, "DOT-RADIUS": dotRadius, "PALETTE": palette, "RGB": rgb, "ROWS": rows, "COLS": cols, "BAR-WIDTH": barWidth, "WIDTH": width, "HEIGHT": height, "LINE-WIDTH": lineWidth, "LINE-COLORS": colors, "COLORS": colors, "TABLE-CHART": tableChart, "BAR-CHART": barChart, "TIMESERIES-CHART": timeseriesChart, "AREA-CHART": areaChart, "VIRUS-CHART": virusChart, "HEATMAP": heatmap, "PROG" : program, "EXPRS" : exprs, "STR": str, "NUM": num, "IDENT": ident, "BOOL": bool, "LIST": list, "RECORD": record, "BINDING": binding, "ADD" : add, "MUL" : mul, "VAL" : val, "KEY" : key, "LEN" : len, "STYLE" : style, "CONCAT" : concat, "ARG" : arg, "DEFAULTS" : defaults, "LAMBDA" : lambda, "PAREN" : paren, "APPLY" : apply, "MAP" : map, }; let nodePool; let version; function getVersion(pool) { return pool.version ? +pool.version : 0; } function transform(code, data, resume) { nodePool = code; version = getVersion(code); return visit(code.root, data, resume); } function error(str, nid) { return { str: str, nid: nid, }; } function visit(nid, options, resume) { assert(typeof resume === "function", message(1003)); // Get the node from the pool of nodes. let node; if (typeof nid === "object") { node = nid; } else { node = nodePool[nid]; } assert(node, message(1001, [nid])); assert(node.tag, message(1001, [nid])); assert(typeof table[node.tag] === "function", message(1004, [JSON.stringify(node.tag)])); return table[node.tag](node, options, resume); } // BEGIN VISITOR METHODS function tableChart(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { let vals = val0; resume([].concat(err0), { type: "table-chart", args: { vals: vals, } }); }); }; function barChart(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { let vals = val0; resume([].concat(err0), { type: "bar-chart", args: { vals: vals, } }); }); }; function timeseriesChart(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { let cols = val0; let vals = val1; resume([].concat(err0).concat(err1), { type: "timeseries-chart", args: { cols: cols, vals: vals, } }); }); }); }; function areaChart(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { let vals = []; let keys = Object.keys(val0[0]); vals.push(keys); val0.forEach((v, i) => { if (+v[keys[1]] < 120) { vals.push([ v[keys[0]], v[keys[1]], ]); } }); resume([].concat(err0), { type: "area-chart", args: { vals: vals, } }); }); }; function virusChart(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { let vals = []; let values = val0.values; let region = val0.region + ', ' + val0.parent; let type = val0.type; let keys = Object.keys(values[0]); vals.push(keys); values.forEach((v, i) => { vals.push([ v[keys[0]], v[keys[1]], ]); }); resume([].concat(err0), { type: "virus-chart", args: { type: type, region: region, vals: vals, } }); }); }; function heatmap(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { // let vals = []; // let keys = Object.keys(val0[0]); // vals.push(keys); // val0.forEach((v, i) => { // if (+v[keys[1]] < 120) { // vals.push([ // v[keys[0]], // v[keys[1]], // ]); // } // }); resume([].concat(err0), { type: "heatmap", args: { vals: val0, } }); }); }; function horizontal(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.horizontal = true; resume([].concat(err0), val0); }); }; function hideAxis(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideXAxis = true; val0.hideYAxis = true; resume([].concat(err0), val0); }); }; function hideXAxis(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideXAxis = true; resume([].concat(err0), val0); }); }; function hideYAxis(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideYAxis = true; resume([].concat(err0), val0); }); }; function hideLegend(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideLegend = true; resume([].concat(err0), val0); }); }; function hideGrid(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideXGrid = true; val0.hideYGrid = true; resume([].concat(err0), val0); }); }; function hideXGrid(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideXGrid = true; resume([].concat(err0), val0); }); }; function hideYGrid(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.hideYGrid = true; resume([].concat(err0), val0); }); }; function showYValues(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.showYValues = true; resume([].concat(err0), val0); }); }; function horizontal(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.horizontal = true; resume([].concat(err0), val0); }); }; function stack(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { val0.stack = true; resume([].concat(err0), val0); }); }; function dotRadius(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.dotRadius = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function palette(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.palette = val0; resume([].concat(err0).concat(err1), val1); }); }); } function decimalToHex(d, padding) { var hex = Number(d).toString(16); padding = typeof (padding) === "undefined" || padding === null ? padding = 2 : padding; while (hex.length < padding) { hex = "0" + hex; } return hex; } function rgb(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { let r = decimalToHex(val0[0], 2); let g = decimalToHex(val0[1], 2); let b = decimalToHex(val0[2], 2); let val = "#" + r + g + b; resume([].concat(err0), val); }); } function rowLabels(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.rowLabels = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function rows(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { let rows = val1.rows = val0[0]; let vals = val1.args.vals; let name = rows.name; let type = "none"; let newVals = []; let newRows = []; vals.forEach(v => { if (!newRows[v[name]]) { newRows[v[name]] = { label: rows.label.replace("%%", v[name]), }; } newVals.push(Object.assign({}, v, { row: v[name], label: v[name], val: v.value, tip: v.value + (rows.units && " " + rows.units || ""), })); }); val1.rows = newRows; val1.args.vals = newVals; resume([].concat(err0).concat(err1), val1); // val1.rows = val0; // let vals = val1.args.vals; // let rows = val1.rows; // let newVals = [] // vals.forEach(v => { // rows.forEach((r, i) => { // let name = r.name; // newVals.push(Object.assign({}, v, { // row: i,un // val: scale(v[name], r), // tip: v[name] + (r.units && " " + r.units || ""), // })); // }); // }); // val1.args.vals = newVals; // resume([].concat(err0).concat(err1), val1); }); }); function scale(v, r) { let [NONE, NORMAL, WARNING, CRITICAL] = [0, 1, 2, 3]; if (r.normal) { let breaks = r.normal; for (let i = 0; i < breaks.length; i += 2) { if (v >= breaks[i] && v < breaks[i + 1]) { return NORMAL; } } } if (r.warning) { let breaks = r.warning; for (let i = 0; i < breaks.length; i += 2) { if (v >= breaks[i] && v < breaks[i + 1]) { return WARNING; } } } if (r.critical) { let breaks = r.critical; for (let i = 0; i < breaks.length; i += 2) { if (v >= breaks[i] && v < breaks[i + 1]) { return CRITICAL; } } } return NONE; } }; function cols(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.cols = val0[0]; let vals = val1.args.vals; let name = val1.cols.name; let type = "time" let interval = val1.cols.interval; vals.forEach(v => { switch (type) { case "time": let t = new Date(v[name]); switch (interval) { case "day": v.col = t.getDate() - 1; break; case "hour": v.col = t.getHours(); break; case "minute": v.col = t.getMinutes(); break; } break; default: break; } }); resume([].concat(err0).concat(err1), val1); }); }); }; function xAxisLabel(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.xAxisLabel = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function yAxisLabel(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.yAxisLabel = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function xTickFormat(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.xTickFormat = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function yTickFormat(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.yTickFormat = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function yTickSize(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.yTickSize = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function chartPadding(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.chartPadding = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function gap(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.gap = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function barWidth(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.barWidth = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function width(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.width = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function height(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.height = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function lineWidth(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.lineWidth = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function colors(node, options, resume) { visit(node.elts[0], options, function (err0, val0) { visit(node.elts[1], options, function (err1, val1) { val1.colors = val0; resume([].concat(err0).concat(err1), val1); }); }); }; function str(node, options, resume) { let val = node.elts[0]; resume([], val); } function num(node, options, resume) { let val = node.elts[0]; resume([], +val); } function ident(node, options, resume) { let val = node.elts[0]; resume([], val); } function bool(node, options, resume) { let val = node.elts[0]; resume([], !!val); } function concat(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { let str = ""; if (val1 instanceof Array) { val1.forEach(v => { str += v; }); } else { str = val1.toString(); } resume(err1, str); }); } function paren(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { resume(err1, val1); }); } function list(node, options, resume) { if (node.elts && node.elts.length > 1) { visit(node.elts[0], options, function (err1, val1) { node = { tag: "LIST", elts: node.elts.slice(1), }; list(node, options, function (err2, val2) { let val = [].concat(val2); val.unshift(val1); resume([].concat(err1).concat(err2), val); }); }); } else if (node.elts && node.elts.length > 0) { visit(node.elts[0], options, function (err1, val1) { let val = [val1]; resume([].concat(err1), val); }); } else { resume([], []); } } function defaults(node, options, resume) { // If there is input data, then use it, otherwise use default data. if (node.elts.length === 0) { // No args, so use the given data or empty. let data = options.data ? options.data : []; resume([], data); } else { visit(node.elts[0], options, function (err1, val1) { if (false) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } let data = options.data && Object.keys(options.data).length != 0 ? options.data : val1; resume([].concat(err1), data); }); } } function arg(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { let key = val1; if (false) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } resume([].concat(err1), options.args[key]); }); } function args(node, options, resume) { resume([], options.args); } function lambda(node, options, resume) { // Return a function value. visit(node.elts[0], options, function (err1, val1) { visit(node.elts[1], options, function (err2, val2) { resume([].concat(err1).concat(err2), val2); }); }); } function apply(node, options, resume) { // Apply a function to arguments. visit(node.elts[1], options, function (err1, val1) { // args options.args = [val1]; visit(node.elts[0], options, function (err0, val0) { // fn resume([].concat(err1).concat(err0), val0); }); }); } function map(node, options, resume) { // Apply a function to arguments. visit(node.elts[1], options, function (err1, val1) { // args let errs = []; let vals = []; val1.forEach((val) => { options.args = [val]; visit(node.elts[0], options, function (err0, val0) { vals.push(val0); errs = errs.concat(err0); }); }); resume(errs, vals); }); } function binding(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { visit(node.elts[1], options, function (err2, val2) { resume([].concat(err1).concat(err2), {key: val1, val: val2}); }); }); } function record(node, options, resume) { if (node.elts && node.elts.length > 1) { visit(node.elts[0], options, function (err1, val1) { node = { tag: "RECORD", elts: node.elts.slice(1), }; record(node, options, function (err2, val2) { val2[val1.key] = val1.val; resume([].concat(err1).concat(err2), val2); }); }); } else if (node.elts && node.elts.length > 0) { visit(node.elts[0], options, function (err1, val1) { let val = {}; val[val1.key] = val1.val; resume([].concat(err1), val); }); } else { resume([], {}); } } function exprs(node, options, resume) { if (node.elts && node.elts.length > 1) { visit(node.elts[0], options, function (err1, val1) { node = { tag: "EXPRS", elts: node.elts.slice(1), }; exprs(node, options, function (err2, val2) { let val = [].concat(val2); val.unshift(val1); resume([].concat(err1).concat(err2), val); }); }); } else if (node.elts && node.elts.length > 0) { visit(node.elts[0], options, function (err1, val1) { let val = [val1]; resume([].concat(err1), val); }); } else { resume([], []); } } function program(node, options, resume) { if (!options) { options = {}; } visit(node.elts[0], options, function (err, val) { // Return the value of the last expression. resume(err, val.pop()); }); } function key(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { let key = val1; if (false) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } visit(node.elts[1], options, function (err2, val2) { let obj = val2; if (false) { err2 = err2.concat(error("Argument must be a number.", node.elts[1])); } resume([].concat(err1).concat(err2), Object.keys(obj)[key]); }); }); } function val(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { let key = val1; if (false) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } visit(node.elts[1], options, function (err2, val2) { let obj = val2; if (false) { err2 = err2.concat(error("Argument must be a number.", node.elts[1])); } resume([].concat(err1).concat(err2), obj[key]); }); }); } function len(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { let obj = val1; if (false) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } resume([].concat(err1), obj.length); }); } function add(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { val1 = +val1; if (isNaN(val1)) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } visit(node.elts[1], options, function (err2, val2) { val2 = +val2; if (isNaN(val2)) { err2 = err2.concat(error("Argument must be a number.", node.elts[1])); } resume([].concat(err1).concat(err2), val1 + val2); }); }); } function mul(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { val1 = +val1; if (isNaN(val1)) { err1 = err1.concat(error("Argument must be a number.", node.elts[0])); } visit(node.elts[1], options, function (err2, val2) { val2 = +val2; if (isNaN(val2)) { err2 = err2.concat(error("Argument must be a number.", node.elts[1])); } resume([].concat(err1).concat(err2), val1 * val2); }); }); } function style(node, options, resume) { visit(node.elts[0], options, function (err1, val1) { visit(node.elts[1], options, function (err2, val2) { val2.style = val1 resume([].concat(err1).concat(err2), val2); }); }); } return transform; })(); let render = (function() { function escapeXML(str) { return String(str) .replace(/&(?!\w+;)/g, "&amp;") .replace(/\n/g, " ") .replace(/\\/g, "\\\\") .replace(/</g, "&lt;") .replace(/>/g, "&gt;") .replace(/"/g, "&quot;"); } function render(val, options, resume) { // Do some rendering here. resume([], val); } return render; })(); export let compiler = (function () { exports.version = "v1.0.0"; exports.compile = function compile(code, data, resume) { try { let options = { data: data }; transform(code, options, function (err, val) { if (err.length) { resume(err, val); } else { render(val, options, function (err, val) { resume(err, val); }); } }); } catch (x) { console.log("ERROR with code"); console.log(x.stack); resume(["Compiler error"], { score: 0 }); } } })();
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import traceback import requests from django.utils.translation import ugettext_lazy as _ from pipeline.core.flow.activity import Service from pipeline.component_framework.component import Component from pipeline.conf import settings from pipeline.core.flow.io import StringItemSchema from gcloud.core.models import EnvironmentVariables __group_name__ = _("企业微信(WechatWork)") class WechatWorkSendMessageService(Service): def inputs_format(self): return [ self.InputItem( name=_("会话 ID"), key="wechat_work_chat_id", type="string", schema=StringItemSchema(description=_("通过在群里@企业微信机器人获取,多个用换行分隔")), ), self.InputItem( name=_("消息内容"), key="message_content", type="string", schema=StringItemSchema(description=_("消息内容")), ), self.InputItem( name=_("提醒人"), key="wechat_work_mentioned_members", type="string", schema=StringItemSchema(description=_("提醒群指定成员(@某个成员),多个成员用 `,` 分隔,@all表示提醒所有人")), ), self.InputItem( name=_("消息格式"), key="msgtype", type="string", schema=StringItemSchema(description=_("文本(text)或Markdown")), ), ] def execute(self, data, parent_data): chat_id = data.inputs.wechat_work_chat_id content = data.inputs.message_content mentioned_members = data.inputs.wechat_work_mentioned_members msgtype = data.get_one_of_inputs("msgtype", "text") chat_id_list = chat_id.split("\n") url = EnvironmentVariables.objects.get_var("BKAPP_SOPS_WECHAT_WORK_WEB_HOOK") if not url: data.outputs.ex_data = "WechatWork send message URL is not config, contact admin please" return False if not chat_id: data.outputs.ex_data = _("会话 ID 不能为空") return False for c in chat_id_list: if len(c) != 32: data.outputs.ex_data = _("无效的会话 ID: {}".format(c)) return False mentioned_list = [] if mentioned_members: mentioned_list = str(mentioned_members).split(",") try: if msgtype == "text": resp = requests.post( url=url, json={ "chatid": "|".join(chat_id_list), "msgtype": msgtype, "text": {"content": str(content), "mentioned_list": mentioned_list}, }, timeout=5, ) elif msgtype == "markdown": mentioned_str = " ".join(["<@{}>".format(mentioned) for mentioned in mentioned_list]) content = "{}\n{}\n".format(str(content), mentioned_str) resp = requests.post( url=url, json={ "chatid": "|".join(chat_id_list), "msgtype": msgtype, "markdown": {"content": content}, "at_short_name": True, }, timeout=5, ) else: err = _("msgtype 不存在") self.logger.error(err) data.outputs.ex_data = err.format(err) return False except Exception as e: err = _("企业微信发送消息请求失败,详细信息: {}") self.logger.error(err.format(traceback.format_exc())) data.outputs.ex_data = err.format(e) return False if not resp.ok: err = _("企业微信发送消息请求失败,状态码: {}, 响应: {}").format(resp.status_code, resp.content) data.outputs.ex_data = err return False self.logger.info(resp.content) return True class WechatWorkSendMessageComponent(Component): name = _("发送消息") code = "wechat_work_send_message" bound_service = WechatWorkSendMessageService form = "%scomponents/atoms/wechat_work/wechat_work_send_message/v1_0.js" % settings.STATIC_URL version = "1.0" desc = "请参考企业微信群机器人开发文档来获取机器人在群聊中的会话ID"
def generate_diff_array(arr): diff = (arr[1] - arr[0]) * 0.2 arr[0] -= diff arr[1] += diff return arr result = generate_diff_array([1, 10]) print(result)
<filename>imageeditor/src/main/java/com/createchance/imageeditor/drawers/RippleTransDrawer.java<gh_stars>10-100 package com.createchance.imageeditor.drawers; import android.opengl.GLES20; import com.createchance.imageeditor.shaders.RippleTransShader; /** * Ripple transition drawer. * * @author createchance * @date 2019/1/1 */ public class RippleTransDrawer extends AbstractTransDrawer { @Override protected void getTransitionShader() { mTransitionShader = new RippleTransShader(); } public void setAmplitude(float amplitude) { GLES20.glUseProgram(mProgramId); ((RippleTransShader) mTransitionShader).setUAmplitude(amplitude); } public void setSpeed(float speed) { GLES20.glUseProgram(mProgramId); ((RippleTransShader) mTransitionShader).setUSpeed(speed); } }
#!/bin/bash chmod +x /opt/servicec
"use strict"; function get_drawutil(gl, legacygl) { var drawutil = {}; drawutil.xyzaxis = function() { legacygl.begin(gl.LINES); legacygl.color(1, 0, 0); legacygl.vertex(0, 0, 0); legacygl.vertex(1, 0, 0); legacygl.color(0, 1, 0); legacygl.vertex(0, 0, 0); legacygl.vertex(0, 1, 0); legacygl.color(0, 0, 1); legacygl.vertex(0, 0, 0); legacygl.vertex(0, 0, 1); legacygl.end(); }; drawutil.xygrid = function(size) { legacygl.begin(gl.LINES); for (var i = -size; i <= size; ++i) { legacygl.vertex(i, -size, 0); legacygl.vertex(i, size, 0); legacygl.vertex(-size, i, 0); legacygl.vertex( size, i, 0); } legacygl.end(); }; drawutil.yzgrid = function(size) { legacygl.begin(gl.LINES); for (var i = -size; i <= size; ++i) { legacygl.vertex(0, i, -size); legacygl.vertex(0, i, size); legacygl.vertex(0, -size, i); legacygl.vertex(0, size, i); } legacygl.end(); }; drawutil.zxgrid = function(size) { legacygl.begin(gl.LINES); for (var i = -size; i <= size; ++i) { legacygl.vertex(i, 0, -size); legacygl.vertex(i, 0, size); legacygl.vertex(-size, 0, i); legacygl.vertex( size, 0, i); } legacygl.end(); }; drawutil.quadmesh = function(mode, vertices, faces) { legacygl.begin(mode == "line" ? gl.LINES : legacygl.QUADS); for (var f = 0; f < faces.length / 4; ++f) { for (var i = 0; i < 4; ++i) { var v0 = faces[4 * f + i]; var x0 = vertices[3 * v0]; var y0 = vertices[3 * v0 + 1]; var z0 = vertices[3 * v0 + 2]; legacygl.vertex(x0, y0, z0); if (mode == "line") { var v1 = faces[4 * f + (i + 1) % 4]; var x1 = vertices[3 * v1]; var y1 = vertices[3 * v1 + 1]; var z1 = vertices[3 * v1 + 2]; legacygl.vertex(x1, y1, z1); } } } legacygl.end(); }; drawutil.trimesh = function(mode, vertices, faces) { legacygl.begin(mode == "line" ? gl.LINES : gl.TRIANGLES); for (var f = 0; f < faces.length / 3; ++f) { for (var i = 0; i < 3; ++i) { var v0 = faces[3 * f + i]; var x0 = vertices[3 * v0]; var y0 = vertices[3 * v0 + 1]; var z0 = vertices[3 * v0 + 2]; legacygl.vertex(x0, y0, z0); if (mode == "line") { var v1 = faces[3 * f + (i + 1) % 3]; var x1 = vertices[3 * v1]; var y1 = vertices[3 * v1 + 1]; var z1 = vertices[3 * v1 + 2]; legacygl.vertex(x1, y1, z1); } } } legacygl.end(); }; drawutil.cube = function(mode, size) { var r = size / 2; this.quadmesh(mode, [ // vertices -r, -r, -r, r, -r, -r, -r, r, -r, r, r, -r, -r, -r, r, r, -r, r, -r, r, r, r, r, r ], [ // faces 1, 3, 7, 5, // positive-x 3, 2, 6, 7, // positive-y 2, 0, 4, 6, // negative-x 0, 1, 5, 4, // negative-y 4, 5, 7, 6, // positive-z 0, 2, 3, 1 // negative-z ] ); }; drawutil.circle = function(mode, size, numdiv) { if (!numdiv) numdiv = 12; var r = size / 2; legacygl.begin(mode == "line" ? gl.LINE_LOOP : gl.TRIANGLE_FAN); for (var i = 0; i < numdiv; ++i) { var theta = i * 2 * Math.PI / numdiv; var x = r * Math.cos(theta); var y = r * Math.sin(theta); legacygl.vertex(x, y, 0); } legacygl.end(); }; drawutil.sphere = function(mode, radius, slices, stacks) { function angle2pos(theta, phi) { var x = radius * Math.cos(theta) * Math.sin(phi); var y = radius * Math.sin(theta) * Math.sin(phi); var z = radius * Math.cos(phi); return [x, y, z]; }; legacygl.begin(mode == "line" ? gl.LINES : legacygl.QUADS); var phi = 0; var dphi = Math.PI / stacks; for (var i = 0; i < stacks; ++i, phi += dphi) { var theta = 0; var dtheta = 2 * Math.PI / slices; for (var j = 0; j < slices; ++j, theta += dtheta) { var p = [ angle2pos(theta, phi), angle2pos(theta + dtheta, phi), angle2pos(theta + dtheta, phi + dphi), angle2pos(theta , phi + dphi) ]; for (var k = 0; k < 4; ++k) { legacygl.vertex(p[k][0], p[k][1], p[k][2]); if (mode == "line") { var k1 = (k + 1) % 4; legacygl.vertex(p[k1][0], p[k1][1], p[k1][2]); } } } } legacygl.end(); }; return drawutil; };
#!/bin/sh # A dwm_bar function to show the master volume of ALSA # Joe Standring <git@joestandring.com> # GNU GPLv3 # Dependencies: alsa-utils dwm_alsa () { STATUS=$(amixer sget Master | tail -n1 | sed -r "s/.*\[(.*)\]/\1/") VOL=$(amixer get Master | tail -n1 | sed -r "s/.*\[(.*)%\].*/\1/") printf "%s" "$SEP1" if [ 0 ]; then if [ "$STATUS" = "off" ]; then printf "🔇" else #removed this line becuase it may get confusing if [ "$VOL" -ge 0 ] && [ "$VOL" -le 33 ]; then printf "🔈 %s%%" "$VOL" elif [ "$VOL" -gt 33 ] && [ "$VOL" -le 66 ]; then printf "🔉 %s%%" "$VOL" else printf "🔊 %s%%" "$VOL" fi fi else if [ "$STATUS" = "off" ]; then printf "MUTE" else # removed this line because it may get confusing if [ "$VOL" -gt 0 ] && [ "$VOL" -le 33 ]; then printf "VOL %s%%" "$VOL" elif [ "$VOL" -gt 33 ] && [ "$VOL" -le 66 ]; then printf "VOL %s%%" "$VOL" else printf "VOL %s%%" "$VOL" fi fi fi printf "%s\n" "$SEP2" } dwm_alsa
import json from sklearn.externals import joblib model_name = 'model_1553724836.6208675.joblib' model = joblib.load(model_name) def predict(event, context): body = { "message": "OK", } if 'queryStringParameters' in event.keys(): params = event['queryStringParameters'] medInc = float(params['medInc']) / 100000 houseAge = float(params['houseAge']) aveRooms = float(params['aveRooms']) aveBedrms = float(params['aveBedrms']) population = float(params['population']) aveOccup = float(params['aveOccup']) latitude = float(params['latitude']) longitude = float(params['longitude']) inputVector = [medInc, houseAge, aveRooms, aveBedrms, population, aveOccup, latitude, longitude] data = [inputVector] predictedPrice = model.predict(data)[0] * 100000 # convert to units of 1 USDs predictedPrice = round(predictedPrice, 2) body['predictedPrice'] = predictedPrice else: body['message'] = 'queryStringParameters not in event.' print(body['message']) response = { "statusCode": 200, "body": json.dumps(body), "headers": { "Content-Type": 'application/json', "Access-Control-Allow-Origin": "*" } } return response def do_main(): event = { 'queryStringParameters': { 'medInc': 200000, 'houseAge': 10, 'aveRooms': 4, 'aveBedrms': 1, 'population': 800, 'aveOccup': 3, 'latitude': 37.54, 'longitude': -121.72 } } response = predict(event, None) body = json.loads(response['body']) print('Price:', body['predictedPrice']) with open('event.json', 'w') as event_file: event_file.write(json.dumps(event)) #do_main()
import { Link } from 'react-router-dom'; import AuthImages from '../helpers/AuthImages'; import { logInWithFacebook, logInWithGoogle } from './loginProviders'; function Auth() { return ( <div className="auth"> <div className="container"> <div className="select-registration-method"> <AuthImages /> <div className="methods"> <div className="select__method"> <h1 className="select__method-h1"> <span>Lavarc</span> invites You to join our ranks! </h1> <p className="select__method-p1"> Create an account, it's free! Thanks to it you will be able to communicate with our community. </p> <div className="select__method-buttons"> <button type="button" className="btn" onClick={logInWithGoogle}> Sign up with Google </button> <button type="button" className="btn" onClick={logInWithFacebook}> Sign up with Facebook </button> <Link to="/auth/register" className="btn primary"> Sign up with Email </Link> </div> <p className="select__method-p2"> Already a member? <Link to="/auth/login">Log in.</Link> </p> </div> </div> </div> </div> </div> ); } export default Auth;
#!/usr/bin/env python3 #══════════════════════════════════════════════════════════════════════════════════════════════════════════════════ # Class: PyDictFileEncy #────────────────────────── # Author: <NAME> #────────────────────────── # Version: 2018/11/17 #────────────────────────── # discription: # # The key of the dict can not be numeric type. String only. # # # # The Dict MUST contains a element of FILE_DB_CONFIG (dict) # # FILE_DB_CONFIG:{password: } # FILE_DB_TABLE:{ } # #────────────────────────── # Used : import json,hashlib,base64,os,inspect,functools from Crypto import Random from Crypto.Cipher import AES import hashlib #────────────────────────── # Interface: # # [ini] path,key # # [sub] connect() # make connect to the filedict. # If the file exists, load the data (with the key). # If not, create one with the key # # [fun] IsConnected() # # [sub] CreateTableIfNotExist(TableName) # if it is already existed, do nonthing # # [fun] GetTableList(): # return dictkey type # # [fun] GetTable(TableName) # return a normal python dict # # [sub] DropTable(TableName) # if the table does not exist, do nothing. # # [sub] SetPassword(password) # set a new password to filedict # # [sub] Save() # save data into file. # # [fun] LoadFileToDict(filepath,key = '') # read a file to dict # # # [fun] GetFolderPath() # # [fun] GetFilePath() # # [fun] SaveDecryptedDataToFile(FilePath) # save all data (dict) into a file. Careful to use this procedure # # [fun] ReadDecryptedDataFile(FilePath) # The inverse action of 'SaveDecryptedDataToFile'. # When load a new file, the current data file is overried. # # #══════════════════════════════════════════════════════════════════════════════════════════════════════════════════ class PyDictFileEncy(): class AESCipher(object): #------------------------------------- #https://stackoverflow.com/questions/12524994/encrypt-decrypt-using-pycrypto-aes-256 def __init__(self, key): self.bs = 32 self.key = hashlib.sha256(key.encode()).digest() def encrypt(self, raw): raw = self._pad(raw) iv = Random.new().read(AES.block_size) cipher = AES.new(self.key, AES.MODE_CBC, iv) return base64.b64encode(iv + cipher.encrypt(raw.encode('utf-8'))) def decrypt(self, enc): enc = base64.b64decode(enc) iv = enc[:AES.block_size] cipher = AES.new(self.key, AES.MODE_CBC, iv) return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8') def _pad(self, s): return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs) @staticmethod def _unpad(s): return s[:-ord(s[len(s)-1:])] @staticmethod def GetFilePath(): return os.path.realpath(__file__) @classmethod def GetFolderPath(cls): f = cls.GetFilePath() return os.path.dirname(f) #---------------------------------------------- def __init__(self,path,key=''): self.path = path self.key = key self.__Dict = {} self.__isconnected = False self.__needstage = False def IsConnected(self): return self.__isconnected def connect(self): if os.path.isfile(self.path): self.__Dict = self.LoadFileToDict(self.path,self.key) if self.__Dict is None: # raise Exception('Invalid DB file or key error!') self.__isconnected = False else: self.__isconnected = True else: self.__isconnected = True self.__Dict = self.__InitiateFile(self.path,self.key) def __IsconnectCheck(f): @functools.wraps(f) def decorated(*args,**kwargs): self = args[0] if self.IsConnected(): return f(*args,**kwargs) else: print("@function '{}', filedict is not connected".format(inspect.stack()[1][3])) raise PermissionError('Filedict is not connected.') return decorated @__IsconnectCheck def SetPassword(self,password): self.__Dict['FILE_DB_CONFIG']['password'] = password self.__needstage = True @__IsconnectCheck def CreateTableIfNotExist(self,TableName): if TableName not in self.__Dict['FILE_DB_TABLE']: self.__Dict['FILE_DB_TABLE'][TableName] = {} self.__needstage = True else: print('table existed.Do nothing') @__IsconnectCheck def DropTable(self,TableName): if TableName in self.__Dict['FILE_DB_TABLE']: del self.__Dict['FILE_DB_TABLE'][TableName] self.__needstage = True @__IsconnectCheck def GetTableList(self): return self.__Dict['FILE_DB_TABLE'].keys() @__IsconnectCheck def GetTable(self,TableName): return self.__Dict['FILE_DB_TABLE'].get(TableName,None) # def SaveToDB(self): # self.__SaveDictToFile(self.path,self.__Dict) # def NotConnectedError(self): # if not self.__isconnected: # print("DB is not connected, function '{}' is not callable".format(inspect.stack()[1][3])) @__IsconnectCheck def Save(self): # self.NotConnectedError() self.__SaveDictToFile( self.path , self.__Dict ) @__IsconnectCheck def CreateTable(self,TableName): self.NotConnectedError() if TableName in self.__Dict: print('{} is existed in DB'.format(TableName)) exit() else: self.__Dict[TableName] = {} @classmethod def LoadFileToDict(cls,filepath,key = ''): reader = cls.AESCipher(key) f = open(filepath,'rb') Estr = f.read() f.close() try: #---------------- # 20200425 checkLen = 5 Estr = Estr.decode('utf-8') CheckMD5 = Estr[0:checkLen] PureData = Estr[checkLen:] if hashlib.md5( PureData.encode('utf-8') ).hexdigest()[0:checkLen] != CheckMD5: return None Estr = PureData.encode('utf-8') #---------------- getString = reader.decrypt(Estr) d = json.loads(getString) if d['FILE_DB_CONFIG']['password'] == key: return d except: return None return None # [sub] __SaveDictToFile(filepath,Dict) # save a dict to file. @classmethod def __SaveDictToFile(cls,filepath,Dict): jStr = json.dumps(Dict) key = Dict['FILE_DB_CONFIG']['password'] reader = cls.AESCipher(key) Estr = reader.encrypt(jStr) #---------------- # 20200425 checkLen = 5 Estr = Estr.decode('utf-8') CheckMD5 = hashlib.md5( Estr.encode('utf-8') ).hexdigest()[0:checkLen] TotStr = CheckMD5 + Estr Estr = TotStr.encode('utf-8') #---------------- tpfile = filepath+'.tmp' f = open(tpfile,'wb') f.write(Estr) f.close() try: os.remove(filepath) except: pass os.rename(tpfile,filepath) # # [sub] __InitiateFile(filepath,key): # initiate a data file with a key @classmethod def __InitiateFile(cls,filepath,key=''): d = { 'FILE_DB_CONFIG':{ 'password':key } , 'FILE_DB_TABLE':{} } cls.__SaveDictToFile(filepath,d) return d @__IsconnectCheck def SaveDecryptedDataToFile(self,FilePath): jStr = json.dumps(self.__Dict) open(FilePath,'w').write(jStr) def ReadDecryptedDataFile(self,FilePath): Estr = open(FilePath,'r').read() DICT = json.loads(Estr) self.__Dict = DICT self.Save() # #----------------------------------------------------- # # create a new filedict (with password = 'password') # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # # a file is created. One can check the context in it. # #----------------------------------------------------- # # connect to a filedict # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # print(container.IsConnected()) # # a filedict is connect. One can try another password. # #----------------------------------------------------- # # create a table # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # container.CreateTableIfNotExist('testTable') # # remenber to save it # container.Save() # #----------------------------------------------------- # # get all the tables in container # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # # return a python dict 'dict_keys' type # l = container.GetTableList() # print(l) # #----------------------------------------------------- # # get a normal python dict in the container so one can interact with it # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # d = container.GetTable('testTable') # print(type(d),d) # # --- inser an element # d['new'] = 'hellow' # # remenber to save it # container.Save() # #----------------------------------------------------- # # remove a table # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # container.DropTable('testTable') # # remenber to save it # container.Save() # #----------------------------------------------------- # # reset password # container = PyDictFileEncy('encrypted.dat','password') # container.connect() # container.SetPassword('<PASSWORD>') # # remenber to save it # container.Save() # # the container itself is already renewed # print( container.IsConnected() ) # # # print(f.GetFolderPath()) # g = f.GetTable('x') # print(g) # # # # # # # # print(f.__Dict) # # # # # f.CreateTableIfNotExist('MyPasswordManeger') # # # # f.SaveToDB() # f = PyDictFileEncy('test.db','123') # f.connect() # # f.CreateTableIfNotExist('t1') # f.CreateTableIfNotExist('t2') # f.CreateTableIfNotExist('t3')
<filename>src/example/linechart1/index.js import * as d3 from 'd3' import 'assets/css/linechart1.css' let width = 500, height = 250, margin = { top: 30, right: 20, bottom: 20, left: 50 }, bodyWidth = width - margin.left - margin.right, bodyHeight = height - margin.top - margin.bottom let data = [1, 3, 5, 7, 8, 4, 3, 7] let xScale = d3.scaleLinear().domain([0, data.length - 1]).range([0, bodyWidth]), yScale = d3.scaleLinear().domain([0, d3.max(data)]).range([bodyHeight, 0]) let line = d3.line() .x((d, i) => xScale(i)) .y(d => yScale(d)) .curve(d3.curveCardinal) let xAxis = d3.axisBottom().scale(xScale), yAxis = d3.axisLeft().scale(yScale) let svg = d3.select('#container') .append('svg') .attr('width', width) .attr('height', height) let g = svg.append('g') .attr('transform', 'translate(' + margin.left + ', ' + margin.top + ')') g.append('path') .attr('d', line(data)) g.append('g') .call(xAxis) .attr('transform', 'translate(0, ' + bodyHeight + ')') g.append('g') .call(yAxis) g.append('text') .text('Price($)') .attr('transform', 'rotate(-90)') .attr('text-anchor', 'end') .attr('dy', '1em')
import random import string def generate_password(): length = 10 char_set = string.ascii_letters + string.digits + string.punctuation rand_pwd = ''.join(random.choice(char_set) for i in range(length)) return rand_pwd if __name__ == '__main__': print(generate_password())
#!/bin/bash ## Helper to run tests locally using same commands as circle ci config # See: encoded/.circleci/config.yml # # Use Cases: No argument defaults to not bdd tests # $ circle-tests.sh bdd # $ circle-tests.sh npm # $ circle-tests.sh ## if [ "$1" == "bdd" ]; then pytest -v -v --timeout=400 -m "bdd" --tb=short --splinter-implicit-wait 10 --splinter-webdriver chrome --splinter-socket-timeout 300 --chrome-options "--headless --disable-gpu --no-sandbox --disable-dev-shm-usage --disable-extensions --whitelisted-ips --window-size=1920,1080" exit fi if [ "$1" == "npm" ]; then npm test exit fi if [ -z "$1" ]; then pytest -v -v --timeout=400 -m "not bdd" exit fi
// Copyright (C) 2019-2021, <NAME>. // @author xiongfa.li // @version V1.0 // Description: package model type ModelData struct { Config *Config Value *Value } func LoadModelData(configPath, valuePath string) (*ModelData, error) { c, err := LoadConfig(configPath) if err != nil { return nil, err } v, err := LoadValue(valuePath) if err != nil { return nil, err } return &ModelData{ Config: c, Value: v, }, nil }
import _ from 'lodash'; import axios from 'axios'; import service from './service'; export default { fetchItems, fetchItemById } function fetchItems(start) { const url = service.getUrl('browse', 'data'); return axios.get(url, { params: { start } }) .then(res => res.data) .then(toResponse); function toResponse(data) { return { items: _.map(data.items, toListEntity), totalItems: data.totalItems } } } function fetchItemById(id) { const url = service.getUrl('item', `${id}/data`); return axios.get(url) .then(res => res.data) .then(toEntity); } function toListEntity(data) { return { id: data.integerId, image: data.image, price: _.get(data, 'price.amounts.USD') } } function toEntity(data) { return { id: data.integerId, image: data.image, title: data.title, price: _.get(data, 'price.amounts.USD'), measurements: _.get(data, 'measurements.display'), description: data.description, creators: data.creators, seller: { company: _.get(data, 'seller.company'), logoUrl: _.get(data, 'seller.logo') }, } }
const AppErrors = require('../errors/AppErrors'); module.exports = (error,request,response,next) => { console.log(error); if(error instanceof AppErrors){ return response.status(error.error).json({ error: error.mensagem }); } console.log(error); return response.status(500).json({ message: error.message }); }
class Scraper def self.scrape beer_info = [] site = Nokogiri::HTML(open("https://www.craftbeer.com/beer/beer-styles-guide")) commercial_example = {} site.css("#content .style").each do |beer_site| a_to_z = beer_site.css(".simple li") each_beer = { family_name: beer_site.css(".family-name").text.gsub("Style Family: ", ""), style_name: beer_site.css(".style-name").text, description: beer_site.css("p").map(&:text).keep_if{|d| !d.match(/(Style Family)|(\bour\b)|(CraftBeer.com)|(newsletter)/)}.join("\n\n"), commercial_examples: [] } # iterate through commercial examples beer_site.css(".winners li").each do |example| commercial_example = { beer_name: example.css(".brewery").text.strip, brewery: example.css(".value a").text } each_beer[:commercial_examples] << commercial_example end beer_info << each_beer end beer_info end end
// The HTML structure for displaying the search results <div id="results-container"></div> // JavaScript code to fetch search results and display them const url = 'http://example.com/api/search'; // Function to fetch data from the API async function fetchData(keyword) { const response = await fetch(url, { method: 'POST', body: JSON.stringify({keyword}) }); const data = await response.json(); return data; } // Function to display the results async function displayResults(keyword) { const data = await fetchData(keyword); const resultsContainer = document.getElementById('results-container'); data.forEach(result => { // Create the elements to hold the data const resultElem = document.createElement('div'); const titleElem = document.createElement('h2'); const summaryElem = document.createElement('p'); // Add the content to the elements titleElem.innerHTML = result.title; summaryElem.innerHTML = result.summary; // Append the elements resultElem.appendChild(titleElem); resultElem.appendChild(summaryElem); resultsContainer.appendChild(resultElem); }); } // Call the function with 'cars' as the keyword displayResults('cars');
function generateGameTable(games) { let html = '<table>\n<div class="overflow-auto w-100" style="height:250px">\n<table>\n'; games.forEach(game => { html += '<tr>\n'; html += `<td>\n<label>ID du jeu : </label>\n${game.ID}\n</td>\n`; html += `<td>\n<label>Title : </label>\n${game.title}\n</td>\n`; html += `<td>\n<label>Genre : </label>\n${game.genre}\n</td>\n`; html += `<td>\n<label>Platform : </label>\n${game.platform}\n</td>\n`; html += '</tr>\n'; }); html += '</table>\n</div>\n</table>'; return html; }
import nltk from nltk.classify import AffinityPropagation from sklearn.feature_extraction.text import TfidfVectorizer text = "Bonjour tout le monde!" vectorizer = TfidfVectorizer(min_df = 1, analyzer = nltk.word_tokenize) X = vectorizer.fit_transform([text]) clf = AffinityPropagation() clf.fit(X) labels = clf.labels_ language = clf.cluster_centers_indices_[labels[0]] print(vectorizer.get_feature_names()[language])
#!/bin/bash -e dir=`dirname $0` root=`cd $dir;pwd` gobin=$root/build/server/bin/ function init_relay() { echo "Configurable networks: 'standalone','sirius','mainnet'" if [ "${NETWORK}" != "standalone" ] && [ "${NETWORK}" != "sirius" ] && [ "${NETWORK}" != "mainnet" ]; then echo "Invalid network: ${NETWORK}" exit 1 else echo "Using network: ${NETWORK}" fi network=${NETWORK} cd $root export GOPATH=$root/server/:$root/build/server echo "GOPATH: $GOPATH" echo "RELAY_URL: ${RELAY_URL}" ./scripts/extract_abi.js blocktime=${T=0} hubaddr=`npx truffle migrate --network=$network --reset | tee /dev/stderr | grep -A 4 "RelayHub" | grep "contract address" | grep "0x.*" -o` if [ -z "$hubaddr" ]; then echo "FATAL: failed to detect RelayHub address" exit 1 fi echo $hubaddr > ./hubaddr.txt ( sleep 3; ./scripts/fundrelay.js $hubaddr ${RELAY_URL} 0 ) & } function run_relay() { hubaddr=$(cat ${root}/hubaddr.txt) $gobin/RelayHttpServer -DefaultGasPrice ${GAS_PRICE} -GasPricePercent ${GAS_PRICE_PERCENT} -RelayHubAddress $hubaddr -RegistrationBlockRate ${REGISTRATION_BLOCK_RATE} -Workdir $root/build/server -EthereumNodeUrl ${ETHEREUM_NODE_URL} } function main() { if [ ! -f $root/hubaddr.txt ]; then init_relay fi run_relay } main
// (C) 2007-2020 GoodData Corporation import { TotalType } from "@gooddata/sdk-model"; export const ROW_ATTRIBUTE_COLUMN = "ROW_ATTRIBUTE_COLUMN"; export const COLUMN_ATTRIBUTE_COLUMN = "COLUMN_ATTRIBUTE_COLUMN"; export const MEASURE_COLUMN = "MEASURE_COLUMN"; export const FIELD_SEPARATOR = "-"; export const FIELD_SEPARATOR_PLACEHOLDER = "DASH"; export const FIELD_TYPE_MEASURE = "m"; export const FIELD_TYPE_ATTRIBUTE = "a"; export const ID_SEPARATOR = "_"; export const ID_SEPARATOR_PLACEHOLDER = "UNDERSCORE"; export const DOT_PLACEHOLDER = "DOT"; export const ROW_TOTAL = "rowTotal"; export const ROW_SUBTOTAL = "rowSubtotal"; export const COLUMN_GROUPING_DELIMITER = " › "; export const AVAILABLE_TOTALS: TotalType[] = ["sum", "max", "min", "avg", "med", "nat"]; export const COLS_PER_PAGE: number = 1000; export const VALUE_CLASS = "s-value"; export const HEADER_LABEL_CLASS = "s-header-cell-label"; export const ROW_TOTAL_CLASS = "gd-row-total"; export const ROW_SUBTOTAL_CLASS = "gd-table-row-subtotal"; export const DEFAULT_HEADER_FONT = '12px / 28px avenir, "Helvetica Neue", arial, sans-serif'; export const DEFAULT_ROW_FONT = '12px / 26px avenir, "Helvetica Neue", arial, sans-serif'; export const DEFAULT_SUBTOTAL_FONT = '700 12px / 26px avenir, "Helvetica Neue", arial, sans-serif'; export const DEFAULT_TOTAL_FONT = '700 12px / 26px avenir, "Helvetica Neue", arial, sans-serif';
$(function(){ // dropdown menu var timeout = 200; var closetimer = 0; var ddmenuitem = 0; function dd_open() { dd_canceltimer(); dd_close(); var liwidth = $(this).width(); ddmenuitem = $(this).find('> ul').css({'visibility': 'visible'/* , 'width': liwidth */}); ddmenuitem.prev().addClass('dd_hover').parent().addClass('dd_hover'); } function dd_close() { if(ddmenuitem) ddmenuitem.css('visibility', 'hidden').prev().removeClass('dd_hover').parent().removeClass('dd_hover'); } function dd_timer() {closetimer = window.setTimeout(dd_close, timeout); } function dd_canceltimer() { if (closetimer) { window.clearTimeout(closetimer); closetimer = null; } } document.onclick = dd_close; $('#mainmenu > li').bind('mouseover', dd_open); $('#mainmenu > li').bind('mouseout', dd_timer); // nice hovers on main menu $('#mainmenu li').hover( function(){ $(this).parent('ul').find('li.current').addClass('currentOff').removeClass('current'); $(this).parent('ul').find('li.section').addClass('sectionOff').removeClass('section'); }, function(){ $(this).parent('ul').find('li.currentOff').addClass('current').removeClass('currentOff'); $(this).parent('ul').find('li.sectionOff').addClass('section').removeClass('sectionOff'); } ); // search form var currentSearch = $('#txtSearch').val(); $('#txtSearch').focus(function () { if ($(this).val() == currentSearch) { $(this).val(''); } }) // jreject - prompts user to upgrade browser if less than ie8 $.reject({ reject : { msie5: true, msie6: true, msie7: true, msie8: false, msie9: false }, imagePath : 'mysite/thirdparty/jreject/images/', closeCookie: true }); });
<reponame>masseelch/ogen package uri import ( "fmt" "testing" "github.com/stretchr/testify/require" ) func TestPathEncoder(t *testing.T) { t.Run("Value", func(t *testing.T) { tests := []struct { Param string Input string Expect string Style PathStyle Explode bool }{ { Param: "id", Input: "foo", Expect: "foo", Style: PathStyleSimple, Explode: false, }, { Param: "id", Input: "foo", Expect: "foo", Style: PathStyleSimple, Explode: true, }, { Param: "id", Input: "foo", Expect: ".foo", Style: PathStyleLabel, Explode: false, }, { Param: "id", Input: "foo", Expect: ".foo", Style: PathStyleLabel, Explode: true, }, { Param: "id", Input: "foo", Expect: ";id=foo", Style: PathStyleMatrix, Explode: false, }, { Param: "id", Input: "foo", Expect: ";id=foo", Style: PathStyleMatrix, Explode: true, }, } for i, test := range tests { e := NewPathEncoder(PathEncoderConfig{ Param: test.Param, Style: test.Style, Explode: test.Explode, }) require.NoError(t, e.EncodeValue(test.Input)) require.Equal(t, test.Expect, e.Result(), fmt.Sprintf("Test %d", i+1)) } }) t.Run("Array", func(t *testing.T) { tests := []struct { Param string Input []string Expect string Style PathStyle Explode bool }{ { Param: "id", Input: []string{"foo", "bar"}, Expect: "foo,bar", Style: PathStyleSimple, Explode: false, }, { Param: "id", Input: []string{"foo", "bar"}, Expect: "foo,bar", Style: PathStyleSimple, Explode: true, }, { Param: "id", Input: []string{"foo", "bar"}, Expect: ".foo,bar", Style: PathStyleLabel, Explode: false, }, { Param: "id", Input: []string{"foo", "bar"}, Expect: ".foo.bar", Style: PathStyleLabel, Explode: true, }, { Param: "id", Input: []string{"foo", "bar"}, Expect: ";id=foo,bar", Style: PathStyleMatrix, Explode: false, }, { Param: "id", Input: []string{"foo", "bar"}, Expect: ";id=foo;id=bar", Style: PathStyleMatrix, Explode: true, }, } for i, test := range tests { e := NewPathEncoder(PathEncoderConfig{ Param: test.Param, Style: test.Style, Explode: test.Explode, }) err := e.EncodeArray(func(e Encoder) error { for _, item := range test.Input { if err := e.EncodeValue(item); err != nil { return err } } return nil }) require.NoError(t, err) require.Equal(t, test.Expect, e.Result(), fmt.Sprintf("Test %d", i+1)) } }) t.Run("Object", func(t *testing.T) { tests := []struct { Param string Input []Field Expect string Style PathStyle Explode bool }{ { Param: "id", Input: []Field{ {"role", "admin"}, {"firstName", "Alex"}, }, Style: PathStyleSimple, Explode: false, Expect: "role,admin,firstName,Alex", }, { Param: "id", Input: []Field{ {"role", "admin"}, {"firstName", "Alex"}, }, Style: PathStyleSimple, Explode: true, Expect: "role=admin,firstName=Alex", }, { Param: "id", Input: []Field{ {"role", "admin"}, {"firstName", "Alex"}, }, Style: PathStyleLabel, Explode: false, Expect: ".role,admin,firstName,Alex", }, { Param: "id", Input: []Field{ {"role", "admin"}, {"firstName", "Alex"}, }, Style: PathStyleLabel, Explode: true, Expect: ".role=admin.firstName=Alex", }, { Param: "id", Input: []Field{ {"role", "admin"}, {"firstName", "Alex"}, }, Style: PathStyleMatrix, Explode: false, Expect: ";id=role,admin,firstName,Alex", }, { Param: "id", Input: []Field{ {"role", "admin"}, {"firstName", "Alex"}, }, Style: PathStyleMatrix, Explode: true, Expect: ";role=admin;firstName=Alex", }, } for i, test := range tests { e := NewPathEncoder(PathEncoderConfig{ Param: test.Param, Style: test.Style, Explode: test.Explode, }) for _, field := range test.Input { err := e.EncodeField(field.Name, func(e Encoder) error { return e.EncodeValue(field.Value) }) require.NoError(t, err) } require.Equal(t, test.Expect, e.Result(), fmt.Sprintf("Test %d", i+1)) } }) }
#!/usr/bin/env bash MASON_NAME=mapnik MASON_VERSION=3.0.14 MASON_LIB_FILE=lib/libmapnik.${MASON_DYNLIB_SUFFIX} . ${MASON_DIR}/mason.sh function mason_load_source { mason_download \ https://github.com/mapnik/mapnik/releases/download/v${MASON_VERSION}/mapnik-v${MASON_VERSION}.tar.bz2 \ 42648397f73775e244d8d743a3f3fc9a1dfbacf2 mason_extract_tar_bz2 export MASON_BUILD_PATH=${MASON_ROOT}/.build/mapnik-v${MASON_VERSION} } function install() { ${MASON_DIR}/mason install $1 $2 MASON_PLATFORM_ID=$(${MASON_DIR}/mason env MASON_PLATFORM_ID) if [[ ! -d ${MASON_ROOT}/${MASON_PLATFORM_ID}/${1}/${2} ]]; then if [[ ${3:-false} != false ]]; then LA_FILE=$(${MASON_DIR}/mason prefix $1 $2)/lib/$3.la if [[ -f ${LA_FILE} ]]; then perl -i -p -e 's:\Q$ENV{HOME}/build/mapbox/mason\E:$ENV{PWD}:g' ${LA_FILE} else echo "$LA_FILE not found" fi fi fi ${MASON_DIR}/mason link $1 $2 } ICU_VERSION="57.1" function mason_prepare_compile { install jpeg_turbo 1.5.1 libjpeg install libpng 1.6.28 libpng install libtiff 4.0.7 libtiff install libpq 9.6.2 install sqlite 3.17.0 libsqlite3 install expat 2.2.0 libexpat install icu ${ICU_VERSION} install proj 4.9.3 libproj install pixman 0.34.0 libpixman-1 install cairo 1.14.8 libcairo install webp 0.6.0 libwebp install libgdal 2.1.3 libgdal install boost 1.63.0 install boost_libsystem 1.63.0 install boost_libfilesystem 1.63.0 install boost_libprogram_options 1.63.0 install boost_libregex_icu57 1.63.0 install freetype 2.7.1 libfreetype install harfbuzz 1.4.2-ft libharfbuzz } function mason_compile { #patch -N -p1 < ${MASON_DIR}/scripts/${MASON_NAME}/${MASON_VERSION}/patch.diff export PATH="${MASON_ROOT}/.link/bin:${PATH}" MASON_LINKED_REL="${MASON_ROOT}/.link" MASON_LINKED_ABS="${MASON_ROOT}/.link" if [[ $(uname -s) == 'Linux' ]]; then echo "CUSTOM_LDFLAGS = '${LDFLAGS} -Wl,-z,origin -Wl,-rpath=\\\$\$ORIGIN/../lib/ -Wl,-rpath=\\\$\$ORIGIN/../../'" > config.py echo "CUSTOM_CXXFLAGS = '${CXXFLAGS} -D_GLIBCXX_USE_CXX11_ABI=0'" >> config.py else echo "CUSTOM_LDFLAGS = '${LDFLAGS}'" > config.py echo "CUSTOM_CXXFLAGS = '${CXXFLAGS}'" >> config.py fi ./configure \ CXX="${CXX}" \ CC="${CC}" \ PREFIX="${MASON_PREFIX}" \ RUNTIME_LINK="static" \ INPUT_PLUGINS="all" \ ENABLE_SONAME=False \ PKG_CONFIG_PATH="${MASON_LINKED_REL}/lib/pkgconfig" \ PATH_REMOVE="/usr:/usr/local" \ BOOST_INCLUDES="${MASON_LINKED_REL}/include" \ BOOST_LIBS="${MASON_LINKED_REL}/lib" \ ICU_INCLUDES="${MASON_LINKED_REL}/include" \ ICU_LIBS="${MASON_LINKED_REL}/lib" \ HB_INCLUDES="${MASON_LINKED_REL}/include" \ HB_LIBS="${MASON_LINKED_REL}/lib" \ PNG_INCLUDES="${MASON_LINKED_REL}/include/libpng16" \ PNG_LIBS="${MASON_LINKED_REL}/lib" \ JPEG_INCLUDES="${MASON_LINKED_REL}/include" \ JPEG_LIBS="${MASON_LINKED_REL}/lib" \ TIFF_INCLUDES="${MASON_LINKED_REL}/include" \ TIFF_LIBS="${MASON_LINKED_REL}/lib" \ WEBP_INCLUDES="${MASON_LINKED_REL}/include" \ WEBP_LIBS="${MASON_LINKED_REL}/lib" \ PROJ_INCLUDES="${MASON_LINKED_REL}/include" \ PROJ_LIBS="${MASON_LINKED_REL}/lib" \ PG_INCLUDES="${MASON_LINKED_REL}/include" \ PG_LIBS="${MASON_LINKED_REL}/lib" \ FREETYPE_INCLUDES="${MASON_LINKED_REL}/include/freetype2" \ FREETYPE_LIBS="${MASON_LINKED_REL}/lib" \ SVG_RENDERER=True \ CAIRO_INCLUDES="${MASON_LINKED_REL}/include" \ CAIRO_LIBS="${MASON_LINKED_REL}/lib" \ SQLITE_INCLUDES="${MASON_LINKED_REL}/include" \ SQLITE_LIBS="${MASON_LINKED_REL}/lib" \ GDAL_CONFIG="${MASON_LINKED_REL}/bin/gdal-config" \ PG_CONFIG="${MASON_LINKED_REL}/bin/pg_config" \ BENCHMARK=False \ CPP_TESTS=False \ PGSQL2SQLITE=True \ SAMPLE_INPUT_PLUGINS=False \ DEMO=False \ XMLPARSER="ptree" \ NO_ATEXIT=True \ SVG2PNG=True || cat ${MASON_BUILD_PATH}"/config.log" cat config.py # limit concurrency on travis to avoid heavy jobs being killed if [[ ${TRAVIS_OS_NAME:-} ]]; then JOBS=4 make else JOBS=${MASON_CONCURRENCY} make fi make install if [[ $(uname -s) == 'Darwin' ]]; then install_name_tool -id @loader_path/lib/libmapnik.dylib ${MASON_PREFIX}"/lib/libmapnik.dylib"; PLUGINDIRS=${MASON_PREFIX}"/lib/mapnik/input/*.input"; for f in $PLUGINDIRS; do echo $f; echo `basename $f`; install_name_tool -id plugins/input/`basename $f` $f; install_name_tool -change ${MASON_PREFIX}"/lib/libmapnik.dylib" @loader_path/../../../lib/libmapnik.dylib $f; done; # command line tools install_name_tool -change ${MASON_PREFIX}"/lib/libmapnik.dylib" @loader_path/../lib/libmapnik.dylib ${MASON_PREFIX}"/bin/mapnik-index" install_name_tool -change ${MASON_PREFIX}"/lib/libmapnik.dylib" @loader_path/../lib/libmapnik.dylib ${MASON_PREFIX}"/bin/mapnik-render" install_name_tool -change ${MASON_PREFIX}"/lib/libmapnik.dylib" @loader_path/../lib/libmapnik.dylib ${MASON_PREFIX}"/bin/shapeindex" fi # fix mapnik-config entries for deps HERE=$(pwd) python -c "data=open('$MASON_PREFIX/bin/mapnik-config','r').read();open('$MASON_PREFIX/bin/mapnik-config','w').write(data.replace('$HERE','.').replace('${MASON_ROOT}','./mason_packages'))" cat $MASON_PREFIX/bin/mapnik-config } function mason_cflags { ${MASON_PREFIX}/bin/mapnik-config --cflags } function mason_ldflags { ${MASON_PREFIX}/bin/mapnik-config --ldflags } function mason_static_libs { ${MASON_PREFIX}/bin/mapnik-config --dep-libs } function mason_clean { make clean } mason_run "$@"
#!/bin/bash . ${FSLDIR}/etc/fslconf/fsl.sh generateNonWMHimg_fromVisAdj(){ WMHimg=$1 ID=$2 studyFolder=$3 custClssfrFolder="${studyFolder}/customiseClassifier" seg0=`ls ${custClssfrFolder}/subjects/${ID}/mri/kNN_intermediateOutput/${ID}_accurateCSFmasked_seg0.nii*` seg1=`ls ${custClssfrFolder}/subjects/${ID}/mri/kNN_intermediateOutput/${ID}_accurateCSFmasked_seg1.nii*` seg2=`ls ${custClssfrFolder}/subjects/${ID}/mri/kNN_intermediateOutput/${ID}_accurateCSFmasked_seg2.nii*` seg0_nonWMH="${custClssfrFolder}/subjects/${ID}/mri/extractedWMH/manuallyModified/${ID}_seg0_nonWMH" seg1_nonWMH="${custClssfrFolder}/subjects/${ID}/mri/extractedWMH/manuallyModified/${ID}_seg1_nonWMH" seg2_nonWMH="${custClssfrFolder}/subjects/${ID}/mri/extractedWMH/manuallyModified/${ID}_seg2_nonWMH" nonWMH="${custClssfrFolder}/subjects/${ID}/mri/extractedWMH/manuallyModified/${ID}_seg012_nonWMH" ${FSLDIR}/bin/fslmaths ${seg0} \ -sub ${WMHimg} \ -thr 0 \ ${seg0_nonWMH} ${FSLDIR}/bin/fslmaths ${seg1} \ -sub ${WMHimg} \ -thr 0 \ ${seg1_nonWMH} ${FSLDIR}/bin/fslmaths ${seg2} \ -sub ${WMHimg} \ -thr 0 \ ${seg2_nonWMH} ${FSLDIR}/bin/fslmaths ${seg0_nonWMH} -add \ ${seg1_nonWMH} -add \ ${seg2_nonWMH} \ ${nonWMH} } # $1 = VisAdj WMH img # $2 = ID # $3 = studyFolder generateNonWMHimg_fromVisAdj $1 $2 $3
<gh_stars>10-100 let path = require('path') let webpack = require('webpack') const DIST_PATH = path.resolve(__dirname, '../dist') const SRC_PATH = path.resolve(__dirname, '../src') module.exports = { entry: [ './index.js' // the entry point of our app ], output: { filename: 'xoces-umd.js', // the output bundle libraryTarget: 'umd', // Possible value - amd, commonjs, commonjs2, commonjs-module, this, var path: DIST_PATH, publicPath: '/', // necessary for HMR to know where to load the hot update chunks, library: 'xoces', }, context: SRC_PATH, module: { rules: [ { test: /\.js$/, use: [ 'babel-loader', ], exclude: /node_modules/ }, { test: /\.css$/, use: [ 'style-loader', 'css-loader', 'postcss-loader', ], }, { test: /\.scss$/, use: ['style-loader', 'css-loader', 'postcss-loader', 'sass-loader', ] }, { test: /\.png$/, loader: "url-loader?mimetype=image/png" } ], }, plugins: [ // prints more readable module names in the browser console on HMR updates new webpack.NamedModulesPlugin(), ], };
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # fail if undeclared variables are used set -o nounset # exit on error set -o errexit # Figure out which tarball to use based on which Hadoop version is being used. set +o nounset HADOOP_BIN="sudo -u hadoop ${HADOOP_INSTALL_DIR}/bin/hadoop" HADOOP_VERSION=$(${HADOOP_BIN} version | tr -cd [:digit:] | head -c1) set -o nounset if [[ "${HADOOP_VERSION}" == '2' ]]; then FLINK_TARBALL_URI=${FLINK_HADOOP2_TARBALL_URI} else FLINK_TARBALL_URI=${FLINK_HADOOP1_TARBALL_URI} fi # Install Flink via this fancy pipe gsutil cat "${FLINK_TARBALL_URI}" | tar -C /home/hadoop/ -xzv mv /home/hadoop/flink* "${FLINK_INSTALL_DIR}" # List all task managers (workers) in the slaves file # The task managers will be brought up by the job manager (master) echo ${WORKERS[@]} | tr ' ' '\n' > ${FLINK_INSTALL_DIR}/conf/slaves # Create temp file in hadoop directory which might be mounted to other storage than os FLINK_TASKMANAGER_TEMP_DIR="/hadoop/flink/tmp" mkdir -p ${FLINK_TASKMANAGER_TEMP_DIR} chgrp hadoop -R /hadoop/flink chmod 777 -R /hadoop/flink # Calculate the memory allocations, MB, using 'free -m'. Floor to nearest MB. TOTAL_MEM=$(free -m | awk '/^Mem:/{print $2}') FLINK_JOBMANAGER_MEMORY=$(python -c \ "print int(${TOTAL_MEM} * ${FLINK_JOBMANAGER_MEMORY_FRACTION})") FLINK_TASKMANAGER_MEMORY=$(python -c \ "print int(${TOTAL_MEM} * ${FLINK_TASKMANAGER_MEMORY_FRACTION})") # Determine the number of task slots if [[ "${FLINK_TASKMANAGER_SLOTS}" == "auto" ]] ; then FLINK_TASKMANAGER_SLOTS=`grep -c processor /proc/cpuinfo` fi # Determine the default parallelism if [[ "${FLINK_PARALLELISM}" == "auto" ]] ; then FLINK_PARALLELISM=$(python -c \ "print ${NUM_WORKERS} * ${FLINK_TASKMANAGER_SLOTS}") fi # Apply Flink settings by appending them to the default config cat << EOF >> ${FLINK_INSTALL_DIR}/conf/flink-conf.yaml jobmanager.rpc.address: ${MASTER_HOSTNAME} jobmanager.heap.mb: ${FLINK_JOBMANAGER_MEMORY} taskmanager.heap.mb: ${FLINK_TASKMANAGER_MEMORY} taskmanager.numberOfTaskSlots: ${FLINK_TASKMANAGER_SLOTS} parallelism.default: ${FLINK_PARALLELISM} taskmanager.network.numberOfBuffers: ${FLINK_NETWORK_NUM_BUFFERS} env.java.opts: ${FLINK_JAVA_OPTS} taskmanager.tmp.dirs: ${FLINK_TASKMANAGER_TEMP_DIR} fs.hdfs.hadoopconf: ${HADOOP_CONF_DIR} EOF # Find the Hadoop lib dir so and add its gcs-connector to the Flink lib dir set +o nounset if [[ -r "${HADOOP_INSTALL_DIR}/libexec/hadoop-config.sh" ]]; then . "${HADOOP_INSTALL_DIR}/libexec/hadoop-config.sh" fi if [[ -n "${HADOOP_COMMON_LIB_JARS_DIR}" ]] && \ [[ -n "${HADOOP_PREFIX}" ]]; then LIB_JARS_DIR="${HADOOP_PREFIX}/${HADOOP_COMMON_LIB_JARS_DIR}" else LIB_JARS_DIR="${HADOOP_INSTALL_DIR}/lib" fi set -o nounset # Get jar name and path GCS_JARNAME=$(grep -o '[^/]*\.jar' <<< ${GCS_CONNECTOR_JAR}) LOCAL_GCS_JAR="${LIB_JARS_DIR}/${GCS_JARNAME}" # create link in Flink lib dir ln -s "${LOCAL_GCS_JAR}" "${FLINK_INSTALL_DIR}/lib/" # Assign ownership of everything to the 'hadoop' user. chown -R hadoop:hadoop /home/hadoop/ # Make the Flink log directory writable chmod 777 ${FLINK_INSTALL_DIR}/log
require "bullet_train/themes/light/version" require "bullet_train/themes/light/engine" require "bullet_train/themes/tailwind_css" module BulletTrain module Themes module Light # TODO Not sure this is the right place for this in the long-term. mattr_accessor :color, default: :blue class Theme < BulletTrain::Themes::TailwindCss::Theme def directory_order ["light"] + super end end end end end
def longest_common_subsequence(string1, string2): dp = [[0 for j in range(len(string1)+1)] for i in range(len(string2)+1)] for i in range(1, len(string2)+1): for j in range(1, len(string1)+1): if string2[i-1] == string1[j-1]: dp[i][j] = dp[i-1][j-1] + 1 else: dp[i][j] = max(dp[i-1][j], dp[i][j-1]) longest_subsequence = "" i, j = len(string2), len(string1) while i > 0 and j > 0: if string2[i-1] == string1[j-1]: longest_subsequence += string2[i-1] i -= 1 j -= 1 elif dp[i-1][j] > dp[i][j-1]: i -= 1 else: j -= 1 return longest_subsequence[::-1] string1 = "ABCDGH" string2 = "AEDFHR" print(longest_common_subsequence(string1, string2))
#include <stdio.h> long long int n; int sqrt(long long int n){ double i, m; for(i=0;i*i<=n;i++); return i-1; } int main() { scanf("%lld", &n); printf("%d\n", sqrt(n)); }
/* * Copyright 2017-2021 original authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.micronaut.data.mongodb.serde; import io.micronaut.core.annotation.Internal; import io.micronaut.core.type.Argument; import io.micronaut.serde.SerdeRegistry; import io.micronaut.serde.Serializer; import io.micronaut.serde.config.naming.PropertyNamingStrategy; import io.micronaut.serde.exceptions.SerdeException; import io.micronaut.serde.reference.AbstractPropertyReferenceManager; import io.micronaut.serde.reference.PropertyReference; import io.micronaut.serde.reference.SerializationReference; /** * Default implementation of {@link Serializer.EncoderContext}. * */ @Internal class DefaultEncoderContext extends AbstractPropertyReferenceManager implements Serializer.EncoderContext { private final SerdeRegistry registry; DefaultEncoderContext(SerdeRegistry registry) { this.registry = registry; } @Override public final <T, D extends Serializer<? extends T>> D findCustomSerializer(Class<? extends D> serializerClass) throws SerdeException { return registry.findCustomSerializer(serializerClass); } @Override public final <T> Serializer<? super T> findSerializer(Argument<? extends T> forType) throws SerdeException { return registry.findSerializer(forType); } @Override public <D extends PropertyNamingStrategy> D findNamingStrategy(Class<? extends D> namingStrategyClass) throws SerdeException { return registry.findNamingStrategy(namingStrategyClass); } @Override public <B, P> SerializationReference<B, P> resolveReference(SerializationReference<B, P> reference) { final Object value = reference.getReference(); if (refs != null) { final PropertyReference<?, ?> managedReference = refs.peekFirst(); if (managedReference != null && managedReference.getProperty().getName().equals(reference.getReferenceName())) { if (managedReference.getReference() == value) { return null; } } } return reference; } }
<filename>js/script.js const menu = document.querySelector(".menu"); const burgerButton = document.querySelector(".burger-button"); const screen = window.matchMedia("screen and (max-width: 767px)"); function showHide() { menu.classList.toggle("is-active"); } /*function showHide() { if (menu.classList.contains("is-active")) { menu.classList.remove("is-active"); } else { menu.classList.add("is-active"); } }*/ function hide() { menu.classList.remove("is-active"); } if (screen.matches) { burgerButton.addEventListener("click", showHide); menu.addEventListener("click", hide); } screen.addListener(validation); function validation(event) { if (event.matches) { burgerButton.addEventListener("click", showHide); menu.addEventListener("click", hide); } else { burgerButton.removeEventListener("click", showHide); menu.removeEventListener("click", hide); } }
import React from "react"; const useMediaQuery = (query, whenTrue, whenFalse) => { const mediaQuery = window.matchMedia(query); const [match, setMatch] = React.useState(!!mediaQuery.matches); React.useEffect(() => { const handler = () => setMatch(!!mediaQuery.matches); mediaQuery.addListener(handler); return () => mediaQuery.removeListener(handler); }, [mediaQuery]); if (typeof window === 'undefined' || typeof window.matchMedia === 'undefined'){ return whenFalse; } return match ? whenTrue : whenFalse; }; export default useMediaQuery // Usage example : // const text = useMediaQuery( // '(max-width: 400px)', // 'Less than 400px wide', // 'More than 400px wide' // );
#!/bin/bash if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then DIR="$(pwd)" cd .. export SWIFT_VERSION=swift-4.1-RELEASE wget https://swift.org/builds/swift-4.1-release/ubuntu1404/swift-4.1-RELEASE/swift-4.1-RELEASE-ubuntu14.04.tar.gz tar xzf swift-4.1-RELEASE-ubuntu14.04.tar.gz export PATH="${PWD}/swift-4.1-RELEASE-ubuntu14.04/usr/bin:${PATH}" cd "$DIR" else export SWIFT_VERSION=swift-4.1-RELEASE curl -O https://swift.org/builds/swift-4.1-release/xcode/swift-4.1-RELEASE/swift-4.1-RELEASE-osx.pkg sudo installer -pkg swift-4.1-RELEASE-osx.pkg -target / export TOOLCHAINS=swift fi
function Foo() { /* .. */ } Foo.prototype = { /* .. */ }; // create a new prototype object Object.defineProperty(Foo.prototype, "constructor",{ enumerable: false, writable: true, configurable: true, value: Foo // point `.constructor` at `Foo` });//add constructor var a1 = new Foo(); console.log(Foo.prototype.constructor === Foo); // false! console.log(a1.constructor === Foo); // true!
<gh_stars>0 import * as express from "express"; import UserCtrl from "../controller/userCtrl"; import AdminCtrl from "../controller/adminCtrl"; import PoolCtrl from "../controller/poolCtrl"; export default function setRoutes(app: any) { const router = express.Router(); const userCtrl = new UserCtrl(); const adminCtrl = new AdminCtrl(); const poolCtrl = new PoolCtrl(); router.route("/user/login").post(userCtrl.login); router.route("/user/vote").post(userCtrl.vote); router.route("/admin/login").post(adminCtrl.login); router.route("/admin/newPool").post(poolCtrl.newPool); router.route("/admin/showPool").post(poolCtrl.showPool); router.route("/pool").get(poolCtrl.getAll); router.route("/users").get(userCtrl.getAll); app.use("/api", router); }
import random def generate_random_string(): return ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=5)) print(generate_random_string())
<gh_stars>0 import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { ThemeModule } from '../../@theme/theme.module'; import { PlanDocumentComponent } from './plan-document.component'; import { AddDocumetComponent } from './add-documet/add-documet.component'; import { EitDocumetComponent } from './eit-documet/eit-documet.component'; @NgModule({ declarations: [PlanDocumentComponent, AddDocumetComponent, EitDocumetComponent], imports: [ CommonModule, ThemeModule ] }) export class PlanDocumentModule { }
/** * Copyright 2014 isandlaTech * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.psem2m.isolates.slave.agent.threads; /** * The thread that polls the threads monitor * * @author <NAME> */ public class MonitorPollerThread extends Thread { /** The parent monitor */ private final ThreadsMonitor pMonitor; /** The poll interval */ private final long pPollInterval; /** * Sets up members */ public MonitorPollerThread(final ThreadsMonitor aParent, final long aPollInterval) { super("psem2m-threads-monitor-poller"); setDaemon(true); pMonitor = aParent; pPollInterval = aPollInterval; } /* * (non-Javadoc) * * @see java.lang.Thread#run() */ @Override public void run() { while (!isInterrupted()) { // Update the monitor pMonitor.updateMonitor(); try { // Make a pause Thread.sleep(pPollInterval); } catch (final InterruptedException e) { // Stop working on interruption break; } } } }
#!/bin/bash ################## 2/6/14 ######################### # Script by Evan Lutkenhoff, lutkenhoff@ucla.edu # # Monti Lab (http://montilab.psych.ucla.edu) # # Tools used within script are copyrighted by: # # FSL (http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSL) # # & AFNI (http://afni.nimh.nih.gov/afni/) # ################################################### ####usage notes/comments#### howtouse() { echo "" echo "How To Use: optiBET.sh -i <input_image> -options" echo "" echo "* if option is -f script uses FSL for initial extraction (default)" echo "* if option is -a script uses AFNI for initial extraction" echo "* if option is -o script uses MNI152_T1_1mm_brain_mask.nii.gz for mask (default)" echo "* if option is -t script uses MNI152_T1_2mm_brain_mask.nii.gz for mask" echo "* if option is -g script uses avg152T1_brain.nii.gz for mask" echo "* if option is -d use debug mode (will NOT delete intermediate files)" echo "* using -p <thr> the fractional intensity threshold to be used by FSL's bet can be specified (default: 0.1)" echo "* the option -p is ignored if AFNI is used for initial extraction" echo "* script requires proper installation of FSL and AFNI" echo "* input image should be in standard orientation" echo "* use .nii.gz image for input" echo "* outputs binarized brain-extraction mask, saved as: <input_image>_optiBET_brain_mask.nii.gz" echo "* and full-intensity brain-extraction, saved as: <input_image>_optiBET_brain.nii.gz" echo "" exit 1 } [ "$1" = "" ] && howtouse ####setup environment variables#### #setup environment variable for AFNI afnidirtemp=`which 3dSkullStrip` afnidir=`echo $afnidirtemp | awk -F "3dSkullStrip" '{print $1}'` #setup environment variables for FSL #FSLDIR already stored if installed ####PARSE options######################## #sets up initial values for brain extraction and MNI mask and debug s1=bet; #default step1 mask=MNI152_T1_1mm_brain_mask.nii.gz; #default MNI mask debugger=no; #default delete intermediate files popt=0.1; while getopts i:faotgdp: name do case $name in i)iopt=$OPTARG;; f)fopt=1;; a)aopt=1;; o)oopt=1;; t)topt=1;; g)gopt=1;; d)dopt=1;; p)popt=$OPTARG;; *)echo "Invalid option as argument"; exit 1;; #exits if bad option used esac done if [[ ! -z $iopt ]] then i=`${FSLDIR}/bin/remove_ext $iopt`; #removes file extensions from input image echo $i "is input image" fi if [[ ! -z $fopt ]] then s1=bet; #use FSL fi if [[ ! -z $aopt ]] then s1=3dSS; #use AFNI fi if [[ ! -z $oopt ]] then mask=MNI152_T1_1mm_brain_mask.nii.gz; #use 1mm mask fi if [[ ! -z $topt ]] then mask=MNI152_T1_2mm_brain_mask.nii.gz; #use 2mm mask fi if [[ ! -z $gopt ]] then mask=avg152T1_brain.nii.gz; #use avg mask fi if [[ ! -z $dopt ]] then debugger=yes; #keeps intermediate files echo "debug: do NOT delete intermediate files" fi #following takes care of inputting conflicting options if [[ ! -z $fopt ]] && [[ ! -z $aopt ]]; then echo "only specify one option for inital extraction (-f OR -a)" exit 1 fi if [[ ! -z $oopt ]] && [[ ! -z $topt ]] && [[ ! -z $gopt ]]; then echo "only specify one option for mask (-o, -t, or -g)" exit 1 fi if [[ ! -z $oopt ]] && [[ ! -z $topt ]]; then echo "only specify one option for mask (-o, -t, or -g)" exit 1 fi if [[ ! -z $oopt ]] && [[ ! -z $gopt ]]; then echo "only specify one option for mask (-o, -t, or -g)" exit 1 fi if [[ ! -z $topt ]] && [[ ! -z $gopt ]]; then echo "only specify one option for mask (-o, -t, or -g)" exit 1 fi shift $(($OPTIND -1)) echo "for subject $iopt use $s1 for step 1 and $mask for MNI mask" ####END PARSE ################################# #### 1. initial brain extraction (“step 1”) ######################### ## Perform initial “approximate” brain extraction (use FSL unless input options specify the AFNI option) # this is referred to as “step 1” in the manuscript. if [[ "$s1" == "bet" ]]; then echo step1 BET -B -f ${popt} subject ${i} for initial extraction bet ${iopt} ${i}_step1 -B -f ${popt} else echo step1 AFNI 3dSkullStrip subject ${i} for initial extraction $afnidir/3dSkullStrip -input ${iopt} -prefix ${i}_step1.nii.gz &>/dev/null #suppress screen output b/c a lot fi #### 2. linear transform to MNI space (“step 2”) #################### ## Perform linear transformation of initial “approximate” extraction to MNI space # This is referred to as “step 2” in the manuscript. echo step2 flirt subject ${i} to MNI space flirt -ref ${FSLDIR}/data/standard/MNI152_T1_2mm_brain -in ${i}_step1.nii.gz -omat ${i}_step2.mat -out ${i}_step2 -searchrx -30 30 -searchry -30 30 -searchrz -30 30 #### 3. nonlinear transform to MNI space (“step 3”) ################## ## Follow the linear transformation with a non-linear transformation. Use the MNI152 2mm as default # This is referred to as “step 3” in the manuscript echo step3 fnirt subject ${i} to MNI space fnirt --in=${i} --aff=${i}_step2.mat --cout=${i}_step3 --config=T1_2_MNI152_2mm #### 4. QC: Generate image for QC of fnirt #################################### ## This is a quality control step that generates an image of the original subject structural after transformation to MNI space # which can (and should) be checked by the user. echo step4 quality control of fnirt using applywarp to put subject ${i} in MNI space applywarp --ref=${FSLDIR}/data/standard/MNI152_T1_2mm --in=${i} --warp=${i}_step3 --out=${i}_step4 #### 5. Invert nonlinear warp (“step 4a”) ############################ ## Invert the nonlinear warp in order to be able to back-project the MNI brain into subject space # this is the first part of “step 4” in the manuscript echo step5 invert nonlinear warp for subject ${i} invwarp -w ${i}_step3.nii.gz -o ${i}_step5.nii.gz -r ${i}_step1.nii.gz #### 6. Apply inverted nonlinear warp to labels (“step 4b”) ########## ## Apply inverted nonlinear warp to the MNI standard brain in order to back-project it back into subject space # this is the second part of “step 4” in the manuscript echo step6 apply inverted nonlinear warp to MNI label: MNI152_T1_1mm_brain_mask for subject ${i} applywarp --ref=${i} --in=${FSLDIR}/data/standard/${mask} --warp=${i}_step5.nii.gz --out=${i}_step6 --interp=nn #### 7. binarize brain extractions ################################### ## Binarize the back-projected MNI brain in order to use it to “punch-out” brain extraction (in the next step) echo step 7 creating binary brain mask for subject ${i} fslmaths ${i}_step6.nii.gz -bin ${i}_optiBET_brain_mask #### 8. Punch-out mask from brain to do skull-stripping (“step 4c”) ## ## Take the binarized back-projected MNI brain and use it to “punch-out" non-brain tissue from the subject’s original T1 image # this is the last part of “step 4” as described in the manuscript). echo step 8 creating brain extraction for subject ${i} fslmaths ${i} -mas ${i}_optiBET_brain_mask ${i}_optiBET_brain ###debug or not ##### if [ "$debugger" == "yes" ];then echo "keep intermediate files" else echo "removing intermediate files" rm ${i}_step1.nii.gz ${i}_step1_mask.nii.gz ${i}_step2.nii.gz ${i}_step2.mat ${i}_step3.nii.gz ${i}_step4.nii.gz ${i}_step5.nii.gz ${i}_step6.nii.gz ${i}_to_MNI152_T1_2mm.log fi
function polarToRectangular(cx, cy, r, theta) { const x = cx + r * Math.cos(theta * (Math.PI / 180)); // Convert degrees to radians const y = cy + r * Math.sin(theta * (Math.PI / 180)); // Convert degrees to radians return { x, y }; } // Test the function const cx = 0; const cy = 0; const innerR = 5; const outerR = 10; const coordinates1 = polarToRectangular(cx, cy, innerR, (36 * 7) - 90); const coordinates2 = polarToRectangular(cx, cy, outerR, (36 * 8) - 90); const coordinates3 = polarToRectangular(cx, cy, innerR, (36 * 9) - 90); const coordinates4 = polarToRectangular(cx, cy, outerR, 0 - 90); console.log(coordinates1); // Output: { x: -3.5355339059327378, y: 3.5355339059327378 } console.log(coordinates2); // Output: { x: -7.0710678118654755, y: 7.0710678118654755 } console.log(coordinates3); // Output: { x: -5.877852522924732, y: -5.877852522924732 } console.log(coordinates4); // Output: { x: 10, y: 0 }