text stringlengths 1 1.05M |
|---|
#!/bin/bash
set -euo pipefail
cd /var/www/html
php redaxo/bin/console install:download -q demo_base 2.10.1
php redaxo/bin/console package:install -q demo_base
php redaxo/bin/console cache:clear -q
php redaxo/bin/console demo_base:install -q -y
chown -R www-data:www-data ./
|
#!/usr/bin/env bash
set -e
if [ ! -d node_modules ]; then
echo "[B] Run 'npm install' first"
exit 1
fi
clean() {
rm -f .babelrc
rm -rf lib/*
node scripts/version.js > lib/version.json
node scripts/assemble_lua.js > lib/lua.json
}
makeLib10() {
echo '[B] Compiling Bottleneck to Node 10+...'
npx coffee --compile --bare --no-header src/*.coffee
mv src/*.js lib/
}
makeLib6() {
echo '[B] Compiling Bottleneck to Node 6+...'
ln -s .babelrc.lib .babelrc
npx coffee --compile --bare --no-header --transpile src/*.coffee
mv src/*.js lib/
}
makeES5() {
echo '[B] Compiling Bottleneck to ES5...'
ln -s .babelrc.es5 .babelrc
npx coffee --compile --bare --no-header src/*.coffee
mv src/*.js lib/
echo '[B] Assembling ES5 bundle...'
npx rollup -c rollup.config.es5.js
}
makeLight() {
makeLib10
echo '[B] Assembling light bundle...'
npx rollup -c rollup.config.light.js
}
makeTypings() {
echo '[B] Compiling and testing TS typings...'
npx ejs-cli bottleneck.d.ts.ejs > bottleneck.d.ts
npx tsc --noEmit --strict test.ts
}
if [ "$1" = 'dev' ]; then
clean
makeLib10
elif [ "$1" = 'bench' ]; then
clean
makeLib6
elif [ "$1" = 'es5' ]; then
clean
makeES5
elif [ "$1" = 'light' ]; then
clean
makeLight
elif [ "$1" = 'typings' ]; then
makeTypings
else
clean
makeES5
clean
makeLight
clean
makeLib6
makeTypings
fi
rm -f .babelrc
echo '[B] Done!'
|
<filename>Problem #0016/src/Problem.java
import java.util.Scanner;
public class Problem
{
private int nextIndex = 0;
private int[] lastOrderIds;
public Problem(int n)
{
this.lastOrderIds = new int[n];
}
public void record(int order_id)
{
if (0 >= order_id)
throw new IllegalArgumentException();
this.lastOrderIds[this.nextIndex++] = order_id;
this.nextIndex %= this.lastOrderIds.length;
}
public int get_last(int i)
{
if (0 >= i || this.lastOrderIds.length < i)
throw new IndexOutOfBoundsException();
int index = (this.nextIndex - i + this.lastOrderIds.length);
index %= this.lastOrderIds.length;
if (0 == this.lastOrderIds[index])
throw new IndexOutOfBoundsException();
return this.lastOrderIds[index];
}
public static void main(String[] args)
{
Scanner sc = new Scanner(System.in);
System.out.println("How many order ids would you like to buffer?");
Problem p = new Problem(sc.nextInt());
sc.nextLine();
System.out.println("How many order ids would you like to enter?");
int orderIds = sc.nextInt();
sc.nextLine();
for (int i = 0; orderIds > i; ++i)
{
System.out.println("Please enter order id #" + (i + 1) + ":");
p.record(sc.nextInt());
sc.nextLine();
}
System.out.println("Which ith last order id would you like to retrieve?");
int ithLastOrderId = sc.nextInt();
sc.close();
System.out.println("The ith last order id was " + p.get_last(ithLastOrderId) + ".");
}
} |
def validate_data_types(data):
attribute_data_types = {
'version': 'str',
'product_id': 'int',
'product_name': 'str',
'current_product_milestone_id': 'int',
'product_milestones': 'list[ProductMilestoneRest]',
'product_releases': 'list[ProductReleaseRest]',
'build_configuration_sets': 'list[BuildConfigurationSetRest]',
'build_configurations': 'list[BuildConfigurationRest]',
'attributes': 'dict(str, str)'
}
for attribute, expected_type in attribute_data_types.items():
if attribute in data:
if expected_type.startswith('list[') and expected_type.endswith(']'):
list_type = expected_type[5:-1]
if not all(isinstance(item, globals()[list_type]) for item in data[attribute]):
return False
elif expected_type.startswith('dict(') and expected_type.endswith(')'):
key_type, value_type = expected_type[5:-1].split(', ')
if not all(isinstance(key, str) and isinstance(value, str) for key, value in data[attribute].items()):
return False
else:
if not isinstance(data[attribute], globals()[expected_type]):
return False
else:
return False
return True |
python -m pip install -r ./requirements.txt
cd ./src/match
bash build.sh
cd ../.. |
#!/bin/bash
# This script will configure iptables to restrict access to solely
# the www box - that is, the box with the tomcat web server on it.
#
# Flush all current rules from iptables
#
iptables -F
# Allow existing connections to continue
iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT
iptables -I INPUT -s 198.23.161.154 -j ACCEPT #the other database server
iptables -I INPUT -s 198.23.128.117 -j ACCEPT #the web server
iptables -I INPUT -s 23.95.35.84 -j ACCEPT #the old web server
iptables -A INPUT -i lo -j ACCEPT
iptables -P INPUT DROP
# Save settings
#
/sbin/service iptables save
# List rules
#
iptables -L -v
|
package net.orecrops.gameobjs.blocks;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.BlockCrops;
import net.minecraft.block.IGrowable;
import net.minecraft.block.properties.PropertyInteger;
import net.minecraft.block.state.BlockStateContainer;
import net.minecraft.block.state.IBlockState;
import net.minecraft.init.Blocks;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.EnumPlantType;
import net.minecraftforge.common.IPlantable;
import net.minecraftforge.fml.common.FMLLog;
import net.orecrops.gameobjs.ModItems;
import java.util.List;
import java.util.Random;
public class OreCropsCrop extends BlockCrops implements IGrowable, IPlantable {
public static final PropertyInteger AGE = PropertyInteger.create("age", 0, 7);
public final String regName;
public OreCropsCrop(String regName) {
super();
this.regName = regName;
this.setDefaultState(blockState.getBaseState().withProperty(AGE, 0));
}
public boolean isSuitableForPlant(Block soil) {
return soil == Blocks.FARMLAND;
}
protected PropertyInteger getAge() {
return AGE;
}
public int getHarvestReadyAge() {
return 7;
}
public boolean isHarvestReady(IBlockState state) {
return state.getValue(getAge()) >= getHarvestReadyAge();
}
protected Item getSeeds() {
final Item seeds = ModItems.seedsMap.get(this);
if(seeds == null) {
FMLLog.bigWarning("No seeds detected!");
return new Item();
}
return seeds;
}
@Override
public ItemStack getItem(World world, BlockPos pos, IBlockState state) {
return new ItemStack(getSeeds());
}
@Override
public boolean canGrow(World worl, BlockPos pos, IBlockState state, boolean isClient) {
return !isHarvestReady(state);
}
protected Item getHarvestedItem() {
final Item harvestedItem = ModItems.harvestedItemMap.get(this);
if(harvestedItem == null) {
FMLLog.bigWarning("Unexpected drop regsitered!");
return new Item();
}
return harvestedItem;
}
@Override
public IBlockState getStateFromMeta(int meta) {
return getDefaultState().withProperty(getAge(), meta);
}
@Override
public void updateTick(World world, BlockPos pos, IBlockState state, Random rnd) {
this.checkAndDropBlock(world, pos, state);
if(world.getLightFromNeighbors(pos.up()) >= 9) {
int i = this.getMetaFromState(state);
if(i < this.getHarvestReadyAge()) {
float f = getGrowthChance(this, world, pos);
if(rnd.nextInt((int) (25.0F / f) + 1) == 0) {
world.setBlockState(pos, this.getStateFromMeta(i + 1), 2);
}
}
}
}
@Override
public Item getItemDropped(IBlockState state, Random rnd, int fortune) {
if(!isHarvestReady(state)) {
return getSeeds();
}else{
return getHarvestedItem();
}
}
public int getMetaFromState(IBlockState state) {
return state.getValue(getAge());
}
@Override
public boolean canPlaceBlockAt(World world, BlockPos pos) {
Block soilBlock = world.getBlockState(pos.down()).getBlock();
return this.isSuitableForPlant(soilBlock);
}
@Override
public EnumPlantType getPlantType(IBlockAccess world, BlockPos pos) {
return EnumPlantType.Crop;
}
@Override
protected BlockStateContainer createBlockState() {
return new BlockStateContainer(this, AGE);
}
protected int getRandomInt(World world) {
return MathHelper.getRandomIntegerInRange(world.rand, 1, 7);
}
@Override
public void grow(World world, BlockPos pos, IBlockState state) {
int newGrowth = getMetaFromState(state) + getRandomInt(world);
int maxGrowth = getHarvestReadyAge();
if(newGrowth > maxGrowth) {
newGrowth = maxGrowth;
}
world.setBlockState(pos, getStateFromMeta(newGrowth), 2);
}
@Override
public void grow(World world, Random rnd, BlockPos pos, IBlockState state) {
grow(world, pos, state);
}
@Override
public boolean equals(Object obj) {
return (obj instanceof OreCropsCrop && regName.equals(((OreCropsCrop) obj).regName));
}
@Override
public List<ItemStack> getDrops(IBlockAccess world, BlockPos pos, IBlockState state, int fortune) {
List<ItemStack> ret = new java.util.ArrayList<ItemStack>();
Random rnd = world instanceof World ? ((World) world).rand : new Random();
int age = getMetaFromState(state);
int count = quantityDropped(state, fortune, rnd);
for(int i = 0; i < count; i++) {
Item item = this.getItemDropped(state, rnd, fortune);
if(item != null) {
ret.add(new ItemStack(item, 1, this.damageDropped(state)));
}
}
if(age >= getHarvestReadyAge()) {
for(int i = 0; i < 3; ++i) {
if(rnd.nextInt(2 * getHarvestReadyAge()) <= age) {
ret.add(new ItemStack(this.getSeeds(), 1, 0));
}
}
}
return ret;
}
@Override
public boolean canUseBonemeal(World worldIn, Random rand, BlockPos pos, IBlockState state) {
return true;
}
}
|
import {BrowserModule} from '@angular/platform-browser';
import {NgModule} from '@angular/core';
import {AppComponent} from './app.component';
import {BoxComponent} from './box/box.component';
import {ListProjectComponent} from './box/list-project/list-project.component';
import {ProjectService} from './services/project.service';
import {HttpClientModule} from '@angular/common/http';
import {ProjectItemComponent} from './box/list-project/project-item/project-item.component';
import {WeatherComponent} from './box/weather/weather.component';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {MatButtonToggleModule} from '@angular/material/button-toggle';
import {MatIconModule} from '@angular/material/icon';
import {MatButtonModule} from '@angular/material/button';
import {AddProjectDialogComponent} from './box/add-project-dialog/add-project-dialog.component';
import {MatDialogModule} from '@angular/material/dialog';
import {MatFormFieldModule} from '@angular/material/form-field';
import {FormsModule, ReactiveFormsModule} from '@angular/forms';
import {MatInputModule} from '@angular/material/input';
import {MatDatepickerModule} from '@angular/material/datepicker';
import {MatNativeDateModule} from '@angular/material/core';
import {MatCheckboxModule} from '@angular/material/checkbox';
import {MatSelectModule} from '@angular/material/select';
import {EditProjectDialogComponent} from './box/edit-project-dialog/edit-project-dialog.component';
import {MatAutocompleteModule} from '@angular/material/autocomplete';
import {ConfirmationDialogComponent} from './confirmation-dialog/confirmation-dialog.component';
import {UserService} from './services/user.service';
import {ErrorDialogComponent} from './error-dialog/error-dialog.component';
import {MatProgressSpinnerModule} from '@angular/material/progress-spinner';
import {UserComponent} from './user/user.component';
import {UserAddDialogComponent} from './user/user-add-dialog/user-add-dialog.component';
import { SpinnerOverlayComponent } from './spinner-overlay/spinner-overlay.component';
import {SpinnerOverlayService} from './services/spinner-overlay.service';
@NgModule({
declarations: [
AppComponent,
BoxComponent,
ListProjectComponent,
ProjectItemComponent,
WeatherComponent,
AddProjectDialogComponent,
EditProjectDialogComponent,
ConfirmationDialogComponent,
ErrorDialogComponent,
UserComponent,
UserAddDialogComponent,
SpinnerOverlayComponent
],
imports: [
BrowserModule,
HttpClientModule,
BrowserAnimationsModule,
MatButtonToggleModule,
MatIconModule,
MatButtonModule,
MatDialogModule,
MatFormFieldModule,
FormsModule,
MatInputModule,
MatDatepickerModule,
MatNativeDateModule,
MatCheckboxModule,
MatSelectModule,
ReactiveFormsModule,
MatAutocompleteModule,
MatProgressSpinnerModule
],
entryComponents: [AddProjectDialogComponent],
providers: [ProjectService, UserService, SpinnerOverlayService],
bootstrap: [AppComponent]
})
export class AppModule {
}
|
#!/bin/bash
# Regular tests
./build/Linux-x86_64/bin/test
|
model = Sequential()
model.add(Dense(20, input_dim=1, activation='relu'))
model.add(Dense(10, activation='relu'))
model.add(Dense(1, activation='sigmoid')) |
<reponame>mesikapp/dash.js
import PatchManifestModel from '../../src/dash/models/PatchManifestModel';
import DashConstants from '../../src/dash/constants/DashConstants';
import PatchOperation from '../../src/dash/vo/PatchOperation';
import SimpleXPath from '../../src/dash/vo/SimpleXPath';
import PatchHelper from './helpers/PatchHelper';
const expect = require('chai').expect;
const context = {};
const patchManifestModel = PatchManifestModel(context).getInstance();
describe('PatchManifestModel', function () {
describe('getIsPatch', function () {
it('should identify patches by presence of original MPD id', function () {
let patch = {
[DashConstants.ORIGINAL_MPD_ID]: 'foobar'
};
expect(patchManifestModel.getIsPatch(patch)).to.be.true; // jshint ignore:line
});
it('should consider the lack of original MPD id as non-patch', function () {
expect(patchManifestModel.getIsPatch({})).to.be.false; // jshint ignore:line
});
it('should consider lack of patch argument as non-patch', function () {
expect(patchManifestModel.getIsPatch()).to.be.false; // jshint ignore:line
});
});
describe('getPublishTime', function () {
it('should provide null for missing argument', function () {
expect(patchManifestModel.getPublishTime()).to.be.null; // jshint ignore:line
});
it('should provide null for missing publish time in patch', function () {
expect(patchManifestModel.getPublishTime({})).to.be.null; // jshint ignore:line
});
it('should provide Date object for parsed publish time', function () {
let patch = {
[DashConstants.PUBLISH_TIME]: '2020-11-11T05:13:19.514676331Z'
};
expect(patchManifestModel.getPublishTime(patch)).to.be.instanceOf(Date);
});
});
describe('getOriginalPublishTime', function () {
it('should provide null for missing argument', function () {
expect(patchManifestModel.getOriginalPublishTime()).to.be.null; // jshint ignore:line
});
it('should provide null for missing original publish time in patch', function () {
expect(patchManifestModel.getOriginalPublishTime({})).to.be.null; // jshint ignore:line
});
it('should provide Date object for parsed original publish time', function () {
let patch = {
[DashConstants.ORIGINAL_PUBLISH_TIME]: '2020-11-11T05:13:19.514676331Z'
};
expect(patchManifestModel.getOriginalPublishTime(patch)).to.be.instanceOf(Date);
});
});
describe('getMpdId', function () {
it('should provide null for missing argument', function () {
expect(patchManifestModel.getMpdId()).to.be.null; // jshint ignore:line
});
it('should provide null for missing attribute', function () {
expect(patchManifestModel.getMpdId({})).to.be.null; // jshint ignore:line
});
it('should provide mpd id when present', function () {
let patch = {
[DashConstants.ORIGINAL_MPD_ID]: 'foobar'
};
expect(patchManifestModel.getMpdId(patch)).to.equal('foobar');
});
});
describe('getPatchOperations', function () {
const patchHelper = new PatchHelper();
it('should provide empty operation set for missing argument', function () {
expect(patchManifestModel.getPatchOperations()).to.be.empty; // jshint ignore:line
});
describe('add operations', function () {
it('should properly parse add operation targeting element', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'add',
selector: '/MPD/Period',
position: 'after',
children: [{ 'Period': {} }]
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(1);
expect(operations[0]).to.be.instanceOf(PatchOperation);
expect(operations[0].action).to.equal('add');
expect(operations[0].xpath).to.be.instanceOf(SimpleXPath);
expect(operations[0].xpath.findsElement()).to.be.true; // jshint ignore:line
expect(operations[0].position).to.equal('after');
expect(operations[0].value).to.have.all.keys(['Period']);
});
it('should properly parse add operation targeting attribute', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'add',
selector: '/MPD/Period',
type: '@id',
text: 'foo-1'
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(1);
expect(operations[0]).to.be.instanceOf(PatchOperation);
expect(operations[0].action).to.equal('add');
expect(operations[0].xpath).to.be.instanceOf(SimpleXPath);
expect(operations[0].xpath.findsAttribute()).to.be.true; // jshint ignore:line
expect(operations[0].value).to.equal('foo-1');
});
it('should properly ignore add operation attempting namespace addition', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'add',
selector: '/MPD/Period',
type: 'namespace::thing',
text: 'foo-1'
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(0);
});
});
describe('replace operations', function () {
it('should properly parse replace operation targeting element', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'replace',
selector: '/MPD/Period',
children: [{ 'Period': {} }]
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(1);
expect(operations[0]).to.be.instanceOf(PatchOperation);
expect(operations[0].action).to.equal('replace');
expect(operations[0].xpath).to.be.instanceOf(SimpleXPath);
expect(operations[0].xpath.findsElement()).to.be.true; // jshint ignore:line
expect(operations[0].value).to.have.all.keys(['Period']);
});
it('should properly parse replace operation targeting attribute', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'replace',
selector: '/MPD/Period/@id',
text: 'foo-2'
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(1);
expect(operations[0]).to.be.instanceOf(PatchOperation);
expect(operations[0].action).to.equal('replace');
expect(operations[0].xpath).to.be.instanceOf(SimpleXPath);
expect(operations[0].xpath.findsAttribute()).to.be.true; // jshint ignore:line
expect(operations[0].value).to.equal('foo-2');
});
});
describe('remove operations', function () {
it('should properly parse remove operation targeting element', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'remove',
selector: '/MPD/Period[3]'
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(1);
expect(operations[0]).to.be.instanceOf(PatchOperation);
expect(operations[0].action).to.equal('remove');
expect(operations[0].xpath).to.be.instanceOf(SimpleXPath);
expect(operations[0].xpath.findsElement()).to.be.true; // jshint ignore:line
});
it('should properly parse remove operation targeting attribute', function () {
let patch = patchHelper.generatePatch('foobar', [{
action: 'remove',
selector: '/MPD/Period/@id'
}]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(1);
expect(operations[0]).to.be.instanceOf(PatchOperation);
expect(operations[0].action).to.equal('remove');
expect(operations[0].xpath).to.be.instanceOf(SimpleXPath);
expect(operations[0].xpath.findsAttribute()).to.be.true; // jshint ignore:line
});
});
describe('operation edge cases', function () {
it('should properly parse operation sequence', function () {
let patch = patchHelper.generatePatch('foobar', [
{
action: 'remove',
selector: '/MPD/Period[2]'
},
{
action: 'replace',
selector: '/MPD/@publishTime',
text: 'some-new-time'
},
{
action: 'add',
selector: '/MPD/Period',
position: 'after',
children: [{ 'Period': {} }]
}
]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(3);
expect(operations[0].action).to.equal('remove');
expect(operations[1].action).to.equal('replace');
expect(operations[2].action).to.equal('add');
});
it('should properly ignore invalid operations', function () {
let patch = patchHelper.generatePatch('foobar', [
{
action: 'remove',
selector: '/MPD/Period[2]'
},
{
action: 'unknown'
},
{
action: 'add',
selector: '/MPD/Period',
position: 'after',
children: [{ 'Period': {} }]
},
{
action: 'other-unknown'
}
]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(2);
expect(operations[0].action).to.equal('remove');
expect(operations[1].action).to.equal('add');
});
it('should properly ignore operations with unsupported xpaths', function () {
let patch = patchHelper.generatePatch('foobar', [
{
action: 'remove',
selector: 'MPD/Period' // non-absolute paths not supported
}
]);
let operations = patchManifestModel.getPatchOperations(patch);
expect(operations.length).to.equal(0);
});
});
});
});
|
<reponame>a8m/expect
package expect_test
import (
"testing"
"github.com/a8m/expect"
)
// TODO(Ariel): Create mock that implement TB interface
// and stub `Error` and `Fatal`
func TestLen(t *testing.T) {
expect := expect.New(t)
expect("foo").To.Have.Len(3)
m := map[string]int{}
expect(m).To.Have.Len(0)
expect(m).Not.To.Have.Len(1)
s := []string{"a", "b"}
expect(s).To.Have.Len(2)
expect(s).Not.To.Have.Len(1)
c := make(chan bool, 5)
c <- true
expect(c).To.Have.Len(1)
expect(c).Not.To.Have.Len(0)
}
func TestCap(t *testing.T) {
expect := expect.New(t)
expect([2]int{}).To.Have.Cap(2)
expect(make([]byte, 2, 10)).To.Have.Cap(10)
expect(make(chan string, 2)).Not.To.Have.Cap(10)
}
func TestKey(t *testing.T) {
expect := expect.New(t)
m1 := map[string]int{
"a": 1,
"b": 2,
}
expect(m1).To.Have.Key("a")
expect(m1).Not.To.Have.Key("c")
expect(m1).To.Have.Key("a", 1)
m2 := map[int]string{
1: "a",
2: "b",
}
expect(m2).To.Have.Key(1)
expect(m2).Not.To.Have.Key(3)
expect(m2).To.Have.Key(2, "b")
expect(m2).Not.To.Have.Key(1, "c")
m3 := map[string]interface{}{
"arr": [1]int{},
"map": map[int]int{1: 1},
}
expect(m3).To.Have.Key("arr")
expect(m3).To.Have.Key("map")
expect(m3).Not.To.Have.Key("struct")
expect(m3).To.Have.Key("arr", [1]int{})
expect(m3).To.Have.Key("map", map[int]int{1: 1})
expect(m3).Not.To.Have.Key("map", map[string]int{})
}
func TestKeys(t *testing.T) {
expect := expect.New(t)
m1 := map[string]int{
"a": 1,
"b": 2,
"c": 3,
}
expect(m1).To.Have.Keys("a", "b", "c")
expect(m1).Not.To.Have.Keys("d", "e", "i")
m2 := map[int]string{
1: "a",
2: "b",
3: "c",
}
expect(m2).To.Have.Keys(1, 2, 3)
expect(m2).Not.To.Have.Keys(4, 5, 6)
}
func TestField(t *testing.T) {
expect := expect.New(t)
p := struct {
X, Y int
}{1, 3}
expect(p).To.Have.Field("X")
expect(p).To.Have.Field("Y", 3)
expect(p).Not.To.Have.Field("Z")
expect(p).Not.To.Have.Field("Y", 4)
}
func TestFields(t *testing.T) {
expect := expect.New(t)
p := struct {
X, Y int
}{1, 2}
expect(p).To.Have.Fields("X", "Y")
expect(p).Not.To.Have.Fields("Z")
expect(p).Not.To.Have.Fields("T", "Z")
}
// Test Method
type Person struct{}
func (p Person) Hello() {}
func (p *Person) Hallo() {}
func TestMethod(t *testing.T) {
expect := expect.New(t)
p := Person{}
expect(p).To.Have.Method("Hello")
expect(p).Not.To.Have.Method("Hallo")
expect(&p).To.Have.Method("Hallo")
expect(&p).To.Have.Method("Hello")
}
func TestHaveFailNow(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
l := []string{"foo"}
expect(l).To.Have.Len(1).Else.FailNow()
select {
case <-mockT.FailNowCalled:
t.Errorf("Expected FailNow() on passing test not to be called")
default:
}
expect(l).To.Have.Len(3).Else.FailNow()
select {
case <-mockT.FailNowCalled:
default:
t.Errorf("Expected FailNow() on failing test to be called")
}
}
func TestNotHaveFailNow(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
l := []string{"foo"}
expect(l).Not.To.Have.Len(3).Else.FailNow()
select {
case <-mockT.FailNowCalled:
t.Errorf("Expected FailNow() on passing test not to be called")
default:
}
expect(l).Not.To.Have.Len(1).Else.FailNow()
select {
case <-mockT.FailNowCalled:
default:
t.Errorf("Expected FailNow() on failing test to be called")
}
}
|
package com.java.study.algorithm.zuo.emiddle.class07;
public class Code08_FindNewTypeChar{
} |
source ~/.bashrc
conda activate covdock
cov_dock_mol2 -i /home/fuqiuyu/work/covdock_server/data/compound1.mol2 -r fixed -s A:CYS19 -l flex.list -w output
|
public class PrimeNumber {
public static boolean isPrime(int num) {
boolean isPrime = true;
//Check if the number is divisible by any number from 2 to num -1
for (int i = 2; i < num; i++) {
if (num % i == 0) {
isPrime = false;
break;
}
}
return isPrime;
}
public static void main(String[] args) {
int num = 24;
if (isPrime(num))
System.out.println(num + " is a prime number");
else
System.out.println(num + " is not a prime number");
}
} |
TwitchViewer.directive('titleBar', [function() {
return {
restrict: 'E',
templateUrl: 'app/templates/directives/header.html',
link: function($scope, element) {
var gui = require('nw.gui');
var win = gui.Window.get();
element.on("click", function(event) {
element.find("button:focus").blur();
});
$scope.goFullscreen = function() {
win.toggleFullscreen();
};
$scope.showDevTools = function() {
win.showDevTools();
};
$scope.reload = function() {
win.reloadIgnoringCache();
};
$scope.closeApplication = function() {
gui.App.closeAllWindows();
};
}
}
}]);
|
<reponame>naga-project/webfx<gh_stars>100-1000
package dev.webfx.kit.mapper.peers.javafxgraphics.markers;
import javafx.beans.property.DoubleProperty;
/**
* @author <NAME>
*/
public interface HasEndXProperty {
DoubleProperty endXProperty();
default void setEndX(Number endX) {
endXProperty().setValue(endX);
}
default Double getEndX() {
return endXProperty().getValue();
}
}
|
curl -X POST \
http://localhost:8080/doctors \
-H 'Content-Type: application/json' \
-H 'Postman-Token: a8e04d0c-154d-472c-be02-d15f39ec53d4' \
-H 'cache-control: no-cache' \
-d '{
"fName" : "John",
"lName" : "Dorian",
"specialization" : "Internal Medicine"
}'
curl -X GET \
http://localhost:8080/doctors \
-H 'Postman-Token: 9150daaf-8d3d-492d-a244-f0e9a3195e97' \
-H 'cache-control: no-cache'
curl -X POST \
http://localhost:8080/clinics \
-H 'Content-Type: application/json' \
-H 'Postman-Token: 0dec8820-1f80-48ab-930e-889814b5b4a7' \
-H 'cache-control: no-cache' \
-d '{
"name" : "Sacred Heart",
"city" : "North Hollywood",
"streetAddress" : "12629 Riverside Drive"
}'
curl -X GET \
http://localhost:8080/clinics \
-H 'Postman-Token: 71e936ba-5fe6-494b-938a-b53dec91fec6' \
-H 'cache-control: no-cache' |
import { join, resolve } from 'path'
import { compile } from 'ejs'
import htmlescape from 'htmlescape'
import File from '@uiengine/core/lib/util/file'
import { highlight } from './util'
const supportedLocales = ['en', 'de']
const defaultOpts = {
lang: 'en',
hljs: 'atom-one-dark',
base: '/',
cache: true,
customStylesFile: null
}
// templates are loaded on setup
const templates = {}
const templatesPath = resolve(__dirname, '..', 'lib', 'templates')
const staticPath = resolve(__dirname, '..', 'dist')
const templatePath = template => join(templatesPath, `${template}.ejs`)
async function copyStatic (target) {
await File.copy(staticPath, target)
}
async function compileTemplate (name) {
const templateString = await File.read(templatePath(name))
templates[name] = compile(templateString)
}
export async function setup (options) {
// configure markdown renderer
const { markdownIt, target } = options
markdownIt.set({ highlight })
// load and assign template
try {
await Promise.all([
compileTemplate('index'),
compileTemplate('sketch'),
copyStatic(target)
])
} catch (err) {
const message = ['UI setup failed:', err]
if (options.debug) message.push(JSON.stringify(options, null, 2))
throw new Error(message.join('\n\n'))
}
}
export async function render (options, state, template = 'index') {
// sanitize and prepare options
if (!supportedLocales.includes(options.lang)) delete options.lang
const opts = Object.assign({}, defaultOpts, options)
const basePath = opts.base.replace(/\/$/, '')
const context = Object.assign({ htmlescape, basePath }, { state }, opts)
try {
if (!options.cache) await compileTemplate(template)
const templateFn = templates[template]
const rendered = templateFn(context)
if (template === 'index') {
const { target } = options
const filePath = resolve(target, 'index.html')
await File.write(filePath, rendered)
}
return rendered
} catch (err) {
const message = [`UI could not render template "${template}":`, err]
if (options.debug) message.push(JSON.stringify(context, null, 2))
const error = new Error(message.join('\n\n'))
error.code = err.code
error.path = templatePath(template)
throw error
}
}
|
<html>
<head>
<title>My Form</title>
</head>
<body>
<form action="email.php" method="post">
<input type="text" name="first_name" placeholder="First Name">
<input type="text" name="last_name" placeholder="Last Name">
<input type="submit" value="Submit">
</form>
</body>
</html>
<?php
$first_name = $_POST['first_name'];
$last_name = $_POST['last_name'];
$to = 'your@email.com';
$subject = 'Form Submission';
$message = "$first_name $last_name has submitted the form.";
mail($to, $subject, $message);
?> |
#!/usr/bin/env bash
source variables.sh
./script/discord.sh "$USER executed deploy.sh."
for server in ${ISUCON_SERVERS[@]};
do
echo "---- start deploy $server -----"
# etc
rsync -avz -e "ssh -i $ISUCON_SSH_KEY" --rsync-path='sudo rsync' ./etc/ $ISUCON_SSH_USER@$server:/etc/
# app
rsync -avz -e "ssh -i $ISUCON_SSH_KEY" ./home/ $ISUCON_SSH_USER@$server:~/ # --exclude=''
done
./script/log-rotate.sh deploy-by-$USER
|
<reponame>iagodahlem/clima-cli<filename>src/utils/weather.js<gh_stars>1-10
const axios = require('axios')
module.exports = async (location) => {
const result = await axios.get('https://query.yahooapis.com/v1/public/yql', {
params: {
format: 'json',
q: `select item from weather.forecast where woeid in
(select woeid from geo.places(1) where text="${location}")
`,
}
})
return result.data.query.results.channel.item
}
|
#!/usr/bin/env bash
set -ex
source scripts/generic-setup.sh
# system wide dependencies (packages)
install_build_dependencies
install_run_dependencies
# installing rust
bootstrap_rust
install_rust_build_dependencies
install_rust_run_dependencies
# set permissions for tcpdump for vmxnet3 tests
sudo setcap cap_net_raw,cap_net_admin=eip /usr/sbin/tcpdump
|
<reponame>ritaswc/wechat_app_template<filename>2021-05-09/微商城+项目搭建指南/nodejs/static/javascripts/admin/reducers/userAnalyze.js
import {
REQUEST_USER_ANALYZE
} from '../constants';
let initState = {
todayNewUser : 0,
yesterdayNewUser : 0,
todayPurchaseUser : 0,
yesterdayPurchaseUser : 0
};
export default (state = initState, action) => {
switch (action.type) {
case REQUEST_USER_ANALYZE: {
return action.userAnalyze;
}
default: {
return state
}
}
}
|
<gh_stars>1-10
/*
* Copyright (C) 2011 GroupMe, Inc.
*/
package com.groupme.providerone.sample.database.objects;
import android.os.Parcel;
import com.groupme.providerone.sample.database.autogen.objects.BaseMyView;
public class MyView extends BaseMyView {
public MyView() {
}
public MyView(Parcel in) {
super(in);
}
public static final Creator<MyView> CREATOR = new Creator<MyView>() {
public MyView createFromParcel(Parcel in) {
return new MyView(in);
}
public MyView[] newArray(int size) {
return new MyView[size];
}
};
}
|
<reponame>jmini/microprofile-open-api<gh_stars>0
/**
* Copyright (c) 2017 Contributors to the Eclipse Foundation
* Copyright 2017 SmartBear Software
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eclipse.microprofile.openapi.models.security;
import java.util.Map;
import org.eclipse.microprofile.openapi.models.Constructible;
import org.eclipse.microprofile.openapi.models.Extensible;
/**
* Scopes is a property of OAuth Flow Object.
*
* @see <a href="https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#oauthFlowObject">OAuthFlow Object</a>
**/
public interface Scopes extends Constructible, Extensible, Map<String, String> {
/**
* Adds name of an existing scope object and item parameters to a Scopes instance as a key-value pair in a map.
*
* @param scope the name of a scope
* @param description description of the scope
* @return Scopes instance with the added key-value pair
*/
Scopes addScope(String scope, String description);
} |
#!/usr/bin/env bash
echo --------------
echo READY....
echo __AVI__ ok avi
echo __AVI__ il est quelle heure |
import java.util.ArrayList;
public class ItemManager {
private ArrayList<String> items;
public ItemManager() {
this.items = new ArrayList<>();
}
public void addItem(String item) {
this.items.add(item);
}
public void editItem(int index, String item) {
this.items.set(index, item);
}
public void deleteItem(int index) {
this.items.remove(index);
}
} |
<filename>middleware/auth.js
const jwt = require('jsonwebtoken');
const config = require('../config/config');
module.exports = (req, res, next) => {
const token = req.header('x-auth-token');
if (!token) {
return res.status(401).json({ msg: 'No token, authorization denied.' });
}
try {
jwt.verify(token, config.jwtSecret, (error, decoded) => {
if (error) {
return res.status(401).json({ msg: 'Token is not valid.' });
} else {
req.user = decoded.user;
next();
}
});
} catch (err) {
console.error('something wrong.');
res.status(500).json({ msg: 'Server error' });
}
}; |
import { LoginInput } from './input/login.input';
import { Resolver, Query, Mutation, Args, Context } from '@nestjs/graphql';
import { AuthService } from './auth.service';
import { Auth } from '../../models/auth.model';
import { Public } from '../../@core/@keycloak';
import { Logout } from '../../models/logout.model';
import { AuthenticatedUser } from '../../@core/@keycloak';
import { InjectRedis } from '@liaoliaots/nestjs-redis';
import { Redis } from 'ioredis';
import { GqlCookies } from '../../@core/guards/gql-cookie.decorator';
import { RefreshInput } from './input/refresh.input';
import { UserKc } from '../../models/user-kc.model';
@Resolver((of) => Auth)
//@Resource('profile')
export class AuthResolver {
constructor(
@InjectRedis('cabir') private readonly redis: Redis,
private readonly authService: AuthService
) {}
@Query(() => String)
sayHello(): string {
return 'Hello World!';
}
@Mutation((returns) => Auth)
@Public()
async login(
@Context() ctx,
@GqlCookies() cookies,
@Args('data') { username, password }: LoginInput
) {
const data = await this.authService.login(username, password);
await this.redis.set(
data?.session_state,
JSON.stringify(data),
'EX',
data?.refresh_expires_in
);
return data;
}
@Mutation((returns) => Logout)
async logout(@AuthenticatedUser() user: any) {
const sessUserId = user?.session_state;
const { refresh_token } = JSON.parse(await this.redis.get(sessUserId));
const data = await this.authService.logout(refresh_token);
if (data) {
await this.redis.del(sessUserId);
}
return data;
}
@Mutation((returns) => Auth)
@Public()
async refresh(@Args('data') { refresh }: RefreshInput) {
const data = await this.authService.refreshToken(refresh);
await this.redis.set(
data?.session_state,
JSON.stringify(data),
'EX',
data?.refresh_expires_in
);
return data;
}
@Mutation((returns) => UserKc)
async me(@Context() ctx) {
return await this.authService.me(ctx?.req?.accessTokenJWT);
}
}
|
The time complexity of a linear search procedure in Java is O(n), where n is the size of the array. |
def populate_model_results(model_types: list) -> dict:
results = {model_type: [] for model_type in model_types}
return results |
# rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: reviews
#
# id :integer not null, primary key
# content :text not null
# content_formatted :text not null
# deleted_at :datetime indexed
# likes_count :integer default(0), indexed
# media_type :string
# progress :integer
# rating :float not null
# source :string(255)
# spoiler :boolean default(FALSE), not null
# created_at :datetime not null
# updated_at :datetime not null
# library_entry_id :integer
# media_id :integer not null, indexed
# user_id :integer not null, indexed
#
# Indexes
#
# index_reviews_on_deleted_at (deleted_at)
# index_reviews_on_likes_count (likes_count)
# index_reviews_on_media_id (media_id)
# index_reviews_on_user_id (user_id)
#
# Foreign Keys
#
# fk_rails_150e554f22 (library_entry_id => library_entries.id)
#
# rubocop:enable Metrics/LineLength
class Review < ApplicationRecord
include WithActivity
include ContentProcessable
acts_as_paranoid
has_many :likes, class_name: 'ReviewLike', dependent: :destroy
belongs_to :media, polymorphic: true, required: true
belongs_to :user, required: true, counter_cache: true
belongs_to :library_entry, required: true
validates :content, presence: true
validates :rating, presence: true
validates :media_id, uniqueness: { scope: :user_id }
validates :media, polymorphism: { type: Media }
resourcify
processable :content, InlinePipeline
before_validation do
self.source ||= 'hummingbird'
self.progress = library_entry&.progress
self.rating = library_entry&.rating
end
def stream_activity
user.profile_feed.activities.new(
progress: progress,
updated_at: updated_at,
likes_count: likes_count,
to: [media.feed]
)
end
end
|
def triangle_num(n):
if n == 1:
return 1
else:
return n + triangle_num(n-1) |
<filename>client/components/ThankYou.js
import React from 'react'
const ThankYou = () => <div>Thank you for your purchase!</div>
export default ThankYou
|
package com.dubture.symfony.ui.wizards.importer;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import org.apache.commons.io.FileUtils;
import org.eclipse.core.internal.resources.ProjectDescription;
import org.eclipse.core.internal.resources.ProjectDescriptionReader;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.dltk.internal.ui.wizards.dialogfields.DialogField;
import org.eclipse.dltk.internal.ui.wizards.dialogfields.IDialogFieldListener;
import org.eclipse.dltk.internal.ui.wizards.dialogfields.IStringButtonAdapter;
import org.eclipse.dltk.internal.ui.wizards.dialogfields.LayoutUtil;
import org.eclipse.dltk.internal.ui.wizards.dialogfields.StringButtonDialogField;
import org.eclipse.dltk.internal.ui.wizards.dialogfields.StringDialogField;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.php.core.PHPVersion;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.DirectoryDialog;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.ui.PlatformUI;
import org.eclipse.php.composer.api.ComposerPackage;
import org.eclipse.php.composer.api.VersionedPackage;
import org.eclipse.php.composer.api.collection.Dependencies;
import org.eclipse.php.composer.api.json.ParseException;
import com.dubture.symfony.core.SymfonyVersion;
import com.dubture.symfony.core.log.Logger;
import com.dubture.symfony.ui.SymfonyPluginImages;
import com.dubture.symfony.ui.SymfonyUiPlugin;
/**
* @author <NAME> <<EMAIL>>
*/
@SuppressWarnings("restriction")
public class SymfonyImportFirstPage extends WizardPage {
public class ValidationException extends Exception {
private static final long serialVersionUID = 1L;
public ValidationException(String string) {
super(string);
}
}
private StringButtonDialogField sourceButton;
private StringButtonDialogField containerButton;
private StringButtonDialogField consoleButton;
protected IPath sourcePath;
protected IPath consolePath;
protected IPath containerPath;
protected String projectName;
protected PHPVersion phpVersion;
protected SymfonyVersion symfonyVersion;
private SymfonyProjectScanner scanner;
private StringDialogField projectNameField;
private IWorkspace workspace;
protected SymfonyImportFirstPage(String pageName) {
super(pageName);
scanner = new SymfonyProjectScanner();
workspace = ResourcesPlugin.getWorkspace();
setTitle("Import Symfony project");
setDescription("Import an existing Symfony project into your workspace");
setImageDescriptor(SymfonyPluginImages.DESC_WIZBAN_IMPORT_PROJECT);
}
@Override
public void createControl(Composite parent) {
Composite container = new Composite(parent, SWT.NONE);
GridLayoutFactory.fillDefaults().numColumns(3).equalWidth(false).applyTo(container);
GridDataFactory.fillDefaults().grab(true, true).applyTo(container);
projectNameField = new StringDialogField();
projectNameField.setLabelText("Project name");
projectNameField.doFillIntoGrid(container, 3);
projectNameField.setDialogFieldListener(new IDialogFieldListener() {
@Override
public void dialogFieldChanged(DialogField field) {
projectName = projectNameField.getText();
validateSettings();
}
});
sourceButton = new StringButtonDialogField(new IStringButtonAdapter() {
@Override
public void changeControlPressed(DialogField field) {
try {
DirectoryDialog dialog = new DirectoryDialog(getShell());
String result = dialog.open();
if (result != null) {
sourceButton.setText(result);
sourcePath = new Path(result);
enableButtons();
scanSourceFolder();
}
} catch (Exception e) {
Logger.logException(e);
}
}
});
sourceButton.setButtonLabel("Browse");
sourceButton.setLabelText("Source folder");
sourceButton.doFillIntoGrid(container, 3);
sourceButton.getTextControl(null).setEnabled(false);
containerButton = new StringButtonDialogField(new IStringButtonAdapter() {
@Override
public void changeControlPressed(DialogField field) {
try {
FileDialog dialog = new FileDialog(getShell());
//dialog.setFilterExtensions(new String[]{"xml"});
String result = dialog.open();
String relativePath = getRelativePath(result);
if (result != null && relativePath != null) {
containerButton.setText(relativePath);
containerPath= new Path(relativePath);
validateSettings();
}
} catch (Exception e) {
Logger.logException(e);
}
}
});
LayoutUtil.setHorizontalGrabbing(sourceButton.getTextControl(null));
containerButton.setButtonLabel("Browse");
containerButton.setLabelText("Dumped container");
containerButton.doFillIntoGrid(container, 3);
consoleButton = new StringButtonDialogField(new IStringButtonAdapter() {
@Override
public void changeControlPressed(DialogField field) {
FileDialog dialog = new FileDialog(getShell());
String result = dialog.open();
String relativePath = getRelativePath(result);
if (result != null && relativePath != null) {
consoleButton.setText(relativePath);
consolePath = new Path(relativePath);
validateSettings();
}
}
});
LayoutUtil.setHorizontalGrabbing(containerButton.getTextControl(null));
consoleButton.setButtonLabel("Browse");
consoleButton.setLabelText("Symfony console");
consoleButton.doFillIntoGrid(container, 3);
LayoutUtil.setHorizontalGrabbing(consoleButton.getTextControl(null));
PlatformUI.getWorkbench().getHelpSystem().setHelp(container, SymfonyUiPlugin.PLUGIN_ID + "." + "import_firstpage");
disableButtons();
setControl(container);
}
protected String getRelativePath(String path) {
if (path == null) {
return null;
}
IPath container = new Path(path);
if (!sourcePath.isPrefixOf(container)) {
return null;
}
return container.setDevice(null).removeFirstSegments(sourcePath.segmentCount()).toOSString();
}
protected void enableButtons() {
consoleButton.setEnabled(true);
consoleButton.getTextControl(null).setEnabled(false);
containerButton.setEnabled(true);
containerButton.getTextControl(null).setEnabled(false);
}
protected void disableButtons() {
consoleButton.setEnabled(false);
containerButton.setEnabled(false);
}
protected void scanSourceFolder() throws InvocationTargetException, InterruptedException {
if (sourcePath == null) {
return;
}
getWizard().getContainer().run(true, true, scanner);
if (scanner.getConsole() != null && scanner.getConsole().exists()) {
IPath newConsolePath = new Path(scanner.getConsole().getAbsolutePath());
newConsolePath = newConsolePath.removeFirstSegments(sourcePath.segmentCount());
consoleButton.setText(newConsolePath.setDevice(null).toOSString());
consolePath = newConsolePath;
}
if (scanner.getContainer() != null && scanner.getContainer().exists()) {
IPath newContainerPath = new Path(scanner.getContainer().getAbsolutePath());
newContainerPath = newContainerPath.setDevice(null).removeFirstSegments(sourcePath.segmentCount());
containerButton.setText(newContainerPath.toOSString());
containerPath = newContainerPath;
}
validateSettings();
}
protected class SymfonyProjectScanner implements IRunnableWithProgress {
private File dumpedContainer;
private File console;
@Override
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
monitor.beginTask("Scanning Symfony project", 4);
scanForDumpedContainer();
monitor.worked(1);
scanForConsole();
monitor.worked(1);
scanVersions();
monitor.worked(1);
scanForExistingProjectFile();
monitor.worked(1);
} catch (Exception e) {
Logger.logException(e);
} finally {
monitor.done();
}
}
protected void scanForDumpedContainer() {
String sourcePath = sourceButton.getText();
File sourceDirectory = new File(sourcePath);
Collection<File> files = FileUtils.listFiles(sourceDirectory, null, true);
for (File file : files) {
if (file.getName().toLowerCase().endsWith("container.xml")) {
dumpedContainer = file;
break;
}
}
}
protected void scanForConsole() {
String sourcePath = sourceButton.getText();
File sourceDirectory = new File(sourcePath);
Collection<File> files = FileUtils.listFiles(sourceDirectory, null, true);
for (File file : files) {
if (file.getName().toLowerCase().equals("console")) {
console = file;
break;
}
}
}
protected void scanVersions() throws IOException, ParseException {
IPath composerPath = sourcePath.append("composer.json");
ComposerPackage composer = new ComposerPackage(composerPath.toFile());
Dependencies require = composer.getRequire();
for (VersionedPackage versioned: require) {
if ("symfony/symfony".equals(versioned.getName())) {
if (versioned.getVersion().startsWith("2.1")) {
symfonyVersion = SymfonyVersion.Symfony2_1_9;
} else {
symfonyVersion = SymfonyVersion.Symfony2_2_1;
}
}
if ("php".equals(versioned.getName())) {
if (versioned.getVersion().contains("5.3")) {
phpVersion = PHPVersion.PHP5_3;
} else {
phpVersion = PHPVersion.PHP5_4;
}
}
}
}
protected void scanForExistingProjectFile() throws IOException {
IPath projectPath = sourcePath.append(".project");
if (projectPath.toFile().exists()) {
ProjectDescriptionReader reader = new ProjectDescriptionReader();
final ProjectDescription projectDescription = reader.read(projectPath);
if (projectDescription != null && projectDescription.getName() != null) {
getShell().getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
projectName = projectDescription.getName();
projectNameField.setTextWithoutUpdate(projectName);
}
});
}
}
}
public File getConsole() {
return console;
}
public File getContainer() {
return dumpedContainer;
}
}
public IPath getSourcePath() {
return sourcePath;
}
public IPath getContainerPath() {
return containerPath;
}
public IPath getConsolePath() {
return consolePath;
}
public String getProjectName() {
return projectName;
}
public PHPVersion getPHPVersion() {
return phpVersion;
}
public SymfonyVersion getSymfonyVersion() {
return symfonyVersion;
}
public void validateProjectName() throws ValidationException {
try {
IProject project = workspace.getRoot().getProject(projectName);
if (project != null && project.exists()) {
throw new ValidationException("A project with the same name already exists in the workspace.");
}
} catch (Exception e) {
throw new ValidationException(e.getMessage());
}
}
public void validateContainerPath() throws ValidationException {
try {
validatePath(containerPath.toOSString());
} catch (Exception e) {
throw new ValidationException("The selected service container does not exist.");
}
}
public void validateConsolePath() throws ValidationException {
try {
validatePath(consolePath.toOSString());
} catch (Exception e) {
throw new ValidationException("The selected Symfony console does not exist.");
}
}
public void validatePath(String path) throws ValidationException {
if (sourcePath == null) {
throw new ValidationException("");
}
File absolute = new File(sourcePath.toOSString(), path);
if (absolute.exists() == false) {
throw new ValidationException("");
}
}
public void validateSettings() {
try {
validateProjectName();
validateContainerPath();
} catch (ValidationException e) {
setErrorMessage(e.getMessage());
setPageComplete(false);
return;
}
setPageComplete(true);
setErrorMessage(null);
}
}
|
import { connect } from 'react-redux'
import { push } from 'connected-react-router'
import getMe from 'store/selectors/getMe'
import updateUserSettings from 'store/actions/updateUserSettings'
import { getReturnToURL, resetReturnToURL } from 'router/AuthRoute/AuthRoute.store'
export function mapStateToProps (state, props) {
return {
currentUser: getMe(state),
returnToURL: getReturnToURL(state)
}
}
export const mapDispatchToProps = {
updateUserSettings,
resetReturnToURL,
push
}
export function mergeProps (stateProps, dispatchProps, ownProps) {
return {
...stateProps,
...dispatchProps,
...ownProps,
handleCloseSignupModal: (defaultPath = '/') => {
const changes = { settings: { signupInProgress: false } }
return dispatchProps.updateUserSettings(changes).then(() => {
dispatchProps.resetReturnToURL()
dispatchProps.push(stateProps.returnToURL || defaultPath)
})
}
}
}
export default connect(mapStateToProps, mapDispatchToProps, mergeProps)
|
<filename>src/shared/context.tsx
import type { ReactNode } from "react";
import { createContext, useContext, useReducer } from "react";
const defaultState = {
walletAddress: "0x29D7d1dd5B6f9C864d9db560D72a247c178aE86B",
};
export type Action = {
type: "CHANGE_WALLET_ADDRESS";
payload: string;
};
export type DispatchW = (action: Action) => void;
export type State = typeof defaultState;
const WalletContext = createContext<
{ state: State; dispatch: DispatchW } | undefined
>(undefined);
function walletReducer(state: State, action: Action) {
switch (action.type) {
case "CHANGE_WALLET_ADDRESS":
return {
walletAddress: action.payload,
};
default:
throw new Error("There is no such action: " + action.type);
}
}
export function WalletProvider({ children }: { children: ReactNode }) {
const [state, dispatch] = useReducer(walletReducer, defaultState);
return (
<WalletContext.Provider value={{ state, dispatch }}>
{children}
</WalletContext.Provider>
);
}
export function useWallet() {
const context = useContext(WalletContext);
if (!context) {
throw new Error("useWallet must be used inside a WalletContext");
}
return context;
}
|
function parseStr(str) {
let totalLength = 0;
let words = str.split(" ");
words.forEach(word => {
totalLength += word.length;
});
return totalLength;
} |
<reponame>turbo124/taskmanager<filename>resources/assets/js/components/common/dropdowns/CustomerDropdown.js
import React, { Component } from 'react'
import { Input } from 'reactstrap'
import Select from 'react-select'
import { translations } from '../../utils/_translations'
import CustomerRepository from '../../repositories/CustomerRepository'
export default class CustomerDropdown extends Component {
constructor (props) {
super(props)
this.state = {
customers: []
}
this.getCustomers = this.getCustomers.bind(this)
}
componentDidMount () {
if (!this.props.customers || !this.props.customers.length) {
this.getCustomers()
} else {
this.props.customers.unshift({ id: '', name: 'Select Customer' })
this.setState({ customers: this.props.customers })
}
}
renderErrorFor (field) {
if (this.hasErrorFor(field)) {
return (
<span className='invalid-feedback d-block'>
<strong>{this.props.errors[field][0]}</strong>
</span>
)
}
}
hasErrorFor (field) {
return this.props.errors && !!this.props.errors[field]
}
getCustomers () {
const customerRepository = new CustomerRepository()
customerRepository.get().then(response => {
if (!response) {
alert('error')
}
this.setState({ customers: response }, () => {
console.log('customers', this.state.customers)
this.state.customers.unshift({ id: '', name: 'Select Customer' })
})
})
}
handleChange (value, name) {
const e = {
target: {
id: name,
name: name,
value: value.id
}
}
this.props.handleInputChanges(e)
}
render () {
const customerList = <option value="">Select Customer</option>
let options = null
if (this.state.customers && this.state.customers.length) {
options = this.state.customers.map((customer, index) => (
<option key={index} value={customer.id}>{customer.name}</option>
))
}
const customer = this.props.customer ? this.state.customers.filter(option => option.id === this.props.customer) : null
const name = 'customer_id'
const selectList = this.props.disabled
? <Input disabled value={this.props.customer} onChange={this.props.handleInputChanges} type="select"
name={name} id={name}>{customerList}{options}</Input> : <Select
placeholder={translations.select_option}
className="flex-grow-1"
classNamePrefix="select"
name={name}
value={customer}
options={this.state.customers}
getOptionLabel={option => option.name}
getOptionValue={option => option.id}
onChange={(value) => this.handleChange(value, name)}
/>
return (
<React.Fragment>
{selectList}
{this.renderErrorFor('customer_id')}
</React.Fragment>
)
}
}
|
tapefile.write(tape_line+'\n')
|
import '../modules/job-search-sk';
import '../modules/task-scheduler-scaffold-sk';
import '../modules/colors.css';
import { GetTaskSchedulerService } from '../modules/rpc';
import { JobSearchSk } from '../modules/job-search-sk/job-search-sk';
const ele = <JobSearchSk>document.querySelector('job-search-sk');
ele.rpc = GetTaskSchedulerService(ele);
|
#!/bin/bash
#
# We enable docker if either:
# - we detect the DOCKER_HOST envvar, overriding the default socket location
# (in that case, we trust the user wants docker integration and don't check existence)
# - we find the docker socket at it's default location
if [[ -z "${DOCKER_HOST}" && ! -S /var/run/docker.sock ]]; then
exit 0
fi
# Set a config for vanilla Docker if no orchestrator was detected
# by the 50-* scripts
# Don't override /etc/stackstate-agent/stackstate.yaml if it exists
if [[ ! -e /etc/stackstate-agent/stackstate.yaml ]]; then
ln -s /etc/stackstate-agent/stackstate-docker.yaml \
/etc/stackstate-agent/stackstate.yaml
fi
# Enable the docker corecheck
if [[ ! -e /etc/stackstate-agent/conf.d/docker.d/conf.yaml.default && -e /etc/stackstate-agent/conf.d/docker.d/conf.yaml.example ]]; then
mv /etc/stackstate-agent/conf.d/docker.d/conf.yaml.example \
/etc/stackstate-agent/conf.d/docker.d/conf.yaml.default
fi
|
#!/bin/bash
NUMBER_OF_CLIENTS=$(grep -c -E "^### " "/var/lib/premium-script/data-user-sstp")
if [[ ${NUMBER_OF_CLIENTS} == '0' ]]; then
clear
echo ""
echo "You have no existing clients!"
exit 1
fi
clear
echo ""
echo "Select the existing client you want to renew"
echo " Press CTRL+C to return"
echo -e "==============================="
grep -E "^### " "/var/lib/premium-script/data-user-sstp" | cut -d ' ' -f 2-3 | nl -s ') '
until [[ ${CLIENT_NUMBER} -ge 1 && ${CLIENT_NUMBER} -le ${NUMBER_OF_CLIENTS} ]]; do
if [[ ${CLIENT_NUMBER} == '1' ]]; then
read -rp "Select one client [1]: " CLIENT_NUMBER
else
read -rp "Select one client [1-${NUMBER_OF_CLIENTS}]: " CLIENT_NUMBER
fi
done
read -p "Expired (days): " masaaktif
user=$(grep -E "^### " "/var/lib/premium-script/data-user-sstp" | cut -d ' ' -f 2 | sed -n "${CLIENT_NUMBER}"p)
exp=$(grep -E "^### " "/var/lib/premium-script/data-user-sstp" | cut -d ' ' -f 3 | sed -n "${CLIENT_NUMBER}"p)
now=$(date +%Y-%m-%d)
d1=$(date -d "$exp" +%s)
d2=$(date -d "$now" +%s)
exp2=$(( (d1 - d2) / 86400 ))
exp3=$(($exp2 + $masaaktif))
exp4=`date -d "$exp3 days" +"%Y-%m-%d"`
sed -i "s/### $user $exp/### $user $exp4/g" /var/lib/premium-script/data-user-sstp
clear
echo ""
echo " Akun SSTP berhasil diperpanjang"
echo " =========================="
echo " Client Name : $user"
echo " Expired On : $exp4"
echo " =========================="
echo " By HAWENG"
|
package fi.tuni.minesweeper;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.IBinder;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Switch;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import androidx.preference.PreferenceFragmentCompat;
import androidx.room.Room;
/**
* Settings activity contains all customizable settings for the application
* @author <NAME> <<EMAIL>>
* @version 2020.04.22
* @since 2020.03.24
*/
public class SettingsActivity extends AppCompatActivity {
Activity messenger;
private ServiceConnection connectService;
SharedPreferences settings;
private static final String SETTINGS = "UserSettings";
/**
* preparing under the hood running systems (database, settings, soundService)
* @param savedInstanceState
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_settings);
messenger = this;
connectService = new SettingsActivity.MyConnection();
settings = getSharedPreferences(SETTINGS, Context.MODE_PRIVATE);
}
// SoundPlayer connection related variables
private SoundPlayer soundService;
private boolean soundBound = false;
private static ScoreDatabase scoreDatabase;
// Setting related variables
private boolean soundStatus;
private Switch soundSwitch;
private boolean vibrationStatus;
private Switch vibrationSwitch;
private boolean debugStatus;
private Switch debugSwitch;
private int debugTrigger = 20;
/**
* onStart binds the SoundPlayer upon application start and fetches settings and database
*/
@Override
protected void onStart() {
super.onStart();
Intent intent = new Intent(this, SoundPlayer.class);
bindService(intent, connectService, Context.BIND_AUTO_CREATE);
scoreDatabase = Room.databaseBuilder(getApplicationContext(),
ScoreDatabase.class, "scoredb")
.allowMainThreadQueries()
.fallbackToDestructiveMigration()
.build();
soundStatus = settings.getBoolean("sound", true);
vibrationStatus = settings.getBoolean("vibration", true);
debugStatus = settings.getBoolean("debug", false);
soundSwitch = findViewById(R.id.soundsStatus);
vibrationSwitch = findViewById(R.id.vibrationStatus);
debugSwitch = findViewById(R.id.debugStatus);
if(debugStatus) {
debugSwitch.setVisibility(View.VISIBLE);
}
soundSwitch.setChecked(soundStatus);
vibrationSwitch.setChecked(vibrationStatus);
debugSwitch.setChecked(debugStatus);
}
//editor object used in setting editing
SharedPreferences.Editor editor;
/**
* Settings are saved upon exiting the activity
*/
@Override
protected void onPause() {
editor = settings.edit();
editor.putBoolean("sound", soundStatus);
editor.putBoolean("vibration", vibrationStatus);
editor.putBoolean("debug", debugStatus);
editor.commit();
System.out.println("sound:" + soundStatus +
", vibration:" + vibrationStatus +
", debug" + debugStatus);
toaster("Settings saved successfully.");
super.onPause();
}
/**
* responds to user changing the setttings
* After the user changes a setting, the state of given setting will be updated
* the user will be notified about the setting change
* @param v
*/
public void clicked(View v) {
System.out.println(v.getId());
switch(v.getId()) {
case R.id.soundsStatus:
soundStatus = !soundStatus;
System.out.println("Soundstatus was changed to " + soundStatus);
soundService.toggleSound(soundStatus);
// Feedback after user has toggled
if(soundStatus) {
toaster("Sounds enabled");
} else if(!soundStatus) {
toaster("Sounds disabled");
}
playSound(R.raw.click);
break;
case R.id.vibrationStatus:
vibrationStatus = !vibrationStatus;
System.out.println("VibrationStatus was changed to " + vibrationStatus);
playSound(R.raw.click);
// Feedback after user has toggled
if(vibrationStatus) {
toaster("Vibration enabled");
} else if(!vibrationStatus) {
toaster("Vibration disabled");
}
break;
case R.id.debugStatus:
debugStatus = !debugStatus;
System.out.println("DebugStatus was changed to " + debugStatus);
playSound(R.raw.click);
// Feedback after user has toggled
if(!debugStatus) {
toaster("Debugging disabled");
debugSwitch.setVisibility(View.INVISIBLE);
debugTrigger = 20;
}
break;
}
}
public void enableDebug(View v) {
if(debugTrigger > 0 && !debugStatus) {
debugTrigger--;
System.out.println("Debug mode enabled in " + debugTrigger + " clicks" );
if(debugTrigger <= 0) {
toaster("Debugging enabled");
debugStatus = !debugStatus;
debugSwitch.setChecked(debugStatus);
debugSwitch.setVisibility(View.VISIBLE);
}
}
}
// failsafe variables used for confirming high score reset functionality
private boolean failsafeDisabled = false;
private int failsafeCount = 5;
/**
* Deletes all high scores after specified amount of clicks
* @param v Clicked View
*/
public void deleteScoreData(View v) {
if(failsafeCount == 0) {
failsafeDisabled = true;
}
if(failsafeDisabled) {
SettingsActivity.scoreDatabase.scoreDao().deleteAll();
System.out.println("Deleted All database entries");
toaster("Deleted all High Scores");
}else {
toaster("Erases permanently ALL data. Tap " + failsafeCount + " times to confirm the deletion");
failsafeCount--;
}
}
/**
* Just a method for easier toasting
* @param text
*/
public void toaster(String text) {
Toast.makeText(messenger, text, Toast.LENGTH_LONG).show();
}
/**
* sends given sound files to SoundPlayer, which plays the sounds
* @param audioId
*/
private void playSound(int audioId) {
if(soundBound) {
soundService.playSound(audioId);
}
}
/**
* MyConnection maintains the connection between SoundPlayer and this activity
*/
class MyConnection implements ServiceConnection {
@Override
public void onServiceConnected(ComponentName className,
IBinder service) {
// After bound to SoundPlayer, cast the IBinder and get SoundService instance
System.out.println("Fetching soundService from binder");
MyBinder binder = (MyBinder) service;
soundService = binder.getSoundPlayer();
soundBound = true;
}
@Override
public void onServiceDisconnected(ComponentName arg0) {
soundBound = false;
}
}
} |
import Widgets from '../fixtures/Widgets';
import { createWidget } from '../support/widget';
import { dashboardNameGen } from '../fixtures/Dashboard';
const example = Widgets.whiteSpace;
describe('Dashboard Persistence', () => {
beforeEach(() => {
cy.visit('/');
cy.login();
});
it('Not saved dashboard is not displayed after refresh', () => {
const name = dashboardNameGen();
cy.addDashboard(name);
cy.visit('/');
cy.get('[data-cy="navbar-show-drawer-button"]').click();
cy.contains('[data-cy="board-card"]', name).should('not.visible');
});
it('Saved dashboard is displayed after refresh', () => {
const name = dashboardNameGen();
cy.addDashboard(name);
cy.saveState();
cy.visit('/');
cy.get('[data-cy="navbar-show-drawer-button"]').click();
cy.contains('[data-cy="board-card"]', name)
.scrollIntoView()
.should('is.visible');
cy.contains('[data-cy="board-card"]', name)
.find('[data-cy="board-card-delete-button"]')
.scrollIntoView()
.click();
cy.get('[data-cy="confirmation-dialog-ok"]').click();
cy.saveState();
});
});
describe('Widget Persistence', () => {
const dashboardName = 'Welcome to Cogboard';
const title = `Test-${example.name}`;
beforeEach(() => {
cy.visit('/');
cy.login();
cy.openDrawer();
cy.chooseDashboard(dashboardName);
cy.clickAddWidgetButton();
});
it('Not saved Example widget is not displayed after refresh', () => {
const widget = createWidget(Widgets.whiteSpace.name)
.configure(true)
.assertTitle();
cy.visit('/');
cy.openDrawer();
cy.chooseDashboard(dashboardName);
cy.contains('h3', widget.title).should('not.visible');
});
it('Saved Example widget is displayed after refresh', () => {
const widget = createWidget(Widgets.whiteSpace.name)
.configure(true)
.assertTitle();
cy.saveState();
cy.visit('/');
cy.openDrawer();
cy.chooseDashboard(dashboardName);
widget.assertTitle();
cy.removeWidget(title);
cy.saveState();
});
});
|
import argparse
class Bunch(object):
def __init__(self, adict):
self.__dict__.update(adict)
def get_args(dummy=False):
parser = argparse.ArgumentParser(description='Welcome to GAN-Shot-Learning script')
parser.add_argument('--batch_size', nargs="?", type=int, default=32, help='batch_size for experiment')
parser.add_argument('--discriminator_inner_layers', nargs="?", type=int, default=1,
help='Number of inner layers per multi layer in the discriminator')
parser.add_argument('--generator_inner_layers', nargs="?", type=int, default=1,
help='Number of inner layers per multi layer in the generator')
parser.add_argument('--experiment_title', nargs="?", type=str, default="omniglot_dagan_experiment",
help='Experiment name')
parser.add_argument('--continue_from_epoch', nargs="?", type=int, default=-1,
help='continue from checkpoint of epoch')
parser.add_argument('--num_of_gpus', nargs="?", type=int, default=1, help='Number of GPUs to use for training')
parser.add_argument('--z_dim', nargs="?", type=int, default=100, help='The dimensionality of the z input')
parser.add_argument('--dropout_rate_value', type=float, default=0.5,
help='A dropout rate placeholder or a scalar to use throughout the network')
parser.add_argument('--num_generations', nargs="?", type=int, default=64,
help='The number of samples generated for use in the spherical interpolations at the end of '
'each epoch')
parser.add_argument('--use_wide_connections', nargs="?", type=str, default="False",
help='Whether to use wide connections in discriminator')
args = parser.parse_args(args=[]) if dummy else parser.parse_args()
batch_size = args.batch_size
num_gpus = args.num_of_gpus
args_dict = vars(args)
for key in list(args_dict.keys()):
print(key, args_dict[key])
if args_dict[key] == "True":
args_dict[key] = True
elif args_dict[key] == "False":
args_dict[key] = False
args = Bunch(args_dict)
return batch_size, num_gpus, args |
<filename>src/main/java/org/tom_v_squad/soiwenttoaconcert/controllers/VenueController.java
package org.tom_v_squad.soiwenttoaconcert.controllers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.Errors;
import org.springframework.web.bind.annotation.*;
import org.tom_v_squad.soiwenttoaconcert.data.ArtistRepository;
import org.tom_v_squad.soiwenttoaconcert.data.EventRepository;
import org.tom_v_squad.soiwenttoaconcert.data.UserRepository;
import org.tom_v_squad.soiwenttoaconcert.data.VenueRepository;
import org.tom_v_squad.soiwenttoaconcert.models.Artist;
import org.tom_v_squad.soiwenttoaconcert.models.Event;
import org.tom_v_squad.soiwenttoaconcert.models.Venue;
import javax.validation.Valid;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Controller
@RequestMapping("venue")
public class VenueController {
@Autowired
private VenueRepository venueRepository;
@Autowired
private EventRepository eventRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private ArtistRepository artistRepository;
private static List<Venue> venues = new ArrayList<>();
@RequestMapping("")
public String displayVenues(Model model) {
model.addAttribute("title", "Venues");
model.addAttribute("venues", venueRepository.findAll());
return "venue/index";
}
@GetMapping("create")
public String displayAddVenue(Model model) {
model.addAttribute(new Venue());
return "venue/create";
}
@PostMapping("create")
public String processAddVenueForm(@ModelAttribute @Valid Venue newVenue, Errors errors, Model model) {
if (errors.hasErrors()) {
return "venue/create";
}
venueRepository.save(newVenue);
return "redirect:/venue";
}
@GetMapping("delete")
public String displayDeleteVenueForm(@RequestParam Integer venueId, Model model) {
Optional<Venue> result = venueRepository.findById(venueId);
System.out.println(venueId);
if (!result.isEmpty()) {
Venue venue = result.get();
model.addAttribute("title","Delete Artist");
model.addAttribute("venue", venue);
return "venue/delete";
}else{
model.addAttribute("title", "Invalid Venue ID: " + venueId);
}
return "venue/delete";
}
@PostMapping("delete")
public String processDeleteVenueForm(@RequestParam(required = false) Integer venueId) {
if (venueId != null) {
Optional<Venue> result = venueRepository.findById(venueId);
if (result.isPresent()) {
venueRepository.delete(result.get());
}
}
return "redirect:";
}
@GetMapping("edit")
public String displayEditVenueForm(@RequestParam Integer venueId, Model model) {
Optional <Venue> result = venueRepository.findById(venueId);
if (result.isEmpty()) {
model.addAttribute("title", "Invalid Venue ID: " + venueId);
} else {
Venue venue = result.get();
model.addAttribute("venue", venue);
}
return "venue/edit";
}
@PostMapping("edit")
public String processEditVenueForm(@RequestParam Integer venueId, @ModelAttribute Venue newVenue, Errors errors, Model model) {
if(errors.hasErrors()) {
model.addAttribute("title", "Create Venue");
return "venue/create";
}
venueRepository.save(newVenue);
venueRepository.deleteById(venueId);
return "redirect:";
}
}
|
package gobpp
func init() {
SetupBasics()
}
|
#!/usr/bin/env bash
# Copyright (c) .NET Foundation and contributors. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# Obtain the location of the bash script to figure out where the root of the repo is.
source="${BASH_SOURCE[0]}"
# Resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
__ProjectRoot="$( cd -P "$( dirname "$source" )/.." && pwd )"
__BuildOS=Linux
__HostOS=Linux
__BuildArch=x64
__HostArch=x64
__BuildType=Debug
__PortableBuild=1
__ExtraCmakeArgs=""
__ClangMajorVersion=0
__ClangMinorVersion=0
__CrossBuild=false
__NumProc=1
__Build=false
__Test=false
__DailyTest=
__CI=false
__Verbosity=minimal
__TestArgs=
__UnprocessedBuildArgs=
__Alpine=false
usage()
{
echo "Usage: $0 [options]"
echo "--build-native - build native components"
echo "--test - test native components"
echo "--daily-test - test native components for daily build job"
echo "--architecture <x64|x86|arm|armel|arm64>"
echo "--configuration <debug|release>"
echo "--rootfs <ROOTFS_DIR>"
echo "--clangx.y - optional argument to build using clang version x.y"
echo "--ci - CI lab build"
echo "--verbosity <q[uiet]|m[inimal]|n[ormal]|d[etailed]|diag[nostic]>"
echo "--help - this help message"
exit 1
}
# args:
# input - $1
to_lowercase() {
#eval $invocation
echo "$1" | tr '[:upper:]' '[:lower:]'
return 0
}
# Argument types supported by this script:
#
# Build architecture - valid values are: x64, x86, arm, armel, arm64
# Build Type - valid values are: debug, release
#
# Set the default arguments for build
# Use uname to determine what the CPU is.
CPUName=$(uname -p)
# Some Linux platforms report unknown for platform, but the arch for machine.
if [ "$CPUName" == "unknown" ]; then
CPUName=$(uname -m)
fi
case $CPUName in
i686)
echo "Unsupported CPU $CPUName detected, build might not succeed!"
__BuildArch=x86
__HostArch=x86
;;
x86_64)
__BuildArch=x64
__HostArch=x64
;;
armv7l)
echo "Unsupported CPU $CPUName detected, build might not succeed!"
__BuildArch=arm
__HostArch=arm
;;
aarch64)
__BuildArch=arm64
__HostArch=arm64
;;
*)
echo "Unknown CPU $CPUName detected, configuring as if for x64"
__BuildArch=x64
__HostArch=x64
;;
esac
# Use uname to determine what the OS is.
OSName=$(uname -s)
case $OSName in
Linux)
__BuildOS=Linux
__HostOS=Linux
;;
Darwin)
__BuildOS=OSX
__HostOS=OSX
;;
FreeBSD)
__BuildOS=FreeBSD
__HostOS=FreeBSD
;;
OpenBSD)
__BuildOS=OpenBSD
__HostOS=OpenBSD
;;
NetBSD)
__BuildOS=NetBSD
__HostOS=NetBSD
;;
SunOS)
__BuildOS=SunOS
__HostOS=SunOS
;;
*)
echo "Unsupported OS $OSName detected, configuring as if for Linux"
__BuildOS=Linux
__HostOS=Linux
;;
esac
while :; do
if [ $# -le 0 ]; then
break
fi
lowerI="$(to_lowercase "$1")"
case $lowerI in
-\?|-h|--help)
usage
exit 1
;;
--build-native)
__Build=true
;;
# Passed to common build script when testing
--test)
__Test=true
;;
--daily-test)
__DailyTest="--daily-test"
;;
--ci)
__CI=true
__TestArgs="$__TestArgs $1"
;;
--solution)
__TestArgs="$__TestArgs $1 $2"
shift
;;
--verbosity)
__Verbosity=$2
__TestArgs="$__TestArgs $1 $2"
shift
;;
--configuration)
__BuildType="$(to_lowercase "$2")"
shift
;;
--architecture)
__BuildArch="$(to_lowercase "$2")"
shift
;;
--rootfs)
export ROOTFS_DIR="$2"
shift
;;
--portablebuild=false)
__PortableBuild=0
;;
--clang3.5)
__ClangMajorVersion=3
__ClangMinorVersion=5
;;
--clang3.6)
__ClangMajorVersion=3
__ClangMinorVersion=6
;;
--clang3.7)
__ClangMajorVersion=3
__ClangMinorVersion=7
;;
--clang3.8)
__ClangMajorVersion=3
__ClangMinorVersion=8
;;
--clang3.9)
__ClangMajorVersion=3
__ClangMinorVersion=9
;;
--clang4.0)
__ClangMajorVersion=4
__ClangMinorVersion=0
;;
--clang5.0)
__ClangMajorVersion=5
__ClangMinorVersion=0
;;
# Ignored for a native build
--build|--rebuild|--sign|--restore|--pack|--publish|--preparemachine|--projects|--solution)
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
if [ "$__BuildType" == "release" ]; then
__BuildType=Release
fi
if [ "$__BuildType" == "debug" ]; then
__BuildType=Debug
fi
# Needs to be set for generate version source file/msbuild
if [[ -z $NUGET_PACKAGES ]]; then
if [[ $__CI == true ]]; then
export NUGET_PACKAGES="$__ProjectRoot/.packages"
else
export NUGET_PACKAGES="$HOME/.nuget/packages"
fi
fi
echo $NUGET_PACKAGES
__RootBinDir=$__ProjectRoot/artifacts
__BinDir=$__RootBinDir/bin/$__BuildOS.$__BuildArch.$__BuildType
__LogDir=$__RootBinDir/log/$__BuildOS.$__BuildArch.$__BuildType
__IntermediatesDir=$__RootBinDir/obj/$__BuildOS.$__BuildArch.$__BuildType
__ResultsDir=$__RootBinDir/TestResults/$__BuildType
__PackagesBinDir=$__RootBinDir/packages/$__BuildType/Shipping
__ExtraCmakeArgs="-DCLR_MANAGED_BINARY_DIR=$__RootBinDir/bin -DCLR_BUILD_TYPE=$__BuildType"
__DotNetCli=$__ProjectRoot/.dotnet/dotnet
__DotNetRuntimeVersion=2.1.11
if [ ! -e $__DotNetCli ]; then
echo "dotnet cli not installed $__DotNetCli"
exit 1
fi
# Specify path to be set for CMAKE_INSTALL_PREFIX.
# This is where all built native libraries will copied to.
export __CMakeBinDir="$__BinDir"
# Set default clang version
if [[ $__ClangMajorVersion == 0 && $__ClangMinorVersion == 0 ]]; then
if [[ "$__BuildArch" == "arm" || "$__BuildArch" == "armel" ]]; then
__ClangMajorVersion=5
__ClangMinorVersion=0
else
__ClangMajorVersion=3
__ClangMinorVersion=9
fi
fi
if [[ "$__BuildArch" == "armel" ]]; then
# Armel cross build is Tizen specific and does not support Portable RID build
__PortableBuild=0
fi
# Configure environment if we are doing a cross compile.
if [ "${__BuildArch}" != "${__HostArch}" ]; then
__CrossBuild=true
export CROSSCOMPILE=1
if ! [[ -n "$ROOTFS_DIR" ]]; then
echo "ERROR: ROOTFS_DIR not set for cross build"
exit 1
fi
echo "ROOTFS_DIR: $ROOTFS_DIR"
fi
mkdir -p "$__IntermediatesDir"
mkdir -p "$__LogDir"
mkdir -p "$__CMakeBinDir"
build_native()
{
platformArch="$1"
intermediatesForBuild="$2"
extraCmakeArguments="$3"
# All set to commence the build
echo "Commencing $__DistroRid build for $__BuildOS.$__BuildArch.$__BuildType in $intermediatesForBuild"
generator=""
buildFile="Makefile"
buildTool="make"
scriptDir="$__ProjectRoot/eng"
pushd "$intermediatesForBuild"
echo "Invoking \"$scriptDir/gen-buildsys-clang.sh\" \"$__ProjectRoot\" $__ClangMajorVersion \"$__ClangMinorVersion\" $platformArch "$scriptDir" $__BuildType $generator $extraCmakeArguments $__cmakeargs"
"$scriptDir/gen-buildsys-clang.sh" "$__ProjectRoot" $__ClangMajorVersion "$__ClangMinorVersion" $platformArch "$scriptDir" $__BuildType $generator "$extraCmakeArguments" "$__cmakeargs"
popd
if [ ! -f "$intermediatesForBuild/$buildFile" ]; then
echo "Failed to generate build project!"
exit 1
fi
# Check that the makefiles were created.
pushd "$intermediatesForBuild"
echo "Executing $buildTool install -j $__NumProc"
$buildTool install -j $__NumProc | tee $__LogDir/make.log
if [ $? != 0 ]; then
echo "Failed to build."
exit 1
fi
popd
}
initTargetDistroRid()
{
source "$__ProjectRoot/eng/init-distro-rid.sh"
local passedRootfsDir=""
# Only pass ROOTFS_DIR if cross is specified.
if [ "$__CrossBuild" == true ]; then
passedRootfsDir=${ROOTFS_DIR}
fi
initDistroRidGlobal ${__BuildOS} ${__BuildArch} ${__PortableBuild} ${passedRootfsDir}
}
# Init the target distro name
initTargetDistroRid
echo "RID: $__DistroRid"
if [ "$__HostOS" == "OSX" ]; then
export LLDB_H=$__ProjectRoot/src/SOS/lldbplugin/swift-4.0
export LLDB_LIB=/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/LLDB
export LLDB_PATH=/Applications/Xcode.app/Contents/Developer/usr/bin/lldb
export MACOSX_DEPLOYMENT_TARGET=10.12
# If Xcode 9.2 exists (like on the CI/build machines), use that. Xcode 9.3 or
# greater (swift 4.1 lldb) doesn't work that well (seg faults on exit).
if [ -f "/Applications/Xcode_9.2.app/Contents/Developer/usr/bin/lldb" ]; then
if [ -f "/Applications/Xcode_9.2.app/Contents/SharedFrameworks/LLDB.framework/LLDB" ]; then
export LLDB_PATH=/Applications/Xcode_9.2.app/Contents/Developer/usr/bin/lldb
export LLDB_LIB=/Applications/Xcode_9.2.app/Contents/SharedFrameworks/LLDB.framework/LLDB
fi
fi
if [ ! -f $LLDB_LIB ]; then
echo "Cannot find the lldb library. Try installing Xcode."
exit 1
fi
# Workaround bad python version in /usr/local/bin/python2.7 on lab machines
export PATH=/usr/bin:$PATH
which python
python --version
fi
# Build native components
if [ $__Build == true ]; then
if [[ $__CI == true ]]; then
echo "Generating Version Source File"
__GenerateVersionLog="$__LogDir/GenerateVersion.binlog"
$__DotNetCli msbuild $__ProjectRoot/eng/CreateVersionFile.csproj /v:$__Verbosity /bl:$__GenerateVersionLog /t:GenerateVersionFiles /p:GenerateVersionSourceFile=true /p:NativeVersionSourceFile="$__IntermediatesDir/version.cpp" /p:Configuration="$__BuildType" /p:Platform="$__BuildArch" $__UnprocessedBuildArgs
if [ $? != 0 ]; then
echo "Generating Version Source File FAILED"
exit 1
fi
else
echo "Generating Empty Version Source File"
echo "" > "$__IntermediatesDir/version.cpp"
fi
build_native "$__BuildArch" "$__IntermediatesDir" "$__ExtraCmakeArgs"
fi
if [[ $__Build == true || $__Test == true ]]; then
# Copy the native SOS binaries to where these tools expect for testing
__dotnet_sos=$__RootBinDir/bin/dotnet-sos/$__BuildType/netcoreapp2.1/publish/$__DistroRid
__dotnet_dump=$__RootBinDir/bin/dotnet-dump/$__BuildType/netcoreapp2.1/publish/$__DistroRid
mkdir -p "$__dotnet_sos"
mkdir -p "$__dotnet_dump"
cp "$__BinDir"/* "$__dotnet_sos"
echo "Copied SOS to $__dotnet_sos"
cp "$__BinDir"/* "$__dotnet_dump"
echo "Copied SOS to $__dotnet_dump"
fi
# Run SOS/lldbplugin tests
if [ $__Test == true ]; then
if [[ "$__BuildArch" != "arm" && "$__BuildArch" != "armel" && "$__BuildArch" != "arm64" ]]; then
# Install the other versions of .NET Core runtime we are going to test on
"$__ProjectRoot/eng/install-test-runtimes.sh" --dotnet-directory "$__ProjectRoot/.dotnet" --runtime-version-21 "$__DotNetRuntimeVersion" --temp-directory "$__IntermediatesDir" --architecture "$__BuildArch" $__DailyTest
if [ "$LLDB_PATH" == "" ]; then
export LLDB_PATH="$(which lldb-3.9.1 2> /dev/null)"
if [ "$LLDB_PATH" == "" ]; then
export LLDB_PATH="$(which lldb-3.9 2> /dev/null)"
if [ "$LLDB_PATH" == "" ]; then
export LLDB_PATH="$(which lldb-4.0 2> /dev/null)"
if [ "$LLDB_PATH" == "" ]; then
export LLDB_PATH="$(which lldb-5.0 2> /dev/null)"
if [ "$LLDB_PATH" == "" ]; then
export LLDB_PATH="$(which lldb 2> /dev/null)"
fi
fi
fi
fi
fi
if [ "$GDB_PATH" == "" ]; then
export GDB_PATH="$(which gdb 2> /dev/null)"
fi
echo "lldb: '$LLDB_PATH' gdb: '$GDB_PATH'"
# Run xunit SOS tests
"$__ProjectRoot/eng/common/build.sh" --test --configuration "$__BuildType" $__TestArgs
if [ $? != 0 ]; then
exit 1
fi
# Skip Alpine because lldb doesn't work
if [ $__Alpine == false ]; then
if [ "$__BuildOS" == "OSX" ]; then
__Plugin=$__CMakeBinDir/libsosplugin.dylib
else
__Plugin=$__CMakeBinDir/libsosplugin.so
fi
# Run lldb python tests
"$__ProjectRoot/src/SOS/lldbplugin.tests/testsos.sh" "$__ProjectRoot" "$__Plugin" "$__DotNetRuntimeVersion" "$__RootBinDir/bin/TestDebuggee/$__BuildType/netcoreapp2.0/TestDebuggee.dll" "$__ResultsDir"
if [ $? != 0 ]; then
exit 1
fi
fi
fi
fi
echo "BUILD: Repo sucessfully built."
echo "BUILD: Product binaries are available at $__CMakeBinDir"
|
#!/bin/bash
# Clean out any old sandbox, make a new one
OUTDIR=sandbox
rm -fr $OUTDIR; mkdir -p $OUTDIR
# Check for os
SEP=:
case "`uname`" in
CYGWIN* )
SEP=";"
;;
esac
function cleanup () {
kill -9 ${PID_1} ${PID_2} ${PID_3} ${PID_4} 1> /dev/null 2>&1
wait 1> /dev/null 2>&1
RC=`cat $OUTDIR/status.0`
if [ $RC -ne 0 ]; then
cat $OUTDIR/out.0
echo h2o-persist-hdfs junit tests FAILED
else
echo h2o-persist-hdfs junit tests PASSED
fi
exit $RC
}
trap cleanup SIGTERM SIGINT
if [ -z "$JUNIT_CORE_SITE_PATH" ]; then
echo "!!! Error: environment variable JUNIT_CORE_SITE_PATH is not defined. Aborting tests !!!"
exit 1
fi
# Find java command
if [ -z "$TEST_JAVA_HOME" ]; then
# Use default
JAVA_CMD="java"
else
# Use test java home
JAVA_CMD="$TEST_JAVA_HOME/bin/java"
# Increase XMX since JAVA_HOME can point to java6
JAVA6_REGEXP=".*1\.6.*"
if [[ $TEST_JAVA_HOME =~ $JAVA6_REGEXP ]]; then
JAVA_CMD="${JAVA_CMD}"
fi
fi
# Gradle puts files:
# build/classes/main - Main h2o core classes
# build/classes/test - Test h2o core classes
# build/resources/main - Main resources (e.g. page.html)
JVM="nice $JAVA_CMD -ea -Xmx3g -Xms3g -cp ${JVM_CLASSPATH} ${ADDITIONAL_TEST_JVM_OPTS}"
echo "$JVM" > $OUTDIR/jvm_cmd.txt
# Ahhh... but the makefile runs the tests skipping the jar'ing step when possible.
# Also, sometimes see test files in the main-class directory, so put the test
# classpath before the main classpath.
#JVM="nice java -ea -cp build/classes/test${SEP}build/classes/main${SEP}../h2o-core/build/classes/test${SEP}../h2o-core/build/classes/main${SEP}../lib/*"
# Tests
# Must run first, before the cloud locks (because it tests cloud locking)
# Tests
# Must run first, before the cloud locks (because it tests cloud locking)
JUNIT_TESTS_BOOT=""
JUNIT_TESTS_BIG=""
# Runner
# Default JUnit runner is org.junit.runner.JUnitCore
JUNIT_RUNNER="water.junit.H2OTestRunner"
# find all java in the src/test directory
# Cut the "./water/MRThrow.java" down to "water/MRThrow.java"
# Cut the "water/MRThrow.java" down to "water/MRThrow"
# Slash/dot "water/MRThrow" becomes "water.MRThrow"
# On this h2o-persist-hdfs testMultiNode.sh only, force the tests.txt to be in the same order for all machines.
# If sorted, the result of the cd/grep varies by machine.
# If randomness is desired, replace sort with the unix 'shuf'
# Use /usr/bin/sort because of cygwin on windows.
# Windows has sort.exe which you don't want. Fails? (is it a lineend issue)
(cd src/test/java; /usr/bin/find . -name '*.java' | cut -c3- | sed 's/.....$//' | sed -e 's/\//./g') | /usr/bin/sort > $OUTDIR/tests.txt
# Output the comma-separated list of ignored/dooonly tests
# Ignored tests trump do-only tests
echo $IGNORE > $OUTDIR/tests.ignore.txt
echo $DOONLY > $OUTDIR/tests.doonly.txt
# Launch 4 helper JVMs. All output redir'd at the OS level to sandbox files.
CLUSTER_NAME=junit_cluster_$$
CLUSTER_BASEPORT=44000
$JVM water.H2O -name $CLUSTER_NAME -baseport $CLUSTER_BASEPORT -ga_opt_out 1> $OUTDIR/out.1 2>&1 & PID_1=$!
$JVM water.H2O -name $CLUSTER_NAME -baseport $CLUSTER_BASEPORT -ga_opt_out 1> $OUTDIR/out.2 2>&1 & PID_2=$!
$JVM water.H2O -name $CLUSTER_NAME -baseport $CLUSTER_BASEPORT -ga_opt_out 1> $OUTDIR/out.3 2>&1 & PID_3=$!
$JVM water.H2O -name $CLUSTER_NAME -baseport $CLUSTER_BASEPORT -ga_opt_out 1> $OUTDIR/out.4 2>&1 & PID_4=$!
# Launch last driver JVM. All output redir'd at the OS level to sandbox files.
echo Running h2o-persist-hdfs junit tests...
($JVM -Ddoonly.tests=$DOONLY -Dbuild.id=$BUILD_ID -Dignore.tests=$IGNORE -Djob.name=$JOB_NAME -Dgit.commit=$GIT_COMMIT -Dgit.branch=$GIT_BRANCH -Dai.h2o.name=$CLUSTER_NAME -Dai.h2o.baseport=$CLUSTER_BASEPORT -Dai.h2o.ga_opt_out=yes -Dai.h2o.hdfs_config=$JUNIT_CORE_SITE_PATH $JUNIT_RUNNER $JUNIT_TESTS_BOOT `cat $OUTDIR/tests.txt` 2>&1 ; echo $? > $OUTDIR/status.0) 1> $OUTDIR/out.0 2>&1
grep EXECUTION $OUTDIR/out.0 | sed -e "s/.*TEST \(.*\) EXECUTION TIME: \(.*\) (Wall.*/\2 \1/" | sort -gr | head -n 10 >> $OUTDIR/out.0
cleanup
|
<reponame>xsolve-pl/xsolve-feat
import { Injectable } from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { Model } from 'mongoose';
import { DefinitionInterface } from '../interface/definition.interface';
import { DeployKeyRepository } from './deploy-key.repository';
import { DefinitionRecipeMapper } from '../../instantiation/definition-recipe-mapper.component';
@Injectable()
export class DefinitionRepository {
constructor(
@InjectModel('Definition')
private readonly definitionModel: Model<DefinitionInterface>,
) {}
find(
criteria: object,
offset: number,
limit: number,
sort?: object,
): Promise<DefinitionInterface[]> {
const query = this.definitionModel.find(criteria);
query.skip(offset).limit(limit);
if (sort) {
query.sort(sort);
}
return query.exec();
}
findById(id: string): Promise<DefinitionInterface> {
return this.definitionModel.findById(id).exec();
}
async findByIdOrFail(id: string): Promise<DefinitionInterface> {
const definition = await this.findById(id);
if (null === definition) {
throw new Error(`Definition document with id ${id} not found.`);
}
return definition;
}
async create(
projectId: string,
name: string,
recipeAsYaml: string,
): Promise<DefinitionInterface> {
const createdDefinition = new this.definitionModel({
projectId,
name,
});
createdDefinition.recipeAsYaml = recipeAsYaml;
createdDefinition.createdAt = new Date();
createdDefinition.updatedAt = new Date();
await createdDefinition.save();
return createdDefinition;
}
async update(
id: string,
name: string,
recipeAsYaml: string,
): Promise<DefinitionInterface> {
const updatedDefinition = await this.findById(id);
if (null === updatedDefinition) {
throw new Error(`Definition document with id ${id} not found.`);
}
updatedDefinition.name = name;
updatedDefinition.recipeAsYaml = recipeAsYaml;
updatedDefinition.updatedAt = new Date();
await updatedDefinition.save();
return updatedDefinition;
}
async remove(id: string): Promise<boolean> {
const removal = await this.definitionModel.findByIdAndRemove(id);
return true;
}
}
|
<reponame>mission-apprentissage/prise-de-rdv
const { runScript } = require("../scriptWrapper");
const logger = require("../../common/logger");
const { clearCfas } = require("./utils/clearUtils");
runScript(async () => {
logger.info("Suppression de tous les users cfas ....");
await clearCfas();
logger.info("Users cfas supprimés avec succès !");
});
|
import songsReducer from './songsReducer'
describe('songsReducer reducer', () => {
it('should return the initial state', () => {
expect(
songsReducer(undefined, {})
).toEqual({songs: [], loading: false})
})
it('should handle the ADD_SONG action', () => {
const songs = [ {track: {artist_name: "Eminem", track_name: "Lose Yourself"}}, {track: {artist_name: "<NAME> feat. <NAME>", track_name: "Lose Yourself to Dance"}} ]
expect(
songsReducer([], {
type: 'ADD_SONG',
payload: songs
})
).toEqual({loading: false, songs: songs})
})
it('should handle the ADD_POPULAR_SONGS action', () => {
const songs = [ {track: {artist_name: "Eminem", track_name: "Lose Yourself"}}, {track: {artist_name: "<NAME> feat. <NAME>", track_name: "Lose Yourself to Dance"}} ]
expect(
songsReducer([], {
type: 'ADD_POPULAR_SONGS',
payload: songs
})
).toEqual({loading: false, popularSongs: songs})
})
it('should handle the ADD_LYRICS action', () => {
const songWithLyrics = {lyrics_body: "Look if you had one shot, would you capture it or just let it slip...", lyrics_id: 16023263}
expect(
songsReducer([], {
type: 'ADD_LYRICS',
payload: songWithLyrics
})
).toEqual({loading: false, lyrics: songWithLyrics})
})
})
|
#
# Provides for an easier use of SSH by setting up ssh-agent.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Return if requirements are not found.
if (( ! $+commands[ssh-agent] )); then
return 1
fi
# Set the path to the SSH directory.
_ssh_dir="$HOME/.ssh"
# Set the path to the environment file if not set by another module.
_ssh_agent_env="${_ssh_agent_env:-${TMPDIR:-/tmp}/ssh-agent.env.$UID}"
# Set the path to the persistent authentication socket.
_ssh_agent_sock="${TMPDIR:-/tmp}/ssh-agent.sock.$UID"
# Start ssh-agent if not started.
if [[ ! -S "$SSH_AUTH_SOCK" ]]; then
# Export environment variables.
source "$_ssh_agent_env" 2> /dev/null
# Start ssh-agent if not started.
if ! ps -U "$LOGNAME" -o pid,ucomm | grep -q -- "${SSH_AGENT_PID:--1} ssh-agent"; then
eval "$(ssh-agent | sed '/^echo /d' | tee "$_ssh_agent_env")"
fi
fi
# Create a persistent SSH authentication socket.
if [[ -S "$SSH_AUTH_SOCK" && "$SSH_AUTH_SOCK" != "$_ssh_agent_sock" ]]; then
ln -sf "$SSH_AUTH_SOCK" "$_ssh_agent_sock"
export SSH_AUTH_SOCK="$_ssh_agent_sock"
fi
# Load identities.
if ssh-add -l 2>&1 | grep -q 'The agent has no identities'; then
zstyle -a ':prezto:module:ssh:load' identities '_ssh_identities'
# ssh-add has strange requirements for running SSH_ASKPASS, so we duplicate
# them here. Essentially, if the other requirements are met, we redirect stdin
# from /dev/null in order to meet the final requirement.
#
# From ssh-add(1):
# If ssh-add needs a passphrase, it will read the passphrase from the current
# terminal if it was run from a terminal. If ssh-add does not have a terminal
# associated with it but DISPLAY and SSH_ASKPASS are set, it will execute the
# program specified by SSH_ASKPASS and open an X11 window to read the
# passphrase.
if [[ -n "$DISPLAY" && -x "$SSH_ASKPASS" ]]; then
ssh-add ${_ssh_identities:+$_ssh_dir/${^_ssh_identities[@]}} < /dev/null 2> /dev/null
else
ssh-add ${_ssh_identities:+$_ssh_dir/${^_ssh_identities[@]}} 2> /dev/null
fi
fi
# Clean up.
unset _ssh_{dir,identities} _ssh_agent_{env,sock}
|
<gh_stars>0
package com.acgist.snail.net.upnp;
import org.w3c.dom.Element;
import com.acgist.snail.config.SystemConfig;
import com.acgist.snail.format.XML;
import com.acgist.snail.protocol.Protocol;
/**
* <p>UPNP请求</p>
*
* @author acgist
*/
public final class UpnpRequest {
/**
* <p>SOAP协议:{@value}</p>
*/
private static final String NAMESPACE_ENVELOPE = "http://schemas.xmlsoap.org/soap/envelope/";
/**
* <p>SOAP协议:{@value}</p>
*/
private static final String NAMESPANCE_ENCODING = "http://schemas.xmlsoap.org/soap/encoding/";
/**
* <p>XML工具</p>
*/
private XML xml;
/**
* <p>主体节点</p>
*/
private Element body;
/**
* <p>服务类型</p>
*/
private final String serviceType;
/**
* @param serviceType 服务类型
*/
private UpnpRequest(String serviceType) {
this.serviceType = serviceType;
}
/**
* <p>创建请求</p>
*
* @param serviceType 服务类型
*
* @return 请求
*/
public static final UpnpRequest newRequest(String serviceType) {
final UpnpRequest request = new UpnpRequest(serviceType);
request.build();
return request;
}
/**
* <p>新建报文</p>
*/
private void build() {
this.xml = XML.build();
final Element envelope = this.xml.elementNS(this.xml.document(), "s:Envelope", NAMESPACE_ENVELOPE);
envelope.setAttributeNS(NAMESPACE_ENVELOPE, "encodingStyle", NAMESPANCE_ENCODING);
this.body = this.xml.element(envelope, "s:Body");
}
/**
* <p>外网IP地址</p>
* <pre>
* <xmp>
* <?xml version="1.0"?>
* <s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
* <s:Body>
* <u:GetExternalIPAddress xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1"></u:GetExternalIPAddress>
* </s:Body>
* </s:Envelope>
* </xmp>
* </pre>
*
* @return 请求内容
*/
public String buildGetExternalIPAddress() {
this.xml.elementNS(this.body, "u:GetExternalIPAddress", this.serviceType);
return this.xml();
}
/**
* <p>端口映射信息</p>
* <pre>
* <xmp>
* <?xml version="1.0"?>
* <s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
* <s:Body>
* <u:GetSpecificPortMappingEntry xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1">
* <NewRemoteHost></NewRemoteHost>
* <NewExternalPort>8080</NewExternalPort><!-- 外网端口 -->
* <NewProtocol>TCP</NewProtocol>
* </u:GetSpecificPortMappingEntry>
* </s:Body>
* </s:Envelope>
* </xmp>
* </pre>
*
* @param portExt 外网端口
* @param protocol 协议
*
* @return 请求内容
*/
public String buildGetSpecificPortMappingEntry(int portExt, Protocol.Type protocol) {
final Element mapping = this.xml.elementNS(this.body, "u:GetSpecificPortMappingEntry", this.serviceType);
this.xml.element(mapping, "NewRemoteHost", "");
this.xml.element(mapping, "NewExternalPort", String.valueOf(portExt));
this.xml.element(mapping, "NewProtocol", protocol.name().toUpperCase());
return this.xml();
}
/**
* <p>端口映射</p>
* <pre>
* <xmp>
* <?xml version="1.0"?>
* <s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
* <s:Body>
* <u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1">
* <NewRemoteHost></NewRemoteHost>
* <NewExternalPort>8080</NewExternalPort><!-- 外网端口 -->
* <NewProtocol>TCP</NewProtocol>
* <NewInternalPort>8080</NewInternalPort><!-- 内网端口 -->
* <NewInternalClient>192.168.1.100</NewInternalClient><!-- 内网地址 -->
* <NewEnabled>1</NewEnabled>
* <NewPortMappingDescription>描述</NewPortMappingDescription>
* <NewLeaseDuration>0</NewLeaseDuration>
* </u:AddPortMapping>
* </s:Body>
* </s:Envelope>
* </xmp>
* </pre>
*
* @param port 内网端口
* @param address 内网地址
* @param portExt 外网端口
* @param protocol 协议
*
* @return 请求内容
*/
public String buildAddPortMapping(int port, String address, int portExt, Protocol.Type protocol) {
final Element mapping = this.xml.elementNS(this.body, "u:AddPortMapping", this.serviceType);
this.xml.element(mapping, "NewRemoteHost", "");
this.xml.element(mapping, "NewExternalPort", String.valueOf(portExt));
this.xml.element(mapping, "NewProtocol", protocol.name().toUpperCase());
this.xml.element(mapping, "NewInternalPort", String.valueOf(port));
this.xml.element(mapping, "NewInternalClient", address);
this.xml.element(mapping, "NewEnabled", "1");
this.xml.element(mapping, "NewPortMappingDescription", SystemConfig.getNameEn());
this.xml.element(mapping, "NewLeaseDuration", "0");
return this.xml();
}
/**
* <p>删除端口映射</p>
* <pre>
* <xmp>
* <?xml version="1.0" encoding="UTF-8"?>
* <s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
* <s:Body>
* <u:DeletePortMapping xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1">
* <NewRemoteHost></NewRemoteHost>
* <NewExternalPort>8080</NewExternalPort><!-- 外网端口 -->
* <NewProtocol>TCP</NewProtocol>
* </u:DeletePortMapping>
* </s:Body>
* </s:Envelope>
* </xmp>
* </pre>
*
* @param portExt 外网端口
* @param protocol 协议
*
* @return 请求内容
*/
public String buildDeletePortMapping(int portExt, Protocol.Type protocol) {
final Element mapping = this.xml.elementNS(body, "u:DeletePortMapping", this.serviceType);
this.xml.element(mapping, "NewRemoteHost", "");
this.xml.element(mapping, "NewExternalPort", String.valueOf(portExt));
this.xml.element(mapping, "NewProtocol", protocol.name().toUpperCase());
return this.xml();
}
/**
* <p>XML文本输出</p>
*
* @return XML文本
*/
private String xml() {
return this.xml.xml(false);
}
}
|
package org.jooby;
import static java.util.Objects.requireNonNull;
import static org.junit.Assert.assertEquals;
import java.text.SimpleDateFormat;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import java.util.TimeZone;
import javax.inject.Inject;
import javax.inject.Named;
import org.jooby.mvc.GET;
import org.jooby.mvc.Path;
import org.jooby.test.ServerFeature;
import org.junit.Test;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
public class DateTimeFormatterFeature extends ServerFeature {
public static class Resource {
private DateTimeFormatter formatter;
private String dateFormat;
private ZoneId zoneId;
@Inject
public Resource(final DateTimeFormatter formatter, final ZoneId zoneId,
@Named("application.dateFormat") final String dateFormat) {
assertEquals(ZoneId.of("GMT"), zoneId);
assertEquals(zoneId, formatter.getZone());
this.formatter = requireNonNull(formatter, "def formatter is required.");
this.zoneId = zoneId;
this.dateFormat = requireNonNull(dateFormat, "The dateFormat is required.");
}
@GET
@Path("/")
public String formatter(final long time, final org.jooby.Request req) {
Date date = new Date(time);
SimpleDateFormat sdf = new SimpleDateFormat(dateFormat, req.locale());
sdf.setTimeZone(TimeZone.getTimeZone(zoneId));
String sdate = sdf.format(date);
String newsdate = formatter.format(formatter.parse(sdate));
return sdate + "|" + newsdate;
}
}
{
use(ConfigFactory
.empty()
.withValue("application.lang", ConfigValueFactory.fromAnyRef("en-US"))
.withValue("application.dateFormat", ConfigValueFactory.fromAnyRef("MM/dd/yy H:mm"))
.withValue("application.tz", ConfigValueFactory.fromAnyRef("GMT")));
use(Resource.class);
}
@Test
public void dateFormat() throws Exception {
long time = 1412824189989l;
request()
.get("/?time=" + time)
.expect(200)
.expect("10/09/14 3:09|10/09/14 3:09");
}
}
|
<gh_stars>10-100
'use strict';
var handlebars = require('handlebars');
var _ = require('lodash');
var config = require('../config');
var template = require('../template');
var helpers = require('./helpers');
////////
module.exports = render;
////////
function render(source, data) {
var env = handlebars.create();
registerPartials(env);
registerHelpers(env);
var compiledTemplate = env.compile(source, {
noEscape: true
});
var result = compiledTemplate(data);
var indent = config.template.indent;
if (_.isNumber(indent)) {
indent = _.repeat(' ', indent);
}
if (indent !== '\t') {
result = result.replace(/\t/g, indent);
}
return result;
}
////////
function registerHelpers(env) {
_.forEach(helpers, function (fn, name) {
env.registerHelper(name, fn);
});
}
function registerPartials(env) {
var partials = {};
_.forEach(config.dependencies.process, function (name) {
var source = template.dependency(name);
partials[name] = source;
});
_.forEach(partials, function (source, name) {
env.registerPartial(name, source);
});
} |
<reponame>siddharthsg2/your-awesome-projects<filename>Movie-App/node_modules/server/src/config/integration.test.js<gh_stars>1000+
// Test runner:
const run = require('server/test/run');
const path = require('path');
const test = 'test';
describe('Basic router types', () => {
// TODO: fix this
it('has independent options', async () => {
const res = await Promise.all([
run({ public: 'right' }, ctx => new Promise(resolve => {
setTimeout(() => {
ctx.res.send(ctx.options.public);
resolve();
}, 1000);
})).get('/'),
run({ public: 'wrong' }, ctx => ctx.options.public).get('/')
]);
expect(res[0].body).toMatch(/right/);
expect(res[1].body).toMatch(/wrong/);
});
it('accepts several definitions of public correctly', async () => {
const full = path.join(process.cwd(), 'test');
const publish = ctx => ctx.options.public;
expect((await run({
public: test
}, publish).get('/')).body).toBe(full);
expect((await run({
public: './' + test
}, publish).get('/')).body).toBe(full);
expect((await run({
public: __dirname + '/../../' + test
}, publish).get('/')).body).toBe(full);
});
});
|
<reponame>ZenUml/vue-sequence<gh_stars>1-10
// Advanced functionality
// Allow call self method with participant name
export default 'A.methodA() { A.methodA1() }'
|
<gh_stars>0
angular.module('jenkins',[])
.controller('JenkinsController',['$scope',function($scope){
$scope.firstName = "John";
$scope.lastName = "Doe";
}]); |
<filename>regscrape/regsdotgov/__init__.py
# add self to path
import sys
import os
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
if CURRENT_DIR not in sys.path:
sys.path.append(CURRENT_DIR) |
#!/bin/bash
# STAR mapping evaluation
# GOAL: run mapping various STAR mapping parameters and test impact on cis-eQTLs
# DATE: 1 December 2020 -
# INFO:
# Steps from reads of samples (.fastq.gz) to cis eQTL detection
###################################################################################
# Part 1: Mapping
# Constant definitions
refdir=/home/ngobet/projects/BXD/references
outdir=/mnt/md0/BXD/MappingEvaluation
parental=paternal
unphased=nonrandomized
variants=genotypesandimputed
deletion=2
insertion=2
# prepare mapping commands
mkdir $outdir
##for line in $(cat data/lines.txt)
for line in $(cat data/lines.txt | grep -v "BXD43")
do
echo "#$line"
echo "#$line" > /mnt/nas/BXD/analysis/scripts/mapping/MappingEvaluation_$line\_listcommands.sh
echo "#DATE:" `date` >> /mnt/nas/BXD/analysis/scripts/mapping/MappingEvaluation_$line\_listcommands.sh
# Run STAR alignment
for annotation in withoutannotation withannotation
do
echo "#$annotation $count"
for trimming in EndToEnd Local
do
echo "##trimming: $trimming"
for intronMax in 0 1
do
echo "###intronMax: $intronMax"
for mismatches in 0 1 2 3 10
do
echo "####mismatches: $mismatches"
mkdir -p $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/
linenumber=$(grep -o [0-9]* <<< $line)
samples=($(ls /mnt/nas/BXD/data/transcriptome/RNAseq/filtered/*.fastq.gz | grep -o "[L0-9D]\{2,4\}[n]\{0,1\}sd*" | grep "$linenumber" | sort | uniq))
for sample in "${samples[@]}"
do
# mapping command (alignment with STAR + gene counting)
# mapping without transcriptome annotation
if [ $annotation = "withoutannotation" ]
then
echo "/software/Alignment/STAR/STAR-2.7.0e/bin/STAR --genomeDir $refdir/$line\_$unphased\_$variants/star_$parental\_$annotation --outFileNamePrefix $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_ --outSAMmode Full --outStd SAM --runThreadN 2 --readFilesCommand zcat --readFilesIn /mnt/nas/BXD/data/transcriptome/RNAseq/filtered/$sample.fastq.gz --scoreDelOpen -$deletion --scoreInsOpen -$insertion --alignIntronMax $intronMax --alignEndsType $trimming --outFilterMismatchNmax $mismatches 2> $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_align.err | htseq-count -f bam -q -s reverse -t exon -m union - $refdir/$line\_$unphased\_$variants/$parental.gtf 2> $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_htseqcount.err 1> $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_ReadsPerGene.out.tab; rm $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_SJ.out.tab" >> /mnt/nas/BXD/analysis/scripts/mapping/MappingEvaluation_$line\_listcommands.sh
# mapping with transcriptome annotation
elif [ $annotation = "withannotation" ]
then
echo "/software/Alignment/STAR/STAR-2.7.0e/bin/STAR --genomeDir $refdir/$line\_$unphased\_$variants/star_$parental\_$annotation --outFileNamePrefix $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_ --outSAMtype BAM Unsorted --runThreadN 2 --readFilesCommand zcat --readFilesIn /mnt/nas/BXD/data/transcriptome/RNAseq/filtered/$sample.fastq.gz --quantMode GeneCounts --scoreDelOpen -$deletion --scoreInsOpen -$insertion --alignIntronMax $intronMax --alignEndsType $trimming --outFilterMismatchNmax $mismatches > $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_align_default.out 2> $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_align.err; rm $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_*.bam; rm $outdir/$variants\_$annotation\_$trimming\_$intronMax\_$mismatches/$sample\_SJ.out.tab" >> /mnt/nas/BXD/analysis/scripts/mapping/MappingEvaluation_$line\_listcommands.sh
fi
done #end samples loop
done #end mismatches loop
done #end intronMax loop
done #end trimming loop
done #end annotation loop
done #end lines loop
# Run mapping commands
##for line in $(cat data/lines.txt)
for line in $(cat data/lines.txt | grep -v "BXD63")
do
echo "#$line"
# # Impute genotypes for BXD line
# # retrieve header
# grep "#CHROM" data/genome/D2specificVariants/DBA_2J.mgp.v5.snps.dbSNP142.vcf > data/genome/ImputedGenotypes/$line\_imputedgenotypes.vcf
# # adapt header to BXD line
# sed -i "s/DBA_2J/$line/g" data/genome/ImputedGenotypes/$line\_imputedgenotypes.vcf
# # impute genotypes for BXD line
# /home/ngobet/software/bedtools2/bin/intersectBed -a data/genome/D2specificVariants/DBA_2J.mgp.v5.indels.dbSNP142.normed.vcf -b data/genome/$line\_D2blocks.bed -wa >> data/genome/ImputedGenotypes/$line\_imputedgenotypes.vcf
# /home/ngobet/software/bedtools2/bin/intersectBed -a data/genome/D2specificVariants/DBA_2J.mgp.v5.snps.dbSNP142.vcf -b data/genome/$line\_D2blocks.bed -wa >> data/genome/ImputedGenotypes/$line\_imputedgenotypes.vcf
# # Build personalized genomes
# export JAVA_HOME=/home/ngobet/software/java/jdk-13.0.2
# export PATH=$JAVA_HOME/bin:$PATH
# # unphased variant randomized or not
# mkdir $refdir/$line\_$unphased\_$variants
# cd $refdir/$line\_$unphased\_$variants
# java -jar /home/ngobet/software/vcf2diploid-masterNotRandomized/vcf2diploid.jar \
# -id $line \
# -chr /mnt/nas/BXD/references/genome/Mus_musculus.GRCm38.dna_sm.primary_assembly.fa \
# -vcf /mnt/nas/BXD/data/genome/ImputedGenotypes/$line\_imputedgenotypes.vcf > $refdir/$line\_$unphased\_$variants/vcf2diploid.out 2> $refdir/$line\_$unphased\_$variants/vcf2diploid.err
# cd /mnt/nas/BXD/
# mkdir $refdir/$line\_$unphased\_$variants/star_$parental\_withoutannotation $refdir/$line\_$unphased\_$variants/star_$parental\_withannotation
# # liftover transcriptome annotation
# /home/ngobet/software/liftOver -gff /mnt/nas/BXD/references/transcriptome/Mus_musculus.GRCm38.94.gtf $refdir/$line\_$unphased\_$variants/$parental\.chain $refdir/$line\_$unphased\_$variants/$parental\.gtf $refdir/$line\_$unphased\_$variants/$parental\_unlifted.gtf
# # build references index for STAR
# /software/Alignment/STAR/STAR-2.7.0e/bin/STAR --runThreadN 16 --limitGenomeGenerateRAM 90000000000 --runMode genomeGenerate --genomeDir $refdir/$line\_$unphased\_$variants/star_$parental\_withoutannotation --genomeFastaFiles $refdir/$line\_$unphased\_$variants/*$parental\.fa /mnt/nas/BXD/references/genome/Mus_musculus.GRCm38.dna_sm.nonchromosomal.fa --outFileNamePrefix $refdir/$line\_$unphased\_$variants/star_$parental\_withoutannotation/star_$parental\_withoutannotation_\_ > $refdir/$line\_$unphased\_$variants/star_$parental\_withoutannotation/indexing.out 2> $refdir/$line\_$unphased\_$variants/star_$parental\_withoutannotation/indexing.err
# /software/Alignment/STAR/STAR-2.7.0e/bin/STAR --runThreadN 16 --limitGenomeGenerateRAM 90000000000 --runMode genomeGenerate --genomeDir $refdir/$line\_$unphased\_$variants/star_$parental\_withannotation --genomeFastaFiles $refdir/$line\_$unphased\_$variants/*$parental\.fa /mnt/nas/BXD/references/genome/Mus_musculus.GRCm38.dna_sm.nonchromosomal.fa --sjdbGTFfile $refdir/$line\_$unphased\_$variants/$parental.gtf --outFileNamePrefix $refdir/$line\_$unphased\_$variants/star_$parental\_withannotation/star_$parental\_withannotation_ > $refdir/$line\_$unphased\_$variants/star_$parental\_withannotation/indexing.out 2> $refdir/$line\_$unphased\_$variants/star_$parental\_withannotation/indexing.err
# Get gene lengths
# GTFtools only consider genes on conventional genes (help from: https://genomespot.blogspot.com/2019/01/using-gtf-tools-to-get-gene-lengths.html)
# grep -v '#' data/PersonalizedReferences/$line\_nonrandomized_genotypesandimputed/paternal.gtf | awk '{OFS="\t"} $1=1' > tmp.gtf
# /home/ngobet/software/GTFtools_0.6.5/gtftools.py -l data/PersonalizedReferences/$line\_nonrandomized_genotypesandimputed/genelength.txt tmp.gtf
# rm tmp.gtf
grep -v '#' data/PersonalizedReferences/$line\_nonrandomized_genotypesandimputed/paternal.gtf | awk '{OFS="\t"} $1=1' > tmp.gtf
/home/ngobet/software/GTFtools_0.6.5/gtftools.py -l data/PersonalizedReferences/$line\_nonrandomized_genotypesandimputed/genelength.txt tmp.gtf
rm tmp.gtf
# copy reference directory to SSD memory
cp -r data/PersonalizedReferences/$line\_$unphased\_$variants $refdir
# run jobs
nohup parallel --delay 1 -j 4 < /mnt/nas/BXD/analysis/scripts/mapping/MappingEvaluation_$line\_listcommands.sh 1> $outdir/MappingEvaluation_$line\_listcommands.out 2> $outdir/MappingEvaluation_$line\_listcommands.err
# remove reference directory copy on SSD memory
rm -r $refdir/$line\_$unphased\_$variants
done
# copy results on nas
cp -r $outdir data/
listsettings=($(ls -d data/MappingEvaluation/*/))
for setting in "${listsettings[@]}"
do
echo setting is: $setting
# group counts
analysis/scripts/mapping/MergeCount.sh $setting _ReadsPerGene.out.tab
# normalize counts CPM
module load R/3.4.2
Rscript analysis/scripts/mapping/normalizeGeneCountsCPM.R $setting
# normalize counts TPM
Rscript analysis/scripts/mapping/normalizeGeneCountsTPM.R $setting
done
# # prepare gene annotation for eQTL analysis
# Rscript analysis/scripts/mapping/retrieveGenePosition.R
# # Part 2: eQTL analysis [on cluster]
# eQTL analysis on Wally cluster
# directories definitions
refdir=/scratch/wally/FAC/FBM/CIG/pfranken/bxd_map2/data/references
mapdir=/scratch/wally/FAC/FBM/CIG/pfranken/bxd_map2/data/mapping
eQTLdir=/scratch/wally/FAC/FBM/CIG/pfranken/bxd_map2/data/eQTL
codedir=/users/ngobet/scripts
# copy normalized gene expression
scp -r ngobet@pccig3009.unil.ch:/mnt/nas/BXD/data/MappingEvaluation/* $mapdir
rm $mapdir/*/*.err $mapdir/*/*.out.tab
rm $mapdir/*/*final.out
rm $mapdir/*/*progress.out
rm $mapdir/*/*Log.out
rm $mapdir/*/*.out $mapdir/*/*_CPM* $mapdir/*/*TPM*
rm $mapdir/*.err $mapdir/*.out
# load softwares
module load HPC/Software
module add UHTS/Analysis/FastQTL/2.184
module add UHTS/Analysis/EPACTS/3.2.6
# eQTL detection
listsettings=$(ls -d $mapdir/* | cut -d "/" -f 11)
for setting in ${listsettings[@]}
do
echo $setting
for tissue in Cortex Liver
do
echo $tissue
for condition in NSD SD
do
echo $condition
# transform phenotypes (gene expression) to bed format (UCSC format)
scripts/transformGenePhenotypesToBedUCSC.py $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.tab $refdir/genotypes/BXDGenotypes.geno $refdir/transcriptome_annotation/GenePosition.txt $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.bed > $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition\_transformGenePhenotypesToBedUCSC.out 2> $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition\_transformGenePhenotypesToBedUCSC.err
# compress and index phenotypes
bgzip -f $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.bed && tabix -p bed $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.bed.gz
# prepare commands
fastQTL --vcf $refdir/genotypes/BXDGenotypes.vcf.gz --bed $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.bed.gz --out $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition --commands 25 $codedir/$setting\_$tissue\_$condition\_CPM_listcommands.sh --window 2e6 --permute 1000 --seed 1
# run commands
bash $codedir/$setting\_$tissue\_$condition\_CPM_listcommands.sh > $mapdir/$setting/$setting\_$tissue\_$condition\_CPM_listcommands.out 2> $mapdir/$setting/$setting\_$tissue\_$condition\_CPM_listcommands.err
# post-processing
# group results from different regions
cat $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.chr* > $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.txt
# clean up unneeded files
rm $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition.chr* $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition
# calculate q-values
Rscript $codedir/correctMultiPhenotypeseQTL.R $mapdir/$setting/TMMnormalized_log2CPM\_$tissue\_$condition
done
done
done
# copy eQTL files on 3009
scp -r $mapdir ngobet@pccig3009.unil.ch:/mnt/nas/BXD/data/transcriptome/$setting
##scp -r $tmpdir ngobet@pccig3009.unil.ch:/mnt/nas/BXD/data/MappingEvaluation/genotypesandimputed_withoutannotation_EndToEnd_1_0/
# remove eQTL files on cluster
rm -r $mapdir
|
<filename>app.py
import os
from flask import Flask, render_template, request, redirect, url_for, make_response
from mastodon import Mastodon, get_client_keys, generate_oauth_url, process_refresh_token
from tootsaver_data import save_toot, read_saved, remove_data, call_data
root_dir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
@app.route("/", methods=["GET"])
def main():
title = "Kyori Application Portal"
return render_template("index.html", title=title)
@app.route("/regulusaurum", methods=["GET"])
def regulusaurum():
return redirect(url_for("regulusaurum_info"))
@app.route("/regulusaurum/info", methods=["GET"])
def regulusaurum_info():
user_id = request.cookies.get("account_id", None)
if user_id == None:
title = "ログイン・利用登録 - EasyTootSaver ~獅子の黄金~"
requireLogin = request.args.get("requireLogin", default=1, type=int)
return render_template("regulusaurum/info.html", title=title, requireLogin=requireLogin)
else:
return redirect(url_for("regulusaurum_dashboard"))
@app.route("/regulusaurum/jump", methods=["POST"])
def regulusaurum_jump():
host_domain = request.form.get("host_domain")
client_keys = get_client_keys(root_dir, host_domain)
response = make_response(redirect(generate_oauth_url(
host_domain, client_keys[0], "https://app.odakyu.app/regulusaurum/done", "read:accounts")))
response.set_cookie("host_domain", value=host_domain)
return response
@app.route("/regulusaurum/done", methods=["GET"])
def regulusaurum_done():
refresh_token = request.args.get("code", default=None, type=str)
host_domain = request.cookies.get("host_domain", "")
client_keys = get_client_keys(root_dir, host_domain)
if not refresh_token == None:
access_token = process_refresh_token(
host_domain, client_keys[0], client_keys[1], refresh_token)
response = make_response(redirect(url_for("regulusaurum_dashboard")))
api = Mastodon(access_token=access_token, host_domain=host_domain)
result = api.verify_credentials().json()
account_id = result["id"]
response.set_cookie("account_id", value=account_id)
screen_name = result["username"]
response.set_cookie("screen_name", screen_name)
return response
else:
return redirect(url_for("regulusaurum_info", requireLogin=0))
@app.route("/regulusaurum/dashboard", methods=["GET"])
def regulusaurum_dashboard():
account_id = request.cookies.get("account_id", None)
if account_id == None:
return redirect(url_for("regulusaurum_info", requireLogin=0))
else:
title = "ユーザーページ - EasyTootSaver ~獅子の黄金~"
return render_template("regulusaurum/dashboard.html", title=title)
@app.route("/tuskyex", methods=["GET"])
def tuskyex():
title = "TuskyEx Kyori Build"
tuskyex_root=root_dir+"/tuskyex"
version_name_list=os.listdir(tuskyex_root)
version_name_list.sort()
version_name_list.reverse()
view_array=[]
for file_name in version_name_list:
file_path=tuskyex_root+"/"+file_name
with open(file_path,"r",encoding="utf-8") as input:
text=input.read()
joker={}
joker["version"]=file_name
joker["apk_url"]=url_for("static",filename="apk/"+file_name+".apk")
joker["content"]=text
view_array.append(joker)
return render_template("tuskyex.html", title=title, view_array=view_array)
@app.route("/profile", methods=["GET"])
def profile():
title = "About きょり/わんせた"
return render_template("profile.html", title=title)
if __name__ == "__main__":
app.run(host="localhost", port=1443)
|
// copied from govau/designsystem
var mainmenu = document.getElementById( 'mainmenu' );
var searchmenu = document.getElementById( 'searchmenu' );
var mainmenuToggle = document.getElementById( 'mainmenu-toggle' );
var searchToggle = document.getElementById( 'search-toggle' );
var overlay = document.getElementById( 'overlay' );
var focustrapTop = document.getElementById( 'focustrap-top' );
var focustrapBottom = document.getElementById( 'focustrap-bottom' );
var mainmenuLinks = document.querySelectorAll( '.header a, .header button' );
var navSkipLink = document.querySelectorAll( '.au-skip-link__link[href="#mainmenu"]' )[ 0 ];
function CloseMenuOnly() {
if (mainmenuToggle.innerHTML == "Close menu") {
AU.accordion.Toggle(mainmenuToggle, undefined);
mainmenuToggle.innerHTML = 'Open menu';
}
}
function CloseSearchOnly() {
if (searchToggle.innerHTML == "Close search") {
AU.accordion.Toggle(searchToggle, undefined);
searchToggle.innerHTML = 'Open search';
}
}
function ToggleMenu() {
AU.accordion.Toggle( mainmenuToggle, undefined, {
onOpen: function() {
CloseSearchOnly(); // Force close the search in case it's currently open
mainmenuToggle.innerHTML = 'Close menu'; // Change the text in the toggle
focustrapTop.setAttribute( "tabindex", 0 ); // Enable the focus trap
focustrapBottom.setAttribute( "tabindex", 0 );
AddClass( document.body, 'overlay--open' ); // Stop scrolling when overlay is open
},
onClose: function() {
mainmenuToggle.innerHTML = 'Open menu';
focustrapTop.removeAttribute( "tabindex" );
focustrapBottom.removeAttribute( "tabindex" );
RemoveClass( document.body, 'overlay--open' );
},
});
}
function ToggleSearch() {
AU.accordion.Toggle( searchToggle, undefined, {
onOpen: function() {
CloseMenuOnly(); // Force close the menu in case it's currently open
searchToggle.innerHTML = 'Close search'; // Change the text in the toggle
focustrapTop.setAttribute( "tabindex", 0 ); // Enable the focus trap
focustrapBottom.setAttribute( "tabindex", 0 );
AddClass( document.body, 'overlay--open' ); // Stop scrolling when overlay is open
},
onClose: function() {
searchToggle.innerHTML = 'Open search';
focustrapTop.removeAttribute( "tabindex" );
focustrapBottom.removeAttribute( "tabindex" );
RemoveClass( document.body, 'overlay--open' );
},
});
}
// On click of the menu toggle open or close the menu
AddEvent( mainmenuToggle, 'click', function( event ) {
PreventEvent( event );
ToggleMenu();
});
// On click of the search toggle open or close the search
AddEvent( searchToggle, 'click', function( event ) {
PreventEvent( event );
ToggleSearch();
});
// Close the menu if the overlay is clicked
AddEvent( overlay, 'click', function( event ) {
PreventEvent( event );
ToggleMenu();
});
// Move the focus to the correct item when it lands on a trap
AddEvent( focustrapTop, 'focus', function( event ) {
PreventEvent( event );
mainmenuLinks[ mainmenuLinks.length - 1 ].focus();
});
AddEvent( focustrapBottom, 'focus', function( event ) {
PreventEvent( event );
mainmenuLinks[ 0 ].focus();
});
if ( mainmenu ){
var MenuCheck = function() {
var currentMenuDisplay = window.getComputedStyle( mainmenu, '' ).getPropertyValue( 'display' );
if( currentMenuDisplay === 'none' ){
mainmenu.setAttribute( "aria-hidden", "true" );
}
else {
mainmenu.setAttribute( "aria-hidden", "false" );
}
};
var SkipLinkCheck = function() {
var mobileMenuDisplay = window.getComputedStyle( mainmenuToggle, '' ).getPropertyValue( 'display' );
if( mobileMenuDisplay === 'none' ){
navSkipLink.setAttribute( "href", "#mainmenu" );
}
else {
navSkipLink.setAttribute( "href", "#mainmenu-toggle" );
}
}
// Run on page load
MenuCheck();
SkipLinkCheck();
// Run functions after a debounced resize
var PageResize = Debounce(function() {
MenuCheck();
SkipLinkCheck();
}, 250);
// Run PageResize function on resize
window.onresize = function() {
PageResize();
}
}
|
package solver
import (
"context"
"sync"
"time"
"github.com/pkg/errors"
)
func NewInMemoryCacheStorage() CacheKeyStorage {
return &inMemoryStore{
byID: map[string]*inMemoryKey{},
byResult: map[string]map[string]struct{}{},
}
}
type inMemoryStore struct {
mu sync.RWMutex
byID map[string]*inMemoryKey
byResult map[string]map[string]struct{}
}
type inMemoryKey struct {
id string
results map[string]CacheResult
links map[CacheInfoLink]map[string]struct{}
backlinks map[string]struct{}
}
func (s *inMemoryStore) Exists(id string) bool {
s.mu.RLock()
defer s.mu.RUnlock()
if k, ok := s.byID[id]; ok {
return len(k.links) > 0 || len(k.results) > 0
}
return false
}
func newInMemoryKey(id string) *inMemoryKey {
return &inMemoryKey{
results: map[string]CacheResult{},
links: map[CacheInfoLink]map[string]struct{}{},
backlinks: map[string]struct{}{},
id: id,
}
}
func (s *inMemoryStore) Walk(fn func(string) error) error {
s.mu.RLock()
ids := make([]string, 0, len(s.byID))
for id := range s.byID {
ids = append(ids, id)
}
s.mu.RUnlock()
for _, id := range ids {
if err := fn(id); err != nil {
return err
}
}
return nil
}
func (s *inMemoryStore) WalkResults(id string, fn func(CacheResult) error) error {
s.mu.RLock()
k, ok := s.byID[id]
if !ok {
s.mu.RUnlock()
return nil
}
copy := make([]CacheResult, 0, len(k.results))
for _, res := range k.results {
copy = append(copy, res)
}
s.mu.RUnlock()
for _, res := range copy {
if err := fn(res); err != nil {
return err
}
}
return nil
}
func (s *inMemoryStore) Load(id string, resultID string) (CacheResult, error) {
s.mu.RLock()
defer s.mu.RUnlock()
k, ok := s.byID[id]
if !ok {
return CacheResult{}, errors.Wrapf(ErrNotFound, "no such key %s", id)
}
r, ok := k.results[resultID]
if !ok {
return CacheResult{}, errors.WithStack(ErrNotFound)
}
return r, nil
}
func (s *inMemoryStore) AddResult(id string, res CacheResult) error {
s.mu.Lock()
defer s.mu.Unlock()
k, ok := s.byID[id]
if !ok {
k = newInMemoryKey(id)
s.byID[id] = k
}
k.results[res.ID] = res
m, ok := s.byResult[res.ID]
if !ok {
m = map[string]struct{}{}
s.byResult[res.ID] = m
}
m[id] = struct{}{}
return nil
}
func (s *inMemoryStore) WalkIDsByResult(resultID string, fn func(string) error) error {
s.mu.Lock()
ids := map[string]struct{}{}
for id := range s.byResult[resultID] {
ids[id] = struct{}{}
}
s.mu.Unlock()
for id := range ids {
if err := fn(id); err != nil {
return err
}
}
return nil
}
func (s *inMemoryStore) Release(resultID string) error {
s.mu.Lock()
defer s.mu.Unlock()
ids, ok := s.byResult[resultID]
if !ok {
return nil
}
for id := range ids {
k, ok := s.byID[id]
if !ok {
continue
}
delete(k.results, resultID)
delete(s.byResult[resultID], id)
if len(s.byResult[resultID]) == 0 {
delete(s.byResult, resultID)
}
s.emptyBranchWithParents(k)
}
return nil
}
func (s *inMemoryStore) emptyBranchWithParents(k *inMemoryKey) {
if len(k.results) != 0 || len(k.links) != 0 {
return
}
for id := range k.backlinks {
p, ok := s.byID[id]
if !ok {
continue
}
for l := range p.links {
delete(p.links[l], k.id)
if len(p.links[l]) == 0 {
delete(p.links, l)
}
}
s.emptyBranchWithParents(p)
}
delete(s.byID, k.id)
}
func (s *inMemoryStore) AddLink(id string, link CacheInfoLink, target string) error {
s.mu.Lock()
defer s.mu.Unlock()
k, ok := s.byID[id]
if !ok {
k = newInMemoryKey(id)
s.byID[id] = k
}
k2, ok := s.byID[target]
if !ok {
k2 = newInMemoryKey(target)
s.byID[target] = k2
}
m, ok := k.links[link]
if !ok {
m = map[string]struct{}{}
k.links[link] = m
}
k2.backlinks[id] = struct{}{}
m[target] = struct{}{}
return nil
}
func (s *inMemoryStore) WalkLinks(id string, link CacheInfoLink, fn func(id string) error) error {
s.mu.RLock()
k, ok := s.byID[id]
if !ok {
s.mu.RUnlock()
return nil
}
var links []string
for target := range k.links[link] {
links = append(links, target)
}
s.mu.RUnlock()
for _, t := range links {
if err := fn(t); err != nil {
return err
}
}
return nil
}
func (s *inMemoryStore) HasLink(id string, link CacheInfoLink, target string) bool {
s.mu.RLock()
defer s.mu.RUnlock()
if k, ok := s.byID[id]; ok {
if v, ok := k.links[link]; ok {
if _, ok := v[target]; ok {
return true
}
}
}
return false
}
func (s *inMemoryStore) WalkBacklinks(id string, fn func(id string, link CacheInfoLink) error) error {
s.mu.RLock()
k, ok := s.byID[id]
if !ok {
s.mu.RUnlock()
return nil
}
var outIDs []string
var outLinks []CacheInfoLink
for bid := range k.backlinks {
b, ok := s.byID[bid]
if !ok {
continue
}
for l, m := range b.links {
if _, ok := m[id]; !ok {
continue
}
outIDs = append(outIDs, bid)
outLinks = append(outLinks, CacheInfoLink{
Digest: rootKey(l.Digest, l.Output),
Input: l.Input,
Selector: l.Selector,
})
}
}
s.mu.RUnlock()
for i := range outIDs {
if err := fn(outIDs[i], outLinks[i]); err != nil {
return err
}
}
return nil
}
func NewInMemoryResultStorage() CacheResultStorage {
return &inMemoryResultStore{m: &sync.Map{}}
}
type inMemoryResultStore struct {
m *sync.Map
}
func (s *inMemoryResultStore) Save(r Result, createdAt time.Time) (CacheResult, error) {
s.m.Store(r.ID(), r)
return CacheResult{ID: r.ID(), CreatedAt: createdAt}, nil
}
func (s *inMemoryResultStore) Load(ctx context.Context, res CacheResult) (Result, error) {
v, ok := s.m.Load(res.ID)
if !ok {
return nil, errors.WithStack(ErrNotFound)
}
return v.(Result), nil
}
func (s *inMemoryResultStore) LoadRemote(ctx context.Context, res CacheResult) (*Remote, error) {
return nil, nil
}
func (s *inMemoryResultStore) Exists(id string) bool {
_, ok := s.m.Load(id)
return ok
}
|
import { FC } from 'react';
import { Container, Paper } from '@mantine/core';
import { Cards } from '../components/Cards';
import { NAV_BAR_HEIGHT } from '../constants/styling';
export const MainContent: FC = () => {
return (
<Paper radius={0} style={{ height: `calc(100vh - ${NAV_BAR_HEIGHT}px)` }}>
<Container pt={'md'}>
<Cards />
</Container>
</Paper>
);
};
|
#!/bin/bash
# Import and run selected benchmark models with nominal parameters and check
# agreement with reference values
#
# Expects environment variable BENCHMARK_COLLECTION to provide path to
# benchmark collection model directory
# Confirmed to be working
models="
Boehm_JProteomeRes2014
Borghans_BiophysChem1997
Elowitz_Nature2000
Schwen_PONE2014
Fujita_SciSignal2010
Sneyd_PNAS2002
Zheng_PNAS2012
Weber_BMC2015"
# Model needs fixing:
# Chen_MSB2009
#
# Not matching reference for unclear reasons
# Lucarelli_CellSystems2018
# Weber_BMC2015
#
# PEtab needs fixing: Bachmann_MSB2011
#
# Unsupported:
#
# Becker_Science2010: multiple models
#
# no reference value:
# Alkan_SciSignal2018
# Beer_MolBioSystems2014
# Blasi_CellSystems2016
# Crauste_CellSystems2017
# Hass_PONE2017
# Korkut_eLIFE2015
# Perelson_Science1996
# Bruno_JExpBio2016
#
# Timepoint-specific parameter overrides
# Fiedler_BMC2016
# Brannmark_JBC2010
# Isensee_JCB2018
# Sobotta_Frontiers2017
#
# yaml missing:
# Casaletto_PNAS2019
#
# Model missing:
# Merkle_PCB2016
#
# SBML extensions:
# Parmar_PCB2019
#
# Events:
# Swameye_PNAS2003
#
# state-dependent sigmas:
# Raia_CancerResearch2011
set -e
[[ -n "${BENCHMARK_COLLECTION}" ]] && model_dir="${BENCHMARK_COLLECTION}"
function show_help() {
echo "-h: this help; -n: dry run, print commands; -b path_to_models_dir"
}
OPTIND=1
while getopts "h?nb:" opt; do
case "$opt" in
h | \?)
show_help
exit 0
;;
n)
dry_run=1
;;
b)
model_dir=$OPTARG
;;
esac
done
script_path=$(dirname "$BASH_SOURCE")
script_path=$(cd "$script_path" && pwd)
for model in $models; do
yaml="${model_dir}"/"${model}"/"${model}".yaml
amici_model_dir=test_bmc/"${model}"
mkdir -p "$amici_model_dir"
cmd_import="amici_import_petab --verbose -y ${yaml} -o ${amici_model_dir} -n ${model}"
cmd_run="$script_path/test_petab_model.py --verbose -y ${yaml} -d ${amici_model_dir} -m ${model} -c"
printf '=%.0s' {1..40}
printf " %s " "${model}"
printf '=%.0s' {1..40}
echo
if [[ -z "$dry_run" ]]; then
$cmd_import
$cmd_run
else
echo "$cmd_import"
echo "$cmd_run"
fi
printf '=%.0s' {1..100}
echo
echo
done
|
//
// SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
//
// SPDX-License-Identifier: BSL-1.0
//
#pragma once
#include "mbedtls/ssl.h"
#include "mbedtls/entropy.h"
#include "mbedtls/ctr_drbg.h"
#include "mbedtls/error.h"
#include "mbedtls/certs.h"
#include "mbedtls/esp_debug.h"
#include "esp_log.h"
namespace asio {
namespace ssl {
namespace mbedtls {
const char *error_message(int error_code)
{
static char error_buf[100];
mbedtls_strerror(error_code, error_buf, sizeof(error_buf));
return error_buf;
}
void throw_alloc_failure(const char* location)
{
asio::error_code ec( MBEDTLS_ERR_SSL_ALLOC_FAILED, asio::error::get_mbedtls_category());
asio::detail::throw_error(ec, location);
}
namespace error_codes {
bool is_error(int ret)
{
return ret < 0 && ret != MBEDTLS_ERR_SSL_WANT_READ && ret != MBEDTLS_ERR_SSL_WANT_WRITE;
}
static bool want_write(int ret)
{
return ret == MBEDTLS_ERR_SSL_WANT_WRITE;
}
static bool want_read(int ret)
{
return ret == MBEDTLS_ERR_SSL_WANT_READ;
}
} // namespace error_codes
enum rw_state {
IDLE, READING, WRITING, CLOSED
};
class engine {
public:
explicit engine(std::shared_ptr<context> ctx): ctx_(std::move(ctx)),
bio_(bio::new_pair("mbedtls-engine")), state_(IDLE), verify_mode_(0) {}
void set_verify_mode(asio::ssl::verify_mode mode)
{
verify_mode_ = mode;
}
bio* ext_bio() const
{
return bio_.second.get();
}
rw_state get_state() const
{
return state_;
}
int shutdown()
{
int ret = mbedtls_ssl_close_notify(&impl_.ssl_);
if (ret) {
impl::print_error("mbedtls_ssl_close_notify", ret);
}
state_ = CLOSED;
return ret;
}
int connect()
{
return handshake(true);
}
int accept()
{
return handshake(false);
}
int write(const void *buffer, int len)
{
int ret = impl_.write(buffer, len);
state_ = ret == len ? IDLE: WRITING;
return ret;
}
int read(void *buffer, int len)
{
int ret = impl_.read(buffer, len);
state_ = ret == len ? IDLE: READING;
return ret;
}
private:
int handshake(bool is_client_not_server)
{
if (impl_.before_handshake()) {
impl_.configure(ctx_.get(), is_client_not_server, impl_verify_mode(is_client_not_server));
}
return do_handshake();
}
static int bio_read(void *ctx, unsigned char *buf, size_t len)
{
auto bio = static_cast<BIO*>(ctx);
int read = bio->read(buf, len);
if (read <= 0 && bio->should_read()) {
return MBEDTLS_ERR_SSL_WANT_READ;
}
return read;
}
static int bio_write(void *ctx, const unsigned char *buf, size_t len)
{
auto bio = static_cast<BIO*>(ctx);
int written = bio->write(buf, len);
if (written <= 0 && bio->should_write()) {
return MBEDTLS_ERR_SSL_WANT_WRITE;
}
return written;
}
int do_handshake()
{
int ret = 0;
mbedtls_ssl_set_bio(&impl_.ssl_, bio_.first.get(), bio_write, bio_read, nullptr);
while (impl_.ssl_.MBEDTLS_PRIVATE(state) != MBEDTLS_SSL_HANDSHAKE_OVER) {
ret = mbedtls_ssl_handshake_step(&impl_.ssl_);
if (ret != 0) {
if (ret != MBEDTLS_ERR_SSL_WANT_READ && ret != MBEDTLS_ERR_SSL_WANT_WRITE) {
impl::print_error("mbedtls_ssl_handshake_step", ret);
}
if (ret == MBEDTLS_ERR_SSL_WANT_READ) {
state_ = READING;
} else if (ret == MBEDTLS_ERR_SSL_WANT_WRITE) {
state_ = WRITING;
}
break;
}
}
return ret;
}
// Converts OpenSSL verification mode to mbedtls enum
int impl_verify_mode(bool is_client_not_server) const
{
int mode = MBEDTLS_SSL_VERIFY_UNSET;
if (is_client_not_server) {
if (verify_mode_ & SSL_VERIFY_PEER)
mode = MBEDTLS_SSL_VERIFY_REQUIRED;
else if (verify_mode_ == SSL_VERIFY_NONE)
mode = MBEDTLS_SSL_VERIFY_NONE;
} else {
if (verify_mode_ & SSL_VERIFY_FAIL_IF_NO_PEER_CERT)
mode = MBEDTLS_SSL_VERIFY_REQUIRED;
else if (verify_mode_ & SSL_VERIFY_PEER)
mode = MBEDTLS_SSL_VERIFY_OPTIONAL;
else if (verify_mode_ == SSL_VERIFY_NONE)
mode = MBEDTLS_SSL_VERIFY_NONE;
}
return mode;
}
struct impl {
static void print_error(const char* function, int error_code)
{
constexpr const char *TAG="mbedtls-engine-impl";
ESP_LOGE(TAG, "%s() returned -0x%04X", function, -error_code);
ESP_LOGI(TAG, "-0x%04X: %s", -error_code, error_message(error_code));
}
bool before_handshake() const
{
return ssl_.MBEDTLS_PRIVATE(state) == 0;
}
int write(const void *buffer, int len)
{
int ret = mbedtls_ssl_write(&ssl_, static_cast<const unsigned char *>(buffer), len);
if (ret < 0 && ret != MBEDTLS_ERR_SSL_WANT_WRITE) {
print_error("mbedtls_ssl_write", ret);
}
return ret;
}
int read(void *buffer, int len)
{
int ret = mbedtls_ssl_read(&ssl_, static_cast<unsigned char *>(buffer), len);
if (ret < 0 && ret != MBEDTLS_ERR_SSL_WANT_READ) {
print_error("mbedtls_ssl_read", ret);
}
return ret;
}
impl()
{
const unsigned char pers[] = "asio ssl";
mbedtls_ssl_init(&ssl_);
mbedtls_ssl_config_init(&conf_);
mbedtls_ctr_drbg_init(&ctr_drbg_);
#ifdef CONFIG_MBEDTLS_DEBUG
mbedtls_esp_enable_debug_log(&conf_, CONFIG_MBEDTLS_DEBUG_LEVEL);
#endif
mbedtls_entropy_init(&entropy_);
mbedtls_ctr_drbg_seed(&ctr_drbg_, mbedtls_entropy_func, &entropy_, pers, sizeof(pers));
mbedtls_x509_crt_init(&public_cert_);
mbedtls_pk_init(&pk_key_);
mbedtls_x509_crt_init(&ca_cert_);
}
bool configure(context *ctx, bool is_client_not_server, int mbedtls_verify_mode)
{
mbedtls_x509_crt_init(&public_cert_);
mbedtls_pk_init(&pk_key_);
mbedtls_x509_crt_init(&ca_cert_);
int ret = mbedtls_ssl_config_defaults(&conf_, is_client_not_server ? MBEDTLS_SSL_IS_CLIENT: MBEDTLS_SSL_IS_SERVER,
MBEDTLS_SSL_TRANSPORT_STREAM, MBEDTLS_SSL_PRESET_DEFAULT);
if (ret) {
print_error("mbedtls_ssl_config_defaults", ret);
return false;
}
mbedtls_ssl_conf_rng(&conf_, mbedtls_ctr_drbg_random, &ctr_drbg_);
mbedtls_ssl_conf_authmode(&conf_, mbedtls_verify_mode);
if (ctx->cert_chain_.size() > 0 && ctx->private_key_.size() > 0) {
ret = mbedtls_x509_crt_parse(&public_cert_, ctx->data(container::CERT), ctx->size(container::CERT));
if (ret < 0) {
print_error("mbedtls_x509_crt_parse", ret);
return false;
}
ret = mbedtls_pk_parse_key(&pk_key_, ctx->data(container::PRIVKEY), ctx->size(container::PRIVKEY),
nullptr, 0, mbedtls_ctr_drbg_random, &ctr_drbg_);
if (ret < 0) {
print_error("mbedtls_pk_parse_keyfile", ret);
return false;
}
ret = mbedtls_ssl_conf_own_cert(&conf_, &public_cert_, &pk_key_);
if (ret) {
print_error("mbedtls_ssl_conf_own_cert", ret);
return false;
}
}
if (ctx->ca_cert_.size() > 0) {
ret = mbedtls_x509_crt_parse(&ca_cert_, ctx->data(container::CA_CERT), ctx->size(container::CA_CERT));
if (ret < 0) {
print_error("mbedtls_x509_crt_parse", ret);
return false;
}
mbedtls_ssl_conf_ca_chain(&conf_, &ca_cert_, nullptr);
} else {
mbedtls_ssl_conf_ca_chain(&conf_, nullptr, nullptr);
}
ret = mbedtls_ssl_setup(&ssl_, &conf_);
if (ret) {
print_error("mbedtls_ssl_setup", ret);
return false;
}
return true;
}
mbedtls_ssl_context ssl_{};
mbedtls_entropy_context entropy_{};
mbedtls_ctr_drbg_context ctr_drbg_{};
mbedtls_ssl_config conf_{};
mbedtls_x509_crt public_cert_{};
mbedtls_pk_context pk_key_{};
mbedtls_x509_crt ca_cert_{};
};
impl impl_{};
std::shared_ptr<context> ctx_;
std::pair<std::shared_ptr<bio>, std::shared_ptr<bio>> bio_;
enum rw_state state_;
asio::ssl::verify_mode verify_mode_;
};
} } } // namespace asio::ssl::mbedtls
|
python plot_harvestrate.py
cp figures/* ../../../../website-discovery/paper/figures/harvestrate
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.domain.organization;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import org.opentaps.base.entities.CustomTimePeriod;
import org.opentaps.base.entities.PartyGroup;
import org.opentaps.base.entities.PaymentMethod;
import org.opentaps.base.entities.TermType;
import org.opentaps.foundation.entity.EntityInterface;
import org.opentaps.foundation.entity.EntityNotFoundException;
import org.opentaps.foundation.repository.RepositoryException;
import org.opentaps.foundation.repository.RepositoryInterface;
/**
* Organization repository.
*/
public interface OrganizationRepositoryInterface extends RepositoryInterface {
/**
* Finds an <code>Organization</code> by ID from the database.
* @param organizationPartyId the party ID for the organization
* @return the <code>Organization</code> found
* @throws RepositoryException if an error occurs
* @throws EntityNotFoundException no <code>Organization</code> is found for the given id
*/
public Organization getOrganizationById(String organizationPartyId) throws RepositoryException, EntityNotFoundException;
/**
* Get all time periods for the organization, including those which are closed or in the past or future.
* @param organizationPartyId the party ID for the organization
* @return the list of <code>CustomTimePeriod</code> for the organization
* @throws RepositoryException if an error occurs
*/
public List<CustomTimePeriod> getAllFiscalTimePeriods(String organizationPartyId) throws RepositoryException;
/**
* Finds the open time periods for an organization.
* This method select only those time periods which are relevant to the fiscal
* operations of a company.
* @param organizationPartyId the party ID for the organization
* @return the list of <code>CustomTimePeriod</code> found
* @throws RepositoryException if an error occurs
*/
public List<CustomTimePeriod> getOpenFiscalTimePeriods(String organizationPartyId) throws RepositoryException;
/**
* Finds the open time periods for an organization that the given date falls in.
* This method select only those time periods which are relevant to the fiscal
* operations of a company.
* @param organizationPartyId the party ID for the organization
* @param asOfDate the date for which to get the open fiscal periods
* @return the list of <code>CustomTimePeriod</code> found
* @throws RepositoryException if an error occurs
*/
public List<CustomTimePeriod> getOpenFiscalTimePeriods(String organizationPartyId, Timestamp asOfDate) throws RepositoryException;
/**
* Finds the open time periods for an organization which are of the provided fiscal period types
* (FISCAL_YEAR, FISCAL_QUARTER, etc.) that the given date falls in.
* This method select only those time periods which are relevant to the fiscal
* operations of a company.
* @param organizationPartyId the party ID for the organization
* @param fiscalPeriodTypes a list of the fiscal period types
* @param asOfDate the date for which to get the open fiscal periods
* @return the list of <code>CustomTimePeriod</code> found
* @throws RepositoryException if an error occurs
*/
public List<CustomTimePeriod> getOpenFiscalTimePeriods(String organizationPartyId, List<String> fiscalPeriodTypes, Timestamp asOfDate) throws RepositoryException;
/**
* Finds the default <code>PaymentMethod</code> for the given organization.
* @param organizationPartyId the party ID for the organization
* @return the default <code>PaymentMethod</code>
* @throws RepositoryException if an error occurs
*/
public PaymentMethod getDefaultPaymentMethod(String organizationPartyId) throws RepositoryException;
/**
* Gets the conversion factor from the given organization base currency into the given currency.
* @param organizationPartyId the party ID for the organization
* @param currencyUomId a <code>String</code> value
* @return a <code>BigDecimal</code> value
* @exception RepositoryException if an error occurs
*/
public BigDecimal determineUomConversionFactor(String organizationPartyId, String currencyUomId) throws RepositoryException;
/**
* Gets the conversion factor from the given organization base currency into the given currency taking the conversion rate as of the given date.
* @param organizationPartyId the party ID for the organization
* @param currencyUomId a <code>String</code> value
* @param asOfDate a <code>Timestamp</code> value
* @return a <code>BigDecimal</code> value
* @exception RepositoryException if an error occurs
*/
public BigDecimal determineUomConversionFactor(String organizationPartyId, String currencyUomId, Timestamp asOfDate) throws RepositoryException;
/**
* Gets the configured (non-null) Tag Types for the given organization, as a <code>Map</code> of
* {index value: configured <code>enumTypeId</code>}. For example: {1=DIVISION_TAG, 2=DEPARTMENT_TAG, 3=ACTIVITY_TAG}
* @param organizationPartyId the party ID for the organization
* @param accountingTagUsageTypeId the tag usage, for example "FINANCIALS_REPORTS", "PRCH_ORDER_ITEMS" ...
* @return a <code>Map</code> of tagIndex: enumtypeId
* @throws RepositoryException if an error occurs
*/
public Map<Integer, String> getAccountingTagTypes(String organizationPartyId, String accountingTagUsageTypeId) throws RepositoryException;
/**
* Gets the configuration for the given organization and usage type.
* @param organizationPartyId the party ID for the organization
* @param accountingTagUsageTypeId the tag usage, for example "FINANCIALS_REPORTS", "PRCH_ORDER_ITEMS" ...
* @return a list of <code>AccountingTagConfigurationForOrganizationAndUsage</code>, each one representing the tag type and available tag values for each tag index
* @throws RepositoryException if an error occurs
*/
public List<AccountingTagConfigurationForOrganizationAndUsage> getAccountingTagConfiguration(String organizationPartyId, String accountingTagUsageTypeId) throws RepositoryException;
/**
* Gets the list of term type IDs that can be used for the given document type ID.
* @param documentTypeId the document type to get the agreement term types for, eg: "SALES_INVOICE", "PURCHASE_ORDER", ...
* @return a list of agreement term type IDs
* @throws RepositoryException if an error occurs
*/
public List<String> getValidTermTypeIds(String documentTypeId) throws RepositoryException;
/**
* Gets the list of term types that can be used for the given document type ID.
* @param documentTypeId the document type to get the agreement term types for, eg: "SALES_INVOICE", "PURCHASE_ORDER", ...
* @return a list of agreement term types
* @throws RepositoryException if an error occurs
*/
public List<TermType> getValidTermTypes(String documentTypeId) throws RepositoryException;
/**
* Gets all the organization party from the PartyRole = INTERNAL_ORGANIZATIO.
* @return a list of organization party
* @throws RepositoryException if an error occurs
*/
public List<Organization> getAllValidOrganizations() throws RepositoryException;
/**
* Validates the accounting tags from a given <code>Map</code>, compared to the required tags as configured for the given organization and usage type.
* @param tags a <code>Map<String, Object></code> value, which can simply be the Map representing an entity
* @param organizationPartyId the party ID for the organization
* @param accountingTagUsageTypeId the tag usage, for example "FINANCIALS_REPORTS", "PRCH_ORDER_ITEMS" ...
* @param prefix the prefix of the Map keys corresponding to the accounting tags
* @return a list of the <code>AccountingTagConfigurationForOrganizationAndUsage</code> that are missing
* @throws RepositoryException if an error occurs
*/
@SuppressWarnings("unchecked")
public List<AccountingTagConfigurationForOrganizationAndUsage> validateTagParameters(Map tags, String organizationPartyId, String accountingTagUsageTypeId, String prefix) throws RepositoryException;
/**
* Validates the accounting tags from a given <code>EntityInterface</code>, compared to the required tags as configured for the given organization and usage type.
* @param entity an <code>EntityInterface</code> value
* @param organizationPartyId the party ID for the organization
* @param accountingTagUsageTypeId the tag usage, for example "FINANCIALS_REPORTS", "PRCH_ORDER_ITEMS" ...
* @return a list of the <code>AccountingTagConfigurationForOrganizationAndUsage</code> that are missing
* @throws RepositoryException if an error occurs
*/
public List<AccountingTagConfigurationForOrganizationAndUsage> validateTagParameters(EntityInterface entity, String organizationPartyId, String accountingTagUsageTypeId) throws RepositoryException;
/**
* Returns a list of the party groups with role ORGANIZATION_TEMPL
* @throws RepositoryException if an error occurs
*/
public List<PartyGroup> getOrganizationTemplates() throws RepositoryException;
/**
* Returns a list of party groups with role INTERNAL_ORGANIZATIO no PartyAcctgPreference
* @throws RepositoryException if an error occurs
*/
public List<PartyGroup> getOrganizationWithoutLedgerSetup() throws RepositoryException;
}
|
setenv hello pouet
setenv ahah hehe
env | grep hello | cat -e
env | grep ahah | cat -e
unsetenv hello
env | grep hello | cat -e
env temporary=hello | grep temporary | cat -e
|
#require 'dm-core'
require 'resourceful'
require 'extlib'
require 'json'
__DIR__ = File.dirname(__FILE__)
require File.join(__DIR__, 'dm-ssbe-adapter', 'ssbe_authenticator')
require File.join(__DIR__, 'dm-types', 'href')
require File.join(__DIR__, 'dm-ssbe-adapter', 'model_extensions')
require File.join(__DIR__, 'dm-ssbe-adapter', 'service')
module DataMapper::Adapters
class HttpAdapter < AbstractAdapter
attr_reader :http
def initialize(name, options = {})
super
@http = Resourceful::HttpAccessor.new
@http.cache_manager = Resourceful::InMemoryCacheManager.new
@http.logger = DataMapper.logger
end
def logger
http.logger
end
end
class SsbeAdapter < HttpAdapter
attr_reader :services_uri
SSJ = 'application/vnd.absperf.ssbe+json'
def initialize(name, options = {})
super
username, password = options[:username], options[:password]
http.add_authenticator(Resourceful::SSBEAuthenticator.new(username, password))
@services_uri = options[:services_uri]
Service.default_repository_name = @name
end
def create(resources)
resources.each do |resource|
http_resource = collection_resource_for(resource)
document = serialize(resource)
response = http_resource.post(document, :content_type => SSJ)
update_attributes(resource, deserialize(response.body))
end
end
def read(query)
## [dm-core] need an easy way to determine if we're
# looking up a single record by key
if querying_on_href?(query)
operand = query.conditions.operands.first
href = case operand.subject
when DataMapper::Property
operand.value
when DataMapper::Associations::Relationship
property_name = "#{operand.subject.inverse.name}_href".to_sym
operand.value.attribute_get(property_name)
end
http_resource = http.resource(href, :accept => SSJ)
begin
response = http_resource.get
rescue Resourceful::UnsuccessfulHttpRequestError => e
if e.http_response.code == 404
return []
else
raise e
end
end
record = deserialize(response.body)
if record.has_key?(:items)
query.filter_records(record[:items])
else
[record]
end
else
resource = collection_resource_for(query)
opts = {}
opts.merge(:cache_control => 'no-cache') if query.reload?
response = resource.get(opts)
records = deserialize(response.body)
query.filter_records(records[:items])
end
end
def update(attributes, collection)
collection.each do |resource|
http_resource = http.resource(resource.href, :accept => SSJ)
response = http_resource.put(serialize(attributes, collection.model), :content_type => SSJ)
update_attributes(resource, deserialize(response.body))
end
end
def delete(collection)
collection.each do |resource|
http_resource = http.resource(resource.href, :accept => SSJ)
response = http_resource.delete
update_attributes(resource, deserialize(response.body))
end
end
protected
## [dm-core] resource.update_fields(attributes)
# updates any changed fields from a response
# eg, created_at, updated_at, etc...
def update_attributes(resource, attributes)
attributes.each do |field, value|
property = resource.model.properties.detect { |p| p.field == field }
property.set!(resource, value) if property
end
resource
end
def serialize(resource_or_attributes, model = nil)
if resource_or_attributes.is_a?(DataMapper::Resource)
model = resource_or_attributes.model
attributes_as_fields(resource_or_attributes.dirty_attributes)
else
attributes_as_fields(resource_or_attributes)
end.merge(:_type => model).to_json
end
def deserialize(document)
Mash.new(JSON.parse(document))
end
def querying_on_href?(query)
return false unless query.conditions.operands.size == 1
operand = query.conditions.operands.first
return false unless operand.is_a?(DataMapper::Query::Conditions::EqualToComparison)
case operand.subject
when DataMapper::Property
# .get("http://articles/1")
query.model.key.first == operand.subject
when DataMapper::Associations::OneToMany::Relationship
# many to one (comment.article), but DM inverts it for the query
true
end
end
def collection_resource_for(object)
if object.is_a?(DataMapper::Query)
query = object
model = query.model
elsif object.is_a?(DataMapper::Model)
model = object
elsif object.is_a?(DataMapper::Resource)
resource = object
model = resource.model
else
raise ArgumentError, "Unable to determine collection resource for #{object}"
end
collection_uri = if model == Service
@services_uri
elsif query && uri = association_collection_uri(query)
uri
elsif model && service = Service[model.service_name]
service.resource_href
elsif resource
# TODO: make this work if there's more than one relationship defined
# on the child, and just more robust in general
relationship = resource.model.relationships.values.first
parent_relationship = relationship.inverse
parent_property_name = "#{parent_relationship.name}_href".to_sym
parent_resource = resource.send(relationship.name)
parent_resource.attribute_get(parent_property_name)
end
http.resource(collection_uri, :accept => SSJ)
end
def association_collection_uri(query)
return false unless query.conditions.operands.size == 1
operand = query.conditions.operands.first
return false unless operand.is_a?(DataMapper::Query::Conditions::EqualToComparison)
case operand.subject
when DataMapper::Property
return false unless operand.subject.type == DataMapper::Types::Href
operand.value
when DataMapper::Associations::ManyToOne::Relationship
# DataMapper passes it to the adapters backwards, so this is for
# article.comments
inverse_relationship = operand.subject.inverse
property_name = "#{inverse_relationship.name}_href".to_sym
operand.value.attribute_get(property_name)
end
end
end
end
|
"""
Run tinyframe.py <input file> [int value reg0] [int value reg1] ...
Interpreter for a tiny interpreter with frame introspection. Supports
integer values and function values. The machine is
register based with untyped registers.
Opcodes:
ADD r1 r2 => r3 # integer addition or function combination,
# depending on argument types
# if r1 has a function f and r2 has a function g
# the result will be a function lambda arg : f(g(arg))
# this is also a way to achieve indirect call
INTROSPECT r1 => r2 # frame introspection - load a register with number
# pointed by r1 (must be int) to r2
PRINT r # print a register
CALL r1 r2 => r3 # call a function in register one with argument in r2 and
# result in r3
LOAD_FUNCTION <name> => r # load a function named name into register r
LOAD <int constant> => r # load an integer constant into register r
RETURN r1
JUMP @label # jump + or - by x opcodes
JUMP_IF_ABOVE r1 r2 @label # jump if value in r1 is above
# value in r2
function argument always comes in r0
"""
from pypy.rlib.streamio import open_file_as_stream
from pypy.jit.tl.tinyframe.support import sort
from pypy.rlib.unroll import unrolling_iterable
from pypy.rlib.jit import JitDriver, hint, dont_look_inside
opcodes = ['ADD', 'INTROSPECT', 'PRINT', 'CALL', 'LOAD', 'LOAD_FUNCTION',
'RETURN', 'JUMP', 'JUMP_IF_ABOVE']
unrolling_opcodes = unrolling_iterable(opcodes)
for i, opcode in enumerate(opcodes):
globals()[opcode] = i
class Code(object):
def __init__(self, code, regno, functions, name):
self.code = code
self.regno = regno
self.functions = functions
self.name = name
class Parser(object):
name = None
def compile(self, strrepr):
self.code = []
self.maxregno = 0
self.functions = {}
self.labels = {}
lines = strrepr.splitlines()
for line in lines:
comment = line.find('#')
if comment != -1:
assert comment >= 0
line = line[:comment]
line = line.strip(" ")
if not line:
continue
if line.endswith(':'):
# a name
self.finish_currect_code()
self.name = line[:-1]
continue
if line.startswith('@'):
self.labels[line[1:]] = len(self.code)
continue
firstspace = line.find(" ")
assert firstspace >= 0
opcode = line[:firstspace]
args = line[firstspace + 1:]
for name in unrolling_opcodes:
if opcode == name:
getattr(self, 'compile_' + name)(args)
values = self.functions.values()
sort(values)
functions = [code for i, code in values]
assert self.name == 'main'
return Code("".join([chr(i) for i in self.code]), self.maxregno + 1,
functions, self.name)
def finish_currect_code(self):
if self.name is None:
assert not self.code
return
code = Code("".join([chr(i) for i in self.code]), self.maxregno + 1,
[], self.name)
self.functions[self.name] = (len(self.functions), code)
self.name = None
self.labels = {}
self.code = []
self.maxregno = 0
def rint(self, arg):
assert arg.startswith('r')
no = int(arg[1:])
self.maxregno = max(self.maxregno, no)
return no
def compile_ADD(self, args):
args, result = args.split("=")
result = result[1:]
arg0, arg1 = args.strip(" ").split(" ")
self.code += [ADD, self.rint(arg0), self.rint(arg1),
self.rint(result.strip(" "))]
def compile_LOAD(self, args):
arg0, result = args.split("=")
result = result[1:]
arg0 = arg0.strip(" ")
self.code += [LOAD, int(arg0), self.rint(result.strip(" "))]
def compile_PRINT(self, args):
arg = self.rint(args.strip(" "))
self.code += [PRINT, arg]
def compile_RETURN(self, args):
arg = self.rint(args.strip(" "))
self.code += [RETURN, arg]
def compile_JUMP_IF_ABOVE(self, args):
arg0, arg1, label = args.split(" ")
self.code += [JUMP_IF_ABOVE, self.rint(arg0.strip(" ")),
self.rint(arg1.strip(" ")), self.labels[label[1:]]]
def compile_LOAD_FUNCTION(self, args):
name, res = args.split("=")
res = res[1:]
no, code = self.functions[name.strip(" ")]
self.code += [LOAD_FUNCTION, no, self.rint(res.strip(" "))]
def compile_CALL(self, args):
args, res = args.split("=")
res = res[1:]
arg0, arg1 = args.strip(" ").split(" ")
self.code += [CALL, self.rint(arg0.strip(" ")),
self.rint(arg1.strip(" ")),
self.rint(res.strip(" "))]
def compile_INTROSPECT(self, args):
arg, res = args.split("=")
res = res[1:]
self.code += [INTROSPECT, self.rint(arg.strip(" ")),
self.rint(res.strip(" "))]
def compile_JUMP(self, args):
raise NotImplementedError
def compile(strrepr):
parser = Parser()
return parser.compile(strrepr)
def disassemble(code):
return [ord(i) for i in code.code]
class Object(object):
def __init__(self):
raise NotImplementedError("abstract base class")
def add(self, other):
raise NotImplementedError("abstract base class")
def gt(self, other):
raise NotImplementedError("abstract base class")
def repr(self):
raise NotImplementedError("abstract base class")
class Int(Object):
def __init__(self, val):
self.val = val
def add(self, other):
return Int(self.val + other.val)
def gt(self, other):
return self.val > other.val
def repr(self):
return str(self.val)
class Func(Object):
def __init__(self, code):
self.code = code
def call(self, arg):
f = Frame(self.code, arg)
return f.interpret()
def add(self, other):
return CombinedFunc(self, other)
def repr(self):
return "<function %s>" % self.code.name
class CombinedFunc(Func):
def __init__(self, outer, inner):
self.outer = outer
self.inner = inner
def call(self, arg):
return self.outer.call(self.inner.call(arg))
def repr(self):
return "<function %s(%s)>" % (self.outer.repr(), self.inner.repr())
driver = JitDriver(greens = ['i', 'code'], reds = ['self'],
virtualizables = ['self'])
class Frame(object):
_virtualizable2_ = ['registers[*]', 'code']
def __init__(self, code, arg=None):
self = hint(self, access_directly=True, fresh_virtualizable=True)
self.code = code
self.registers = [None] * code.regno
self.registers[0] = arg
def interpret(self):
i = 0
code = self.code.code
while True:
driver.jit_merge_point(self=self, code=code, i=i)
opcode = ord(code[i])
if opcode == LOAD:
self.registers[ord(code[i + 2])] = Int(ord(code[i + 1]))
i += 3
elif opcode == ADD:
arg1 = self.registers[ord(code[i + 1])]
arg2 = self.registers[ord(code[i + 2])]
self.registers[ord(code[i + 3])] = arg1.add(arg2)
i += 4
elif opcode == RETURN:
return self.registers[ord(code[i + 1])]
elif opcode == JUMP_IF_ABOVE:
arg0 = self.registers[ord(code[i + 1])]
arg1 = self.registers[ord(code[i + 2])]
tgt = ord(code[i + 3])
if arg0.gt(arg1):
i = tgt
driver.can_enter_jit(code=code, i=tgt, self=self)
else:
i += 4
elif opcode == LOAD_FUNCTION:
f = self.code.functions[ord(code[i + 1])]
self.registers[ord(code[i + 2])] = Func(f)
i += 3
elif opcode == CALL:
f = self.registers[ord(code[i + 1])]
arg = self.registers[ord(code[i + 2])]
assert isinstance(f, Func)
self.registers[ord(code[i + 3])] = f.call(arg)
i += 4
elif opcode == PRINT:
arg = self.registers[ord(code[i + 1])]
print arg.repr()
i += 2
elif opcode == INTROSPECT:
self.introspect(ord(code[i + 1]), ord(code[i + 2]))
i += 3
else:
raise Exception("unimplemented opcode %s" % opcodes[opcode])
@dont_look_inside
def introspect(self, rarg, rresult):
source = self.registers[rarg]
assert isinstance(source, Int)
self.registers[rresult] = self.registers[source.val]
def interpret(code):
return Frame(code).interpret()
def main(fname, argv):
f = open_file_as_stream(fname, "r")
input = f.readall()
f.close()
code = compile(input)
mainframe = Frame(code)
for i in range(len(argv)):
mainframe.registers[i] = Int(int(argv[i]))
res = mainframe.interpret()
print "Result:", res.repr()
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print __doc__
sys.exit(1)
fname = sys.argv[1]
main(fname, sys.argv[2:])
|
<gh_stars>0
import { useToastActions } from 'contexts/toast/ToastContext';
import { useMutateAllNftsCache } from 'hooks/api/nfts/useAllNfts';
import { useRefreshOpenseaSync } from 'hooks/api/nfts/useOpenseaSync';
import {
ReactNode,
createContext,
useContext,
memo,
useMemo,
useEffect,
useState,
useCallback,
} from 'react';
export type WizardDataState = {
id: string;
isRefreshingNfts: boolean;
handleRefreshNfts: () => void;
};
const WizardDataContext = createContext<WizardDataState>({
id: '',
isRefreshingNfts: false,
handleRefreshNfts: () => {},
});
export const useWizardId = (): WizardDataState['id'] => {
const context = useContext(WizardDataContext);
if (!context) {
throw new Error('Attempted to use WizardDataContext without a provider');
}
return context.id;
};
export const useRefreshNftConfig = () => {
const context = useContext(WizardDataContext);
if (!context) {
throw new Error('Attempted to use WizardDataContext without a provider');
}
return context;
};
type Props = { id: string; children: ReactNode };
export default memo(function WizardDataProvider({ id, children }: Props) {
const [isRefreshingNfts, setIsRefreshingNfts] = useState(false);
const refreshOpenseaSync = useRefreshOpenseaSync();
const mutateAllNftsCache = useMutateAllNftsCache();
const { pushToast } = useToastActions();
const handleRefreshNfts = useCallback(async () => {
setIsRefreshingNfts(true);
try {
await refreshOpenseaSync();
void mutateAllNftsCache();
} catch {
pushToast(
'Error while fetching latest NFTs. Opensea may be temporarily unavailable. Please try again later.'
);
}
setIsRefreshingNfts(false);
}, [mutateAllNftsCache, pushToast, refreshOpenseaSync]);
useEffect(() => {
if (id === 'onboarding') {
void handleRefreshNfts();
}
// just want this running once for the welcome screen
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const wizardDataState = useMemo(
() => ({ id, isRefreshingNfts, handleRefreshNfts }),
[id, isRefreshingNfts, handleRefreshNfts]
);
return (
<WizardDataContext.Provider value={wizardDataState}>{children}</WizardDataContext.Provider>
);
});
|
def find_primes(input_list):
primes = []
for num in input_list:
if num > 1:
for i in range(2, num):
if (num % i) == 0:
break
else:
primes.append(num)
return primes |
/*
* Copyright (c) 2002-2004 LWJGL Project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'LWJGL' nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.bulletphysics.demos.applet;
/**
* Disk.java
*
*
* Created 23-dec-2003
* @author <NAME>
*/
public class Disk extends Quadric {
/**
* Constructor for Disk.
*/
public Disk() {
super();
}
/**
* renders a disk on the z = 0 plane. The disk has a radius of
* outerRadius, and contains a concentric circular hole with a radius of
* innerRadius. If innerRadius is 0, then no hole is generated. The disk is
* subdivided around the z axis into slices (like pizza slices), and also
* about the z axis into rings (as specified by slices and loops,
* respectively).
*
* With respect to orientation, the +z side of the disk is considered to be
* "outside" (see glu.quadricOrientation). This means that if the orientation
* is set to GLU.OUTSIDE, then any normals generated point along the +z axis.
* Otherwise, they point along the -z axis.
*
* If texturing is turned on (with glu.quadricTexture), texture coordinates are
* generated linearly such that where r=outerRadius, the value at (r, 0, 0) is
* (1, 0.5), at (0, r, 0) it is (0.5, 1), at (-r, 0, 0) it is (0, 0.5), and at
* (0, -r, 0) it is (0.5, 0).
*/
public void draw(Graphics3D gl, float innerRadius, float outerRadius, int slices, int loops)
{
float da, dr;
/* Normal vectors */
if (super.normals != GLU_NONE) {
if (super.orientation == GLU_OUTSIDE) {
gl.setNormal(0.0f, 0.0f, +1.0f);
}
else {
gl.setNormal(0.0f, 0.0f, -1.0f);
}
}
da = 2.0f * (float)Math.PI / slices;
dr = (outerRadius - innerRadius) / loops;
switch (super.drawStyle) {
case GLU_FILL:
{
/* texture of a gluDisk is a cut out of the texture unit square
* x, y in [-outerRadius, +outerRadius]; s, t in [0, 1]
* (linear mapping)
*/
float dtc = 2.0f * outerRadius;
float sa, ca;
float r1 = innerRadius;
int l;
for (l = 0; l < loops; l++) {
float r2 = r1 + dr;
if (super.orientation == GLU_OUTSIDE) {
int s;
gl.begin(Graphics3D.QUAD_STRIP);
for (s = 0; s <= slices; s++) {
float a;
if (s == slices)
a = 0.0f;
else
a = s * da;
sa = sin(a);
ca = cos(a);
TXTR_COORD(gl, 0.5f + sa * r2 / dtc, 0.5f + ca * r2 / dtc);
gl.addVertex(r2 * sa, r2 * ca, 0f);
TXTR_COORD(gl, 0.5f + sa * r1 / dtc, 0.5f + ca * r1 / dtc);
gl.addVertex(r1 * sa, r1 * ca, 0f);
}
gl.end();
}
else {
int s;
gl.begin(Graphics3D.QUAD_STRIP);
for (s = slices; s >= 0; s--) {
float a;
if (s == slices)
a = 0.0f;
else
a = s * da;
sa = sin(a);
ca = cos(a);
TXTR_COORD(gl, 0.5f - sa * r2 / dtc, 0.5f + ca * r2 / dtc);
gl.addVertex(r2 * sa, r2 * ca, 0f);
TXTR_COORD(gl, 0.5f - sa * r1 / dtc, 0.5f + ca * r1 / dtc);
gl.addVertex(r1 * sa, r1 * ca, 0f);
}
gl.end();
}
r1 = r2;
}
break;
}
case GLU_LINE:
{
// int l, s;
// /* draw loops */
// for (l = 0; l <= loops; l++) {
// float r = innerRadius + l * dr;
// GL11.glBegin(GL11.GL_LINE_LOOP);
// for (s = 0; s < slices; s++) {
// float a = s * da;
// GL11.glVertex2f(r * sin(a), r * cos(a));
// }
// GL11.glEnd();
// }
// /* draw spokes */
// for (s = 0; s < slices; s++) {
// float a = s * da;
// float x = sin(a);
// float y = cos(a);
// GL11.glBegin(GL11.GL_LINE_STRIP);
// for (l = 0; l <= loops; l++) {
// float r = innerRadius + l * dr;
// GL11.glVertex2f(r * x, r * y);
// }
// GL11.glEnd();
// }
break;
}
case GLU_POINT:
{
// int s;
// GL11.glBegin(GL11.GL_POINTS);
// for (s = 0; s < slices; s++) {
// float a = s * da;
// float x = sin(a);
// float y = cos(a);
// int l;
// for (l = 0; l <= loops; l++) {
// float r = innerRadius * l * dr;
// GL11.glVertex2f(r * x, r * y);
// }
// }
// GL11.glEnd();
break;
}
case GLU_SILHOUETTE:
{
// if (innerRadius != 0.0) {
// float a;
// GL11.glBegin(GL11.GL_LINE_LOOP);
// for (a = 0.0f; a < 2.0 * GLU.PI; a += da) {
// float x = innerRadius * sin(a);
// float y = innerRadius * cos(a);
// GL11.glVertex2f(x, y);
// }
// GL11.glEnd();
// }
// {
// float a;
// GL11.glBegin(GL11.GL_LINE_LOOP);
// for (a = 0; a < 2.0f * GLU.PI; a += da) {
// float x = outerRadius * sin(a);
// float y = outerRadius * cos(a);
// GL11.glVertex2f(x, y);
// }
// GL11.glEnd();
// }
break;
}
default:
return;
}
}
}
|
package com.github.coreyshupe.lb.velocity.listeners;
import com.github.coreyshupe.lb.api.LoadBalancerInstance;
import com.velocitypowered.api.event.EventTask;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.player.PlayerChooseInitialServerEvent;
import com.velocitypowered.api.proxy.ProxyServer;
public record VelocityInitialConnectListener(ProxyServer proxyServer, LoadBalancerInstance instance) {
@Subscribe
public EventTask onInitialConnect(PlayerChooseInitialServerEvent event) {
return EventTask.resumeWhenComplete(this.instance.getServerBalanceCache().getBestServer().whenComplete((bestServer, throwable) -> {
if (bestServer != null) {
this.proxyServer.getServer(bestServer).ifPresent(event::setInitialServer);
}
}));
}
}
|
# -*- coding: utf-8 -*-
#
# MIT License
#
# Copyright (c) 2016 <NAME> <<EMAIL>>, Institute of Computer Science, Masaryk University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""
Template application for creating new applications using provided module and Spark operations, with a possibility of
adding more advanced modules. This template simply resends one row of data from given input topic to defined
output topic.
Usage:
application_template.py -iz <input-zookeeper-hostname>:<input-zookeeper-port> -it <input-topic>
-oz <output-zookeeper-hostname>:<output-zookeeper-port> -ot <output-topic> -m <microbatch-duration>
To run this on the Stream4Flow, you need to receive flows by IPFIXCol and make them available via Kafka topic. Then
you can run the application as follows:
$ ~/applications/run-application.sh ./application_template.py -iz producer:2181 -it ipfix.entry
-oz producer:9092 -ot results.output
"""
import argparse # Arguments parser
import ujson as json # Fast JSON parser
from termcolor import cprint # Colors in the console output
from modules import kafkaIO # IO operations with kafka topics
def process_results(data_to_process, producer, output_topic):
"""
Process analyzed data and modify it into desired output.
:param data_to_process: analyzed data
:param producer: Kafka producer
:param output_topic: Kafka topic through which output is send
"""
# Here you can format your results output and send it to the kafka topic
# <-- INSERT YOUR CODE HERE
# Example of a transformation function that selects values of the dictionary and dumps them as a string
results_output = '\n'.join(map(json.dumps, data_to_process.values()))
# Send desired output to the output_topic
kafkaIO.send_data_to_kafka(results_output, producer, output_topic)
def process_input(input_data):
"""
Process raw data and do MapReduce operations.
:param input_data: input data in JSON format to process
:return: processed data
"""
# Here you can process input stream with MapReduce operations
# <-- INSERT YOUR CODE HERE
# Example of the map function that transform all JSONs into the key-value pair with the JSON as value and static key
modified_input = input_data.map(lambda json_data: (1, json_data))
return modified_input
if __name__ == "__main__":
# Define application arguments (automatically creates -h argument)
parser = argparse.ArgumentParser()
parser.add_argument("-iz", "--input_zookeeper", help="input zookeeper hostname:port", type=str, required=True)
parser.add_argument("-it", "--input_topic", help="input kafka topic", type=str, required=True)
parser.add_argument("-oz", "--output_zookeeper", help="output zookeeper hostname:port", type=str, required=True)
parser.add_argument("-ot", "--output_topic", help="output kafka topic", type=str, required=True)
parser.add_argument("-m", "--microbatch", help="microbatch duration", type=int, required=False, default=5)
# You can add your own arguments here
# See more at:
# https://docs.python.org/2.7/library/argparse.html
# Parse arguments
args = parser.parse_args()
# Initialize input stream and parse it into JSON
ssc, parsed_input_stream = kafkaIO\
.initialize_and_parse_input_stream(args.input_zookeeper, args.input_topic, args.microbatch)
# Process input in the desired way
processed_input = process_input(parsed_input_stream)
# Initialize kafka producer
kafka_producer = kafkaIO.initialize_kafka_producer(args.output_zookeeper)
# Process computed data and send them to the output
kafkaIO.process_data_and_send_result(processed_input, kafka_producer, args.output_topic, process_results)
# Start Spark streaming context
kafkaIO.spark_start(ssc)
|
package Hibernate;
// Generated 18-nov-2018 18:36:45 by Hibernate Tools 3.5.0.Final
/**
* Proveedores generated by hbm2java
*/
public class Proveedores implements java.io.Serializable {
private String codigo;
private String nombre;
private String apellidos;
private String direccion;
public Proveedores() {
}
public Proveedores(String codigo, String nombre, String apellidos, String direccion) {
this.codigo = codigo;
this.nombre = nombre;
this.apellidos = apellidos;
this.direccion = direccion;
}
public String getCodigo() {
return this.codigo;
}
public void setCodigo(String codigo) {
this.codigo = codigo;
}
public String getNombre() {
return this.nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public String getApellidos() {
return this.apellidos;
}
public void setApellidos(String apellidos) {
this.apellidos = apellidos;
}
public String getDireccion() {
return this.direccion;
}
public void setDireccion(String direccion) {
this.direccion = direccion;
}
}
|
class Node:
def __init__(self, data):
self.data = data
self.next = None |
<gh_stars>0
import { Column, DataType, Model, Table } from 'sequelize-typescript';
interface CitysCreationsAttr {
name: string;
}
interface DisabitilitysCreationsAttr {
name: string;
}
interface MaterialStatusCreationsAttr {
name: string;
}
interface СitizenshipStatusCreationsAttr {
name: string;
}
interface ClientsCreationsAttr {
first_name: string;
last_name: string;
third_name: string;
date_born: string;
sex: string;
passport_series: string;
passport_id: string;
who_issued_the_passport: string;
date_of_issue_of_the_passport: string;
inspirational_passport_number: string;
place_of_birth: string;
city_of_residence: number;
residential_address: string;
mobile_phone: string;
home_phone: string;
email: string;
work_place: string;
position: string;
place_of_registration: number;
address_of_residence: string;
marital_status: number;
citizenship: number;
disability: number;
retiree: number;
salary: number;
liable_for_military_service: string;
}
@Table({ tableName: 'citys' })
export class Citys extends Model<Citys, CitysCreationsAttr> {
@Column({
type: DataType.INTEGER,
allowNull: false,
autoIncrement: true,
unique: true,
primaryKey: true,
})
id: number;
@Column({
type: DataType.STRING,
unique: true,
allowNull: false,
})
name: string;
}
@Table({ tableName: 'disabitilitys' })
export class Disabitilitys extends Model<
Disabitilitys,
DisabitilitysCreationsAttr
> {
@Column({
type: DataType.INTEGER,
allowNull: false,
autoIncrement: true,
unique: true,
primaryKey: true,
})
id: number;
@Column({
type: DataType.STRING,
unique: true,
allowNull: false,
})
name: string;
}
@Table({ tableName: 'marital-status' })
export class MaterialStatus extends Model<
MaterialStatus,
MaterialStatusCreationsAttr
> {
@Column({
type: DataType.INTEGER,
allowNull: false,
autoIncrement: true,
unique: true,
primaryKey: true,
})
id: number;
@Column({
type: DataType.STRING,
unique: true,
allowNull: false,
})
name: string;
}
@Table({ tableName: 'citizenship' })
export class Сitizenship extends Model<
Сitizenship,
СitizenshipStatusCreationsAttr
> {
@Column({
type: DataType.INTEGER,
allowNull: false,
autoIncrement: true,
unique: true,
primaryKey: true,
})
id: number;
@Column({
type: DataType.STRING,
unique: true,
allowNull: false,
})
name: string;
}
@Table({ tableName: 'clients' })
export class Clients extends Model<Clients, ClientsCreationsAttr> {
@Column({
type: DataType.INTEGER,
allowNull: false,
autoIncrement: true,
unique: true,
primaryKey: true,
})
id: number;
@Column({
type: DataType.STRING,
allowNull: false,
})
first_name: string;
@Column({
type: DataType.STRING,
allowNull: false,
unique: true,
})
last_name: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
third_name: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
date_born: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
sex: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
passport_series: string;
@Column({
type: DataType.STRING,
allowNull: false,
unique: true,
})
passport_id: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
who_issued_the_passport: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
date_of_issue_of_the_passport: string;
@Column({
type: DataType.STRING,
allowNull: false,
unique: true,
})
inspirational_passport_number: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
place_of_birth: string;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
city_of_residence: number;
@Column({
type: DataType.STRING,
allowNull: false,
})
residential_address: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
mobile_phone: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
home_phone: string;
@Column({
type: DataType.STRING,
allowNull: false,
unique: true,
})
email: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
work_place: string;
@Column({
type: DataType.STRING,
allowNull: false,
})
position: string;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
place_of_registration: number;
@Column({
type: DataType.STRING,
allowNull: false,
})
address_of_residence: string;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
marital_status: number;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
citizenship: number;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
disability: number;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
retiree: number;
@Column({
type: DataType.INTEGER,
allowNull: false,
})
salary: number;
@Column({
type: DataType.STRING,
allowNull: false,
})
liable_for_military_service: string;
}
|
from django.db import models
class Range(models.Model):
def __str__(self):
return str(self.year) + ',' + str(self.min) + ',' + str(self.max) + ',' + str(self.step)
min = models.FloatField()
max = models.FloatField()
step = models.IntegerField()
class Count(models.Model):
def __str__(self):
return str(self.year) + ',' + str(self.prob)
year = models.IntegerField(unique=True)
count = models.IntegerField()
prob = models.FloatField()
class NBModel(models.Model):
def __str__(self):
return str(self.year) + ',' + self.fprob[:64] + '...'
year = models.IntegerField(unique=True)
fprob = models.TextField()
class TestData(models.Model):
def __str__(self):
return str(self.year) + ',' + str(self.releaseid) + ',' + self.features[:64] + '...'
year = models.IntegerField()
releaseid = models.IntegerField(db_index=True) # 7digital release id
features = models.TextField()
class TestResult(models.Model):
def __str__(self):
return str(self.data) + ',' + str(self.result) + ',' + str(self.correct)
data = models.ForeignKey(TestData, unique=True)
result = models.IntegerField()
correct = models.BooleanField(default=False) |
#!/bin/ksh
set -x
## plot mean tracks of individual storms in the Atlantic
## Fanglin Yang, March 2008: original copy adopted from HWRF. Restructured and added driver scripts.
## Fanglin Yang, March 2013: Generalized for running on WCOSS and THEIA
#-------------------------------------------------------------------------------------
export expdir=/gpfs/dell6/emc/modeling/noscrub/emc.glopara/archive
export mdlist="v16retro1e" ;#experiment names
export mdplot="V16E" ;#names to be shown on plots, limitted to 4 letters
export cyc="00 06 12 18" ;#forecast cycles to be included in verification
export doftp="YES" ;#whether or not sent maps to ftpdir
export webhostid=emc.glopara
export webhost=emcrzdm.ncep.noaa.gov
export ftpdir=/home/people/emc/www/htdocs/gmb/$webhostid/vsdb/v16retro1e
#-------------------------------------------------------------------------------------
#---------------------------------------------------------
#---Most likely you do not need to change anything below--
#---------------------------------------------------------
#-------------------------------------------------------------------------------------
export scrdir=/gpfs/dell2/emc/modeling/noscrub/Fanglin.Yang/VRFY/hurtrack
export STMP="/gpfs/dell3/stmp"
export NDATE=/gpfs/dell1/nco/ops/nwprod/prod_util.v1.1.0/exec/ips/ndate
export rundir=${rundir:-$STMP/$LOGNAME/track$$}
mkdir -p ${rundir}; cd $rundir || exit 8
#==================================================================
for storm in Pabuk Wutip Sepat Mun Danas Goring Nari Wipha Francisco Lekima Krosa Bailu Podul Kajiki Lingling Faxai Marilyn Peipah Tapah Mitag Hagibis Neoguri Bualoi Matmo Halong Nakri Fengshen Kalmaegi Fung-wong Kammuri Phanfone ; do
case $storm in
Pabuk) code1=wp012019.dat; DATEST=20181231; DATEND=20190104;;
Wutip) code1=wp022019.dat; DATEST=20190218; DATEND=20190302;;
Sepat) code1=wp032019.dat; DATEST=20190624; DATEND=20190628;;
Mun) code1=wp042019.dat; DATEST=20190701; DATEND=20190704;;
Danas) code1=wp052019.dat; DATEST=20190714; DATEND=20190721;;
Goring) code1=wp062019.dat; DATEST=20190717; DATEND=20190719;;
Nari) code1=wp072019.dat; DATEST=20190724; DATEND=20190727;;
Wipha) code1=wp082019.dat; DATEST=20190730; DATEND=20190804;;
Francisco) code1=wp092019.dat; DATEST=20190801; DATEND=20190808;;
Lekima) code1=wp102019.dat; DATEST=20190802; DATEND=20190813;;
Krosa) code1=wp112019.dat; DATEST=20190805; DATEND=20190816;;
Bailu) code1=wp122019.dat; DATEST=20190819; DATEND=20190826;;
Podul) code1=wp132019.dat; DATEST=20190824; DATEND=20190831;;
Faxai) code1=wp142019.dat; DATEST=20190902; DATEND=20190909;;
Lingling) code1=wp152019.dat; DATEST=20190831; DATEND=20190907;;
Kajiki) code1=wp162019.dat; DATEST=20190830; DATEND=20190906;;
Mitag) code1=wp192019.dat; DATEST=20190925; DATEND=20191003;;
Hagibis) code1=wp202019.dat; DATEST=20191004; DATEND=20191013;;
Neoguri) code1=wp212019.dat; DATEST=20191015; DATEND=20191021;;
Bualoi) code1=wp222019.dat; DATEST=20191018; DATEND=20191025;;
Matmo) code1=wp232019.dat; DATEST=20191028; DATEND=20191031;;
Halong) code1=wp242019.dat; DATEST=20191101; DATEND=20191108;;
Nakri) code1=wp252019.dat; DATEST=20191104; DATEND=20191111;;
Fengshen) code1=wp262019.dat; DATEST=20191109; DATEND=20191117;;
Kalmaegi) code1=wp272019.dat; DATEST=20191109; DATEND=20191122;;
Fung-wong) code1=wp282019.dat; DATEST=20191117; DATEND=20191123;;
Kammuri) code1=wp292019.dat; DATEST=20191124; DATEND=20191206;;
Phanfone) code1=wp302019.dat; DATEST=20191219; DATEND=20191229;;
esac
OCEAN=WP
#---------------------------------------------------------
#---------------------------------------------------------
set -A mdname $mdlist; set -A mdpt $mdplot
execdir=${rundir}/${storm} ;# working directory
rm -r $execdir; mkdir -p $execdir
cd $execdir; chmod u+rw *
years=`echo $DATEST |cut -c 1-4 `
yeare=`echo $DATEND |cut -c 1-4 `
if [ $years -ne $yeare ]; then
echo " years=$years, yeare=$yeare. Must have years=yeare. exit"
exit
fi
export year=$years
## copy HPC/JTWC tracks to working directory (HPC's tracks sometime do not match with real-time tracks)
tpctrack=${execdir}/tpctrack ;#place to hold HPC original track data
mkdir -p $tpctrack
#TPC Atlantic and Eastern Pacific tracks
#if [ -s /nhc/noscrub/data/atcf-noaa/aid_nws/aep01${year}.dat ]; then
# tpcdata=/nhc/noscrub/data/atcf-noaa
# cp ${tpcdata}/aid_nws/aep*${year}*.dat ${tpctrack}/.
# cp ${tpcdata}/btk/bep*${year}*.dat ${tpctrack}/.
#elif [ -s $scrdir/tpctrack/${year}/aep01${year}.dat ]; then
# tpcdata=$scrdir/tpctrack
# cp ${tpcdata}/${year}/aep*.dat ${tpctrack}/.
# cp ${tpcdata}/${year}/bep*.dat ${tpctrack}/.
#else
# echo" HPC track not found, exit"
# exit 8
#fi
#JTWC Western Pacific tracks
if [ -s /nhc/noscrub/data/atcf-navy/aid/awp01${year}.dat ]; then
jtwcdata=/nhc/noscrub/data/atcf-navy
cp ${jtwcdata}/aid/awp*${year}.dat ${tpctrack}/.
cp ${jtwcdata}/btk/bwp*${year}.dat ${tpctrack}/.
elif [ -s $scrdir/tpctrack/${year}/awp01${year}.dat ]; then
tpcdata=$scrdir/tpctrack
cp ${tpcdata}/${year}/awp*.dat ${tpctrack}/.
cp ${tpcdata}/${year}/bwp*.dat ${tpctrack}/.
else
echo" HPC track not found, exit"
exit 8
fi
#------------------------------------------------------------------------
# insert experiment track to TPC track for all runs and for all BASINs
#------------------------------------------------------------------------
newlist=""
fout=24
nexp=`echo $mdlist |wc -w`
ncyc=`echo $cyc |wc -w |sed 's/ //g'`
if [ $ncyc -eq 3 ]; then ncyc=2; fi
fout=`expr $fout \/ $ncyc `
n=0
if [ $nexp -gt 0 ]; then
for exp in $mdlist; do
## cat experiment track data for each exp
nameold=`echo $exp |cut -c 1-4 ` ;#current fcst always uses first 4 letters of experiment name
nameold=`echo $nameold |tr "[a-z]" "[A-Z]" `
#namenew=`echo $exp |cut -c 1-4 `
namenew=${mdpt[$n]} ; n=$((n+1))
namenew=`echo $namenew |tr "[a-z]" "[A-Z]" `
export newlist=${newlist}"${namenew} " ;#donot delete the space at the end
dump=.gfs.
if [ $exp = gfs2016 ]; then nameold="GFSX" ; fi
if [ $exp = prfv3rt1 ]; then nameold="PRFV"; fi
if [ $exp = gfs2017 ]; then nameold="FY17" ; fi
if [ $exp = gfs2019 ]; then nameold="FY19" ; fi
if [ $exp = gfs2019b ]; then nameold="FY19" ; fi
if [ $exp = gfs2019c ]; then nameold="FY19" ; fi
outfile=${execdir}/atcfunix.$exp.$year
if [ -s $outfile ]; then rm $outfile; fi
touch $outfile
indir=${expdir}/$exp
date=${DATEST}00
until [ $date -gt ${DATEND}18 ] ; do
infile=$indir/atcfunix${dump}$date
if [ -s $infile ];
if [ -s infiletmp ]; then rm infiletmp; fi
sed "s?$nameold?$namenew?g" $infile >infiletmp
then cat infiletmp >> $outfile
fi
date=`$NDATE +$fout $date`
done
## insert experiment track into TPC tracks
for BASIN in $OCEAN; do
$scrdir/sorc/insert_new.sh $exp $BASIN $year $tpctrack $outfile $execdir
done
done ;#end of experiment loop
else
ln -fs $tpctrack/* .
fi
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# prepare data for GrADS graphics
#------------------------------------------------------------------------
for BASIN in $OCEAN; do
bas=`echo $BASIN |tr "[A-Z]" "[a-z]" `
## copy test cards, replace dummy exp name MOD# with real exp name
cp ${scrdir}/sorc/card.i .
cp ${scrdir}/sorc/card.t .
cat >stormlist <<EOF
$code1
EOF
cat card.i stormlist >card${year}_${bas}.i
cat card.t stormlist >card${year}_${bas}.t
#newlisti=${newlist}"AVNO HWRF GFDL EMX UKM JTWC"
#newlistt=${newlist}"AVNO HWRF GFDL EMX UKM JTWC"
newlisti=${newlist}"AVNO"
newlistt=${newlist}"AVNO"
nint=`echo $newlisti |wc -w` ;#number of process for intensity plot, to replace NUMINT in card.i
ntrc=`echo $newlistt |wc -w` ;#number of process for track plot, to replace NUMTRC in card.t
nint=`expr $nint + 0 ` ;#remove extra space
ntrc=`expr $ntrc + 0 `
sed -e "s/MODLIST/${newlisti}/g" -e "s/NUMINT/${nint}/g" card${year}_$bas.i >card_$bas.i
sed -e "s/MODLIST/${newlistt}/g" -e "s/NUMTRC/${ntrc}/g" card${year}_$bas.t >card_$bas.t
## produce tracks.t.txt etc
cp $tpctrack/b*${year}.dat .
${scrdir}/sorc/nhcver.x card_${bas}.t tracks_${bas}.t $execdir
${scrdir}/sorc/nhcver.x card_${bas}.i tracks_${bas}.i $execdir
## create grads files tracks_${bas}.t.dat etc for plotting
${scrdir}/sorc/top_tvercut.sh ${execdir}/tracks_${bas}.t.txt $scrdir/sorc
${scrdir}/sorc/top_ivercut.sh ${execdir}/tracks_${bas}.i.txt $scrdir/sorc
## copy grads scripts and make plots
if [ $BASIN = "WP" ]; then place="West-Pacific"; fi
period="${storm}__${DATEST}_${DATEND}_${ncyc}cyc"
cp ${scrdir}/sorc/*iver*.gs .
cp ${scrdir}/sorc/*tver*.gs .
grads -bcp "run top_iver.gs tracks_${bas}.i $year $place $period"
grads -bcp "run top_tver_250.gs tracks_${bas}.t $year $place $period"
mv tracks_${bas}.i.png tracks_${storm}.i.png
mv tracks_${bas}.t.png tracks_${storm}.t.png
#----------------------------
done ;# end of BASIN loop
#----------------------------
if [ $doftp = "YES" ]; then
cat << EOF >ftpin
cd $ftpdir
mkdir track
cd track
binary
promt
mput tracks_${storm}*.png
put tracks_al.t.txt tracks_${storm}.t.txt
put tracks_al.i.txt tracks_${storm}.i.txt
quit
EOF
sftp ${webhostid}@${webhost} <ftpin
fi
## save tracks
#savedir=${scrdir}/arch_trak/${mdname[0]}$years$yeare
#mkdir -p $savedir
#cp ${execdir}/tracks_${storm}*.png ${savedir}/.
#cp ${execdir}/tracks_al.t.txt ${savedir}/tracks_${storm}.t.txt
#cp ${execdir}/tracks_al.i.txt ${savedir}/tracks_${storm}.i.txt
#---end of individual storm
done
#---end of individual storm
exit
|
<filename>modules/caas/common/src/main/java/io/cattle/platform/agent/impl/RemoteAgentImpl.java
package io.cattle.platform.agent.impl;
import com.google.common.util.concurrent.ListenableFuture;
import io.cattle.platform.agent.AgentRequest;
import io.cattle.platform.agent.RemoteAgent;
import io.cattle.platform.async.utils.AsyncUtils;
import io.cattle.platform.async.utils.TimeoutException;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.eventing.EventCallOptions;
import io.cattle.platform.eventing.EventService;
import io.cattle.platform.eventing.exception.AgentRemovedException;
import io.cattle.platform.eventing.exception.EventExecutionException;
import io.cattle.platform.eventing.impl.AbstractEventService;
import io.cattle.platform.eventing.model.Event;
import io.cattle.platform.eventing.model.EventVO;
import io.cattle.platform.json.JsonMapper;
import io.cattle.platform.object.ObjectManager;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class RemoteAgentImpl implements RemoteAgent {
private static final Set<String> FRIENDLY_REPLY = new HashSet<>(Arrays.asList("compute.instance.activate"));
JsonMapper jsonMapper;
ObjectManager objectManager;
EventService rawEventService;
EventService wrappedEventService;
Long agentId;
public RemoteAgentImpl(JsonMapper jsonMapper, ObjectManager objectManager, EventService rawEventService, EventService wrappedEventService, Long agentId) {
this.jsonMapper = jsonMapper;
this.objectManager = objectManager;
this.rawEventService = rawEventService;
this.wrappedEventService = wrappedEventService;
this.agentId = agentId;
}
@Override
public long getAgentId() {
return agentId;
}
protected AgentRequest createRequest(Event event) {
return new AgentRequest(agentId, event);
}
@Override
public void publish(Event event) {
wrappedEventService.publish(createRequest(event));
}
@Override
public <T extends Event> T callSync(Event event, Class<T> reply, long timeout) {
return callSync(event, reply, new EventCallOptions(AbstractEventService.DEFAULT_RETRIES.get(), timeout));
}
@Override
public <T extends Event> T callSync(Event event, Class<T> reply, EventCallOptions options) {
/*
* NOTE: Forever blocking get() used only because underlying future will
* always timeout
*/
try {
return AsyncUtils.get(call(event, reply, options));
} catch (TimeoutException e) {
Agent agent = objectManager.loadResource(Agent.class, agentId);
if (agent == null || agent.getRemoved() != null) {
throw new AgentRemovedException("Agent [" + agentId + "] is removed", event);
}
throw e;
} catch (AgentRemovedException e) {
throw e;
} catch (EventExecutionException e) {
/*
* This is done so that the exception will have a better stack
* trace. Normally the exceptions from a future will have a pretty
* sparse stack not giving too much context
*/
throw EventExecutionException.fromEvent(e.getEvent());
}
}
@Override
public <T extends Event> ListenableFuture<T> call(final Event event, final Class<T> reply, long timeout) {
return call(event, reply, new EventCallOptions(AbstractEventService.DEFAULT_RETRIES.get(), timeout));
}
@Override
public <T extends Event> ListenableFuture<T> call(final Event event, final Class<T> reply, EventCallOptions options) {
AgentRequest request = createRequest(event);
return EventCallProgressHelper.call(wrappedEventService, request, reply, options, new EventResponseMarshaller() {
@Override
public <V> V convert(Event resultEvent, Class<V> reply) {
return getReply(event, resultEvent, reply);
}
});
}
protected <T> T getReply(Event inputEvent, Event resultEvent, Class<T> reply) {
if (resultEvent.getData() == null) {
return null;
}
T commandReply = jsonMapper.convertValue(resultEvent.getData(), reply);
if (FRIENDLY_REPLY.contains(inputEvent.getName())) {
EventVO<?, ?> publishEvent = null;
if (commandReply instanceof EventVO) {
publishEvent = (EventVO<?, ?>) commandReply;
} else {
publishEvent = jsonMapper.convertValue(resultEvent.getData(), EventVO.class);
}
publishEvent.setName(inputEvent.getName() + Event.REPLY_SUFFIX);
rawEventService.publish(publishEvent);
}
return commandReply;
}
@Override
public Event callSync(Event event) {
return callSync(event, AbstractEventService.DEFAULT_TIMEOUT.get());
}
@Override
public Event callSync(Event event, EventCallOptions options) {
return callSync(event, EventVO.class, options);
}
@Override
public Event callSync(Event event, long timeout) {
return callSync(event, EventVO.class, timeout);
}
@Override
public ListenableFuture<? extends Event> call(Event event) {
return call(event, AbstractEventService.DEFAULT_TIMEOUT.get());
}
@Override
public ListenableFuture<? extends Event> call(Event event, EventCallOptions options) {
return call(event, EventVO.class, options);
}
@Override
public ListenableFuture<? extends Event> call(Event event, long timeout) {
return call(event, EventVO.class, timeout);
}
@Override
public <T extends Event> T callSync(Event event, Class<T> reply) {
return callSync(event, reply, AbstractEventService.DEFAULT_TIMEOUT.get());
}
@Override
public <T extends Event> ListenableFuture<T> call(Event event, Class<T> reply) {
return call(event, reply, AbstractEventService.DEFAULT_TIMEOUT.get());
}
}
|
def str_to_list(string):
return string.split(' ') |
#!/bin/bash
#SBATCH -J Act_tanh_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py tanh 1 sgd 1 0.5579963853723034 469 0.011201983351732336 orth PE-infersent
|
<reponame>bfoz/pocket
/* Filename: intelhex.cc
* Routines for reading/writing Intel INHX8M and INHX32 files
Copyright (c) 2002, Terran Development Corporation
All rights reserved.
This code is made available to the public under a BSD-like license, a copy of which
should have been provided with this code in the file LICENSE. For a copy of the BSD
license template please visit http://www.opensource.org/licenses/bsd-license.php
* */
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include "intelhex.h"
namespace intelhex
{
#define INH32M_HEADER ":020000040000FA"
/* hex_data::hex_data()
{
format = HEX_FORMAT_INHX8M;
}
*/
//Extend the data block array by one element
// and return a pointer to the new element
hex_data::dblock* hex_data::new_block()
{
dblock b;
blocks.push_back(b);
return &blocks.back();
}
//Extend the data block array by one element
// and return a pointer to the new element
// Initialize the element with address and length
hex_data::dblock* hex_data::add_block(address_t address, size_type length, element fill)
{
dblock db; //A list of pointers would be faster, but this isn't too bad
blocks.push_back(db);
blocks.back().first = address;
blocks.back().second.resize(length, fill);
return &blocks.back();
}
//Array access operator
//Assumes that the blocks have been sorted by address in ascending order
// Sort order is maintained
hex_data::element &hex_data::operator[](hex_data::address_t addr)
{
if(blocks.size() == 0) //Add a block if the sequence is empty
add_block(0,0);
//Start at the end of the list and find the first (last) block with an address
// less than addr
reverse_iterator i = blocks.rbegin();
while( (i!=blocks.rend()) && (i->first > addr))
++i;
element relative_addr = addr - i->first;
//If addr is outside of a block and not-adjacent to the end of any block add a new block
if( relative_addr > i->second.size() )
return add_block(addr,1)->second[0];
//If addr is adjacent to the end of the block resize it
if( relative_addr == i->second.size() )
i->second.resize(i->second.size()+1, 0xFFFF);
return i->second[relative_addr];
}
//FIXME Nasty kludge
// I should really create an iterator class to handle this
hex_data::element hex_data::get(address_t addr, element blank)
{
//Start at the end of the list and find the first (last) block with an address
// less than addr
reverse_iterator i = blocks.rbegin();
while( (i!=blocks.rend()) && (i->first > addr))
++i;
//If no block can be found, return the blank value
if( i == blocks.rend() )
return blank;
element relative_addr = addr - i->first;
//If relative_addr is past the end of the block, return blank
if( relative_addr >= i->second.size() )
return blank;
// std::cout << __FUNCTION__ << ": addr = " << std::hex << addr << "\n";
// std::cout << __FUNCTION__ << ": blank = " << std::hex << blank << "\n";
// std::cout << __FUNCTION__ << ": i->first = " << std::hex << i->first << "\n";
// std::cout << __FUNCTION__ << ": i->second.size() = " << std::hex << i->second.size() << "\n";
// std::cout << __FUNCTION__ << "2\n";
return i->second[relative_addr];
}
// Delete all allocated memory
void hex_data::clear()
{
format = HEX_FORMAT_INHX8M;
blocks.clear();
}
//Add a new word to the end of the sequence
// Assumes the sequence has been sorted
void hex_data::push_back(hex_data::element a)
{
if(blocks.size() == 0) //Add a block if the sequence is empty
add_block(0,0);
blocks.back().second.push_back(a); //Append the new word
}
hex_data::size_type hex_data::size()
{
size_type s=0;
for(iterator i=blocks.begin(); i!=blocks.end(); ++i)
s += i->second.size();
return s;
}
//Returns the number of populated elements with addresses less than addr
hex_data::size_type hex_data::size_below_addr(address_t addr)
{
size_type s=0;
// std::cout << __FUNCTION__ << ": addr = " << std::hex << addr << std::endl;
for(iterator i=blocks.begin(); i!=blocks.end(); ++i)
{
// std::cout << __FUNCTION__ << ": i->first = " << std::hex << i->first << std::endl;
// std::cout << __FUNCTION__ << ": i->second.size = " << std::hex << i->second.size() << std::endl;
if( (i->first + i->second.size()) < addr)
s += i->second.size();
else if( i->first < addr )
s += addr - i->first;
}
// std::cout << __FUNCTION__ << ": s = " << std::hex << s << std::endl;
return s;
}
//number of words in [lo, hi)
hex_data::size_type hex_data::size_in_range(address_t lo, address_t hi)
{
size_type s=0;
for(iterator i=blocks.begin(); i!=blocks.end(); ++i)
{
if( i->first < lo )
{
const size_type a = i->first + i->second.size();
if( a >= lo )
s += a - lo;
}
else
{
if( (i->first + i->second.size()) < hi)
s += i->second.size();
else if( i->first < hi )
s += hi - i->first;
}
}
return s;
}
//Return the max address of all of the set words with addresses less than or equal to hi
hex_data::address_t hex_data::max_addr_below(address_t hi)
{
address_t s=0;
// std::cout << __FUNCTION__ << ": hi = " << hi << std::endl;
for(iterator i=blocks.begin(); i!=blocks.end(); ++i)
{
if( i->first <= hi)
{
const address_t a = i->first + i->second.size() - 1; //Max address of this block
// std::cout << __FUNCTION__ << ": a = " << a << std::endl;
if( a > s )
s = a;
}
}
if( s > hi )
return hi;
else
return s;
}
//Return true if an element exists at addr
bool hex_data::isset(address_t addr)
{
//Start at the end of the list and find the first (last) block with an address
// less than addr
reverse_iterator i = blocks.rbegin();
while( (i!=blocks.rend()) && (i->first > addr))
++i;
if( (addr - i->first) > i->second.size() )
return false;
else
return true;
}
//Load a hex file from disk
//Destroys any data that has already been loaded
bool hex_data::load(const char *path)
{
FILE *fp;
dblock *db; //Temporary pointer
unsigned int hi, lo, address, count, rtype, i, j;
uint16_t linear_address(0);
uint32_t a;
if( (fp=fopen(path, "r"))==NULL )
{
// printf("%s: Can't open %s\n", __FUNCTION__, path);
return false;
}
clear(); //First, clean house
//Start parsing the file
while(!feof(fp))
{
if(fgetc(fp)==':') //First character of every line should be ':'
{
// std::cout << __FUNCTION__ << ": Got line" << std::endl;
fscanf(fp, "%2x", &count); //Read in byte count
fscanf(fp, "%4x", &address); //Read in address
fscanf(fp, "%2x", &rtype); //Read type
count /= 2; //Convert byte count to word count
address /= 2; //Byte address to word address
switch(rtype) //What type of record?
{
case 0: //Data block so store it
//Make a data block
a = (static_cast<uint32_t>(linear_address) << 16) + address;
db = add_block(a, count);
// std::cout << __FUNCTION__ << ": db->first = " << std::hex << db->first << std::endl;
// std::cout << __FUNCTION__ << ": db->first*2 = " << std::hex << (db->first*2) << std::endl;
for(i=0; i<count; i++) //Read all of the data bytes
{
fscanf(fp, "%2x", &lo); //Low byte
fscanf(fp, "%2x", &hi); //High byte
db->second[i] = ((hi<<8)&0xFF00) | (lo&0x00FF); //Assemble the word
}
break;
case 1: //EOF
break;
case 2: //Segment address record (INHX32)
segment_addr_rec = true;
break;
case 4: //Linear address record (INHX32)
if(address == 0x0000)
{
fscanf(fp, "%4x", &linear_address); //Get the new linear address
linear_addr_rec = true;
}
else
{
//FIXME There's a problem
}
break;
}
fscanf(fp,"%*[^\n]\n"); //Ignore the checksum and the newline
}
else
{
printf("%s: Bad line\n", __FUNCTION__);
fscanf(fp, "%*[^\n]\n"); //Ignore the rest of the line
}
}
fclose(fp);
blocks.sort(); //Sort the data blocks by address (ascending)
return true;
}
//Write all data to a file
void hex_data::write(const char *path)
{
std::ofstream ofs(path);
if(!ofs)
{
std::cerr << "Couldn't open the output file stream\n";
exit(1);
}
write(ofs);
ofs.close();
}
//Write all data to an output stream
void hex_data::write(std::ostream &os)
{
uint8_t checksum;
uint16_t linear_address(0);
if(!os)
{
std::cerr << "Couldn't open the output file stream\n";
exit(1);
}
truncate(8); //Truncate each record to length=8 (purely aesthetic)
blocks.sort(); //Sort the data blocks by address (ascending)
os.setf(std::ios::hex, std::ios::basefield); //Set the stream to ouput hex instead of decimal
os.setf(std::ios::uppercase); //Use uppercase hex notation
os.fill('0'); //Pad with zeroes
//If we already know that this is an INHX32M file, start with a segment address record
// otherwise check all of the blocks just to make sure
if( linear_addr_rec )
{
os << INH32M_HEADER;
// std::cout << __FUNCTION__ << ": linear_addr_rec == true\n";
}
else
{
for(iterator i=blocks.begin(); i!=blocks.end(); i++)
{
if(i->first & 0xFFFF0000) //Check the upper 16 bits
{
linear_addr_rec = true;
os << INH32M_HEADER;
// std::cout << __FUNCTION__ << ": Found an 04 at " << i->first << std::endl;
// std::cout << __FUNCTION__ << ": i->first & 0xFFFF0000 == " << (i->first & 0xFFFF0000) << std::endl;
break; //Only need to find one
}
}
}
for(iterator i=blocks.begin(); i!=blocks.end(); i++)
{
//Check upper 16 bits of the block address for non-zero,
// which indicates that a segment address record is needed
if( (i->first & 0xFFFF0000) != 0 )
{
//Has a record for this segment already been emitted?
if( static_cast<uint16_t>(i->first >> 16) != linear_address )
{
//Emit a new segment address record
os << ":02000004";
os.width(4);
os << linear_address; //Address
os << (0x01 + ~(0x06 + ((linear_address>>8)&0xFF) + (linear_address&0xFF)));
os << std::endl;
linear_address = (i->first & 0xFFFF0000) >> 16; //Update segment_address
}
}
checksum = 0;
os << ':'; //Every line begins with ':'
os.width(2);
os << i->second.size()*2; //Record length
checksum += i->second.size()*2;
os.width(4);
os << static_cast<uint16_t>(i->first*2); //Address
checksum += static_cast<uint8_t>(i->first & 0x00FF);
checksum += static_cast<uint8_t>(i->first >> 8);
os << "00"; //Record type
for(int j=0; j<i->second.size(); j++) //Store the data bytes, LSB first, ASCII HEX
{
os.width(2);
os << (i->second[j] & 0x00FF);
os.width(2);
os << ((i->second[j]>>8) & 0x00FF);
checksum += static_cast<uint8_t>(i->second[j] & 0x00FF);
checksum += static_cast<uint8_t>(i->second[j] >> 8);
}
checksum = 0x01 + ~checksum;
os.width(2);
//*** OSX (or maybe GCC) seems unable to handle uint8_t arguments to a stream
os << static_cast<uint16_t>(checksum); //Bogus checksum byte
os << std::endl;
}
os << ":00000001FF\n"; //EOF marker
}
//Truncate all of the blocks to a given length
// Maintains sort order
void hex_data::truncate(hex_data::size_type len)
{
for(iterator i=blocks.begin(); i!=blocks.end(); i++)
{
if(i->second.size() > len) //If the block is too long...
{
//Insert a new block
iterator j(i);
j = blocks.insert(++j, dblock());
j->first = i->first + len; //Give the new block an address
//Make an interator that points to the first element to copy out of i->second
dblock::second_type::iterator k(i->second.begin());
advance(k, len);
j->second.assign(k, i->second.end()); //Assign the extra bytes to the new block
i->second.erase(k, i->second.end()); //Truncate the original block
}
}
}
}
|
#!/bin/bash
# KUU.sh
#
# Kasutamine: source ./KUU.sh <aasta> <kuu nr> <CPV kood>
#
# Skript moodustab Riigihangete registri avaandmete failist "Sõlmitud lepingud" CSV-faili.
#
# <aasta> - nt 2020
# <kuu nr> - nt 6
# <CPV kood> - nt
# 45 ehitusteenused
# 72 (s.t IT-teenused)
#
# CC BY-NC-SA, Priit Parmakson, 2021
# Kontrolli argumentide arvu.
if [ "$#" -ne 3 ]; then
echo " "
echo 'Kasutamine: source ./KUU.sh <aasta> <kuu nr> <CPV kood>'
echo " "
return
fi
echo "Töötlen $1 $2 $3..."
# Leia CPV koodi pikkus
KOODIPIKKUS="${#3}"
echo " CPV-koodi pikkus: $KOODIPIKKUS"
cp $1_$2.xml $1_$2_NONS.xml
# Eemalda nimeruumid. xmlstarlet ei suuda neid töödelda.
sed -i 's/ns2://g' $1_$2_NONS.xml
sed -i 's/xmlns.*\s//g' $1_$2_NONS.xml
# Kustuta failist kõik teated, mille kood ei alga $3.
xmlstarlet ed -d \
"//TED_ESENDERS[substring(descendant::CPV_CODE/@CODE,1,$KOODIPIKKUS)!='$3']" \
$1_$2_NONS.xml > $1_$2_72.xml
# Kirjuta CSV-fail.
xmlstarlet sel -t \
-m "//AWARD_CONTRACT" \
-v "../OBJECT_CONTRACT/REFERENCE_NUMBER" -o ", " \
-o "\"" -v "../CONTRACTING_BODY/ADDRESS_CONTRACTING_BODY/OFFICIALNAME" -o "\", " \
-v "AWARDED_CONTRACT/DATE_CONCLUSION_CONTRACT" -o ", " \
-o "\"" -v ".//CONTRACTORS/CONTRACTOR[1]//OFFICIALNAME" -o "\", " \
-v "AWARDED_CONTRACT/VALUES/VAL_TOTAL" -o ", " \
-o "\"" -v "../OBJECT_CONTRACT/TITLE/P" -o "\"" \
-n $1_$2_72.xml > $1_$2.csv
# Kustuta abifailid.
rm $1_$2_NONS.xml
rm $1_$2_72.xml
# Märkmed
# Find Length of String in Bash
# https://linuxhint.com/length_of_string_bash/
# -o "\"" on vajalik CSV eraldajat (koma) sisaldavate sõnede jutumärkidesse
# panemiseks. |
const express = require('express');
const userController = require('./user.controller');
const checkAuth = require('./auth.middleware');
const router = express.Router();
router.post('/signup', userController.userSignUp);
router.post('/login', checkAuth, userController.userLogin);
module.exports = router;
|
#!/bin/bash
cd `dirname $0`
npm run --silent aggregate |
package local.example.aleatory.views.number;
import com.vaadin.flow.component.button.Button;
import com.vaadin.flow.component.notification.Notification;
import com.vaadin.flow.component.orderedlayout.HorizontalLayout;
import com.vaadin.flow.component.textfield.TextField;
import com.vaadin.flow.router.PageTitle;
import com.vaadin.flow.router.Route;
import com.vaadin.flow.router.RouteAlias;
import local.example.aleatory.views.MainLayout;
@PageTitle("Number")
@Route(value = "number", layout = MainLayout.class)
@RouteAlias(value = "", layout = MainLayout.class)
public class NumberView
extends HorizontalLayout {
private final TextField name;
public NumberView() {
name = new TextField("Your name");
Button sayHello = new Button("Say hello");
sayHello.addClickListener(e -> Notification.show("Hello " + name.getValue()));
setMargin(true);
setVerticalComponentAlignment(Alignment.END, name, sayHello);
add(name, sayHello);
}
}
|
import styled from 'styled-components';
export const Container = styled.div`
width: 100%100%;
height: 230px;
text-align: center;
`; |
#!/usr/bin/env bash
set -e # halt script on error
bundle exec jekyll build -d _site/arduino-day
bundle exec htmlproofer --allow-hash-href --disable-external --empty-alt-ignore ./_site |
package main
import "fmt"
func main() {
array := [4]int{1, 2, 5, 10}
value := 2
for _, element := range array {
if element == value {
fmt.Println("Value is in the array")
break
}
}
} |
def infer(self, image, threshold):
# Perform object detection using the Faster R-CNN model
detections = self.model.detect_objects(image)
# Filter detections based on the threshold
filtered_detections = [(obj, score) for obj, score in detections if score >= threshold]
return filtered_detections |
<filename>node_modules/grommet/es6/components/Tabs/tabs.stories.js
import React from 'react';
import PropTypes from 'prop-types';
import { storiesOf } from '@storybook/react';
import { css } from 'styled-components';
import { Attraction } from "grommet-icons/es6/icons/Attraction";
import { Car } from "grommet-icons/es6/icons/Car";
import { CircleInformation } from "grommet-icons/es6/icons/CircleInformation";
import { Currency } from "grommet-icons/es6/icons/Currency";
import { TreeOption } from "grommet-icons/es6/icons/TreeOption";
import { Box, Heading, Grommet, FormField, Tab, Tabs, Text, TextInput } from 'grommet';
import { grommet } from 'grommet/themes';
import { deepMerge } from 'grommet/utils';
var UncontrolledTabs = function UncontrolledTabs(_ref) {
var plain = _ref.plain;
return React.createElement(Grommet, {
theme: grommet,
full: true
}, React.createElement(Box, {
fill: true
}, React.createElement(Tabs, {
flex: true
}, React.createElement(Tab, {
plain: plain,
title: "Tab 1"
}, React.createElement(Box, {
fill: true,
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
plain: plain,
title: "Tab 2"
}, React.createElement(Box, {
fill: true,
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
plain: plain,
title: "Tab 3"
}, React.createElement(Box, {
fill: true,
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))))));
};
UncontrolledTabs.defaultProps = {
plain: false
};
UncontrolledTabs.propTypes = {
plain: PropTypes.bool
};
var ControlledTabs = function ControlledTabs() {
var _React$useState = React.useState(),
index = _React$useState[0],
setIndex = _React$useState[1];
var onActive = function onActive(nextIndex) {
return setIndex(nextIndex);
};
return React.createElement(Grommet, {
theme: grommet
}, React.createElement(Tabs, {
activeIndex: index,
onActive: onActive
}, React.createElement(Tab, {
title: "Tab 1"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 2"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 3"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
})))));
};
var ResponsiveTabs = function ResponsiveTabs() {
var _React$useState2 = React.useState(),
index = _React$useState2[0],
setIndex = _React$useState2[1];
var onActive = function onActive(nextIndex) {
return setIndex(nextIndex);
};
return React.createElement(Grommet, {
theme: grommet
}, React.createElement(Tabs, {
activeIndex: index,
onActive: onActive
}, React.createElement(Tab, {
title: "Tab 1"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 2"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 3"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 4"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 5"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 6"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 7"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 8"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 9"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 10"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 11"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 12"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 13"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 14"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 15"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 16"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 17"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 18"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 19"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-3"
}, React.createElement(Car, {
size: "xlarge"
}))), React.createElement(Tab, {
title: "Tab 20"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-1"
}, React.createElement(Attraction, {
size: "xlarge"
})))));
};
var RichTabTitle = function RichTabTitle(_ref2) {
var icon = _ref2.icon,
label = _ref2.label;
return React.createElement(Box, {
direction: "row",
align: "center",
gap: "xsmall",
margin: "xsmall"
}, icon, React.createElement(Text, {
size: "small"
}, React.createElement("strong", null, label)));
};
RichTabTitle.propTypes = {
icon: PropTypes.node.isRequired,
label: PropTypes.string.isRequired
};
var RichTabs = function RichTabs() {
return React.createElement(Grommet, {
theme: grommet
}, React.createElement(Tabs, null, React.createElement(Tab, {
title: React.createElement(RichTabTitle, {
icon: React.createElement(CircleInformation, {
color: "accent-2"
}),
label: "Personal Data"
})
}, React.createElement(FormField, {
label: "Name"
}, React.createElement(TextInput, {
placeholder: "Enter your name..."
}))), React.createElement(Tab, {
title: React.createElement(RichTabTitle, {
icon: React.createElement(Currency, {
color: "neutral-2"
}),
label: "Payment"
})
}, React.createElement(FormField, {
label: "Card Number"
}, React.createElement(TextInput, {
placeholder: "Enter your card number..."
})))));
};
var customTheme = deepMerge(grommet, {
global: {
edgeSize: {
small: '10px'
},
elevation: {
light: {
small: '0px 1px 5px rgba(0, 0, 0, 0.50)',
medium: '0px 3px 8px rgba(0, 0, 0, 0.50)'
}
}
},
tab: {
active: {
background: 'dark-1',
color: 'accent-1'
},
background: 'dark-3',
border: undefined,
color: 'white',
hover: {
background: 'dark-1'
},
margin: undefined,
pad: {
bottom: undefined,
horizontal: 'small'
},
extend: function extend(_ref3) {
var theme = _ref3.theme;
return css(["border-radius:", ";box-shadow:", ";"], theme.global.control.border.radius, theme.global.elevation.light.small);
}
},
tabs: {
background: 'dark-3',
gap: 'medium',
header: {
background: 'dark-2',
extend: function extend(_ref4) {
var theme = _ref4.theme;
return css(["padding:", ";box-shadow:", ";"], theme.global.edgeSize.small, theme.global.elevation.light.medium);
}
},
panel: {
extend: function extend(_ref5) {
var theme = _ref5.theme;
return css(["padding:", ";box-shadow:", ";"], theme.global.edgeSize.large, theme.global.elevation.light.medium);
}
}
}
});
var CustomTabs = function CustomTabs() {
return React.createElement(Grommet, {
theme: customTheme
}, React.createElement(Tabs, null, React.createElement(Tab, {
title: React.createElement(RichTabTitle, {
icon: React.createElement(CircleInformation, {
color: "accent-1"
}),
label: "Personal Data"
})
}, React.createElement(FormField, {
label: "Name"
}, React.createElement(TextInput, {
placeholder: "Enter your name..."
}))), React.createElement(Tab, {
title: React.createElement(RichTabTitle, {
icon: React.createElement(Currency, {
color: "light-3"
}),
label: "Payment"
})
}, React.createElement(FormField, {
label: "Card Number"
}, React.createElement(TextInput, {
placeholder: "Enter your card number..."
}))), React.createElement(Tab, {
title: "Simple Tab"
}, "This Tab has a different styling than the RichTabTitle (e.g tab.active.color)")));
};
var ScrollableTabs = function ScrollableTabs() {
return React.createElement(Grommet, {
theme: grommet,
full: true
}, React.createElement(Box, {
fill: true
}, React.createElement(Tabs, {
flex: true
}, React.createElement(Tab, {
title: "Tab 1"
}, React.createElement(Box, {
fill: true,
overflow: "auto",
pad: "xlarge",
align: "center",
background: "accent-1"
}, React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"), React.createElement(Heading, null, "hello!"))), React.createElement(Tab, {
title: "Tab 2"
}, React.createElement(Box, {
margin: "small",
pad: "large",
align: "center",
background: "accent-2"
}, React.createElement(TreeOption, {
size: "xlarge"
}))))));
};
storiesOf('Tabs', module).add('Uncontrolled', function () {
return React.createElement(UncontrolledTabs, null);
}).add('Controlled', function () {
return React.createElement(ControlledTabs, null);
}).add('Responsive', function () {
return React.createElement(ResponsiveTabs, null);
}).add('Rich', function () {
return React.createElement(RichTabs, null);
}).add('Custom Theme', function () {
return React.createElement(CustomTabs, null);
}).add('Scrollable', function () {
return React.createElement(ScrollableTabs, null);
}).add('Plain', function () {
return React.createElement(UncontrolledTabs, {
plain: true
});
}); |
<filename>Node Crash Session/logger.js
const EventEmitter = require("events");
const uuid = require("uuid");
const fs = require("fs");
const path = require("path");
console.log(uuid.v4());
class Logger extends EventEmitter {
log(msg) {
// Call event
this.emit("message", { id: uuid.v4(), msg });
fs.writeFile(
path.join(__dirname, "/references/test", "log.txt"),
msg + uuid.v4(),
err => {
if (err) throw err;
console.log("File written to log.txt");
}
);
}
}
const logger = new Logger();
logger.on("message", data => console.log(`Called Listener: `, data));
logger.log("Hi");
|
<gh_stars>0
package scene
import (
"github.com/gravestench/director/pkg/common"
"github.com/gravestench/mathlib"
lua "github.com/yuin/gopher-lua"
)
func (s *Scene) luaCheckEID() *common.Entity {
ud := s.Lua.CheckUserData(1)
if v, ok := ud.Value.(*common.Entity); ok {
return v
}
s.Lua.ArgError(1, "EID expected")
return nil
}
func (s *Scene) makeLuaTableVec3(vec3 *mathlib.Vector3) *lua.LFunction {
setGetXYZ := func(L *lua.LState) int {
if L.GetTop() == 3 {
x := L.CheckNumber(1)
y := L.CheckNumber(2)
z := L.CheckNumber(3)
vec3.Set(float64(x), float64(y), float64(z))
return 0
}
x, y, z := vec3.XYZ()
s.Lua.Push(lua.LNumber(x))
s.Lua.Push(lua.LNumber(y))
s.Lua.Push(lua.LNumber(z))
return 3
}
return s.Lua.NewFunction(setGetXYZ)
}
func (s *Scene) makeLuaTableVec2(vec2 *mathlib.Vector2) *lua.LFunction {
setGetXYZ := func(L *lua.LState) int {
if L.GetTop() == 2 {
x := L.CheckNumber(1)
y := L.CheckNumber(2)
vec2.Set(float64(x), float64(y))
return 0
}
x, y := vec2.XY()
s.Lua.Push(lua.LNumber(x))
s.Lua.Push(lua.LNumber(y))
return 3
}
return s.Lua.NewFunction(setGetXYZ)
}
|
<filename>webapp/src/app/shared/shared.module.ts<gh_stars>0
import {NgModule} from '@angular/core';
import {GlobalEventsService} from './global-events.service';
import {HostDirective} from './host.directive';
import {HttpClientService} from './http-client.service';
import {MapLoaderService} from './map-loader.service';
import {MapSettingsComponent} from './dialogs/map-settings/map-settings.component';
import {FloatingWindowComponent} from './floating-window/floating-window.component';
import {JsonEditorComponent} from './json-editor/json-editor.component';
import {FormsModule} from '@angular/forms';
import {MaterialModule} from '../material.module';
import {AngularDraggableModule} from 'angular2-draggable';
import {CommonModule} from '@angular/common';
import {FlexLayoutModule} from '@angular/flex-layout';
import {OffsetMapComponent} from './dialogs/offset-map/offset-map.component';
@NgModule({
imports: [
FormsModule,
FlexLayoutModule,
CommonModule,
MaterialModule,
AngularDraggableModule,
],
declarations: [
HostDirective,
MapSettingsComponent,
JsonEditorComponent,
FloatingWindowComponent,
OffsetMapComponent,
],
providers: [
HttpClientService,
MapLoaderService,
GlobalEventsService,
],
entryComponents: [
JsonEditorComponent,
MapSettingsComponent,
OffsetMapComponent,
],
exports: [
HostDirective,
MapSettingsComponent,
JsonEditorComponent,
FloatingWindowComponent,
]
})
export class SharedModule {
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.