text
stringlengths 1
1.05M
|
|---|
package org.slos.battle.abilities.rule;
import org.hibernate.validator.internal.util.CollectionHelper;
import org.slos.battle.GameContext;
import org.slos.battle.abilities.rule.attack.DamageRule;
import org.slos.battle.attack.AttackContext;
import org.slos.splinterlands.domain.monster.DamageType;
import java.util.Set;
public class ReduceHalfDamageRule extends DamageRule {
private final Set<DamageType> damageType;
public ReduceHalfDamageRule(DamageType... damageType) {
super(AttackRuleType.TARGET_DAMAGE);
this.damageType = CollectionHelper.asSet(damageType);
}
@Override
public Integer execute(AttackContext attackContext, GameContext gameContext) {
return 0;
}
public Integer figureDamage(AttackContext attackContext, GameContext gameContext, Integer currentCalculatedDamage) {
int returnDamage;
if (damageType.contains(attackContext.getDamageType())) {
if (currentCalculatedDamage == 1) {
returnDamage = 0;
}
else {
Integer damage = currentCalculatedDamage / 2;
if (currentCalculatedDamage % 2 == 1) {
damage++;
}
returnDamage = damage;
}
gameContext.log("Reducing half damage to: %1$s", returnDamage);
}
else {
returnDamage = currentCalculatedDamage;
}
return returnDamage;
}
}
|
import { Link } from "@linkto/core";
export const getLinksWithOrder = (list: Link[]) => {
/**
* When inserted in the DB the order defaults to 9999
* we sort items on the order in case there was any
* updates after they were inserted, and re-assign the order
*
* NOTE: We can assume that no user will have more than
* 100-200 links at most
*/
const orderedList: Link[] = list
.sort((a, b) => a.display_order - b.display_order)
.map((link, idx) => ({ ...link, display_order: idx }));
return orderedList;
};
export const reorderList = (list: Link[], startIdx: number, endIdx: number) => {
let listClone = Array.from(list);
// Taking the item [source] that we want to reorder out of the list
const [sourceLink] = listClone.splice(startIdx, 1);
// Placing the item we took out [source] instead of the item that we swapped
listClone.splice(endIdx, 0, sourceLink);
// Reordering the items swapped in the list
listClone = listClone.map((link, idx) => ({
...link,
display_order: idx,
}));
return listClone;
};
|
// app.component.ts
import { Component } from '@angular/core';
@Component({
selector: 'my-app',
template: `
<div *ngIf="showMainComponent">
<app-main></app-main>
</div>
<div *ngIf="!showMainComponent">
<app-login (loggedIn)="showMainComponent = true"></app-login>
</div>
`,
styleUrls: ['./app.component.css']
})
export class AppComponent {
showMainComponent = false;
}
|
<reponame>buidler-labs/hedera-mirror-node
package com.hedera.mirror.importer.repository;
/*-
*
* Hedera Mirror Node
*
* Copyright (C) 2019 - 2022 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import javax.annotation.Resource;
import org.junit.jupiter.api.Test;
import org.springframework.jdbc.core.RowMapper;
import com.hedera.mirror.common.domain.entity.TokenAllowance;
class TokenAllowanceRepositoryTest extends AbstractRepositoryTest {
private static final RowMapper<TokenAllowance> ROW_MAPPER = rowMapper(TokenAllowance.class);
@Resource
private TokenAllowanceRepository tokenAllowanceRepository;
@Test
void save() {
TokenAllowance tokenAllowance = domainBuilder.tokenAllowance().persist();
assertThat(tokenAllowanceRepository.findById(tokenAllowance.getId())).get().isEqualTo(tokenAllowance);
}
/**
* This test verifies that the domain object and table definition are in sync with the history table.
*/
@Test
void history() {
TokenAllowance tokenAllowance = domainBuilder.tokenAllowance().persist();
jdbcOperations.update("insert into token_allowance_history select * from token_allowance");
List<TokenAllowance> tokenAllowanceHistory = jdbcOperations.query("select * from token_allowance_history",
ROW_MAPPER);
assertThat(tokenAllowanceRepository.findAll()).containsExactly(tokenAllowance);
assertThat(tokenAllowanceHistory).containsExactly(tokenAllowance);
}
}
|
<reponame>saucelabs/travis-core<filename>lib/travis/api/formats.rb<gh_stars>100-1000
module Travis
module Api
module Formats
def format_date(date)
date && date.strftime('%Y-%m-%dT%H:%M:%SZ')
end
end
end
end
|
angular.module('app.controllers', [])
.controller('daianeEventosMenuCtrl', function($scope, $rootScope, $ionicLoading, $http) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
if ($rootScope.registrationId) {$scope.fworkEscopo.registrationId = $rootScope.registrationId};
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('metasCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkRetorno = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.fworkEscopo.tituloListagem='Acumulado mês atual';
$scope.fworkEscopo.periodo='atual';
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
})
.then(function successCallback(response) {
$scope.fworkRetorno = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
$scope.mesAtual = function () {
$scope.fworkEscopo.periodo='atual';
$scope.metodoInicial('metas^daianeEventosCtrl')
};
$scope.mesAnterior = function () {
$scope.fworkEscopo.periodo='anterior';
$scope.metodoInicial('metas^daianeEventosCtrl')
};
})
.controller('defeitosCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkRetorno = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.fworkEscopo.tituloListagem='Acumulado mês atual';
$scope.fworkEscopo.periodo='atual';
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
})
.then(function successCallback(response) {
$scope.fworkRetorno = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
$scope.mesAtual = function () {
$scope.fworkEscopo.periodo='atual';
$scope.fworkEscopo.tituloListagem='Acumulado mês atual';
$scope.metodoInicial('metas^daianeEventosCtrl')
};
$scope.mesAnterior = function () {
$scope.fworkEscopo.periodo='anterior';
$scope.fworkEscopo.tituloListagem='Acumulado mês Anterior';
$scope.metodoInicial('metas^daianeEventosCtrl')
};
})
.controller('cardapioCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('vagasCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('aniversarioCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('eventosCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('fraseSemanaCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('promocaoCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('avisosCtrl', function($scope, $http, $state, $ionicLoading) {
$scope.fworkEscopo = {};
$scope.fworkEscopo.codEmpresa = 1;
$scope.eventoRotina = function(fworkEvento) {
$ionicLoading.show({content: 'Loading', animation: 'fade-in', showBackdrop: true, maxWidth: 200, showDelay: 0});
$scope.fworkEscopo.fworkEvento = fworkEvento;
$scope.fworkEscopo.fworkProgramaCgi = 'daianeEventosCtrl';
$http({
method : 'GET',
url : 'http://daiane.fabiotech.com.br/cgi-bin/daianeWeb.cgi',
//url : 'http://localhost/csp/daiane/daianeEventosCtrl.csp',
params : $scope.fworkEscopo,
headers : {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(function successCallback(response) {
$scope.fworkEscopo = response.data;
console.log($scope.fworkEscopo);
$ionicLoading.hide();
}, function errorCallback(response) {
$ionicLoading.hide();
alert('Sem concexão verifique a internet do seu dispositivo!');
});
};
$scope.metodoInicial = function (fworkEvento) {
$scope.eventoRotina(fworkEvento);
};
})
.controller('googleMapCtrl', function($scope, $stateParams) {
$scope.latitude = $stateParams.latitude;
$scope.longitude = $stateParams.longitude;
})
.controller('contatoCtrl', function($scope) {
});
|
A typical PHP web framework would have the following components:
- A core class that sets up the framework by defining its file structure, the modules, objects and classes used within the framework.
- A routing mechanism that determines which page of the website should be served when a URL is requested.
- A controller class, which handles the logic of connecting data from the model to the view.
- A template system to quickly create dynamic HTML or other type of documents.
- A database abstraction layer that enables accessing databases easily.
- A library of helper functions to help with tasks such as server-side caching, form validation and other common tasks.
|
<reponame>infinitiessoft/skyport-api<filename>src/test/java/com/infinities/skyport/timeout/service/TimedPlatformServicesTest.java
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.timeout.service;
import java.util.concurrent.ExecutorService;
import org.dasein.cloud.platform.CDNSupport;
import org.dasein.cloud.platform.KeyValueDatabaseSupport;
import org.dasein.cloud.platform.MQSupport;
import org.dasein.cloud.platform.MonitoringSupport;
import org.dasein.cloud.platform.PlatformServices;
import org.dasein.cloud.platform.PushNotificationSupport;
import org.dasein.cloud.platform.RelationalDatabaseSupport;
import org.dasein.cloud.platform.bigdata.DataWarehouseSupport;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.jmock.lib.concurrent.Synchroniser;
import org.jmock.lib.legacy.ClassImposteriser;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.infinities.skyport.exception.InitializationException;
import com.infinities.skyport.model.configuration.service.PlatformConfiguration;
public class TimedPlatformServicesTest {
protected Mockery context = new JUnit4Mockery() {
{
setThreadingPolicy(new Synchroniser());
setImposteriser(ClassImposteriser.INSTANCE);
}
};
private ExecutorService executorService;
private PlatformConfiguration platformConfiguration;
private PlatformServices platformServices;
private CDNSupport cdnSupport;
private DataWarehouseSupport dataWarehouseSupport;
private KeyValueDatabaseSupport keyValueDatabaseSupport;
private MQSupport mqSupport;
private PushNotificationSupport pushNotificationSupport;
private RelationalDatabaseSupport relationalDatabaseSupport;
private MonitoringSupport monitoringSupport;
@Before
public void setUp() throws Exception {
platformServices = context.mock(PlatformServices.class);
executorService = context.mock(ExecutorService.class);
platformConfiguration = new PlatformConfiguration();
cdnSupport = context.mock(CDNSupport.class);
dataWarehouseSupport = context.mock(DataWarehouseSupport.class);
keyValueDatabaseSupport = context.mock(KeyValueDatabaseSupport.class);
mqSupport = context.mock(MQSupport.class);
pushNotificationSupport = context.mock(PushNotificationSupport.class);
relationalDatabaseSupport = context.mock(RelationalDatabaseSupport.class);
monitoringSupport = context.mock(MonitoringSupport.class);
}
@After
public void tearDown() throws Exception {
}
@Test
public void testTimedPlatformServices() throws InitializationException {
context.checking(new Expectations() {
{
exactly(1).of(platformServices).hasCDNSupport();
will(returnValue(true));
exactly(1).of(platformServices).hasDataWarehouseSupport();
will(returnValue(true));
exactly(1).of(platformServices).hasKeyValueDatabaseSupport();
will(returnValue(true));
exactly(1).of(platformServices).hasMessageQueueSupport();
will(returnValue(true));
exactly(1).of(platformServices).hasPushNotificationSupport();
will(returnValue(true));
exactly(1).of(platformServices).hasRelationalDatabaseSupport();
will(returnValue(true));
exactly(1).of(platformServices).hasMonitoringSupport();
will(returnValue(true));
exactly(1).of(platformServices).getCDNSupport();
will(returnValue(cdnSupport));
exactly(1).of(platformServices).getDataWarehouseSupport();
will(returnValue(dataWarehouseSupport));
exactly(1).of(platformServices).getKeyValueDatabaseSupport();
will(returnValue(keyValueDatabaseSupport));
exactly(1).of(platformServices).getMessageQueueSupport();
will(returnValue(mqSupport));
exactly(1).of(platformServices).getPushNotificationSupport();
will(returnValue(pushNotificationSupport));
exactly(1).of(platformServices).getRelationalDatabaseSupport();
will(returnValue(relationalDatabaseSupport));
exactly(1).of(platformServices).getMonitoringSupport();
will(returnValue(monitoringSupport));
}
});
new TimedPlatformServices(platformServices, platformConfiguration, executorService);
}
}
|
package edu.uw.tacoma.piggy.view.table;
import java.util.List;
import javax.swing.JTable;
import javax.swing.table.DefaultTableModel;
import edu.uw.tacoma.piggy.model.dao.UserDAO;
import edu.uw.tacoma.piggy.model.entity.UserEntity;
@SuppressWarnings("serial")
public class UserTable
extends JTable
{
private List<UserEntity> entities;
private DefaultTableModel model;
private Object[] data;
public UserTable()
{
data = new Object[] { "UserID", "FirstName", "LastName", "PhoneNumber" };
model = new DefaultTableModel(data, 0);
setModel(model);
}
public void insert(UserEntity entity)
{
UserDAO.insert(entity);
clearData();
loadData();
}
public void update(UserEntity entity)
{
UserDAO.update(entity);
clearData();
loadData();
}
public void delete(UserEntity entity)
{
UserDAO.delete(entity);
clearData();
loadData();
}
public UserEntity getData(int row)
{
return entities.get(row);
}
public void loadData()
{
entities = UserDAO.listUser();
for (UserEntity entity : entities)
addRow(entity);
}
public void clearData()
{
model.getDataVector().removeAllElements();
model.fireTableDataChanged();
}
private void addRow(UserEntity entity)
{
data[0] = entity.getUserID();
data[1] = entity.getFirstName();
data[2] = entity.getLastName();
data[3] = entity.getPhoneNumber();
model.addRow(data);
}
@Override
public boolean isCellEditable(int row, int column)
{
return false;
}
}
|
#!/usr/bin/env bash
#SBATCH --ntasks-per-node=1+1166665
#SBATCH --time=00:30:00
em="\"\""
cp=""
if [ $em != $2 ]; then
cp = $2
fi
echo $9 -np $1 --hostfile $8 java -Djava.util.loggi.config.file=conf/logger.properties $cp -cp $3 edu.iu.dsc.tws.rsched.schedulers.standalone.MPIWorker --container_class $4 --job_name $5 --twister2_home $6 --cluster_type standalone --config_dir $7
$8 java -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp $9 $10 -Djava.util.loggi.config.file=slurm/logger.properties $cp -cp $3 edu.iu.dsc.tws.rsched.schedulers.standalone.MPIWorker --container_class $4 --job_name $5 --twister2_home $6 --cluster_type slurm --config_dir $7 &
wait
|
# Generated by Django 3.1 on 2020-08-19 16:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('authentication', '0007_workspace_beacon_consent'),
('definitions', '0012_auto_20200818_0458'),
]
operations = [
migrations.CreateModel(
name='AssetOwner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.PositiveIntegerField(db_index=True, editable=False, verbose_name='order')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('object_id', models.IntegerField()),
('owner_id', models.IntegerField()),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('owner_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.contenttype')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='authentication.workspace')),
],
options={
'unique_together': {('workspace', 'object_id', 'content_type', 'owner_id', 'owner_type')},
},
),
]
|
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,uselessCode} checked by tsc
*/
/**
* @record
*/
export function ResultAndError() { }
if (false) {
/** @type {?|undefined} */
ResultAndError.prototype.result;
/** @type {?|undefined} */
ResultAndError.prototype.error;
}
//# sourceMappingURL=data:application/json;base64,<KEY>
|
<reponame>Ancora/websiteancora
import { Twitter } from "styled-icons/icomoon/Twitter"
import { Instagram } from "styled-icons/icomoon/Instagram"
import { Facebook } from "styled-icons/icomoon/Facebook"
const Icons = {
Twitter,
Instagram,
Facebook,
}
export default Icons
|
<reponame>estebanmarichal70/VeMec
package com.vemec.api.controllers;
import com.vemec.api.services.SalaService;
import com.vemec.api.utils.Utils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
@RestController
@RequestMapping(path = "/api/v1/sala")
public class SalaController {
@Autowired
private SalaService salaService;
@PostMapping
public @ResponseBody
ResponseEntity addNew(@RequestBody Map<String, Object> payload) {
try {
return new ResponseEntity<>(this.salaService.addNew(payload), null, HttpStatus.CREATED);
}
catch (Exception e) {
return Utils.mapErrors(e);
}
}
@GetMapping
public @ResponseBody
ResponseEntity getAll(@RequestParam Integer page,@RequestParam Integer limit,@RequestParam String nombre, @RequestParam String centro) {
try {
return new ResponseEntity<>(this.salaService.getAll(page-1, limit, nombre, centro),null, HttpStatus.OK);
}
catch (Exception e) {
return Utils.mapErrors(e);
}
}
@GetMapping(path = "/{id}")
public @ResponseBody
ResponseEntity getByID(@PathVariable("id") Integer id) {
try {
return new ResponseEntity<>(this.salaService.getByID(id), null, HttpStatus.OK);
}
catch (Exception e) {
return Utils.mapErrors(e);
}
}
@GetMapping(path = "/centro/{id}")
public @ResponseBody
ResponseEntity centroSala(@PathVariable("id") Integer id){
try{
return new ResponseEntity<>(this.salaService.centroSala(id), null, HttpStatus.OK);
}
catch(Exception e){
return Utils.mapErrors(e);
}
}
@DeleteMapping(path = "/{id}")
public @ResponseBody
ResponseEntity delete(@PathVariable("id") Integer id) {
try {
return new ResponseEntity<>(this.salaService.delete(id) ? "{\"status\":\"SUCCESS\"}":"{\"status\":\"BAD\"}",null, HttpStatus.OK);
} catch (Exception e) {
return Utils.mapErrors(e);
}
}
@PutMapping(path = "/{id}")
public @ResponseBody
ResponseEntity update(@PathVariable("id") Integer id, @RequestBody Map<String, Object> payload) {
try {
return new ResponseEntity<>(this.salaService.update(id, payload), null, HttpStatus.OK);
}
catch (Exception e) {
return Utils.mapErrors(e);
}
}
}
|
class Queue {
constructor() {
this.items = [];
}
insert(element) {
this.items.push(element);
}
remove() {
if (this.isEmpty()) return null;
return this.items.shift();
}
isEmpty() {
return this.items.length === 0;
}
}
|
from sklearn.linear_model import LogisticRegression
from sklearn.feature_extraction.text import CountVectorizer
# Define feature vectors and labels
true_statements = ['The earth is round']
false_statements = ['The earth is flat']
labels = [1, 0]
# Create a count vectorizer to generate the feature vectors
vectorizer = CountVectorizer()
feature_vectors = vectorizer.fit_transform(true_statements + false_statements).toarray()
# Train the logistic regression model
model = LogisticRegression()
model.fit(feature_vectors, labels)
|
<gh_stars>1-10
export const CHANGE_NAME_FILTER = 'CHANGE_NAME_FILTER';
export const CHANGE_TAG_FILTER = 'CHANGE_TAG_FILTER';
export const CHANGE_SELECTED_CHAMPION = 'CHANGE_SELECTED_CHAMPION';
export const FETCH_CHAMPIONS = 'FETCH_CHAMPIONS';
export const FETCH_CHAMPIONS_SUCCESS = 'FETCH_CHAMPIONS_SUCCESS';
export const FETCH_CHAMPIONS_ERROR = 'FETCH_CHAMPIONS_ERROR';
export const apiChampions = 'https://ddragon.leagueoflegends.com/cdn/11.7.1/data/en_US/champion.json';
export const changeNameFilter = filter => (
{
type: CHANGE_NAME_FILTER,
filter,
}
);
export const changeTagFilter = filter => (
{
type: CHANGE_TAG_FILTER,
filter,
}
);
export const changeSelectedChampion = champion => (
{
type: CHANGE_SELECTED_CHAMPION,
selectedChampion: champion,
}
);
export const fetchChampions = () => (
{
type: FETCH_CHAMPIONS,
}
);
export const fetchChampionsSuccess = data => {
const keys = Object.keys(data.data);
const champions = [];
keys.forEach(key => champions.push(data.data[key]));
return {
type: FETCH_CHAMPIONS_SUCCESS,
champions,
};
};
export const fetchChampionsError = error => (
{
type: fetchChampionsError,
error,
}
);
export const requestChampions = dispatch => {
dispatch(fetchChampions());
fetch(apiChampions)
.then(result => result.json())
.then(data => {
dispatch(fetchChampionsSuccess(data));
})
.catch(error => fetchChampionsError(error));
};
|
/*=========================================================================
Program: Visualization Toolkit
Module: vtkPCLOpenNISource.cxx
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkPCLOpenNISource.h"
#include "vtkPCLConversions.h"
#include "vtkPolyData.h"
#include "vtkInformation.h"
#include "vtkInformationVector.h"
#include "vtkObjectFactory.h"
#include "vtkStreamingDemandDrivenPipeline.h"
#include "vtkSmartPointer.h"
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <pcl/io/openni_grabber.h>
#include <boost/thread/thread.hpp>
typedef pcl::PointCloud<pcl::PointXYZRGBA> Cloud;
typedef Cloud::Ptr CloudPtr;
typedef Cloud::ConstPtr CloudConstPtr;
//----------------------------------------------------------------------------
class vtkPCLOpenNISource::vtkInternal
{
public:
vtkInternal()
{
this->Grabber = 0;
this->NewData = false;
}
~vtkInternal()
{
delete this->Grabber;
}
void HandleIncomingCloud(const CloudConstPtr& newCloud)
{
vtkSmartPointer<vtkPolyData> newPolyData = vtkPCLConversions::PolyDataFromPointCloud(newCloud);
boost::lock_guard<boost::mutex> lock(this->mutex);
this->PolyData = newPolyData;
this->NewData = true;
}
vtkSmartPointer<vtkPolyData> GetLatestPolyData()
{
boost::lock_guard<boost::mutex> lock(this->mutex);
vtkSmartPointer<vtkPolyData> polyData = this->PolyData;
this->PolyData = NULL;
this->NewData = false;
return polyData;
}
bool HasNewData()
{
boost::lock_guard<boost::mutex> lock(this->mutex);
return this->NewData;
}
bool NewData;
pcl::OpenNIGrabber* Grabber;
boost::mutex mutex;
vtkSmartPointer<vtkPolyData> PolyData;
boost::function<void (const CloudConstPtr&)> Callback;
};
//----------------------------------------------------------------------------
vtkStandardNewMacro(vtkPCLOpenNISource);
//----------------------------------------------------------------------------
vtkPCLOpenNISource::vtkPCLOpenNISource()
{
this->Internal = new vtkInternal;
this->SetNumberOfInputPorts(0);
this->SetNumberOfOutputPorts(1);
}
//----------------------------------------------------------------------------
vtkPCLOpenNISource::~vtkPCLOpenNISource()
{
delete this->Internal;
}
//----------------------------------------------------------------------------
void vtkPCLOpenNISource::StartGrabber()
{
if (!this->Internal->Grabber)
{
this->Internal->Grabber = new pcl::OpenNIGrabber("");
this->Internal->Callback = boost::bind(&vtkPCLOpenNISource::vtkInternal::HandleIncomingCloud, this->Internal, _1);
this->Internal->Grabber->registerCallback(this->Internal->Callback);
}
this->Internal->Grabber->start();
}
//----------------------------------------------------------------------------
void vtkPCLOpenNISource::StopGrabber()
{
if (this->Internal->Grabber)
{
this->Internal->Grabber->stop();
}
}
//----------------------------------------------------------------------------
bool vtkPCLOpenNISource::HasNewData()
{
return this->Internal->HasNewData();
}
//----------------------------------------------------------------------------
void vtkPCLOpenNISource::Poll()
{
if (this->HasNewData())
{
this->Modified();
}
}
//----------------------------------------------------------------------------
int vtkPCLOpenNISource::RequestData(
vtkInformation *vtkNotUsed(request),
vtkInformationVector **inputVector,
vtkInformationVector *outputVector)
{
vtkInformation *outInfo = outputVector->GetInformationObject(0);
vtkDataSet *output = vtkDataSet::SafeDownCast(outInfo->Get(vtkDataObject::DATA_OBJECT()));
if (!this->HasNewData())
{
return 1;
}
output->ShallowCopy(this->Internal->GetLatestPolyData());
return 1;
}
//----------------------------------------------------------------------------
void vtkPCLOpenNISource::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os,indent);
}
|
<reponame>carlos-sancho-ramirez/android-java-langbook<filename>dbManager/src/main/java/sword/langbook3/android/models/DefinitionDetails.java
package sword.langbook3.android.models;
import sword.collections.ImmutableHashSet;
import sword.collections.ImmutableSet;
public final class DefinitionDetails<ConceptId> {
public final ConceptId baseConcept;
public final ImmutableSet<ConceptId> complements;
public DefinitionDetails(ConceptId baseConcept, ImmutableSet<ConceptId> complements) {
if (baseConcept == null) {
throw new IllegalArgumentException();
}
this.baseConcept = baseConcept;
this.complements = (complements == null)? ImmutableHashSet.empty() : complements;
}
}
|
#include <iostream>
#include <vector>
void printSubsets(std::vector<int> set) {
unsigned int n = set.size();
// Run a loop for printing all 2^n
// subsets one by one
for (unsigned int i = 0; i < (1<<n); i++) {
std::cout << "{ ";
// Print current subset
for (unsigned int j = 0; j < n; j++) {
// (1<<j) is a number with jth bit 1
// so when we 'and' them with the
// subset number we get which numbers
// are present in the subset and which
// are not
if (i & (1<<j)) {
std::cout << set[j] << " ";
}
}
std::cout << "}";
}
}
int main() {
std::vector<int> set = { 1, 2, 3 };
printSubsets(set);
return 0;
}
|
import spacy
from spacy.matcher import Matcher
from spacy.lang.en import English
nlp = English()
matcher = Matcher(nlp.vocab)
# define the pattern
pattern = [{"ENT_TYPE": "INTENT", "OP": "+"}]
# add the pattern to the matcher
matcher.add("matching_1", None, pattern)
# create a function to find the intent
def find_intent(text):
doc = nlp(text)
# find the matches in the doc
matches = matcher(doc)
# get the start and end boundaries of the matches
for match_id, start, end in matches:
span = doc[start:end]
# return the intent if a match is found
if span.text.lower() in intents:
return span.text
# define the possible intents
intents = ["greet", "goodbye", "thankyou", "affirm", "deny", "mood_great",
"mood_unhappy", "bot_challenge"]
# test the function
text = "I am feeling great!"
print(find_intent(text)) #Output: "mood_great"
|
#!/bin/bash
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Unit tests for docker_build
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
source ${DIR}/testenv.sh || { echo "testenv.sh not found!" >&2; exit 1; }
readonly PLATFORM="$(uname -s | tr 'A-Z' 'a-z')"
if [ "${PLATFORM}" = "darwin" ]; then
readonly MAGIC_TIMESTAMP="$(date -r 0 "+%b %e %Y")"
else
readonly MAGIC_TIMESTAMP="$(date --date=@0 "+%F %R")"
fi
function EXPECT_CONTAINS() {
local complete="${1}"
local substring="${2}"
local message="${3:-Expected '${substring}' not found in '${complete}'}"
echo "${complete}" | grep -Fsq -- "${substring}" \
|| fail "$message"
}
function check_property() {
local property="${1}"
local tarball="${2}"
local layer="${3}"
local expected="${4}"
local test_data="${TEST_DATA_DIR}/${tarball}.tar"
local metadata="$(tar xOf "${test_data}" "./${layer}/json")"
# This would be much more accurate if we had 'jq' everywhere.
EXPECT_CONTAINS "${metadata}" "\"${property}\": ${expected}"
}
function check_no_property() {
local property="${1}"
local tarball="${2}"
local layer="${3}"
local test_data="${TEST_DATA_DIR}/${tarball}.tar"
tar xOf "${test_data}" "./${layer}/json" >$TEST_log
expect_not_log "\"${property}\":"
# notop variant
test_data="${TEST_DATA_DIR}/notop_${tarball}.tar"
tar xOf "${test_data}" "./${layer}/json" >$TEST_log
expect_not_log "\"${property}\":"
}
function check_size() {
check_property Size "${@}"
}
function check_id() {
check_property id "${@}"
}
function check_parent() {
check_property parent "${@}"
}
function check_entrypoint() {
input="$1"
shift
check_property Entrypoint "${input}" "${@}"
check_property Entrypoint "notop_${input}" "${@}"
}
function check_cmd() {
input="$1"
shift
check_property Cmd "${input}" "${@}"
check_property Cmd "notop_${input}" "${@}"
}
function check_ports() {
input="$1"
shift
check_property ExposedPorts "${input}" "${@}"
check_property ExposedPorts "${input}" "${@}"
}
function check_volumes() {
input="$1"
shift
check_property Volumes "${input}" "${@}"
check_property Volumes "notop_${input}" "${@}"
}
function check_env() {
input="$1"
shift
check_property Env "${input}" "${@}"
check_property Env "notop_${input}" "${@}"
}
function check_label() {
input="$1"
shift
check_property Label "${input}" "${@}"
check_property Label "notop_${input}" "${@}"
}
function check_workdir() {
input="$1"
shift
check_property WorkingDir "${input}" "${@}"
check_property WorkingDir "notop_${input}" "${@}"
}
function check_user() {
input="$1"
shift
check_property User "${input}" "${@}"
check_property User "notop_${input}" "${@}"
}
function check_layers_aux() {
local input=${1}
shift 1
local expected_layers=(${*})
local expected_layers_sorted=(
$(for i in ${expected_layers[*]}; do echo $i; done | sort)
)
local test_data="${TEST_DATA_DIR}/${input}.tar"
# Verbose output for testing.
tar tvf "${test_data}"
local actual_layers=(
$(tar tvf ${test_data} | tr -s ' ' | cut -d' ' -f 4- | sort \
| cut -d'/' -f 2 | grep -E '^[0-9a-f]+$' | sort | uniq))
# Verbose output for testing.
echo Expected: ${expected_layers_sorted[@]}
echo Actual: ${actual_layers[@]}
check_eq "${#expected_layers[@]}" "${#actual_layers[@]}"
local index=0
local parent=
while [ "${index}" -lt "${#expected_layers[@]}" ]
do
# Check that the nth sorted layer matches
check_eq "${expected_layers_sorted[$index]}" "${actual_layers[$index]}"
# Grab the ordered layer and check it.
local layer="${expected_layers[$index]}"
# Verbose output for testing.
echo Checking layer: "${layer}"
local listing="$(tar xOf "${test_data}" "./${layer}/layer.tar" | tar tv)"
# Check that all files in the layer, if any, have the magic timestamp
check_eq "$(echo "${listing}" | grep -Fv "${MAGIC_TIMESTAMP}" || true)" ""
check_id "${input}" "${layer}" "\"${layer}\""
# Check that the layer contains its predecessor as its parent in the JSON.
if [[ -n "${parent}" ]]; then
check_parent "${input}" "${layer}" "\"${parent}\""
fi
# Check that the layer's size metadata matches the layer's tarball's size.
local layer_size=$(tar xOf "${test_data}" "./${layer}/layer.tar" | wc -c | xargs)
check_size "${input}" "${layer}" "${layer_size}"
index=$((index + 1))
parent=$layer
done
}
function check_layers() {
local input=$1
shift
check_layers_aux "$input" "$@"
check_layers_aux "notop_$input" "$@"
}
function test_gen_image() {
grep -Fsq "./gen.out" "$TEST_DATA_DIR/gen_image.tar" \
|| fail "'./gen.out' not found in '$TEST_DATA_DIR/gen_image.tar'"
}
function test_dummy_repository() {
local layer="0279f3ce8b08d10506abcf452393b3e48439f5eca41b836fae59a0d509fbafea"
local test_data="${TEST_DATA_DIR}/dummy_repository.tar"
check_layers_aux "dummy_repository" "$layer"
local repositories="$(tar xOf "${test_data}" "./repositories")"
# This would really need to use `jq` instead.
echo "${repositories}" | \
grep -Esq -- "\"gcr.io/dummy/[a-zA-Z_]*_docker_testdata\": {" \
|| fail "Cannot find image in repository gcr.io/dummy in '${repositories}'"
EXPECT_CONTAINS "${repositories}" "\"dummy_repository\": \"$layer\""
}
function test_files_base() {
check_layers "files_base" \
"82ca3945f7d07df82f274d7fafe83fd664c2154e5c64c988916ccd5b217bb710"
}
function test_files_with_files_base() {
check_layers "files_with_files_base" \
"82ca3945f7d07df82f274d7fafe83fd664c2154e5c64c988916ccd5b217bb710" \
"84c0d09919ae8b06cb6b064d8cd5eab63341a46f11ccc7ecbe270ad3e1f52744"
}
function test_tar_base() {
check_layers "tar_base" \
"8b9e4db9dd4b990ee6d8adc2843ad64702ad9063ae6c22e8ca5f94aa54e71277"
# Check that this layer doesn't have any entrypoint data by looking
# for *any* entrypoint.
check_no_property "Entrypoint" "tar_base" \
"8b9e4db9dd4b990ee6d8adc2843ad64702ad9063ae6c22e8ca5f94aa54e71277"
}
function test_tar_with_tar_base() {
check_layers "tar_with_tar_base" \
"8b9e4db9dd4b990ee6d8adc2843ad64702ad9063ae6c22e8ca5f94aa54e71277" \
"1cc81a2aaec2e3727d98d48bf9ba09d3ac96ef48adf5edae861d15dd0191dc40"
}
function test_directory_with_tar_base() {
check_layers "directory_with_tar_base" \
"8b9e4db9dd4b990ee6d8adc2843ad64702ad9063ae6c22e8ca5f94aa54e71277" \
"e56ddeb8279698484f50d480f71cb5380223ad0f451766b7b9a9348129d02542"
}
function test_files_with_tar_base() {
check_layers "files_with_tar_base" \
"8b9e4db9dd4b990ee6d8adc2843ad64702ad9063ae6c22e8ca5f94aa54e71277" \
"f099727fa58f9b688e77b511b3cc728b86ae0e84d197b9330bd51082ad5589f2"
}
function test_workdir_with_tar_base() {
check_layers "workdir_with_tar_base" \
"8b9e4db9dd4b990ee6d8adc2843ad64702ad9063ae6c22e8ca5f94aa54e71277" \
"f24cbe53bd1b78909c6dba0bd47016354f3488b35b85aeee68ecc423062b927e"
}
function test_tar_with_files_base() {
check_layers "tar_with_files_base" \
"82ca3945f7d07df82f274d7fafe83fd664c2154e5c64c988916ccd5b217bb710" \
"bee1a325e4b51a1dcfd7e447987b4e130590815865ab22e8744878053d525f20"
}
function test_base_with_entrypoint() {
check_layers "base_with_entrypoint" \
"4acbeb0495918726c0107e372b421e1d2a6fd4825d58fc3f0b0b2a719fb3ce1b"
check_entrypoint "base_with_entrypoint" \
"4acbeb0495918726c0107e372b421e1d2a6fd4825d58fc3f0b0b2a719fb3ce1b" \
'["/bar"]'
# Check that the base layer has a port exposed.
check_ports "base_with_entrypoint" \
"4acbeb0495918726c0107e372b421e1d2a6fd4825d58fc3f0b0b2a719fb3ce1b" \
'{"8080/tcp": {}}'
}
function test_derivative_with_shadowed_cmd() {
check_layers "derivative_with_shadowed_cmd" \
"4acbeb0495918726c0107e372b421e1d2a6fd4825d58fc3f0b0b2a719fb3ce1b" \
"e35f57dc6c1e84ae67dcaaf3479a3a3c0f52ac4d194073bd6214e04c05beab42"
}
function test_derivative_with_cmd() {
check_layers "derivative_with_cmd" \
"4acbeb0495918726c0107e372b421e1d2a6fd4825d58fc3f0b0b2a719fb3ce1b" \
"e35f57dc6c1e84ae67dcaaf3479a3a3c0f52ac4d194073bd6214e04c05beab42" \
"186289545131e34510006ac79498078dcf41736a5eb9a36920a6b30d3f45bc01"
check_entrypoint "derivative_with_cmd" \
"186289545131e34510006ac79498078dcf41736a5eb9a36920a6b30d3f45bc01" \
'["/bar"]'
# Check that the middle layer has our shadowed arg.
check_cmd "derivative_with_cmd" \
"e35f57dc6c1e84ae67dcaaf3479a3a3c0f52ac4d194073bd6214e04c05beab42" \
'["shadowed-arg"]'
# Check that our topmost layer excludes the shadowed arg.
check_cmd "derivative_with_cmd" \
"186289545131e34510006ac79498078dcf41736a5eb9a36920a6b30d3f45bc01" \
'["arg1", "arg2"]'
# Check that the topmost layer has the ports exposed by the bottom
# layer, and itself.
check_ports "derivative_with_cmd" \
"186289545131e34510006ac79498078dcf41736a5eb9a36920a6b30d3f45bc01" \
'{"80/tcp": {}, "8080/tcp": {}}'
}
function test_derivative_with_volume() {
check_layers "derivative_with_volume" \
"125e7cfb9d4a6d803a57b88bcdb05d9a6a47ac0d6312a8b4cff52a2685c5c858" \
"08424283ad3a7e020e210bec22b166d7ebba57f7ba2d0713c2fd7bd1e2038f88"
# Check that the topmost layer has the ports exposed by the bottom
# layer, and itself.
check_volumes "derivative_with_volume" \
"125e7cfb9d4a6d803a57b88bcdb05d9a6a47ac0d6312a8b4cff52a2685c5c858" \
'{"/logs": {}}'
check_volumes "derivative_with_volume" \
"08424283ad3a7e020e210bec22b166d7ebba57f7ba2d0713c2fd7bd1e2038f88" \
'{"/asdf": {}, "/blah": {}, "/logs": {}}'
}
function test_generated_tarball() {
check_layers "generated_tarball" \
"54b8328604115255cc76c12a2a51939be65c40bf182ff5a898a5fb57c38f7772"
}
function test_with_env() {
check_layers "with_env" \
"125e7cfb9d4a6d803a57b88bcdb05d9a6a47ac0d6312a8b4cff52a2685c5c858" \
"42a1bd0f449f61a23b8a7776875ffb6707b34ee99c87d6428a7394f5e55e8624"
check_env "with_env" \
"42a1bd0f449f61a23b8a7776875ffb6707b34ee99c87d6428a7394f5e55e8624" \
'["bar=blah blah blah", "foo=/asdf"]'
}
function test_with_double_env() {
check_layers "with_double_env" \
"125e7cfb9d4a6d803a57b88bcdb05d9a6a47ac0d6312a8b4cff52a2685c5c858" \
"42a1bd0f449f61a23b8a7776875ffb6707b34ee99c87d6428a7394f5e55e8624" \
"576a9fd9c690be04dc7aacbb9dbd1f14816e32dbbcc510f4d42325bbff7163dd"
# Check both the aggregation and the expansion of embedded variables.
check_env "with_double_env" \
"576a9fd9c690be04dc7aacbb9dbd1f14816e32dbbcc510f4d42325bbff7163dd" \
'["bar=blah blah blah", "baz=/asdf blah blah blah", "foo=/asdf"]'
}
function test_with_label() {
check_layers "with_label" \
"125e7cfb9d4a6d803a57b88bcdb05d9a6a47ac0d6312a8b4cff52a2685c5c858" \
"eba6abda3d259ab6ed5f4d48b76df72a5193fad894d4ae78fbf0a363d8f9e8fd"
check_label "with_label" \
"eba6abda3d259ab6ed5f4d48b76df72a5193fad894d4ae78fbf0a363d8f9e8fd" \
'["com.example.bar={\"name\": \"blah\"}", "com.example.baz=qux", "com.example.foo={\"name\": \"blah\"}"]'
}
function test_with_double_label() {
check_layers "with_double_label" \
"125e7cfb9d4a6d803a57b88bcdb05d9a6a47ac0d6312a8b4cff52a2685c5c858" \
"eba6abda3d259ab6ed5f4d48b76df72a5193fad894d4ae78fbf0a363d8f9e8fd" \
"bfe88fbb5e24fc5bff138f7a1923d53a2ee1bbc8e54b6f5d9c371d5f48b6b023" \
check_label "with_double_label" \
"bfe88fbb5e24fc5bff138f7a1923d53a2ee1bbc8e54b6f5d9c371d5f48b6b023" \
'["com.example.bar={\"name\": \"blah\"}", "com.example.baz=qux", "com.example.foo={\"name\": \"blah\"}", "com.example.qux={\"name\": \"blah-blah\"}"]'
}
function test_with_user() {
check_user "with_user" \
"65664d4d78ff321684e2a8bf165792ce562c5990c9ba992e6288dcb1ec7f675c" \
"\"nobody\""
}
function get_layer_listing() {
local input=$1
local layer=$2
local test_data="${TEST_DATA_DIR}/${input}.tar"
tar xOf "${test_data}" \
"./${layer}/layer.tar" | tar tv | sed -e 's/^.*:00 //'
}
function test_data_path() {
local no_data_path_sha="451d182e5c71840f00ba9726dc0239db73a21b7e89e79c77f677e3f7c5c23d44"
local data_path_sha="9a41c9e1709558f7ef06f28f66e9056feafa7e0f83990801e1b27c987278d8e8"
local absolute_data_path_sha="f196c42ab4f3eb850d9655b950b824db2c99c01527703ac486a7b48bb2a34f44"
local root_data_path_sha="19d7fd26d67bfaeedd6232dcd441f14ee163bc81c56ed565cc20e73311c418b6"
check_layers_aux "no_data_path_image" "${no_data_path_sha}"
check_layers_aux "data_path_image" "${data_path_sha}"
check_layers_aux "absolute_data_path_image" "${absolute_data_path_sha}"
check_layers_aux "root_data_path_image" "${root_data_path_sha}"
# Without data_path = "." the file will be inserted as `./test`
# (since it is the path in the package) and with data_path = "."
# the file will be inserted relatively to the testdata package
# (so `./test/test`).
check_eq "$(get_layer_listing "no_data_path_image" "${no_data_path_sha}")" \
'./
./test'
check_eq "$(get_layer_listing "data_path_image" "${data_path_sha}")" \
'./
./test/
./test/test'
# With an absolute path for data_path, we should strip that prefix
# from the files' paths. Since the testdata images are in
# //tools/build_defs/docker/testdata and data_path is set to
# "/tools/build_defs", we should have `docker` as the top-level
# directory.
check_eq "$(get_layer_listing "absolute_data_path_image" "${absolute_data_path_sha}")" \
'./
./docker/
./docker/testdata/
./docker/testdata/test/
./docker/testdata/test/test'
# With data_path = "/", we expect the entire path from the repository
# root.
check_eq "$(get_layer_listing "root_data_path_image" "${root_data_path_sha}")" \
"./
./tools/
./tools/build_defs/
./tools/build_defs/docker/
./tools/build_defs/docker/testdata/
./tools/build_defs/docker/testdata/test/
./tools/build_defs/docker/testdata/test/test"
}
function test_extras_with_deb() {
local test_data="${TEST_DATA_DIR}/extras_with_deb.tar"
local sha=$(tar xOf ${test_data} ./top)
# The content of the layer should have no duplicate
local layer_listing="$(get_layer_listing "extras_with_deb" "${sha}" | sort)"
check_eq "${layer_listing}" \
"./
./etc/
./etc/nsswitch.conf
./tmp/
./usr/
./usr/bin/
./usr/bin/java -> /path/to/bin/java
./usr/titi"
}
run_suite "build_test"
|
/** */
package org.sunbird.learner.util;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.sunbird.cassandra.CassandraOperation;
import org.sunbird.common.models.response.Response;
import org.sunbird.common.models.util.JsonKey;
import org.sunbird.common.models.util.LoggerUtil;
import org.sunbird.common.models.util.ProjectUtil;
import org.sunbird.common.request.RequestContext;
import org.sunbird.helper.ServiceFactory;
import org.sunbird.learner.actors.role.service.RoleService;
/**
* This class will handle the data cache.
*
* @author <NAME>
*/
public class DataCacheHandler implements Runnable {
private static LoggerUtil logger = new LoggerUtil(DataCacheHandler.class);
private static Map<String, Object> roleMap = new ConcurrentHashMap<>();
private static Map<String, Object> telemetryPdata = new ConcurrentHashMap<>(3);
private static Map<String, String> configSettings = new ConcurrentHashMap<>();
private static Map<String, Map<String, List<Map<String, String>>>> frameworkCategoriesMap =
new ConcurrentHashMap<>();
private static Map<String, List<String>> frameworkFieldsConfig = new ConcurrentHashMap<>();
private static Map<String, List<String>> hashtagIdFrameworkIdMap = new ConcurrentHashMap<>();
private static Map<String, Map<String, List<String>>> userTypeOrSubTypeConfigMap =
new ConcurrentHashMap<>();
private static Map<String, List<String>> stateLocationTypeConfigMap = new ConcurrentHashMap<>();
private static Map<String, Map<String, Object>> formApiDataConfigMap = new ConcurrentHashMap<>();
private static List<Map<String, String>> roleList = new CopyOnWriteArrayList<>();
private CassandraOperation cassandraOperation = ServiceFactory.getInstance();
private static final String KEY_SPACE_NAME = Util.KEY_SPACE_NAME;
private static Response roleCacheResponse;
private static Map<String, Integer> orderMap;
public static String[] bulkUserAllowedFields = {
JsonKey.FIRST_NAME,
JsonKey.LAST_NAME,
JsonKey.PHONE,
JsonKey.COUNTRY_CODE,
JsonKey.EMAIL,
JsonKey.USERNAME,
JsonKey.ROLES,
JsonKey.POSITION,
JsonKey.LOCATION,
JsonKey.DOB,
JsonKey.LANGUAGE,
JsonKey.PROFILE_SUMMARY,
JsonKey.SUBJECT,
JsonKey.EXTERNAL_ID_PROVIDER,
JsonKey.EXTERNAL_ID,
JsonKey.EXTERNAL_ID_TYPE,
JsonKey.EXTERNAL_IDS
};
public static String[] bulkOrgAllowedFields = {
JsonKey.ORGANISATION_NAME,
JsonKey.CHANNEL,
JsonKey.IS_TENANT,
JsonKey.PROVIDER,
JsonKey.EXTERNAL_ID,
JsonKey.DESCRIPTION,
JsonKey.HOME_URL,
JsonKey.ORG_TYPE,
JsonKey.CONTACT_DETAILS,
JsonKey.LOC_ID,
JsonKey.LOCATION_CODE
};
@Override
public void run() {
logger.info("DataCacheHandler:run: Cache refresh started.");
roleCache();
cacheSystemConfig();
cacheRoleForRead();
cacheTelemetryPdata();
cacheFormApiDataConfig();
initLocationOrderMap();
logger.info("DataCacheHandler:run: Cache refresh completed.");
}
// Get form data config
private void cacheFormApiDataConfig() {
formApiDataConfigMap = new ConcurrentHashMap<>();
userTypeOrSubTypeConfigMap = new ConcurrentHashMap<>();
stateLocationTypeConfigMap = new ConcurrentHashMap<>();
for (Map.Entry<String, Map<String, Object>> itr : formApiDataConfigMap.entrySet()) {
String stateCode = itr.getKey();
RequestContext reqContext = new RequestContext();
reqContext.setReqId(UUID.randomUUID().toString());
reqContext.setDebugEnabled("false");
Map<String, Object> formDataMap = FormApiUtilHandler.getFormApiConfig(stateCode, reqContext);
logger.info(
reqContext,
String.format("Cache update for form api stateCode:%s is not found", stateCode));
if (MapUtils.isNotEmpty(formDataMap)) {
formApiDataConfigMap.put(stateCode, formDataMap);
cacheUserTypeOrSubTypeConfig();
cacheLocationCodeTypeConfig();
}
}
}
// Update userType or SubType cache for the state which are fetched from form api
private void cacheUserTypeOrSubTypeConfig() {
if (MapUtils.isNotEmpty(formApiDataConfigMap)) {
for (Map.Entry<String, Map<String, Object>> itr : formApiDataConfigMap.entrySet()) {
String stateCode = itr.getKey();
Map<String, Object> formData = itr.getValue();
Map<String, List<String>> userTypeConfigMap = FormApiUtil.getUserTypeConfig(formData);
if (MapUtils.isNotEmpty(userTypeConfigMap)) {
userTypeOrSubTypeConfigMap.put(stateCode, userTypeConfigMap);
} else {
userTypeOrSubTypeConfigMap.remove(stateCode);
}
}
}
}
// Update Location Code Type cache for the state which are fetched from form api
private void cacheLocationCodeTypeConfig() {
if (MapUtils.isNotEmpty(formApiDataConfigMap)) {
for (Map.Entry<String, Map<String, Object>> itr : formApiDataConfigMap.entrySet()) {
String stateCode = itr.getKey();
Map<String, Object> formData = itr.getValue();
List<String> locationCodeLists = FormApiUtil.getLocationTypeConfigMap(formData);
if (CollectionUtils.isNotEmpty(locationCodeLists)) {
stateLocationTypeConfigMap.put(stateCode, locationCodeLists);
} else {
stateLocationTypeConfigMap.remove(stateCode);
}
}
}
}
private void initLocationOrderMap() {
if (orderMap == null) {
orderMap = new HashMap<>();
List<String> subTypeList =
Arrays.asList(ProjectUtil.getConfigValue("sunbird_valid_location_types").split(";"));
for (String str : subTypeList) {
List<String> typeList =
(((Arrays.asList(str.split(","))).stream().map(String::toLowerCase))
.collect(Collectors.toList()));
for (int i = 0; i < typeList.size(); i++) {
orderMap.put(typeList.get(i), i);
}
}
}
}
private void cacheTelemetryPdata() {
String telemetryPdataVer = DataCacheHandler.getConfigSettings().get("telemetry_pdata_ver");
if (StringUtils.isBlank(telemetryPdataVer)) {
telemetryPdataVer = ProjectUtil.getConfigValue("telemetry_pdata_ver");
}
telemetryPdata.put("telemetry_pdata_id", ProjectUtil.getConfigValue("telemetry_pdata_id"));
telemetryPdata.put("telemetry_pdata_pid", ProjectUtil.getConfigValue("telemetry_pdata_pid"));
telemetryPdata.put("telemetry_pdata_ver", telemetryPdataVer);
}
private void cacheRoleForRead() {
roleCacheResponse = RoleService.getUserRoles();
}
public static Response getRoleResponse() {
return roleCacheResponse;
}
public static Map<String, Object> getTelemetryPdata() {
return telemetryPdata;
}
public static void setRoleResponse(Response response) {
if (response != null) roleCacheResponse = response;
}
@SuppressWarnings("unchecked")
private void cacheSystemConfig() {
Map<String, String> tempConfigSettings = new ConcurrentHashMap();
Response response =
cassandraOperation.getAllRecords(KEY_SPACE_NAME, JsonKey.SYSTEM_SETTINGS_DB, null);
logger.debug(
"DataCacheHandler:cacheSystemConfig: Cache system setting fields" + response.getResult());
List<Map<String, Object>> responseList =
(List<Map<String, Object>>) response.get(JsonKey.RESPONSE);
if (null != responseList && !responseList.isEmpty()) {
for (Map<String, Object> resultMap : responseList) {
tempConfigSettings.put(
((String) resultMap.get(JsonKey.FIELD)), (String) resultMap.get(JsonKey.VALUE));
}
}
tempConfigSettings.put(JsonKey.PHONE_UNIQUE, String.valueOf(true));
tempConfigSettings.put(JsonKey.EMAIL_UNIQUE, String.valueOf(true));
configSettings = tempConfigSettings;
}
@SuppressWarnings("unchecked")
private void roleCache() {
Map<String, Object> tempRoleMap = new ConcurrentHashMap();
Response response = cassandraOperation.getAllRecords(KEY_SPACE_NAME, JsonKey.ROLE_GROUP, null);
List<Map<String, Object>> responseList =
(List<Map<String, Object>>) response.get(JsonKey.RESPONSE);
Set<String> roleSet = new HashSet<>();
if (CollectionUtils.isNotEmpty(responseList)) {
for (Map<String, Object> resultMap : responseList) {
if (!roleSet.contains(((String) resultMap.get(JsonKey.ID)).trim())) {
roleSet.add(((String) resultMap.get(JsonKey.ID)).trim());
tempRoleMap.put(
((String) resultMap.get(JsonKey.ID)).trim(),
((String) resultMap.get(JsonKey.NAME)).trim());
}
}
}
Response response2 = cassandraOperation.getAllRecords(KEY_SPACE_NAME, JsonKey.ROLE, null);
List<Map<String, Object>> responseList2 =
(List<Map<String, Object>>) response2.get(JsonKey.RESPONSE);
if (CollectionUtils.isNotEmpty(responseList2)) {
for (Map<String, Object> resultMap : responseList2) {
if (!roleSet.contains(((String) resultMap.get(JsonKey.ID)).trim())) {
roleSet.add(((String) resultMap.get(JsonKey.ID)).trim());
tempRoleMap.put(
((String) resultMap.get(JsonKey.ID)).trim(),
((String) resultMap.get(JsonKey.NAME)).trim());
}
}
}
List<Map<String, String>> tempRoleList = new CopyOnWriteArrayList<>();
tempRoleMap
.entrySet()
.parallelStream()
.forEach(
(roleSetItem) -> {
if (roleSet.contains(roleSetItem.getKey().trim())) {
Map<String, String> role = new HashMap<>();
role.put(JsonKey.ID, roleSetItem.getKey().trim());
role.put(JsonKey.NAME, ((String) roleSetItem.getValue()).trim());
tempRoleList.add(role);
roleSet.remove(roleSetItem.getKey().trim());
}
});
roleMap = tempRoleMap;
roleList = tempRoleList;
}
/** @return the roleMap */
public static Map<String, Object> getRoleMap() {
return roleMap;
}
/** @return the roleList */
public static List<Map<String, String>> getUserReadRoleList() {
return roleList;
}
/** @return the configSettings */
public static Map<String, String> getConfigSettings() {
return configSettings;
}
public static Map<String, Map<String, List<String>>> getUserTypesConfig() {
return userTypeOrSubTypeConfigMap;
}
public static Map<String, Map<String, List<Map<String, String>>>> getFrameworkCategoriesMap() {
return frameworkCategoriesMap;
}
public static void setFrameworkFieldsConfig(Map<String, List<String>> frameworkFieldsConfig) {
DataCacheHandler.frameworkFieldsConfig = frameworkFieldsConfig;
}
public static Map<String, List<String>> getFrameworkFieldsConfig() {
return frameworkFieldsConfig;
}
public static void updateFrameworkCategoriesMap(
String frameworkId, Map<String, List<Map<String, String>>> frameworkCacheMap) {
DataCacheHandler.frameworkCategoriesMap.put(frameworkId, frameworkCacheMap);
}
public static Map<String, List<String>> getHashtagIdFrameworkIdMap() {
return hashtagIdFrameworkIdMap;
}
public static Map<String, Integer> getLocationOrderMap() {
return orderMap;
}
public static Map<String, List<String>> getLocationTypeConfig() {
return stateLocationTypeConfigMap;
}
public static Map<String, Map<String, Object>> getFormApiDataConfigMap() {
return formApiDataConfigMap;
}
}
|
ActiveRecord::Base.connection.create_table(:catalogs, force: true) do |t|
t.string :name
t.timestamps
end
class Catalog < ActiveRecord::Base
end
|
<gh_stars>10-100
package io.opensphere.core.control.action.context;
import io.opensphere.core.model.ScreenPosition;
/** The context key for actions with only a screen position. */
public class ScreenPositionContextKey
{
/** The position of the action on-screen. */
private final ScreenPosition myPosition;
/**
* Constructor.
*
* @param position The position of the action.
*/
public ScreenPositionContextKey(ScreenPosition position)
{
myPosition = position;
}
/**
* Get the screen position of the action.
*
* @return The screen position.
*/
public ScreenPosition getPosition()
{
return myPosition;
}
}
|
library(ggplot2)
# Clustering results
cluster_labels <- c(1,2,2,3,1,2,3,1,3,2)
# Data used for clustering
data <- c(76,44,48,80,78,47,85,75,81,46)
# Create data frame
df <- data.frame(cluster_labels,data)
# Plot data using ggplot
ggplot(data=df, aes(x=cluster_labels, y=data)) +
geom_boxplot() +
xlab("Number of Clusters") +
ylab("Scores") +
ggtitle("Clustering Analysis")
|
#!/bin/bash
# Default resolver, distro-specific solutions should override this
RESOLVE_DEPS() {
echo
error "No dependency resolver was found for your distro."
error "Please install the libraries and/or tools listed above as missing before continuing."
echo
exit 1
}
if [[ -e "/etc/gentoo-release" ]]; then
. "$(dirname "${BASH_SOURCE[0]}")/gentoo.sh"
return
fi
|
/**
* Boilerplate code for Higher Order Components
*/
import React, { Component } from 'react';
export default function(ComposedComponent) {
class SomeComponent extends Component {
render() {
return <ComposedComponent {...this.props} />
}
}
return SomeComponent;
}
|
class RangeCheck:
def __init__(self, min, max, includemin = True, includemax = True):
self.min = min
self.max = max
self.includemin = includemin
self.includemax = includemax
self.errstr = "Value must be in the range {}{},{}{}".format("[" if self.includemin else "(",
self.min, self.max,
"]" if self.includemax else ")")
def validate(self, value):
if (value > self.min) and (value < self.max):
return []
elif (self.includemin == True) and (value == self.min):
return []
elif (self.includemax == True) and (value == self.max):
return []
else:
err = [ "Value \"{}\" is not valid.".format(value) + self.errstr ]
err.append(self.errstr)
return err
class GreaterThan:
def __init__(self, min, includemin = True):
self.min = min
self.includemin = includemin
eqstr = " or equal to" if self.includemin else ""
self.errstr = "Value must be greater than{} {}".format(eqstr, self.min)
def validate(self, value):
if (value > self.min):
return []
elif (self.includemin == True) and (value == self.min):
return []
else:
err = [ "Value \"{}\" is not valid.".format(value) + self.errstr ]
err.append(self.errstr)
return err
class LessThan:
def __init__(self, max, includemax = True):
self.max = max
self.includemax = includemax
eqstr = " or equal to" if self.includemax else ""
self.errstr = "Value must be less than{} {}".format(eqstr, self.max)
def validate(self, value):
if (value < self.max):
return []
elif (self.includemax == True) and (value == self.max):
return []
else:
err = [ "Value \"{}\" is not valid.".format(value) + self.errstr ]
err.append(self.errstr)
return err
class InList:
def __init__(self, lst):
self.lst = []
for l in lst:
if type(l) == str:
self.lst.append(l.lower())
else:
self.lst.append(l)
self.errstr = [ ]
for v in self.lst:
self.errstr.append(" {}".format(v))
def validate(self, value):
v = value
if type(value) == str:
v = v.lower()
if v in self.lst:
return []
else:
err = [ "Value \"{}\" is not valid. Value must be one of the following {} values".format(v, len(self.lst)) ]
err.extend(self.errstr)
return err
|
import VideoCapture from '../containers/Lounge/VideoCapture';
import mainEvent from '../containers/Lounge/mainFunctions/mainEvent';
export default function mainEvents(e, arg) {
const { videoFilePath, start, end } = arg;
const vCap = new VideoCapture({ path: videoFilePath });
return mainEvent({ vCap, start, end });
}
|
function randomString(n) {
const characterSet = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
let randomString = '';
// Generate a random string
for (let i = 0; i < n; i++) {
// Get a random character from the set
const randomCharacter = characterSet[Math.floor(Math.random() * characterSet.length)];
// Append it to the string
randomString += randomCharacter;
}
return randomString;
}
const randomString = randomString(10);
console.log(randomString);
|
#!/bin/bash
RELEASE='kube-ambassador-r1'
# reference: https://github.com/datawire/ambassador
# reference: https://hub.helm.sh/charts/stable/ambassador
# reference: https://www.getambassador.io/
echo "#"
echo "# Create namespace, service-accounts and role-bindings"
echo "#"
kubectl apply -f ~/devenv/config/kube-manifests/kube-ingress
kubectl apply -f ~/devenv/config/kube-manifests/kube-ingress/ambassador
echo ""
echo "#"
echo "# Deploy ambassador"
echo "#"
helm repo update stable
#helm fetch stable/ambassador --prov --destination ~/devenv/config/helm-charts
helm fetch stable/ambassador --destination ~/devenv/config/helm-charts
CHART=$( find ~/devenv/config/helm-charts/ -type f -name ambassador*.tgz -mmin -0.5 )
cat <<EOF > ~/devenv/config/helm-charts/ambassador-values-custom.yaml
nameOverride: kube-ambassador
fullnameOverride: $RELEASE
replicaCount: 1
EOF
#helm template $CHART \
# --namespace kube-ingress \
# --name $RELEASE \
# --values ~/devenv/config/helm-charts/ambassador-values-custom.yaml \
#> ~/devenv/config/helm-charts/ambassador-manifest-rendered.yaml
if [ -f $CHART.prov ] ; then
helm install $CHART --verify --dry-run --debug \
--namespace kube-ingress \
--name $RELEASE \
--values ~/devenv/config/helm-charts/ambassador-values-custom.yaml \
> ~/devenv/logs/helm/ambassador.log
fi
helm install $CHART \
--namespace kube-ingress \
--name $RELEASE \
--values ~/devenv/config/helm-charts/ambassador-values-custom.yaml
helm get manifest $RELEASE \
> ~/devenv/config/helm-charts/ambassador-manifest-applied.yaml
echo ""
# taint
date > ~/devenv/taints/deployed-ambassador
|
class Admin::Mailer < ActionMailer::Base
default :from => Typus.mailer_sender
def reset_password_link(user, url)
@user = user
@url = url
mail :to => user.email,
:subject => "[#{Typus.admin_title}] #{_("Reset password")}"
end
end
|
$(document).ready(function () {
function getCookie(cname) {
var name = cname + "=";
var decodedCookie = decodeURIComponent(document.cookie);
var ca = decodedCookie.split(';');
for (var i = 0; i < ca.length; i++) {
var c = ca[i];
while (c.charAt(0) == ' ') {
c = c.substring(1);
}
if (c.indexOf(name) == 0) {
return c.substring(name.length, c.length);
}
}
return "";
}
let table_sent_items = $('#table_sent_messges').DataTable({
'columnDefs': [ {
'targets': 0, /* column index */
'orderable': false, /* true or false */
}],
"drawCallback": function( settings ) {
$("#table_sent_messges thead").remove();
},
select: {
style: 'os',
selector: 'td:first-child'
},
order: [[ 1, 'asc' ]]
});
$("#btn-select-all").click(function(e){
//https://jsfiddle.net/annoyingmouse/yxLrLr8o/
//https://datatables.net/extensions/select/examples/initialisation/checkbox.html
//https://datatables.net/forums/discussion/44089/is-it-possible-to-hide-the-row-with-column-headers
let click = $(this).data('clicks') ? $(this).data('clicks',false) : $(this).data('clicks',true);
if(click.data('clicks')){
$(".fa", this).removeClass("fa-square-o").addClass('fa-check-square-o');
table_sent_items.rows().select();
}else{
$(".fa", this).removeClass("fa-check-square-o").addClass('fa-square-o');
table_sent_items.rows().deselect();
}
});
$("#btn-delete").click(function(e){
let url = $(this).attr("data-url");
let nodes = table_sent_items.rows(".selected").nodes();
let id_list = [];
if(nodes.length > 0){
for(let index=0; index<nodes.length; index++){
let tr = $(nodes[index]).children()[0];
let id = $(tr).attr('data-id');
id_list.push(id);
}
let data = {
'id_list': id_list,
}
data = JSON.stringify(data);
$.ajax({
url: url,
type: 'GET',
dataType: 'json',
beforeSend: () => {
$("#modal-default").data('data',data).modal("show");
},
success: (data) =>{
$("#modal-default .modal-content")
.html(data.html_form)
.find(".modal-body")
.html(`<p>Are your sure you want to delete this <b>${id_list.length}</b> messages permanently?</p>`);
},
complete: (data) => {
},
error: (data) => {
}
});
}else{
$("#modal-danger .modal-content")
.html(`
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span></button>
<h4 class="modal-title">No record selected</h4>
</div>
<div class="modal-body">
<p>Please select the messages you want to <b>delete</b> permanently!</p>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-outline pull-left" data-dismiss="modal">Close</button>
</div>
`);
$("#modal-danger").modal("show");
}
});
$("#modal-default").on("submit",".delete-sent-messages-permanently-form", function(e){
e.preventDefault();
let form = $(this);
let data = $("#modal-default").data("data");
let url = form.attr('data-url');
$.ajax({
// https://docs.djangoproject.com/en/2.2/ref/csrf/
headers: { "X-CSRFToken": getCookie("csrftoken") },
type: 'POST', // must be in POST
url: url,
data: data, // json object to be transfer
dataType: 'json',
success: (data) => {
if(data.status){
$("#modal-default").modal("hide");
var rows = table_sent_items
.rows( '.selected' )
.remove()
.draw();
}
},
complete: (data) => {
},
error: (data) => {
}
});
return false;
});
});
|
/**
* Ensure that a value is a collection.
*
* @param collection A value that might be a collection.
*/
export function getCollection<T extends any>(collection: T[] | null | undefined): T[] {
return Array.isArray(collection) ? collection : [];
}
/**
* Ensure that a value is a collection that has items in it.
*
* @param collection A value that might be a collection with items in it.
*/
export function hasItems<T extends any>(collection: T[] | null | undefined): collection is T[] {
return Array.isArray(collection) && collection.length > 0;
}
|
#!/bin/bash
/opt/zookeeper/bin/zkServer.sh start
until /opt/zookeeper/bin/zkServer.sh status; do sleep 0.1; done
/opt/kafka/bin/kafka-server-start.sh /opt/kafka/config/server.properties
|
/*
* Copyright 2002, 2003 <NAME>, <NAME>. All rights reserved.
* Distributed under the terms of the MIT License.
*/
#include "DefaultManager.h"
#include <Application.h>
#include <Directory.h>
#include <File.h>
#include <FindDirectory.h>
#include <MediaNode.h>
#include <OS.h>
#include <Path.h>
#include <TimeSource.h>
#include <string.h>
#include "debug.h"
#include "DormantNodeManager.h"
#include "media_server.h"
#include "NodeManager.h"
/* no locking used in this file, we assume that the caller (NodeManager) does it.
*/
#define MAX_NODE_INFOS 10
#define MAX_INPUT_INFOS 10
const uint32 kMsgHeader = 'sepx';
const uint32 kMsgTypeVideoIn = 0xffffffef;
const uint32 kMsgTypeVideoOut = 0xffffffee;
const uint32 kMsgTypeAudioIn = 0xfffffffe;
const uint32 kMsgTypeAudioOut = 0xffffffff;
const char *kDefaultManagerType = "be:_default";
const char *kDefaultManagerAddon = "be:_addon_id";
const char *kDefaultManagerFlavorId = "be:_internal_id";
const char *kDefaultManagerFlavorName = "be:_flavor_name";
const char *kDefaultManagerPath = "be:_path";
const char *kDefaultManagerInput = "be:_input_id";
const char *kDefaultManagerSettingsDirectory = "Media";
const char *kDefaultManagerSettingsFile = "MDefaultManager";
DefaultManager::DefaultManager()
:
fMixerConnected(false),
fPhysicalVideoOut(-1),
fPhysicalVideoIn(-1),
fPhysicalAudioOut(-1),
fPhysicalAudioIn(-1),
fSystemTimeSource(-1),
fTimeSource(-1),
fAudioMixer(-1),
fPhysicalAudioOutInputID(0),
fRescanThread(-1),
fRescanRequested(0),
fRescanLock("rescan default manager"),
fRoster(NULL)
{
strcpy(fPhysicalAudioOutInputName, "default");
fBeginHeader[0] = 0xab00150b;
fBeginHeader[1] = 0x18723462;
fBeginHeader[2] = 0x00000002;
fEndHeader[0] = 0x7465726d;
fEndHeader[1] = 0x6d666c67;
fEndHeader[2] = 0x00000002;
fRoster = BMediaRoster::Roster();
if (fRoster == NULL)
TRACE("DefaultManager: The roster is NULL\n");
}
DefaultManager::~DefaultManager()
{
}
// this is called by the media_server *before* any add-ons have been loaded
status_t
DefaultManager::LoadState()
{
CALLED();
status_t err = B_OK;
BPath path;
if ((err = find_directory(B_USER_SETTINGS_DIRECTORY, &path)) != B_OK)
return err;
path.Append(kDefaultManagerSettingsDirectory);
path.Append(kDefaultManagerSettingsFile);
BFile file(path.Path(), B_READ_ONLY);
uint32 categoryCount;
ssize_t size = sizeof(uint32) * 3;
if (file.Read(fBeginHeader, size) < size)
return B_ERROR;
TRACE("0x%08lx %ld\n", fBeginHeader[0], fBeginHeader[0]);
TRACE("0x%08lx %ld\n", fBeginHeader[1], fBeginHeader[1]);
TRACE("0x%08lx %ld\n", fBeginHeader[2], fBeginHeader[2]);
size = sizeof(uint32);
if (file.Read(&categoryCount, size) < size) {
fprintf(stderr,
"DefaultManager::LoadState() failed to read categoryCount\n");
return B_ERROR;
}
TRACE("DefaultManager::LoadState() categoryCount %ld\n", categoryCount);
while (categoryCount--) {
BMessage settings;
uint32 msg_header;
uint32 default_type;
if (file.Read(&msg_header, size) < size) {
fprintf(stderr,
"DefaultManager::LoadState() failed to read msg_header\n");
return B_ERROR;
}
if (file.Read(&default_type, size) < size) {
fprintf(stderr,
"DefaultManager::LoadState() failed to read default_type\n");
return B_ERROR;
}
if (settings.Unflatten(&file) == B_OK)
fMsgList.AddItem(new BMessage(settings));
else
fprintf(stderr, "DefaultManager::LoadState() failed to unflatten\n");
}
size = sizeof(uint32) * 3;
if (file.Read(fEndHeader,size) < size) {
fprintf(stderr,
"DefaultManager::LoadState() failed to read fEndHeader\n");
return B_ERROR;
}
TRACE("LoadState returns B_OK\n");
return B_OK;
}
status_t
DefaultManager::SaveState(NodeManager *node_manager)
{
CALLED();
status_t err = B_OK;
BPath path;
BList list;
if ((err = find_directory(B_USER_SETTINGS_DIRECTORY, &path, true)) != B_OK)
return err;
path.Append(kDefaultManagerSettingsDirectory);
if ((err = create_directory(path.Path(), 0755)) != B_OK)
return err;
path.Append(kDefaultManagerSettingsFile);
uint32 default_types[] = {kMsgTypeVideoIn, kMsgTypeVideoOut,
kMsgTypeAudioIn, kMsgTypeAudioOut};
media_node_id media_node_ids[] = {fPhysicalVideoIn, fPhysicalVideoOut,
fPhysicalAudioIn, fPhysicalAudioOut};
for (uint32 i = 0; i < sizeof(default_types) / sizeof(default_types[0]);
i++) {
// we call the node manager to have more infos about nodes
dormant_node_info info;
media_node node;
entry_ref ref;
if (node_manager->GetCloneForID(media_node_ids[i], be_app->Team(),
&node) != B_OK
|| node_manager->GetDormantNodeInfo(node, &info) != B_OK
|| node_manager->ReleaseNodeReference(media_node_ids[i],
be_app->Team()) != B_OK
|| node_manager->GetAddOnRef(info.addon, &ref) != B_OK) {
if (media_node_ids[i] != -1) {
// failed to get node info thus just return
return B_ERROR;
}
continue;
}
BMessage *settings = new BMessage();
settings->AddInt32(kDefaultManagerType, default_types[i]);
BPath path(&ref);
settings->AddInt32(kDefaultManagerAddon, info.addon);
settings->AddInt32(kDefaultManagerFlavorId, info.flavor_id);
settings->AddInt32(kDefaultManagerInput,
default_types[i] == kMsgTypeAudioOut ? fPhysicalAudioOutInputID : 0);
settings->AddString(kDefaultManagerFlavorName, info.name);
settings->AddString(kDefaultManagerPath, path.Path());
list.AddItem(settings);
TRACE("message %s added\n", info.name);
}
BFile file(path.Path(), B_WRITE_ONLY | B_CREATE_FILE | B_ERASE_FILE);
if (file.Write(fBeginHeader, sizeof(uint32)*3) < (int32)sizeof(uint32)*3)
return B_ERROR;
int32 categoryCount = list.CountItems();
if (file.Write(&categoryCount, sizeof(uint32)) < (int32)sizeof(uint32))
return B_ERROR;
for (int32 i = 0; i < categoryCount; i++) {
BMessage *settings = (BMessage *)list.ItemAt(i);
uint32 default_type;
if (settings->FindInt32(kDefaultManagerType,
(int32*)&default_type) < B_OK)
return B_ERROR;
if (file.Write(&kMsgHeader, sizeof(uint32)) < (int32)sizeof(uint32))
return B_ERROR;
if (file.Write(&default_type, sizeof(uint32)) < (int32)sizeof(uint32))
return B_ERROR;
if (settings->Flatten(&file) < B_OK)
return B_ERROR;
delete settings;
}
if (file.Write(fEndHeader, sizeof(uint32)*3) < (int32)sizeof(uint32)*3)
return B_ERROR;
return B_OK;
}
status_t
DefaultManager::Set(media_node_id node_id, const char *input_name,
int32 input_id, node_type type)
{
CALLED();
TRACE("DefaultManager::Set type : %i, node : %li, input : %li\n", type,
node_id, input_id);
switch (type) {
case VIDEO_INPUT:
fPhysicalVideoIn = node_id;
return B_OK;
case AUDIO_INPUT:
fPhysicalAudioIn = node_id;
return B_OK;
case VIDEO_OUTPUT:
fPhysicalVideoOut = node_id;
return B_OK;
case AUDIO_MIXER:
return B_ERROR;
case AUDIO_OUTPUT:
fPhysicalAudioOut = node_id;
fPhysicalAudioOutInputID = input_id;
strcpy(fPhysicalAudioOutInputName,
input_name ? input_name : "<null>");
return B_OK;
case TIME_SOURCE:
return B_ERROR;
// called by the media_server's ServerApp::StartSystemTimeSource()
case SYSTEM_TIME_SOURCE:
{
ASSERT(fSystemTimeSource == -1);
fSystemTimeSource = node_id;
return B_OK;
}
default:
{
ERROR("DefaultManager::Set Error: called with unknown type %d\n",
type);
return B_ERROR;
}
}
}
status_t
DefaultManager::Get(media_node_id *nodeid, char *input_name, int32 *inputid,
node_type type)
{
CALLED();
switch (type) {
case VIDEO_INPUT: // output: nodeid
if (fPhysicalVideoIn == -1)
return B_NAME_NOT_FOUND;
*nodeid = fPhysicalVideoIn;
return B_OK;
case AUDIO_INPUT: // output: nodeid
if (fPhysicalAudioIn == -1)
return B_NAME_NOT_FOUND;
*nodeid = fPhysicalAudioIn;
return B_OK;
case VIDEO_OUTPUT: // output: nodeid
if (fPhysicalVideoOut == -1)
return B_NAME_NOT_FOUND;
*nodeid = fPhysicalVideoOut;
return B_OK;
case AUDIO_OUTPUT: // output: nodeid
if (fPhysicalAudioOut == -1)
return B_NAME_NOT_FOUND;
*nodeid = fPhysicalAudioOut;
return B_OK;
case AUDIO_OUTPUT_EX: // output: nodeid, input_name, input_id
if (fPhysicalAudioOut == -1)
return B_NAME_NOT_FOUND;
*nodeid = fPhysicalAudioOut;
*inputid = fPhysicalAudioOutInputID;
strcpy(input_name, fPhysicalAudioOutInputName);
return B_OK;
case AUDIO_MIXER: // output: nodeid
if (fAudioMixer == -1)
return B_NAME_NOT_FOUND;
*nodeid = fAudioMixer;
return B_OK;
case TIME_SOURCE:
if (fTimeSource != -1)
*nodeid = fTimeSource;
else
*nodeid = fSystemTimeSource;
return B_OK;
case SYSTEM_TIME_SOURCE:
*nodeid = fSystemTimeSource;
return B_OK;
default:
{
ERROR("DefaultManager::Get Error: called with unknown type %d\n",
type);
return B_ERROR;
}
}
}
// this is called by the media_server *after* the initial add-on loading
// has been done
status_t
DefaultManager::Rescan()
{
BAutolock locker(fRescanLock);
atomic_add(&fRescanRequested, 1);
if (fRescanThread < 0) {
fRescanThread = spawn_thread(rescan_thread, "rescan defaults",
B_NORMAL_PRIORITY - 2, this);
resume_thread(fRescanThread);
}
return B_OK;
}
int32
DefaultManager::rescan_thread(void *arg)
{
reinterpret_cast<DefaultManager *>(arg)->_RescanThread();
return 0;
}
void
DefaultManager::_RescanThread()
{
TRACE("DefaultManager::_RescanThread() enter\n");
BAutolock locker(fRescanLock);
while (atomic_and(&fRescanRequested, 0) != 0) {
locker.Unlock();
// We do not search for the system time source,
// it should already exist
ASSERT(fSystemTimeSource != -1);
if (fPhysicalVideoOut == -1) {
_FindPhysical(&fPhysicalVideoOut, kMsgTypeVideoOut, false,
B_MEDIA_RAW_VIDEO);
_FindPhysical(&fPhysicalVideoOut, kMsgTypeVideoOut, false,
B_MEDIA_ENCODED_VIDEO);
}
if (fPhysicalVideoIn == -1) {
_FindPhysical(&fPhysicalVideoIn, kMsgTypeVideoIn, true,
B_MEDIA_RAW_VIDEO);
_FindPhysical(&fPhysicalVideoIn, kMsgTypeVideoIn, true,
B_MEDIA_ENCODED_VIDEO);
}
if (fPhysicalAudioOut == -1)
_FindPhysical(&fPhysicalAudioOut, kMsgTypeAudioOut, false,
B_MEDIA_RAW_AUDIO);
if (fPhysicalAudioIn == -1)
_FindPhysical(&fPhysicalAudioIn, kMsgTypeAudioIn, true,
B_MEDIA_RAW_AUDIO);
if (fAudioMixer == -1)
_FindAudioMixer();
// The normal time source is searched for after the
// Physical Audio Out has been created.
if (fTimeSource == -1)
_FindTimeSource();
// Connect the mixer and physical audio out (soundcard)
if (!fMixerConnected && fAudioMixer != -1 && fPhysicalAudioOut != -1) {
fMixerConnected = _ConnectMixerToOutput() == B_OK;
if (!fMixerConnected)
TRACE("DefaultManager: failed to connect mixer and "
"soundcard\n");
} else {
TRACE("DefaultManager: Did not try to connect mixer and "
"soundcard\n");
}
if (fMixerConnected) {
add_on_server_rescan_finished_notify_command cmd;
SendToAddOnServer(ADD_ON_SERVER_RESCAN_FINISHED_NOTIFY, &cmd,
sizeof(cmd));
}
locker.Lock();
}
fRescanThread = -1;
BMessage msg(MEDIA_SERVER_RESCAN_COMPLETED);
be_app->PostMessage(&msg);
TRACE("DefaultManager::_RescanThread() leave\n");
}
void
DefaultManager::_FindPhysical(volatile media_node_id *id, uint32 default_type,
bool isInput, media_type type)
{
live_node_info info[MAX_NODE_INFOS];
media_format format;
int32 count;
status_t rv;
BMessage *msg = NULL;
BPath msgPath;
dormant_node_info msgDninfo;
int32 input_id;
bool isAudio = (type == B_MEDIA_RAW_AUDIO)
|| (type == B_MEDIA_ENCODED_AUDIO);
for (int32 i = 0; i < fMsgList.CountItems(); i++) {
msg = (BMessage *)fMsgList.ItemAt(i);
int32 msgType;
if (msg->FindInt32(kDefaultManagerType, &msgType) == B_OK
&& ((uint32)msgType == default_type)) {
const char *name = NULL;
const char *path = NULL;
msg->FindInt32(kDefaultManagerAddon, &msgDninfo.addon);
msg->FindInt32(kDefaultManagerFlavorId, &msgDninfo.flavor_id);
msg->FindInt32(kDefaultManagerInput, &input_id);
msg->FindString(kDefaultManagerFlavorName, &name);
msg->FindString(kDefaultManagerPath, &path);
if (name)
strcpy(msgDninfo.name, name);
if (path)
msgPath = BPath(path);
break;
}
}
format.type = type;
count = MAX_NODE_INFOS;
rv = fRoster->GetLiveNodes(&info[0], &count,
isInput ? NULL : &format, isInput ? &format : NULL, NULL,
isInput ? B_BUFFER_PRODUCER | B_PHYSICAL_INPUT
: B_BUFFER_CONSUMER | B_PHYSICAL_OUTPUT);
if (rv != B_OK || count < 1) {
TRACE("Couldn't find physical %s %s node\n",
isAudio ? "audio" : "video", isInput ? "input" : "output");
return;
}
for (int i = 0; i < count; i++)
TRACE("info[%d].name %s\n", i, info[i].name);
for (int i = 0; i < count; i++) {
if (isAudio) {
if (isInput) {
if (0 == strcmp(info[i].name, "None In")) {
// we keep the Null audio driver if none else matchs
*id = info[i].node.node;
continue;
}
// skip the Firewire audio driver
if (0 == strcmp(info[i].name, "DV Input"))
continue;
} else {
if (0 == strcmp(info[i].name, "None Out")) {
// we keep the Null audio driver if none else matchs
*id = info[i].node.node;
if (msg)
fPhysicalAudioOutInputID = input_id;
continue;
}
// skip the Firewire audio driver
if (0 == strcmp(info[i].name, "DV Output"))
continue;
}
}
if (msg) { // we have a default info msg
dormant_node_info dninfo;
if (fRoster->GetDormantNodeFor(info[i].node,
&dninfo) != B_OK) {
ERROR("Couldn't GetDormantNodeFor\n");
continue;
}
if (dninfo.flavor_id != msgDninfo.flavor_id
|| strcmp(dninfo.name, msgDninfo.name) != 0) {
ERROR("Doesn't match flavor or name\n");
continue;
}
BPath path;
if (gDormantNodeManager->FindAddOnPath(&path, dninfo.addon) != B_OK
|| path != msgPath) {
ERROR("Doesn't match : path\n");
continue;
}
}
TRACE("Default physical %s %s \"%s\" created!\n",
isAudio ? "audio" : "video", isInput ? "input" : "output",
info[i].name);
*id = info[i].node.node;
if (msg && isAudio && !isInput)
fPhysicalAudioOutInputID = input_id;
return;
}
}
void
DefaultManager::_FindTimeSource()
{
live_node_info info[MAX_NODE_INFOS];
media_format input; /* a physical audio output has a logical data input (DAC)*/
int32 count;
status_t rv;
/* First try to use the current default physical audio out
*/
if (fPhysicalAudioOut != -1) {
media_node clone;
if (fRoster->GetNodeFor(fPhysicalAudioOut,
&clone) == B_OK) {
if (clone.kind & B_TIME_SOURCE) {
fTimeSource = clone.node;
fRoster->StartTimeSource(clone,
system_time() + 1000);
fRoster->ReleaseNode(clone);
TRACE("Default DAC timesource created!\n");
return;
}
fRoster->ReleaseNode(clone);
} else {
TRACE("Default DAC is not a timesource!\n");
}
} else {
TRACE("Default DAC node does not exist!\n");
}
/* Now try to find another physical audio out node
*/
input.type = B_MEDIA_RAW_AUDIO;
count = MAX_NODE_INFOS;
rv = fRoster->GetLiveNodes(&info[0], &count, &input, NULL, NULL,
B_TIME_SOURCE | B_PHYSICAL_OUTPUT);
if (rv == B_OK && count >= 1) {
for (int i = 0; i < count; i++)
printf("info[%d].name %s\n", i, info[i].name);
for (int i = 0; i < count; i++) {
// The BeOS R5 None Out node pretend to be a physical time source,
// that is pretty dumb
// skip the Null audio driver
if (0 == strcmp(info[i].name, "None Out"))
continue;
// skip the Firewire audio driver
if (0 != strstr(info[i].name, "DV Output"))
continue;
TRACE("Default DAC timesource \"%s\" created!\n", info[i].name);
fTimeSource = info[i].node.node;
fRoster->StartTimeSource(info[i].node,
system_time() + 1000);
return;
}
} else {
TRACE("Couldn't find DAC timesource node\n");
}
/* XXX we might use other audio or video clock timesources
*/
}
void
DefaultManager::_FindAudioMixer()
{
live_node_info info;
int32 count;
status_t rv;
if (fRoster == NULL)
fRoster = BMediaRoster::Roster();
count = 1;
rv = fRoster->GetLiveNodes(&info, &count, NULL, NULL, NULL,
B_BUFFER_PRODUCER | B_BUFFER_CONSUMER | B_SYSTEM_MIXER);
if (rv != B_OK || count != 1) {
TRACE("Couldn't find audio mixer node\n");
return;
}
fAudioMixer = info.node.node;
TRACE("Default audio mixer node created\n");
}
status_t
DefaultManager::_ConnectMixerToOutput()
{
media_node timesource;
media_node mixer;
media_node soundcard;
media_input inputs[MAX_INPUT_INFOS];
media_input input;
media_output output;
media_input newinput;
media_output newoutput;
media_format format;
BTimeSource * ts;
bigtime_t start_at;
int32 count;
status_t rv;
if (fRoster == NULL)
fRoster = BMediaRoster::Roster();
rv = fRoster->GetNodeFor(fPhysicalAudioOut, &soundcard);
if (rv != B_OK) {
TRACE("DefaultManager: failed to find soundcard (physical audio "
"output)\n");
return B_ERROR;
}
rv = fRoster->GetNodeFor(fAudioMixer, &mixer);
if (rv != B_OK) {
fRoster->ReleaseNode(soundcard);
TRACE("DefaultManager: failed to find mixer\n");
return B_ERROR;
}
// we now have the mixer and soundcard nodes,
// find a free input/output and connect them
rv = fRoster->GetFreeOutputsFor(mixer, &output, 1, &count,
B_MEDIA_RAW_AUDIO);
if (rv != B_OK || count != 1) {
TRACE("DefaultManager: can't find free mixer output\n");
rv = B_ERROR;
goto finish;
}
rv = fRoster->GetFreeInputsFor(soundcard, inputs, MAX_INPUT_INFOS, &count,
B_MEDIA_RAW_AUDIO);
if (rv != B_OK || count < 1) {
TRACE("DefaultManager: can't find free soundcard inputs\n");
rv = B_ERROR;
goto finish;
}
for (int32 i = 0; i < count; i++) {
input = inputs[i];
if (input.destination.id == fPhysicalAudioOutInputID)
break;
}
for (int i = 0; i < 6; i++) {
switch (i) {
case 0:
TRACE("DefaultManager: Trying connect in native format (1)\n");
if (fRoster->GetFormatFor(input, &format) != B_OK) {
ERROR("DefaultManager: GetFormatFor failed\n");
continue;
}
// XXX BeOS R5 multiaudio node bug workaround
if (format.u.raw_audio.channel_count == 1) {
TRACE("##### WARNING! DefaultManager: ignored mono format\n");
continue;
}
break;
case 1:
TRACE("DefaultManager: Trying connect in format 1\n");
format.Clear();
format.type = B_MEDIA_RAW_AUDIO;
format.u.raw_audio.frame_rate = 44100;
format.u.raw_audio.channel_count = 2;
format.u.raw_audio.format = 0x2;
break;
case 2:
TRACE("DefaultManager: Trying connect in format 2\n");
format.Clear();
format.type = B_MEDIA_RAW_AUDIO;
format.u.raw_audio.frame_rate = 48000;
format.u.raw_audio.channel_count = 2;
format.u.raw_audio.format = 0x2;
break;
case 3:
TRACE("DefaultManager: Trying connect in format 3\n");
format.Clear();
format.type = B_MEDIA_RAW_AUDIO;
break;
case 4:
// BeOS R5 multiaudio node bug workaround
TRACE("DefaultManager: Trying connect in native format (2)\n");
if (fRoster->GetFormatFor(input, &format) != B_OK) {
ERROR("DefaultManager: GetFormatFor failed\n");
continue;
}
break;
case 5:
TRACE("DefaultManager: Trying connect in format 4\n");
format.Clear();
break;
}
rv = fRoster->Connect(output.source, input.destination, &format,
&newoutput, &newinput);
if (rv == B_OK)
break;
}
if (rv != B_OK) {
ERROR("DefaultManager: connect failed\n");
goto finish;
}
fRoster->SetRunModeNode(mixer, BMediaNode::B_INCREASE_LATENCY);
fRoster->SetRunModeNode(soundcard, BMediaNode::B_RECORDING);
fRoster->GetTimeSource(×ource);
fRoster->SetTimeSourceFor(mixer.node, timesource.node);
fRoster->SetTimeSourceFor(soundcard.node, timesource.node);
fRoster->PrerollNode(mixer);
fRoster->PrerollNode(soundcard);
ts = fRoster->MakeTimeSourceFor(mixer);
start_at = ts->Now() + 50000;
fRoster->StartNode(mixer, start_at);
fRoster->StartNode(soundcard, start_at);
ts->Release();
finish:
fRoster->ReleaseNode(mixer);
fRoster->ReleaseNode(soundcard);
fRoster->ReleaseNode(timesource);
return rv;
}
void
DefaultManager::Dump()
{
}
void
DefaultManager::CleanupTeam(team_id team)
{
}
|
docker run -it --rm --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate analyze
|
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.interestrate;
import java.util.List;
import java.util.Map;
import com.opengamma.analytics.financial.interestrate.future.derivative.BondFutureOptionPremiumTransaction;
import com.opengamma.analytics.financial.interestrate.future.derivative.InterestRateFutureOptionMarginSecurity;
import com.opengamma.analytics.financial.interestrate.future.derivative.InterestRateFutureOptionMarginTransaction;
import com.opengamma.analytics.financial.interestrate.future.derivative.InterestRateFutureOptionPremiumSecurity;
import com.opengamma.analytics.financial.interestrate.future.derivative.InterestRateFutureOptionPremiumTransaction;
import com.opengamma.analytics.financial.interestrate.future.method.BondFutureOptionPremiumTransactionBlackSurfaceMethod;
import com.opengamma.analytics.financial.interestrate.future.method.InterestRateFutureOptionMarginTransactionBlackSurfaceMethod;
import com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionCashFixedIbor;
import com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionPhysicalFixedIbor;
import com.opengamma.analytics.financial.interestrate.swaption.method.SwaptionCashFixedIborBlackMethod;
import com.opengamma.analytics.financial.interestrate.swaption.method.SwaptionPhysicalFixedIborBlackMethod;
import com.opengamma.analytics.financial.model.option.definition.YieldCurveWithBlackSwaptionBundle;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.tuple.DoublesPair;
/**
* Present value curve sensitivity calculator for interest rate instruments using the Black formula.
*/
public final class PresentValueCurveSensitivityBlackCalculator extends PresentValueCurveSensitivityCalculator {
/**
* The method unique instance.
*/
private static final PresentValueCurveSensitivityBlackCalculator INSTANCE = new PresentValueCurveSensitivityBlackCalculator();
/**
* Return the unique instance of the class.
* @return The instance.
*/
public static PresentValueCurveSensitivityBlackCalculator getInstance() {
return INSTANCE;
}
/**
* Constructor.
*/
PresentValueCurveSensitivityBlackCalculator() {
}
/**
* The methods used in the calculator.
*/
private static final SwaptionPhysicalFixedIborBlackMethod PHYSICAL_SWAPTION = SwaptionPhysicalFixedIborBlackMethod.getInstance();
private static final SwaptionCashFixedIborBlackMethod CASH_SWAPTION = SwaptionCashFixedIborBlackMethod.getInstance();
private static final InterestRateFutureOptionMarginTransactionBlackSurfaceMethod MARGINNED_IR_FUTURE_OPTION = InterestRateFutureOptionMarginTransactionBlackSurfaceMethod.getInstance();
private static final BondFutureOptionPremiumTransactionBlackSurfaceMethod PREMIUM_BOND_FUTURE_OPTION = BondFutureOptionPremiumTransactionBlackSurfaceMethod.getInstance();
@Override
public Map<String, List<DoublesPair>> visitSwaptionCashFixedIbor(final SwaptionCashFixedIbor swaption, final YieldCurveBundle curves) {
ArgumentChecker.notNull(swaption, "swaption");
ArgumentChecker.notNull(curves, "curves");
if (curves instanceof YieldCurveWithBlackSwaptionBundle) {
final YieldCurveWithBlackSwaptionBundle curvesBlack = (YieldCurveWithBlackSwaptionBundle) curves;
return CASH_SWAPTION.presentValueCurveSensitivity(swaption, curvesBlack).getSensitivities();
}
throw new UnsupportedOperationException("The PresentValueCurveSensitivityBlackSwaptionCalculator visitor visitSwaptionCashFixedIbor requires a YieldCurveWithBlackSwaptionBundle as data.");
}
@Override
public Map<String, List<DoublesPair>> visitSwaptionPhysicalFixedIbor(final SwaptionPhysicalFixedIbor swaption, final YieldCurveBundle curves) {
ArgumentChecker.notNull(swaption, "swaption");
ArgumentChecker.notNull(curves, "curves");
if (curves instanceof YieldCurveWithBlackSwaptionBundle) {
final YieldCurveWithBlackSwaptionBundle curvesBlack = (YieldCurveWithBlackSwaptionBundle) curves;
return PHYSICAL_SWAPTION.presentValueCurveSensitivity(swaption, curvesBlack).getSensitivities();
}
throw new UnsupportedOperationException("The PresentValueCurveSensitivityBlackSwaptionCalculator visitor visitSwaptionPhysicalFixedIbor requires a YieldCurveWithBlackSwaptionBundle as data.");
}
@Override
public Map<String, List<DoublesPair>> visitInterestRateFutureOptionMarginTransaction(final InterestRateFutureOptionMarginTransaction transaction, final YieldCurveBundle curves) {
ArgumentChecker.notNull(transaction, "transaction");
ArgumentChecker.notNull(curves, "curves");
return MARGINNED_IR_FUTURE_OPTION.presentValueCurveSensitivity(transaction, curves).getSensitivities();
}
//TODO check this
@Override
public Map<String, List<DoublesPair>> visitInterestRateFutureOptionPremiumTransaction(final InterestRateFutureOptionPremiumTransaction transaction, final YieldCurveBundle curves) {
ArgumentChecker.notNull(transaction, "transaction");
ArgumentChecker.notNull(curves, "curves");
final InterestRateFutureOptionPremiumSecurity premiumUnderlying = transaction.getUnderlyingOption();
final InterestRateFutureOptionMarginSecurity underlyingOption = new InterestRateFutureOptionMarginSecurity(premiumUnderlying.getUnderlyingFuture(),
premiumUnderlying.getExpirationTime(), premiumUnderlying.getStrike(), premiumUnderlying.isCall());
final InterestRateFutureOptionMarginTransaction marginTransaction = new InterestRateFutureOptionMarginTransaction(underlyingOption, transaction.getQuantity(), transaction.getTradePrice());
return MARGINNED_IR_FUTURE_OPTION.presentValueCurveSensitivity(marginTransaction, curves).getSensitivities();
}
//TODO check this
@Override
public Map<String, List<DoublesPair>> visitBondFutureOptionPremiumTransaction(final BondFutureOptionPremiumTransaction transaction, final YieldCurveBundle curves) {
ArgumentChecker.notNull(transaction, "transaction");
ArgumentChecker.notNull(curves, "curves");
return PREMIUM_BOND_FUTURE_OPTION.presentValueCurveSensitivity(transaction, curves).getSensitivities();
}
}
|
<reponame>YijiangWang/good-good-study
import React from 'react';
import ReduxTest from './reduxTest';
import {Provider} from 'react-redux';
import store from './store';
import RouterTest from './routerTest';
import { CompFunc, CompClass } from './components/CompType';
import Clock from './components/Clock';
function App() {
return (
<div className="App">
<Provider store={store}>
<ReduxTest />
<RouterTest />
</Provider>
<hr/>
<hr/>
<Clock />
<hr/>
<CompFunc name='CompF'/>
<CompClass name='CompC'/>
</div>
);
}
export default App;
|
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var fs = require('fs');
var path = require('path');
var child_process = require('child_process');
var version = child_process.execFileSync('/usr/libexec/PlistBuddy', [
'-c', 'Print :CFBundleShortVersionString', path.join(__dirname, '..', '..', 'KinveyKit', 'KinveyKit', 'Info.plist')
], {
encoding: 'utf-8'
}).trim();
var fileName = 'KinveyKit-' + version + '.zip';
var filePath = path.join(__dirname, '..', '..', 'KinveyKit', 'build', fileName);
var fileBuffer = fs.readFileSync(filePath);
var params = {
Bucket: 'kinvey-downloads',
Key: path.join('iOS', fileName),
ContentType: 'application/zip',
Body: fileBuffer
};
console.log('Uploading file ' + fileName);
s3.upload(params, function(err, data) {
if (err) {
console.error(err);
} else {
console.log(data);
}
});
|
<reponame>qbancoffee/winfile<gh_stars>1000+
//*************************************************************
// File name: exehdr.c
//
// Description:
// Routines for reading the exe headers and resources
//
// History: Date Author Comment
// 1/16/92 MSM Created
//
// Written by Microsoft Product Support Services, Windows Developer Support
// Copyright (c) 1992 Microsoft Corporation. All rights reserved.
//*************************************************************
// COPYRIGHT:
//
// (C) Copyright Microsoft Corp. 1993. All rights reserved.
//
// You have a royalty-free right to use, modify, reproduce and
// distribute the Sample Files (and/or any modified version) in
// any way you find useful, provided that you agree that
// Microsoft has no warranty obligations or liability for any
// Sample Application Files which are modified.
//
#include "stdafx.h"
//*************************************************************
//
// LoadExeInfo
//
// Purpose:
// Loads in the header information from the EXE
//
//
// Parameters:
// LPSTR lpFile
//
// Return: (PEXEINFO)
//
//
// Comments:
// This function will allocate a EXEINFO structure and
// fill it out from the given filename. This routine,
// if successful will return a pointer to this structure.
// If it fails, it returns a LERR_???? code.
//
// History: Date Author Comment
// 1/16/92 MSM Created
//
//*************************************************************
PEXEINFO LoadExeInfo (LPSTR lpFile)
{
OFSTRUCT of;
int fFile=0, nLen, nErr=0;
WORD wSize;
PEXEINFO pExeInfo;
#define ERROREXIT(X) {nErr=X; goto error_out;}
// Allocate place main EXEINFO structure
pExeInfo = (PEXEINFO)LocalAlloc(LPTR, sizeof(EXEINFO));
if (!pExeInfo)
return (PEXEINFO)LERR_MEMALLOC;
// Open file and check for errors
fFile = OpenFile( lpFile, &of, OF_READ );
if (!fFile)
ERROREXIT(LERR_OPENINGFILE);
// Allocate space for the filename
pExeInfo->pFilename = (PSTR)LocalAlloc(LPTR, lstrlen(lpFile)+1 );
if (!pExeInfo->pFilename)
return (PEXEINFO)LERR_MEMALLOC;
lstrcpy( pExeInfo->pFilename, lpFile );
// Read the OLD exe header
nLen = (int)_lread(fFile, (LPSTR)&(pExeInfo->OldHdr), sizeof(OLDEXE));
if (nLen<sizeof(OLDEXE))
ERROREXIT(LERR_READINGFILE);
if (pExeInfo->OldHdr.wFileSignature != OLDSIG)
ERROREXIT(LERR_NOTEXEFILE);
if (pExeInfo->OldHdr.wFirstRelocationItem < 0x40) // Old EXE
{
pExeInfo->NewHdr.wNewSignature = 0;
_lclose( fFile );
return pExeInfo;
}
_llseek( fFile, pExeInfo->OldHdr.lNewExeOffset, 0 );
// Read the NEW exe header
nLen = (int)_lread(fFile, (LPSTR)&(pExeInfo->NewHdr), sizeof(NEWEXE));
if (nLen<sizeof(NEWEXE))
ERROREXIT(LERR_READINGFILE);
if (pExeInfo->NewHdr.wNewSignature != NEWSIG)
ERROREXIT(LERR_NOTEXEFILE);
// Read entry table
wSize = pExeInfo->NewHdr.wEntrySize;
pExeInfo->pEntryTable=(PSTR)LocalAlloc(LPTR, wSize);
if (!pExeInfo->pEntryTable)
ERROREXIT(LERR_MEMALLOC);
_llseek(fFile, pExeInfo->OldHdr.lNewExeOffset +
pExeInfo->NewHdr.wEntryOffset, 0 );
nLen = _lread(fFile, (LPSTR)pExeInfo->pEntryTable, wSize );
if (nLen != (int)wSize)
ERROREXIT(LERR_READINGFILE);
// Read all the other tables
if ( (nErr=ReadSegmentTable( fFile, pExeInfo )) < 0 )
ERROREXIT(nErr);
// Do not read resources for OS/2 apps!!!!!!!
if (pExeInfo->NewHdr.bExeType == 0x02)
{
if ( (nErr=ReadResourceTable( fFile, pExeInfo )) < 0 )
ERROREXIT(nErr);
}
if ( (nErr=ReadResidentNameTable( fFile, pExeInfo )) < 0 )
ERROREXIT(nErr);
if ( (nErr=ReadImportedNameTable( fFile, pExeInfo )) < 0 )
ERROREXIT(nErr);
if ( (nErr=ReadNonResidentNameTable( fFile, pExeInfo )) < 0 )
ERROREXIT(nErr);
nErr = 1;
error_out:
// Close file and get outta here
if (fFile)
_lclose( fFile );
if (nErr<=0)
{
FreeExeInfoMemory( pExeInfo );
return (PEXEINFO)nErr;
}
return pExeInfo;
} //*** LoadExeInfo
//*************************************************************
//
// FreeExeInfoMemory
//
// Purpose:
// Frees the memory associated created to store the info
//
//
// Parameters:
// PEXEINFO pExeInfo
//
//
// Return: (VOID)
//
//
// Comments:
//
//
// History: Date Author Comment
// 1/17/92 MSM Created
//
//*************************************************************
VOID FreeExeInfoMemory (PEXEINFO pExeInfo)
{
PNAME pName = pExeInfo->pResidentNames;
PRESTYPE prt = pExeInfo->pResTable;
// Free Filename
if (pExeInfo->pFilename)
LocalFree( (HANDLE)pExeInfo->pFilename );
// Free Entry Table
if (pExeInfo->pEntryTable)
LocalFree( (HANDLE)pExeInfo->pEntryTable );
// Free Segment Table
if (pExeInfo->pSegTable)
LocalFree( (HANDLE)pExeInfo->pSegTable );
while (prt) // Loop through the resource table
{
PRESTYPE prt_temp = prt->pNext;
PRESINFO pri = prt->pResInfoArray;
WORD wI=0;
// free if Resource array was allocated
if (pri)
{
// Loop through and free any Resource Names
while ( wI < prt->wCount )
{
if (pri->pResourceName)
LocalFree( (HANDLE)pri->pResourceName );
wI++;
pri++;
}
LocalFree( (HANDLE)prt->pResInfoArray );
}
// Free ResourceType name if there is one
if (prt->pResourceType)
LocalFree( (HANDLE)prt->pResourceType );
// Free resource type header
LocalFree( (HANDLE)prt );
prt = prt_temp;
}
// Free Resident Name Table
while (pName)
{
PNAME pN2 = pName->pNext;
LocalFree( (HANDLE)pName );
pName = pN2;
}
// Free Import Name Table
pName = pExeInfo->pImportedNames;
while (pName)
{
PNAME pN2 = pName->pNext;
LocalFree( (HANDLE)pName );
pName = pN2;
}
// Free Non-Resident Name Table
pName = pExeInfo->pNonResidentNames;
while (pName)
{
PNAME pN2 = pName->pNext;
LocalFree( (HANDLE)pName );
pName = pN2;
}
// Free PEXEINFO struct
LocalFree( (HANDLE)pExeInfo );
} //*** FreeExeInfoMemory
//*************************************************************
//
// ReadSegmentTable
//
// Purpose:
// LocalAllocs memory and reads in the segment table
//
//
// Parameters:
// int fFile
// PEXEINFO pExeInfo
//
//
// Return: (int)
// 0 or error condition
//
// Comments:
//
//
// History: Date Author Comment
// 1/17/92 MSM Created
//
//*************************************************************
int ReadSegmentTable (int fFile, PEXEINFO pExeInfo)
{
int nLen;
PSEGENTRY pSeg;
long lSegTable;
WORD wSegSize;
wSegSize = sizeof(SEGENTRY)*pExeInfo->NewHdr.wSegEntries;
if (wSegSize==0)
{
pExeInfo->pSegTable = NULL;
return 0;
}
// Allocate space for and read in the Segment table
pSeg = (PSEGENTRY)LocalAlloc(LPTR, wSegSize );
if (!pSeg)
return LERR_MEMALLOC;
lSegTable = pExeInfo->OldHdr.lNewExeOffset+pExeInfo->NewHdr.wSegOffset;
_llseek( fFile, lSegTable, 0 );
nLen = _lread( fFile, (LPSTR)pSeg, wSegSize );
if (nLen != (int)wSegSize)
return LERR_READINGFILE;
pExeInfo->pSegTable = pSeg;
return 0;
} //*** ReadSegmentTable
//*************************************************************
//
// ReadResourceTable
//
// Purpose:
// LocalAllocs memory and reads in the resource headers
//
//
// Parameters:
// int fFile
// PEXEINFO pExeInfo
//
//
// Return: (int)
// 0 or error condition
//
// Comments:
//
//
// History: Date Author Comment
// 1/17/92 MSM Created
//
//*************************************************************
int ReadResourceTable (int fFile, PEXEINFO pExeInfo)
{
int nLen;
RESTYPE rt;
PRESTYPE prt, prt_last=NULL;
PRESINFO pri;
long lResTable;
WORD wResSize, wI;
int ipri;
rt.pResourceType = NULL;
rt.pResInfoArray = NULL;
rt.pNext = NULL;
if (pExeInfo->NewHdr.wResourceOffset == pExeInfo->NewHdr.wResOffset)
return 0; // No resources
lResTable = pExeInfo->OldHdr.lNewExeOffset+pExeInfo->NewHdr.wResourceOffset;
_llseek( fFile, lResTable, 0 );
// Read shift count
if (_lread(fFile, (LPSTR)&(pExeInfo->wShiftCount), 2)!=2)
return LERR_READINGFILE;
// Read all the resource types
while (TRUE)
{
nLen = _lread(fFile, (LPSTR)&rt, sizeof(RTYPE));
if (nLen != sizeof(RTYPE))
return LERR_READINGFILE;
if (rt.wType==0)
break;
prt = (PRESTYPE)LocalAlloc(LPTR, sizeof(RESTYPE) );
if (!prt)
return LERR_MEMALLOC;
*prt = rt;
if (prt_last==NULL) // Is this the first entry??
pExeInfo->pResTable = prt;
else // Nope
prt_last->pNext = prt;
prt_last=prt;
// Allocate buffer for 'Count' resources of this type
wResSize = prt->wCount * sizeof( RESINFO2 );
pri = (PRESINFO)LocalAlloc(LPTR, wResSize );
if (!pri)
return LERR_MEMALLOC;
prt->pResInfoArray = pri;
// Now read 'Count' resources of this type
for (ipri = 0; ipri < prt->wCount; ipri++)
{
nLen = _lread(fFile, (LPSTR)(pri + ipri), sizeof(RINFO));
if (nLen != sizeof(RINFO))
return LERR_READINGFILE;
(pri + ipri)->pResType = prt;
}
}
// Now that the resources are read, read the names
prt = pExeInfo->pResTable;
while (prt)
{
if (prt->wType & 0x8000) // Pre-defined type
prt->pResourceType = NULL;
else // Name is in the file
{
// wType is offset from beginning of Resource Table
_llseek( fFile, lResTable + prt->wType, 0 );
wResSize = 0;
// Read string size
if (_lread(fFile, (LPSTR)&wResSize, 1)!=1)
return LERR_READINGFILE;
// +1 for the null terminator
prt->pResourceType = (PSTR)LocalAlloc(LPTR, wResSize+1);
if (!prt->pResourceType)
return LERR_MEMALLOC;
// Read string
if (_lread(fFile, (LPSTR)prt->pResourceType, wResSize)!=wResSize)
return LERR_READINGFILE;
prt->pResourceType[ wResSize ] = 0; // Null terminate string;
}
// Now do Resource Names for this type
pri = prt->pResInfoArray;
wI = 0;
while ( wI < prt->wCount )
{
if (pri->wID & 0x8000) // Integer resource
pri->pResourceName = NULL;
else // Named resource
{
// wID is offset from beginning of Resource Table
_llseek( fFile, lResTable + pri->wID, 0 );
wResSize = 0;
// Read string size
if (_lread(fFile, (LPSTR)&wResSize, 1)!=1)
return LERR_READINGFILE;
// +1 for the null terminator
pri->pResourceName = (PSTR)LocalAlloc(LPTR, wResSize+1);
if (!pri->pResourceName)
return LERR_MEMALLOC;
// Read string
if (_lread(fFile, (LPSTR)pri->pResourceName, wResSize)!=wResSize)
return LERR_READINGFILE;
pri->pResourceName[ wResSize ] = 0; // Null terminate string;
}
pri++;
wI++;
}
prt = prt->pNext;
}
return 0;
} //*** ReadResourceTable
//*************************************************************
//
// ReadResidentNameTable
//
// Purpose:
// Reads in the Resident name table (First one being pModule)
//
//
// Parameters:
// int fFile
// PEXEINFO pExeInfo
//
//
// Return: (int)
//
//
// Comments:
//
//
// History: Date Author Comment
// 1/18/92 MSM Created
//
//*************************************************************
int ReadResidentNameTable (int fFile, PEXEINFO pExeInfo)
{
long lResTable;
WORD wResSize;
PNAME pLast = NULL, pName = NULL;
lResTable = pExeInfo->OldHdr.lNewExeOffset+pExeInfo->NewHdr.wResOffset;
_llseek( fFile, lResTable, 0 );
wResSize = 0;
// Read string length
if (_lread(fFile, (LPSTR)&wResSize, 1)!=1)
return LERR_READINGFILE;
while (wResSize)
{
pName = (PNAME)LocalAlloc(LPTR,sizeof(NAME)+wResSize);
if (!pName)
return LERR_MEMALLOC;
if (!pLast)
pExeInfo->pResidentNames = pName;
else
pLast->pNext = pName;
pLast = pName;
// Read string
if (_lread(fFile, (LPSTR)pName->szName, wResSize)!=wResSize)
return LERR_READINGFILE;
pName->szName[ wResSize ] = 0; // Null terminate string;
// Read ordinal
if (_lread(fFile, (LPSTR)&pName->wOrdinal, 2)!=2)
return LERR_READINGFILE;
wResSize = 0;
// Read string length
if (_lread(fFile, (LPSTR)&wResSize, 1)!=1)
return LERR_READINGFILE;
}
return 0;
} /* ReadResidentNameTable() */
//*************************************************************
//
// ReadImportedNameTable
//
// Purpose:
// Reads the Imported Name table
//
//
// Parameters:
// int fFile
// PEXEINFO pExeInfo
//
//
// Return: (int)
//
//
// Comments:
//
//
// History: Date Author Comment
// 1/18/92 MSM Created
//
//*************************************************************
int ReadImportedNameTable (int fFile, PEXEINFO pExeInfo)
{
long lImpTable;
long lModTable;
WORD wImpSize, wModOffset, wImports;
PNAME pLast = NULL, pName = NULL;
lModTable = pExeInfo->OldHdr.lNewExeOffset+pExeInfo->NewHdr.wModOffset;
lImpTable = pExeInfo->OldHdr.lNewExeOffset+pExeInfo->NewHdr.wImportOffset;
wImports = pExeInfo->NewHdr.wModEntries;
if (!wImports)
return 0;
_llseek( fFile, lModTable, 0 );
// Read Import Names
while (wImports)
{
// Load Module Reference Table offset
_llseek( fFile, lModTable, 0 );
// Move Module pointer to next string
lModTable += 2L;
if (_lread(fFile, (LPSTR)&wModOffset, 2)!=2)
return LERR_READINGFILE;
// Move file pointer to that offset in the Imported Table
_llseek( fFile, lImpTable + wModOffset, 0 );
wImpSize = 0;
// Read string size
if (_lread(fFile, (LPSTR)&wImpSize, 1)!=1)
return LERR_READINGFILE;
pName = (PNAME)LocalAlloc(LPTR,sizeof(NAME)+wImpSize);
if (!pName)
return LERR_MEMALLOC;
if (!pLast)
pExeInfo->pImportedNames = pName;
else
pLast->pNext = pName;
pLast = pName;
// Read string
if (_lread(fFile, (LPSTR)pName->szName, wImpSize)!=wImpSize)
return LERR_READINGFILE;
pName->szName[ wImpSize ] = 0; // Null terminate string;
// Imported Names don't have ordinals
pName->wOrdinal = 0;
wImports--;
}
return 0;
} /* ReadImportedNameTable() */
//*************************************************************
//
// ReadNonResidentNameTable
//
// Purpose:
// Reads in the NonResident name table (First one being pModuleDesc)
//
//
// Parameters:
// int fFile
// PEXEINFO pExeInfo
//
//
// Return: (int)
//
//
// Comments:
//
//
// History: Date Author Comment
// 1/18/92 MSM Created
//
//*************************************************************
int ReadNonResidentNameTable (int fFile, PEXEINFO pExeInfo)
{
long lNonResTable;
WORD wNonResSize;
PNAME pLast = NULL, pName = NULL;
// Correction to the Sept. 1991 MSJ Article. The value at
// offset 2CH is an offset from the beginning of the FILE not
// from the beginning of the New Executable Header.
lNonResTable = pExeInfo->NewHdr.lNonResOffset;
_llseek( fFile, lNonResTable, 0 );
wNonResSize = 0;
// Read string size
if (_lread(fFile, (LPSTR)&wNonResSize, 1)!=1)
return LERR_READINGFILE;
while (wNonResSize)
{
pName = (PNAME)LocalAlloc(LPTR,sizeof(NAME)+wNonResSize);
if (!pName)
return LERR_MEMALLOC;
if (!pLast)
pExeInfo->pNonResidentNames = pName;
else
pLast->pNext = pName;
pLast = pName;
// Read string
if (_lread(fFile, (LPSTR)pName->szName, wNonResSize)!=wNonResSize)
return LERR_READINGFILE;
pName->szName[ wNonResSize ] = 0; // Null terminate string;
// Read ordinal
if (_lread(fFile, (LPSTR)&pName->wOrdinal, 2)!=2)
return LERR_READINGFILE;
wNonResSize = 0;
// Read string size
if (_lread(fFile, (LPSTR)&wNonResSize, 1)!=1)
return LERR_READINGFILE;
}
return 0;
} /* ReadNonResidentNameTable() */
//*************************************************************
//
// GetSegEntry
//
// Purpose:
// Retrieves a segment entry
//
//
// Parameters:
// PEXEINFO pExeInfo
// int nIndex
//
//
// Return: (PSEGENTRY)
//
//
// Comments:
//
//
// History: Date Author Comment
// 1/20/92 MSM Created
//
//*************************************************************
PSEGENTRY GetSegEntry ( PEXEINFO pExeInfo, int nIndex )
{
PSEGENTRY pSeg = pExeInfo->pSegTable;
if (nIndex >= (int)pExeInfo->NewHdr.wSegEntries)
return NULL;
return (pSeg + nIndex);
} //*** GetSegEntry
//*************************************************************
//
// GetModuleName
//
// Purpose:
// Retrieves the module name
//
//
// Parameters:
// PEXEINFO pExeInfo
//
//
// Return: (LPSTR)
//
//
// Comments:
// The module name is the first entry in the Resident Name Table
//
// History: Date Author Comment
// 1/20/92 MSM Created
//
//*************************************************************
LPSTR GetModuleName ( PEXEINFO pExeInfo )
{
if (pExeInfo->pResidentNames)
return (LPSTR)(pExeInfo->pResidentNames->szName);
else
return (LPSTR)"";
} //*** GetModuleName
//*************************************************************
//
// GetModuleDescription
//
// Purpose:
// Retrieves the module description
//
//
// Parameters:
// PEXEINFO pExeInfo
//
//
// Return: (LPSTR)
//
//
// Comments:
// The module description is the first entry in the NonResident Table
//
// History: Date Author Comment
// 1/20/92 MSM Created
//
//*************************************************************
LPSTR GetModuleDescription ( PEXEINFO pExeInfo )
{
if (pExeInfo->pNonResidentNames)
return (LPSTR)(pExeInfo->pNonResidentNames->szName);
else
return (LPSTR)"";
} //*** GetModuleDescription
//*************************************************************
//
// GetExeDataType
//
// Purpose:
// Retrieves the type of data for the executable
//
//
// Parameters:
// PEXEINFO pExeInfo
//
//
// Return: (LPSTR)
//
//
// Comments:
//
//
// History: Date Author Comment
// 1/18/92 MSM Created
//
//*************************************************************
LPSTR GetExeDataType (PEXEINFO pExeInfo)
{
static char szData[40];
PSEGENTRY pSeg = pExeInfo->pSegTable;
int i;
lstrcpy( szData, "NONE" );
for (i=0; i<(int)pExeInfo->NewHdr.wSegEntries; i++)
{
if (pSeg->wFlags & F_DATASEG) // Data Segment
{
if (pSeg->wFlags & F_SHAREABLE)
lstrcpy( szData, "SHARED" );
else
lstrcpy( szData, "NONSHARED" );
return (LPSTR)szData;
}
pSeg++;
}
return (LPSTR)szData;
} //*** GetExeDataType
//*** EOF: newexe.c
|
/**
* Orthanc - A Lightweight, RESTful DICOM Store
* Copyright (C) 2012-2016 <NAME>, Medical Physics
* Department, University Hospital of Liege, Belgium
* Copyright (C) 2017-2020 <NAME>., Belgium
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
**/
#include <stdio.h>
#include "../../../Sources/HttpServer/HttpServer.h"
#include "../../../Sources/Logging.h"
#include "../../../Sources/RestApi/RestApi.h"
#include "../../../Sources/SystemToolbox.h"
class MicroService : public Orthanc::RestApi
{
private:
static MicroService& GetSelf(Orthanc::RestApiCall& call)
{
return dynamic_cast<MicroService&>(call.GetContext());
}
void SayHello()
{
printf("Hello\n");
}
static void Hello(Orthanc::RestApiGetCall& call)
{
GetSelf(call).SayHello();
Json::Value value = Json::arrayValue;
value.append("World");
call.GetOutput().AnswerJson(value);
}
public:
MicroService()
{
Register("/hello", Hello);
}
};
int main()
{
Orthanc::Logging::Initialize();
Orthanc::Logging::EnableTraceLevel(true);
MicroService rest;
{
Orthanc::HttpServer httpServer;
httpServer.SetPortNumber(8000);
httpServer.Register(rest);
httpServer.SetRemoteAccessAllowed(true);
httpServer.Start();
LOG(WARNING) << "Micro-service started on port " << httpServer.GetPortNumber();
Orthanc::SystemToolbox::ServerBarrier();
}
LOG(WARNING) << "Micro-service stopped";
Orthanc::Logging::Finalize();
return 0;
}
|
<filename>activity/mashtoken/activity.go<gh_stars>10-100
// Package mashtoken implements getting a token from TIBCO Cloud Mashery
package mashtoken
import (
b64 "encoding/base64"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strings"
"github.com/TIBCOSoftware/flogo-lib/core/activity"
"github.com/TIBCOSoftware/flogo-lib/logger"
)
const (
grantType = "password"
ivUsername = "username"
ivPassword = "password"
ivScope = "scope"
ivBasicAuth = "basicauth"
url = "https://api.mashery.com/v3/token"
ovToken = "<PASSWORD>"
ovTokenType = "tokentype"
ovExpires = "expiresin"
ovRefreshToken = "<PASSWORD>"
ovScope = "scope"
)
// log is the default package logger
var log = logger.GetLogger("activity-mashtoken")
// MyActivity is a stub for your Activity implementation
type MyActivity struct {
metadata *activity.Metadata
}
// NewActivity creates a new activity
func NewActivity(metadata *activity.Metadata) activity.Activity {
return &MyActivity{metadata: metadata}
}
// Metadata implements activity.Activity.Metadata
func (a *MyActivity) Metadata() *activity.Metadata {
return a.metadata
}
// Eval implements activity.Activity.Eval
func (a *MyActivity) Eval(context activity.Context) (done bool, err error) {
// Get the user provided data
username := context.GetInput(ivUsername).(string)
password := context.GetInput(ivPassword).(string)
scope := context.GetInput(ivScope).(string)
auth := context.GetInput(ivBasicAuth).(string)
encodedAuth := b64.StdEncoding.EncodeToString([]byte(auth))
// Get the token from TIBCO Cloud Mashery
payload := strings.NewReader(fmt.Sprintf("grant_type=%s&username=%s&password=%s&scope=%s", grantType, username, password, scope))
req, err := http.NewRequest("POST", url, payload)
if err != nil {
return false, err
}
req.Header.Add("content-type", "application/x-www-form-urlencoded")
req.Header.Add("authorization", fmt.Sprintf("Basic %s", encodedAuth))
res, err := http.DefaultClient.Do(req)
if err != nil {
return false, err
}
defer res.Body.Close()
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return false, err
}
// Set the output value in the context
var data map[string]interface{}
if err := json.Unmarshal(body, &data); err != nil {
return false, err
}
context.SetOutput(ovExpires, data["expires_in"])
context.SetOutput(ovRefreshToken, data["refresh_token"])
context.SetOutput(ovScope, data["scope"])
context.SetOutput(ovToken, data["access_token"])
context.SetOutput(ovTokenType, data["token_type"])
return true, nil
}
|
<gh_stars>0
# ensure we use the best available 'set' type with name 'set'
try:
set
except NameError:
from sets import Set as set
# a custom exception class that we raise to signal violations
class InterfaceOmission(TypeError):
pass
class MetaInterfaceChecker(type):
''' the interface-checking custom metaclass '''
def __init__(cls, classname, bases, classdict):
super(MetaInterfaceChecker, cls).__init__(classname, bases, classdict)
cls_defines = set(dir(cls))
for interface in cls.__implements__:
itf_requires = set(dir(interface))
if not itf_requires.issubset(cls_defines):
raise InterfaceOmission, list(itf_requires - cls_defines)
|
#!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
def CompareEgy(EgyIn):
return EgyIn+228.99856104845924<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.005502867455371351, -0.0027751336381018924, 0.012084950740151623,
0.009433403388612161, -0.00046749509597486527, -0.01297325682844054,
-0.005722311322994865, -0.00016173311193513505, -0.002000942005888109,
0.0006610108084962162, -0.010009097688745408, -0.009957460723114053,
0.0016319575067324323, 0.001697567271209459, -0.0056374571379951355,
0.0027764939686429737, 0.009729630525249732, 0.015009657098031084,
0.00862971968666757, 0.01161260596748973, -0.0011614628661475676,
-0.01533113508033946, -0.009051964433770002, -0.0008451939530332433,
0.0034237284980951355, -0.0005743798008410808, 0.00548116566911838]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester(
"Testing FPA. MP2/a(TQ)Z+[2B-CCSD(T)/aDZ]-[2B-MP2/aDZ]")
tester.print_header()
LoadDefaultModules(mm)
mm.DuplicateKey("PSR_MP2","MP2_aTZ")
mm.DuplicateKey("PSR_MP2","MP2_aQZ")
mm.DuplicateKey("PSR_MBE","PSR_MBE_MP2")
aDZ="aug-cc-pvdz"
aTZ="aug-cc-pvtz"
aQZ="aug-cc-pvqz"
mm.change_option("PSR_MBE","METHOD","PSR_CCSD(T)")
mm.change_option("PSR_MBE_MP2","METHOD","PSR_MP2")
mm.change_option("PSR_BOND_FRAG","TRUNCATION_ORDER",2)
mm.change_option("PSR_CCSD(T)","BASIS_SET",aDZ)
mm.change_option("PSR_MP2","BASIS_SET",aDZ)
mm.change_option("MP2_aTZ","BASIS_SET",aTZ)
mm.change_option("MP2_aQZ","BASIS_SET",aQZ)
mm.DuplicateKey("PSR_MIM","FPA_MP2_MIM")
mm.change_option("FPA_MP2_MIM","METHODS",["MP2_aTZ","MP2_aQZ"])
mm.change_option("PSR_HELGAKER_CBS","BASIS_CARDINAL_NUMS",[3,4])
mm.change_option("PSR_HELGAKER_CBS","MIM_KEY","FPA_MP2_MIM")
mm.change_option("PSR_MIM","METHODS",["PSR_HELGAKER_CBS",
"PSR_MBE",
"PSR_MBE_MP2"
])
mm.change_option("PSR_MIM","WEIGHTS",[1.0,1.0,-1.0])
MyMod=mm.get_module("PSR_MIM",0)
mol=psr.system.MakeSystem("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,aDZ,aDZ)
mol = ApplyBasis(mol,aTZ,aTZ)
mol = ApplyBasis(mol,aQZ,aQZ)
wfn=psr.datastore.Wavefunction()
wfn.system=mol
NewWfn,Egy=MyMod.Deriv(0,wfn)
tester.test("Testing Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyModenergy(wfn)
tester.test("Testing Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.Deriv(1,wfn)
tester.test("Testing Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.Gradient(wfn)
tester.test("Testing Energy via Gradient()", True, CompareGrad, Egy)
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
|
#!/bin/sh -l
set -eu
apt-get update && apt-get install -y \
ant \
git \
openjdk-8-jdk
git clone $1
git checkout $2
git submodule update --init
cp -r src/** appinventor-sources/appinventor/components/src
cd appinventor-sources/appinventor/
ant clean
ant extensions
cd ../..
cd appinventor-sources/appinventor/components/build/extensions
file=$(dir)
echo ::set-output name=file::$file
cd ../../../../..
|
# Mar 17, 2021
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 1
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 2
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 3
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 4
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 5
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc4_1024x1024 --cls 5 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 4
# Mar 18, 2021
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc2_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 1
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc2_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 200 --valid False --cross 2
# Mar 20, 2021
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 1 --gpu 0 --batch-size 4 --steps 100 --epoch 200 --valid False --cross 1
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 1 --gpu 0 --batch-size 4 --steps 100 --epoch 200 --valid False --cross 2
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 1 --gpu 0 --batch-size 4 --steps 100 --epoch 200 --valid False --cross 3
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 1 --gpu 0 --batch-size 4 --steps 100 --epoch 200 --valid False --cross 4
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 1 --gpu 0 --batch-size 4 --steps 100 --epoch 200 --valid False --cross 5
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc4_1024x1024 --cls 4 --phi 1 --gpu 0 --batch-size 4 --steps 100 --epoch 200 --valid False --cross 4
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 400 --valid False --cross 1 --lr 1e-4 --lw 0.5
# python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc_1024x1024 --cls 4 --phi 0 --gpu 0 --batch-size 8 --steps 100 --epoch 400 --valid False --cross 1 --lr 5e-5 --lw 0.5
python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc2_1024x1024 --cls 4 --phi 0 --gpu 2 --batch-size 8 --steps 100 --epoch 400 --valid True --cross 1 --lr 1e-4 --lw 0.5
python3 train_wbc_cv.py --docker --snapshot imagenet --dataset wbc2_1024x1024 --cls 4 --phi 0 --gpu 0 --batch-size 8 --steps 100 --epoch 400 --valid True --cross 1 --lr 5e-5 --lw 0.5
|
<reponame>Darksecond/libcore
#pragma once
namespace core
{
namespace fs_modes
{
static const int in = 1;
static const int out = 2;
static const int append = 4;
};
};
|
function parseGalleryTemplate($html_template, $page) {
$parsed_data = [];
// Extract intro content
$parsed_data['intro'] = nl2br(get_gallery_intro($page));
// Extract paging links
preg_match("~\{\{paging_start\}\}(.*)\{\{paging_previous_start\}\}(.+)\{\{paging_previous_end\}\}(.*)\{\{paging_next_start\}\}(.+)\{\{paging_next_end\}\}(.*)\{\{paging_end\}\}~s", $html_template, $paging_html_components);
$paging_links = [
'paging_start' => $paging_html_components[1],
'paging_previous_start' => $paging_html_components[2],
'paging_previous_end' => $paging_html_components[3],
'paging_next_start' => $paging_html_components[4],
'paging_next_end' => $paging_html_components[5]
];
$parsed_data['paging_links'] = $paging_links;
return $parsed_data;
}
|
from typing import List
def find_matching_indices(prefixes: List[int], mask: int, index: int) -> List[int]:
output = []
for prefix in prefixes:
if (index & mask) == prefix:
output.append(prefix)
return output
|
<reponame>quipex-binary-studio/github-snurse-cred<filename>src/event_handlers/pr-edited.ts
import { WebhookContext } from '../types';
import { managePrBaseLabels } from './helpers';
const handlePrEdited = async (context: WebhookContext) => {
console.log('pr edited');
await managePrBaseLabels(context);
console.log('end of pr edited action');
};
export { handlePrEdited };
|
import requests
from bs4 import BeautifulSoup
URL = "www.example.com/articles"
r = requests.get(URL)
soup = BeautifulSoup(r.content, 'html5lib')
articles = soup.findAll('h2', attrs = {'class':'article-title'})
for article in articles:
print(article.text)
print(article.a['href'])
|
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
from __future__ import division
import re
import itertools
import string
import sets
import struct
def isString_v1(obj): #除了UserString
return isinstance(obj,basestring)
def isString_v2(obj): #鸭子方法
try:
obj + ''
except:
return False
else:
return True
def revwords_v1(astring): #单词反转
return ' '.join(astring.split()[::-1])
#return ' '.join(reversed(astring.split())) # 紧凑
def revwords_v1(astring): #单词反转,包括空白
revwords = re.split(r'(\s+)',astring)
revwords.reverse()
revwords = ''.join(revwords)
return revwords
def containsAny_v1(seq,aset): #检查seq中是否拥有aset集合中的字符
for c in seq:
if c in aset: return True
return False
def containsAny_v2(seq, aset):
for item in itertools.ifilter(aset.__contains__,seq):
return True
return False
def containsAny_v3(seq,aset): #集合交集
return bool(set(aset).intersection(seq)) #会检查seq中所有元素
def contaisOnly(seq,aset): # seq < aset, 检查seq中的所有字符是否都在集合aset中
for c in seq:
if c not in aset: return False
return True
def containsAll(seq,aset): # seq > aset,是否seq中包含aset所有项目
return not set(aset).difference(seq)
# 特殊方法,需要两个参数都是字符集合,通用性不好
def containsAny_vs4(astring,astrset):
notrans = string.maketrans('','')
return len(astrset) != len(astrset.translate(notrans,astring))
def containsAll_vs1(astring,astrset):
notrans = string.maketrans('','')
return not astrset.translate(notrans,astring)
# 方法,需要特殊定制
def translator(frm='',to='',delete='',keep=None):
if len(to) == 1:
to = to * len(frm) # 补齐
trans = string.maketrans(frm,to)
if keep is not None:
allchars = string.maketrans('','') # 256长度的字符串,所有字符
delete = allchars.translate(allchars,\
keep.translate(allchars,delete)) # 移除的移除
def translate(s): # 闭包
return s.translate(trans,delete)
return translate # 返回一个特殊定制的函数
# 保留指定集合中的字符,过滤字符串
allchars = string.maketrans('','')
def makefilter(keep):
delchars = allchars.translate(allchars,keep) #补集
def thefilter(s): #闭包
return s.translate(allchars,delchars)
return thefilter # 可以使用匿名函数lambda
def canonicform(s): # 使被保留字符串s以十分规整的形式返回
return makefilter(s)(allchars)
# 过滤unicode字符串,基于它的translate
class Keeper(object): # unicode形式的filter
def __init__(self,keep):
self.keep = sets.Set(map(ord,keep))
def __getitem__(self,n): # 如何与translate结合?
if n not in self.keep:
return None
return unichr(n)
def __call__(self,s):
return unicode(s).translate(self)
text_chars = ''.join(map(chr,range(32,127)))+'\t\b\r\n' #文本字符集合
def isText(s,text_chars= text_chars,threshold=0.3): #判断字符(节)串是文本还是二进制
if '\0' in s: #con1
return False
if not s: # con2
return True
notext_chars = s.translate(all_chars,text_chars)
return len(notext_chars)/len(s) <= threshold #con3
# 判断字符串是否capitalize
def iscapitalized(s):
return s == s.capitalize() and containsAny_vs4(s,string.letters) # 53
def fields(baseformat, theline, lastfield=False):
"""取得不固定长度的字段"""
numremain = len(theline) - struct.calcsize(baseformat)
formator = "%s %d%s" %(baseformat, numremain, lastfield and "s" or "x")
return struct.unpack(formator, theline)
# 缓存版本
def fields_mem(baseformat, theline, lastfield=False, _cache={}):
key = baseformat, theline, lastfield
formator = _cache.get(key)
if formator is None:
numremain = len(theline) - struct.calcsize(baseformat)
_cache[key]= formator = "%s %d%s" %(baseformat, numremain, lastfield and "s" or "x")
return struct.unpack(formator, theline)
def split_by(theline, n ,lastfield=False):
"""取平均固定长度为n的字段"""
pieces = [ theline[k,k+n] for k in xrange(0, len(theline),n ) ]
if not lastfield and len(pieces[-1]) < n:
pieces.pop()
return pieces
# 基于index点的切割
def split_at(theline, cuts, lastfield=False):
"""cuts类似[8,16,23....]"""
pieces = [ theline[i:j] for i,j in zip([0]+cuts, cuts+[None]) ]
if not lastfield:
pieces.pop()
return pieces
"""基于生成器的形式,十分好"""
def split_at_v2(theline, cuts, lastfield=False):
last = 0
for cut in cuts:
yield theline[last:cut]
last = cut
if lastfiled:
yield theline[last:]
# 模拟Lc
def split_by_v2(theline, n, lastfield=False):
return split_at_v2(theline,xrange(n, len(theline),n), lastfield)
# 未保留行之间的相对空格
def reindent(s,numspace):
"""每行行首固定长度的空格"""
leading = numspace * ' '
lines = [ leading + line.strip() for line in s.splitlines() ]
return '\n'.join(lines)
def addSpaces(s,numAdd):
"""增加空格,行之间相对缩进不变"""
white = ' '*numAdd
return white + white.join(s.splitlines(True))
def numSpaces(s): #计算行首的空格
return [ len(line) - len(line.lstrip()) for line in s.splitlines()]
def delSpaces(s,numDel):
"""减少空格,行之间相对缩进不变"""
if numDel > min(numSpaces(s)):
raise ValueError,'Removing more space than there are!'
return '\n'.join([line[numDel:] for line in s.splitlines()])
def unIndentBlock(s):
"""使最小缩进的行与左边缘对齐"""
return delSpaces(s,min(numSpaces(s)))
def unexpand(astring, tablen = 8): # 切、处理、组合的方法
"""将空格转化为制表符号"""
import re
pieces = re.split(r'( +)', astring.expandtabs(tablen))
lensofar = 0
for i, piece in enumerate(pieces):
thislen = len(piece)
lensofar += thislen
if piece.isspace():
numblanks = lensofar % tablen # ?计算的不太明白
numtabs = (thislen - numblanks + tablen -1) / tablen
pieces[i] = '\t' * numtabs + ' ' * numblanks
return ''.join(pieces)
def expand_at_linestart(P, tablen):
"""只扩展行开头的制表符号"""
import re
def exp(m):
return m.group().expandtabs(tablen)
return ''.join([ re.sub(r'^\s+', exp, s) for s in P.splitlines(True) ])
def expand(formator, d, maker='"',safe=False): # 标记
"""替换引号标记的字符串子串"""
if safe:
def lookup(w): return d.get(w, w.join(maker*2))
else:
def lookup(w): return d[w]
parts = formator.split(maker)
parts[1::2] = map(lookup,parts[1::2])
return ''.join(parts)
def multiple_replace(text, adict):
rx = re.compile('|'.join(map(re.escape, adict)))
def translate(match):
return adict[match.group(0)]
return rx.sub(translate, text)
def multiple_replace_v1(*args, **kwargs):
"""闭包版本,cool"""
adict = dict(*args, **kwargs) # 环境
rx = re.compile('|'.join(map(re.escape, adict)))
def translate(match):
return adict[match.group(0)]
def real_replace(text):
return rx.sub(translate, text)
return real_replace
class make_multi_replace(object):
"""
根据字典表进行替换
"""
def __init__(self, *args, **kwargs):
self.adict = dict(*args, **kwargs)
self.rx = self.make_rx()
def make_rx(self): # 重载,使用不同的模式
"""可能需要重载,构造不同的正则表达式"""
return re.compile('|'.join(map(re.escape, self.adict)))
def one_xlat(self, match): # dict的value值好像都是字符串,bug
return self.adict[ match.group(0)]
def __call__(self, text):
return self.rx.sub(self.one_xlat,text)
class make_multi_replace_by_word(make_multi_replace):
def make_rx(self):
return re.compile(r'\b%s\b' % r'\b|\b'.join(map(re.escape, self.adict)))
def anyTrue(predicate, seq): #通用函数
return True in itertools.imap(predicate, seq) # 需要一项一项的检查,使用生成器,不错
def endsWith(s, *endings): #字符串S是否以endings之一结尾
return anyTrue(s.endswith, endings)
|
import React, { Component, PropTypes } from "react";
import { connect } from "react-redux";
import { bindActionCreators } from "redux";
import { fetchPage } from "../actions/fetchActions";
import Banner from "../components/Banner";
import Section from "../components/Section";
import Loading from "../components/Loading";
class Work extends Component {
componentDidMount() {
this.props.fetchPage(this.props.route.path);
}
// Helper function to render the various sections
renderProjects(projectData, index) {
const {
title,
summary,
description,
site_url,
repo_url,
image_url,
tools
} = projectData;
if (index === 0 || index % 2 === 0) {
return (
<Section
key={index}
title={title}
caption={summary}
description={description}
image_url={image_url}
primary_site_url={site_url}
secondary_site_url={repo_url}
items={tools}
dark
/>
);
} else {
return (
<Section
key={index}
title={title}
caption={summary}
description={description}
image_url={image_url}
primary_site_url={site_url}
secondary_site_url={repo_url}
items={tools}
/>
);
}
}
// Map over the returned data from the action creator and format it on the page
render() {
// Destructure the work object & loading/error props
const {
work: { title, caption, banner_url, projects = [] },
isLoading,
hasErrored
} = this.props;
return (
<div>
{isLoading ? (
<Loading />
) : (
<div id="work">
<Banner
title={title}
featured_image={banner_url}
caption={caption}
/>
{projects.map(this.renderProjects)}
</div>
)}
</div>
);
}
}
// Map State To Props
const mapStateToProps = ({
pages: { work },
fetching: { isLoading, error }
}) => {
return {
work,
isLoading,
hasErrored: error
};
};
// Bind the actions and dispatch them
function mapDispatchToProps(dispatch) {
return bindActionCreators({ fetchPage }, dispatch);
}
// Work Props Validation
Work.propTypes = {
fetchPage: PropTypes.func,
isLoading: PropTypes.bool,
hasErrored: PropTypes.string,
work: React.PropTypes.shape({
title: PropTypes.string,
banner_url: PropTypes.string,
caption: PropTypes.string,
projects: PropTypes.array
}),
projects: PropTypes.shape({
title: PropTypes.string,
summary: PropTypes.string,
description: PropTypes.string,
primary_url: PropTypes.string,
secondary_url: PropTypes.string,
image_url: PropTypes.string,
tools: PropTypes.array
}),
route: PropTypes.object
};
export default connect(
mapStateToProps,
mapDispatchToProps
)(Work);
|
<gh_stars>0
var server = require('net').createServer();
//var postcss = require('gulp-postcss');
var print = require('gulp-print');
var gulp = require('gulp');
var autoprefixer = require('autoprefixer');
var gutil = require('gulp-util');
var map = require('map-stream');
var vfs = require('vinyl-fs');
var streamify = require('streamify-string');
var postcss = require('postcss');
var cssbeautify = require('cssbeautify');
console.log(cssbeautify);
server.listen(5000, function() {
console.log('Telnet server is running on port', server.address().port);
});
function afterSend() {
console.log("data sent!");
}
server.on('connection', function(socket) {
console.log('connecting..');
socket.on('end', function(){
console.log('disconnecting');
});
socket.setNoDelay(true);
socket.on('data', function(data) {
socket.setEncoding('utf8');
// should only be 1 line at a time
var css = data; //.toString().replace(/(\r\n|\n|\r)/gm,"");
// str.replace(/[^A-Za-z 0-9 \.,\?""!@#\$%\^&\*\(\)-_=\+;:<>\/\\\|\}\{\[\]`~]*/g, '')
// console.log('received data as binary' + css.toString('hex'));
// console.log('received data as text : ' + css);
console.log('received data ..');
postcss( [ autoprefixer ({ browsers: ['last 4 version'] }) ] )
.process(css).then(function(result) {
result.warnings().forEach(function (warn) {
console.warn(warn.toString());
});
console.log('css prefixed');
var beautified = cssbeautify( result.css,{
indent: ' ',
openbrace: 'end-of-line',
autosemicolon: true
});
console.log('css beautified');
var status = socket.write(beautified + "<<<", 'utf8', afterSend);
console.log('all written to buffer: ' + status);
});
});
});
/*
var plugins = [
autoprefixer({ browsers: ['last 4 version'] })
];
var processCss = function read(file, cb) {
console.log("processing css ..");
console.log(JSON.stringify(file));
console.log("data attribute ..");
console.log(JSON.stringify(file.data));
console.log(JSON.stringify(file.type));
console.log('cb' + JSON.stringify(cb));
socket.write(file);
// socket.write(file._contents);
}
console.log('data in: ' + data);
console.log(JSON.stringify(data));
streamify(data)
.pipe(postcss(plugins))
.pipe(map(processCss))
gulp.src('styles.css').pipe(
);
*/
|
import React from 'react';
const UserInfo = (props) => {
return (
<div>
<h2>User Info</h2>
<div>Name: {props.user[0]}</div>
<div>Age: {props.user[1]}</div>
<div>City: {props.user[2]}</div>
</div>
);
};
export default UserInfo;
|
import React, { Component } from "react";
import { View, Text, TouchableWithoutFeedback, Modal, ScrollView } from "react-native";
export default class PlanetDescription extends Component {
constructor(props) {
super(props);
this.state = {
stationDescription: props.stationDescription,
};
}
componentWillReceiveProps(nextProps) {
this.setState({ stationDescription: nextProps.stationDescription });
}
render() {
return (
<Modal
transparent={true}
visible={!!this.state.stationDescription}
onRequestClose={this.handleCloseDescription}
>
<TouchableWithoutFeedback onPress={this.handleCloseDescription}>
<View style={{ flex: 1, justifyContent: "center" }}>
<View
style={{
backgroundColor: "#212121",
marginHorizontal: 32,
marginVertical: 128,
borderRadius: 8,
}}
>
<ScrollView>
<TouchableWithoutFeedback>
<View style={{ padding: 16 }}>
<Text
style={{
color: "white",
fontSize: 20,
alignSelf: "center",
}}
>
{this.state.stationDescription}
</Text>
</View>
</TouchableWithoutFeedback>
</ScrollView>
</View>
</View>
</TouchableWithoutFeedback>
</Modal>
);
}
handleCloseDescription = () => {
this.setState({ stationDescription: null });
};
}
|
import numpy as np
class QuantumCircuit:
def __init__(self, num_qubits):
self.num_qubits = num_qubits
self.state_vector = np.zeros(2 ** num_qubits, dtype=complex)
self.state_vector[0] = 1 # Initialize with |0...0> state
def apply_gate(self, gate, target_qubits):
# Implement applying quantum gates to the circuit
# Update the state vector based on the gate and target qubits
# Example: gate = 'H' (Hadamard gate), 'X' (Pauli-X gate), 'CX' (CNOT gate), etc.
def measure(self, qubit_index):
# Implement measuring a qubit in the circuit
# Return the measurement result (0 or 1) for the specified qubit
def run(self, backend):
# Implement running the quantum circuit on a quantum backend
# Simulate the quantum computation and return the final state vector or measurement results
# Example usage:
qc = QuantumCircuit(2)
qc.apply_gate('H', [0]) # Apply Hadamard gate on qubit 0
qc.apply_gate('CX', [0, 1]) # Apply CNOT gate with control qubit 0 and target qubit 1
result = qc.run('simulator') # Run the circuit on a quantum simulator backend
print(result) # Print the final state vector or measurement results
|
#!/usr/bin/env zsh
VERSION="1.0.0"
HISTORIAN_SRC=${HISTORIAN_SRC:-"$HOME/.zsh_history"}
HISTORIAN_DB=${HISTORIAN_DB:-"$ZSHRC_DIR/zsh.historian.db"}
HISTORIAN_SQLITE3=${HISTORIAN_SQLITE3:-"$(builtin command -v sqlite3)"}
usage() {
echo "Usage: hist <subcommand>" >&2
echo "subcommands:" >&2
echo " config show config" >&2
echo " count count items in history" >&2
echo " import import to db" >&2
echo " shell launch sqlite3 shell with db" >&2
echo " search <term> search for <term>" >&2
echo " /term search for <term>" >&2
echo " version show the version" >&2
}
preflight_check() {
if [ -z "$HOME" ]; then
echo "need \$HOME" >&2
exit 1
fi
if [ -z "${HISTORIAN_SQLITE3}" ]; then
echo "need sqlite3" >&2
exit 1
fi
}
ensure_db_exists() {
( cat <<SQL
CREATE TABLE IF NOT EXISTS history (
id INTEGER PRIMARY KEY ASC,
command TEXT NOT NULL,
timestamp INTEGER
);
CREATE UNIQUE INDEX IF NOT EXISTS
history_command_timestamp ON history(command);
CREATE VIRTUAL TABLE IF NOT EXISTS
history_search USING fts4(id, history, command);
SQL
) | "${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}";
}
fail_unless_db_exists() {
if [ ! -f "${HISTORIAN_DB}" ]; then
echo "db (${HISTORIAN_DB}) doesn't exist. Aborting";
exit 1;
fi
}
cmd_config() {
echo "version: ${VERSION}"
echo "source_history: ${HISTORIAN_SRC}"
echo "db: ${HISTORIAN_DB}"
echo "sqlite3: ${HISTORIAN_SQLITE3}"
}
cmd_count() {
local args=$@
preflight_check;
ensure_db_exists;
( cat <<SQL
SELECT COUNT(*) FROM history;
SQL
) | "${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}";
}
cmd_import() {
local args=$@
preflight_check;
ensure_db_exists;
( cat <<SQL
CREATE TEMPORARY TABLE variables
(key TEXT, value INTEGER);
INSERT INTO variables(key, value)
SELECT 'items', COUNT(*) FROM history;
CREATE TEMPORARY TABLE history_import (line TEXT);
.separator $(echo -e "\x01")
.import ${HISTORIAN_SRC} history_import
INSERT OR IGNORE INTO history(command, timestamp)
SELECT line, NULL FROM history_import;
UPDATE variables
SET value = -1 * value + (SELECT COUNT(*) FROM history); -- lol subtraction
SELECT 'Imported ' || value || ' item(s).' FROM variables WHERE key = 'items';
SQL
) | "${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}";
}
cmd_log() {
local args=$@
preflight_check;
fail_unless_db_exists;
( cat <<SQL
.separator "\\n\\t"
SELECT id, command FROM history ORDER BY id DESC;
SQL
) | "${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}" | less;
}
cmd_search() {
preflight_check;
ensure_db_exists;
local args=$@
_search "$args"
}
cmd_search_slash() {
preflight_check;
ensure_db_exists;
local args=$@
term="$(echo "$args" | sed -e 's/^.//g')";
_search "$term"
}
_search() {
local args=$@
( cat <<SQL
.separator "\\n\\t"
SELECT id, command
FROM history
WHERE command LIKE '%${args}%'
ORDER BY id DESC;
SQL
) | "${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}";
}
cmd_shell() {
local args=$@
preflight_check;
fail_unless_db_exists;
echo "${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}" >&2
"${HISTORIAN_SQLITE3}" "${HISTORIAN_DB}";
}
cmd_version() {
echo "historian version: ${VERSION}"
}
main() {
local cmd=$1
shift
case $cmd in
config)
cmd_config $@
;;
count)
cmd_count $@
;;
import)
cmd_import $@
;;
log)
cmd_log $@
;;
search)
cmd_search $@
;;
shell)
cmd_shell $@
;;
version)
cmd_version $@
;;
"")
usage
;;
*)
if [ -n "$(echo "$cmd" | grep -E '^/')" ]; then
cmd_search_slash $cmd $@
else
usage
exit 1
fi
;;
esac
}
main $@
|
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOG_DIR="../log"
CONFIG="$1"
HEAP="$2"
CONFIG_NAME="${CONFIG##*/}"
VM_OPTIONS_BASE="-Xdump:none -Xgcpolicy:gencon -Xnocompactgc -Xgcthreads16 -Xcompressedrefs"
run_test(){
DATE_TIME=`date "+%Y-%m-%dT_%H-%M-%S"`
VERBOSE_FILE=$LOG_DIR"/"$CONFIG_NAME"_"$DATE_TIME"_verbose.xml"
STDOUT_FILE=$LOG_DIR"/"$CONFIG_NAME"_"$DATE_TIME"_stdout.txt"
CONFIG_COPY=$LOG_DIR"/"$CONFIG_NAME"_"$DATE_TIME".xml"
cp "$CONFIG" "$CONFIG_COPY"
( sleep 10; java net.adoptopenjdk.casa.verbose_gc_parser.VerboseGCTailer "$VERBOSE_FILE" ) &
java $VM_OPTIONS -Xmx$HEAP -Xms$HEAP -Xverbosegclog:"$VERBOSE_FILE" net.adoptopenjdk.casa.workload_sessions.Main "$CONFIG" --log_file "$STDOUT_FILE"
#java $VM_OPTIONS -Xmx$HEAP -Xms$HEAP -verbose:gc -Xtgc:largeAllocationVerbose -Xtrace:iprint=j9mm{alloclarge} net.adoptopenjdk.casa.gc_workload.Main "$CONFIG" --log_file "$STDOUT_FILE" --silent &> stdout_"$DATE_TIME".txt
java net.adoptopenjdk.casa.verbose_gc_parser.VerboseGCParser "$VERBOSE_FILE" -ts
}
fail(){
tail "$VERBOSE_FILE"
}
#VM_OPTIONS_BASE=$VM_OPTIONS_BASE" -XXgc:stdSplitFreeListSplitAmount=1"
#VM_OPTIONS_BASE=$VM_OPTIONS_BASE" -Xgc:scvTenureAge=1,scvNoAdaptiveTenure"
echo $CONFIG
VM_OPTIONS=$VM_OPTIONS_BASE
run_test
#killall java 2> /dev/null || killall 2> /dev/null
#VM_OPTIONS=$VM_OPTIONS_BASE" -Xgc:scvTenureAge=1,scvNoAdaptiveTenure"
#run_test
#VM_OPTIONS=$VM_OPTIONS_BASE" -Xgc:concurrentSlack=auto"
#run_test
#VM_OPTIONS=$VM_OPTIONS_BASE" -Xgc:concurrentSlack=500000000"
#run_test
|
import psutil
def high_usage_check():
cpu_usage = psutil.cpu_percent()
if cpu_usage > 75:
return "high_cpu"
memory_usage = psutil.virtual_memory()
memory_usage_percentage = memory_usage.percent
if memory_usage_percentage > 75:
return "high_memory"
disk_usage_percentage = psutil.disk_usage("/").percent
if disk_usage_percentage > 75:
return "high_storage"
return "normal"
if __name__ == "__main__":
resource_status = high_usage_check()
print(f"Resource utilization is currently {resource_status}.")
|
<reponame>Bellian/GGJ2020
import LevelObject from "./levelObject";
import { vec2 } from "gl-matrix";
import { Bodies, World } from "matter-js";
import PhysicsEngine from "../physicsEngine";
interface WallMeta {
size: vec2;
class: string[];
}
export class Floor extends LevelObject {
meta!: WallMeta;
render(): HTMLElement {
const view = super.render();
view.classList.add('floor', 'center', ... this.meta.class);
view.style.width = this.meta.size[0]+'px';
view.style.height = this.meta.size[1]+'px';
return view;
}
}
export default Floor;
|
<reponame>thomaszdxsn/zoom-meeting-api<gh_stars>1-10
export enum ApprovalTypes {
Automatically = 0,
Manually = 1,
NoRegistartion = 2,
}
|
/* Copyright (c) 2012-2014 University of Cape Town
* Copyright (c) 2014 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**
* @file
*
* Radixsort implementation.
*/
#include "clhpp11.h"
#include <clogs/visibility_push.h>
#include <cstddef>
#include <map>
#include <set>
#include <string>
#include <cassert>
#include <climits>
#include <algorithm>
#include <vector>
#include <utility>
#include <clogs/visibility_pop.h>
#include <clogs/core.h>
#include <clogs/radixsort.h>
#include "utils.h"
#include "radixsort.h"
#include "parameters.h"
#include "tune.h"
#include "cache.h"
#include "tr1_random.h"
#include "tr1_functional.h"
namespace clogs
{
namespace detail
{
void RadixsortProblem::setKeyType(const Type &keyType)
{
if (!(keyType.isIntegral()
&& !keyType.isSigned()
&& keyType.getLength() == 1))
throw std::invalid_argument("keyType is not valid");
this->keyType = keyType;
}
void RadixsortProblem::setValueType(const Type &valueType)
{
this->valueType = valueType;
}
void RadixsortProblem::setTunePolicy(const TunePolicy &tunePolicy)
{
this->tunePolicy = tunePolicy;
}
::size_t Radixsort::getTileSize() const
{
return std::max(reduceWorkGroupSize, scatterWorkScale * scatterWorkGroupSize);
}
::size_t Radixsort::getBlockSize(::size_t elements) const
{
const ::size_t tileSize = getTileSize();
return (elements + tileSize * scanBlocks - 1) / (tileSize * scanBlocks) * tileSize;
}
::size_t Radixsort::getBlocks(::size_t elements, ::size_t len) const
{
const ::size_t slicesPerWorkGroup = scatterWorkGroupSize / scatterSlice;
::size_t blocks = (elements + len - 1) / len;
blocks = roundUp(blocks, slicesPerWorkGroup);
assert(blocks <= scanBlocks);
return blocks;
}
void Radixsort::enqueueReduce(
const cl::CommandQueue &queue, const cl::Buffer &out, const cl::Buffer &in,
::size_t len, ::size_t elements, unsigned int firstBit,
const VECTOR_CLASS<cl::Event> *events, cl::Event *event)
{
reduceKernel.setArg(0, out);
reduceKernel.setArg(1, in);
reduceKernel.setArg(2, (cl_uint) len);
reduceKernel.setArg(3, (cl_uint) elements);
reduceKernel.setArg(4, (cl_uint) firstBit);
cl_uint blocks = getBlocks(elements, len);
cl::Event reduceEvent;
queue.enqueueNDRangeKernel(reduceKernel,
cl::NullRange,
cl::NDRange(reduceWorkGroupSize * blocks),
cl::NDRange(reduceWorkGroupSize),
events, &reduceEvent);
doEventCallback(reduceEvent);
if (event != NULL)
*event = reduceEvent;
}
void Radixsort::enqueueScan(
const cl::CommandQueue &queue, const cl::Buffer &histogram, ::size_t blocks,
const VECTOR_CLASS<cl::Event> *events, cl::Event *event)
{
scanKernel.setArg(0, histogram);
scanKernel.setArg(1, (cl_uint) blocks);
cl::Event scanEvent;
queue.enqueueNDRangeKernel(scanKernel,
cl::NullRange,
cl::NDRange(scanWorkGroupSize),
cl::NDRange(scanWorkGroupSize),
events, &scanEvent);
doEventCallback(scanEvent);
if (event != NULL)
*event = scanEvent;
}
void Radixsort::enqueueScatter(
const cl::CommandQueue &queue, const cl::Buffer &outKeys, const cl::Buffer &outValues,
const cl::Buffer &inKeys, const cl::Buffer &inValues, const cl::Buffer &histogram,
::size_t len, ::size_t elements, unsigned int firstBit,
const VECTOR_CLASS<cl::Event> *events, cl::Event *event)
{
scatterKernel.setArg(0, outKeys);
scatterKernel.setArg(1, inKeys);
scatterKernel.setArg(2, histogram);
scatterKernel.setArg(3, (cl_uint) len);
scatterKernel.setArg(4, (cl_uint) elements);
scatterKernel.setArg(5, (cl_uint) firstBit);
if (valueSize != 0)
{
scatterKernel.setArg(6, outValues);
scatterKernel.setArg(7, inValues);
}
const ::size_t blocks = getBlocks(elements, len);
const ::size_t slicesPerWorkGroup = scatterWorkGroupSize / scatterSlice;
assert(blocks % slicesPerWorkGroup == 0);
const ::size_t workGroups = blocks / slicesPerWorkGroup;
cl::Event scatterEvent;
queue.enqueueNDRangeKernel(scatterKernel,
cl::NullRange,
cl::NDRange(scatterWorkGroupSize * workGroups),
cl::NDRange(scatterWorkGroupSize),
events, &scatterEvent);
doEventCallback(scatterEvent);
if (event != NULL)
*event = scatterEvent;
}
void Radixsort::enqueue(
const cl::CommandQueue &queue,
const cl::Buffer &keys, const cl::Buffer &values,
::size_t elements, unsigned int maxBits,
const VECTOR_CLASS<cl::Event> *events,
cl::Event *event)
{
/* Validate parameters */
if (keys.getInfo<CL_MEM_SIZE>() < elements * keySize)
{
throw cl::Error(CL_INVALID_VALUE, "clogs::Radixsort::enqueue: range of out of buffer bounds for key");
}
if (valueSize != 0 && values.getInfo<CL_MEM_SIZE>() < elements * valueSize)
{
throw cl::Error(CL_INVALID_VALUE, "clogs::Radixsort::enqueue: range of out of buffer bounds for value");
}
if (!(keys.getInfo<CL_MEM_FLAGS>() & CL_MEM_READ_WRITE))
{
throw cl::Error(CL_INVALID_VALUE, "clogs::Radixsort::enqueue: keys is not read-write");
}
if (valueSize != 0 && !(values.getInfo<CL_MEM_FLAGS>() & CL_MEM_READ_WRITE))
{
throw cl::Error(CL_INVALID_VALUE, "clogs::Radixsort::enqueue: values is not read-write");
}
if (elements == 0)
throw cl::Error(CL_INVALID_GLOBAL_WORK_SIZE, "clogs::Radixsort::enqueue: elements is zero");
if (maxBits == 0)
maxBits = CHAR_BIT * keySize;
else if (maxBits > CHAR_BIT * keySize)
throw cl::Error(CL_INVALID_VALUE, "clogs::Radixsort::enqueue: maxBits is too large");
const cl::Context &context = queue.getInfo<CL_QUEUE_CONTEXT>();
// If necessary, allocate temporary buffers for ping-pong
cl::Buffer tmpKeys, tmpValues;
if (this->tmpKeys() && this->tmpKeys.getInfo<CL_MEM_SIZE>() >= elements * keySize)
tmpKeys = this->tmpKeys;
else
tmpKeys = cl::Buffer(context, CL_MEM_READ_WRITE, elements * keySize);
if (valueSize != 0)
{
if (this->tmpValues() && this->tmpValues.getInfo<CL_MEM_SIZE>() >= elements * valueSize)
tmpValues = this->tmpValues;
else
tmpValues = cl::Buffer(context, CL_MEM_READ_WRITE, elements * valueSize);
}
cl::Event next;
std::vector<cl::Event> prev(1);
const std::vector<cl::Event> *waitFor = events;
const cl::Buffer *curKeys = &keys;
const cl::Buffer *curValues = &values;
const cl::Buffer *nextKeys = &tmpKeys;
const cl::Buffer *nextValues = &tmpValues;
const ::size_t blockSize = getBlockSize(elements);
const ::size_t blocks = getBlocks(elements, blockSize);
assert(blocks <= scanBlocks);
for (unsigned int firstBit = 0; firstBit < maxBits; firstBit += radixBits)
{
enqueueReduce(queue, histogram, *curKeys, blockSize, elements, firstBit, waitFor, &next);
prev[0] = next; waitFor = &prev;
enqueueScan(queue, histogram, blocks, waitFor, &next);
prev[0] = next; waitFor = &prev;
enqueueScatter(queue, *nextKeys, *nextValues, *curKeys, *curValues, histogram, blockSize,
elements, firstBit, waitFor, &next);
prev[0] = next; waitFor = &prev;
std::swap(curKeys, nextKeys);
std::swap(curValues, nextValues);
}
if (curKeys != &keys)
{
/* Odd number of ping-pongs, so we have to copy back again.
* We don't actually need to serialize the copies, but it simplifies the event
* management.
*/
queue.enqueueCopyBuffer(*curKeys, *nextKeys, 0, 0, elements * keySize, waitFor, &next);
doEventCallback(next);
prev[0] = next; waitFor = &prev;
if (valueSize != 0)
{
queue.enqueueCopyBuffer(*curValues, *nextValues, 0, 0, elements * valueSize, waitFor, &next);
doEventCallback(next);
prev[0] = next; waitFor = &prev;
}
}
if (event != NULL)
*event = next;
}
void Radixsort::setTemporaryBuffers(const cl::Buffer &keys, const cl::Buffer &values)
{
tmpKeys = keys;
tmpValues = values;
}
void Radixsort::initialize(
const cl::Context &context, const cl::Device &device,
const RadixsortProblem &problem,
const RadixsortParameters::Value ¶ms)
{
reduceWorkGroupSize = params.reduceWorkGroupSize;
scanWorkGroupSize = params.scanWorkGroupSize;
scatterWorkGroupSize = params.scatterWorkGroupSize;
scatterWorkScale = params.scatterWorkScale;
scanBlocks = params.scanBlocks;
keySize = problem.keyType.getSize();
valueSize = problem.valueType.getSize();
radixBits = params.radixBits;
radix = 1U << radixBits;
scatterSlice = std::max(params.warpSizeSchedule, ::size_t(radix));
std::map<std::string, int> defines;
std::map<std::string, std::string> stringDefines;
defines["WARP_SIZE_MEM"] = params.warpSizeMem;
defines["WARP_SIZE_SCHEDULE"] = params.warpSizeSchedule;
defines["REDUCE_WORK_GROUP_SIZE"] = reduceWorkGroupSize;
defines["SCAN_WORK_GROUP_SIZE"] = scanWorkGroupSize;
defines["SCATTER_WORK_GROUP_SIZE"] = scatterWorkGroupSize;
defines["SCATTER_WORK_SCALE"] = scatterWorkScale;
defines["SCATTER_SLICE"] = scatterSlice;
defines["SCAN_BLOCKS"] = scanBlocks;
defines["RADIX_BITS"] = radixBits;
stringDefines["KEY_T"] = problem.keyType.getName();
if (problem.valueType.getBaseType() != TYPE_VOID)
{
/* There are cases (at least on NVIDIA) where value types have
* different performance even when they are the same size e.g. uchar3
* vs uint. Avoid this by canonicalising the value type. This has the
* extra benefit that there are fewer possible kernels.
*/
Type kernelValueType = problem.valueType;
switch (valueSize)
{
case 1: kernelValueType = TYPE_UCHAR; break;
case 2: kernelValueType = TYPE_USHORT; break;
case 4: kernelValueType = TYPE_UINT; break;
case 8: kernelValueType = TYPE_ULONG; break;
case 16: kernelValueType = Type(TYPE_UINT, 4); break;
case 32: kernelValueType = Type(TYPE_UINT, 8); break;
case 64: kernelValueType = Type(TYPE_UINT, 16); break;
case 128: kernelValueType = Type(TYPE_ULONG, 16); break;
}
assert(kernelValueType.getSize() == valueSize);
stringDefines["VALUE_T"] = kernelValueType.getName();
}
/* Generate code for upsweep and downsweep. This is done here rather
* than relying on loop unrolling, constant folding and so on because
* compilers don't always figure that out correctly (particularly when
* it comes to an inner loop whose trip count depends on the counter
* from an outer loop.
*/
std::vector<std::string> upsweepStmts, downsweepStmts;
std::vector< ::size_t> stops;
stops.push_back(1);
stops.push_back(radix);
if (scatterSlice > radix)
stops.push_back(scatterSlice);
stops.push_back(scatterSlice * radix);
for (int i = int(stops.size()) - 2; i >= 0; i--)
{
::size_t from = stops[i + 1];
::size_t to = stops[i];
if (to >= scatterSlice)
{
std::string toStr = detail::toString(to);
std::string fromStr = detail::toString(from);
upsweepStmts.push_back("upsweepMulti(wg->hist.level1.i, wg->hist.level2.c + "
+ toStr + ", " + fromStr + ", " + toStr + ", lid);");
downsweepStmts.push_back("downsweepMulti(wg->hist.level1.i, wg->hist.level2.c + "
+ toStr + ", " + fromStr + ", " + toStr + ", lid);");
}
else
{
while (from >= to * 4)
{
std::string fromStr = detail::toString(from);
std::string toStr = detail::toString(from / 4);
bool forceZero = (from == 4);
upsweepStmts.push_back("upsweep4(wg->hist.level2.i + " + toStr + ", wg->hist.level2.c + "
+ toStr + ", " + toStr + ", lid, SCATTER_SLICE);");
downsweepStmts.push_back("downsweep4(wg->hist.level2.i + " + toStr + ", wg->hist.level2.c + "
+ toStr + ", " + toStr + ", lid, SCATTER_SLICE, "
+ (forceZero ? "true" : "false") + ");");
from /= 4;
}
if (from == to * 2)
{
std::string fromStr = detail::toString(from);
std::string toStr = detail::toString(from / 2);
bool forceZero = (from == 2);
upsweepStmts.push_back("upsweep2(wg->hist.level2.s + " + toStr + ", wg->hist.level2.c + "
+ toStr + ", " + toStr + ", lid, SCATTER_SLICE);");
downsweepStmts.push_back("downsweep2(wg->hist.level2.s + " + toStr + ", wg->hist.level2.c + "
+ toStr + ", " + toStr + ", lid, SCATTER_SLICE, "
+ (forceZero ? "true" : "false") + ");");
}
}
}
std::ostringstream upsweep, downsweep;
upsweep << "do { ";
for (std::size_t i = 0; i < upsweepStmts.size(); i++)
upsweep << upsweepStmts[i];
upsweep << " } while (0)";
downsweep << "do { ";
for (int i = int(downsweepStmts.size()) - 1; i >= 0; i--)
downsweep << downsweepStmts[i];
downsweep << "} while (0)";
stringDefines["UPSWEEP()"] = upsweep.str();
stringDefines["DOWNSWEEP()"] = downsweep.str();
try
{
histogram = cl::Buffer(context, CL_MEM_READ_WRITE, params.scanBlocks * radix * sizeof(cl_uint));
program = build(context, device, "radixsort.cl", defines, stringDefines);
reduceKernel = cl::Kernel(program, "radixsortReduce");
scanKernel = cl::Kernel(program, "radixsortScan");
scanKernel.setArg(0, histogram);
scatterKernel = cl::Kernel(program, "radixsortScatter");
scatterKernel.setArg(1, histogram);
}
catch (cl::Error &e)
{
throw InternalError(std::string("Error preparing kernels for radixsort: ") + e.what());
}
}
Radixsort::Radixsort(
const cl::Context &context, const cl::Device &device,
const RadixsortProblem &problem,
const RadixsortParameters::Value ¶ms)
{
initialize(context, device, problem, params);
}
Radixsort::Radixsort(
const cl::Context &context, const cl::Device &device,
const RadixsortProblem &problem)
{
if (!keyTypeSupported(device, problem.keyType))
throw std::invalid_argument("keyType is not valid");
if (!valueTypeSupported(device, problem.valueType))
throw std::invalid_argument("valueType is not valid");
RadixsortParameters::Key key = makeKey(device, problem);
RadixsortParameters::Value params;
if (!getDB().radixsort.lookup(key, params))
{
params = tune(device, problem);
getDB().radixsort.add(key, params);
}
initialize(context, device, problem, params);
}
RadixsortParameters::Key Radixsort::makeKey(
const cl::Device &device,
const RadixsortProblem &problem)
{
RadixsortParameters::Key key;
key.device = deviceKey(device);
key.keyType = problem.keyType.getName();
key.valueSize = problem.valueType.getSize();
return key;
}
bool Radixsort::keyTypeSupported(const cl::Device &device, const Type &keyType)
{
return keyType.isIntegral()
&& !keyType.isSigned()
&& keyType.getLength() == 1
&& keyType.isComputable(device)
&& keyType.isStorable(device);
}
bool Radixsort::valueTypeSupported(const cl::Device &device, const Type &valueType)
{
return valueType.getBaseType() == TYPE_VOID
|| valueType.isStorable(device);
}
static cl::Buffer makeRandomBuffer(const cl::CommandQueue &queue, ::size_t size)
{
cl::Buffer buffer(queue.getInfo<CL_QUEUE_CONTEXT>(), CL_MEM_READ_WRITE, size);
cl_uchar *data = reinterpret_cast<cl_uchar *>(
queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, size));
RANDOM_NAMESPACE::mt19937 engine;
for (::size_t i = 0; i < size; i++)
{
/* We take values directly from the engine rather than using a
* distribution, because the engine is guaranteed to be portable
* across compilers.
*/
data[i] = engine() & 0xFF;
}
queue.enqueueUnmapMemObject(buffer, data);
return buffer;
}
std::pair<double, double> Radixsort::tuneReduceCallback(
const cl::Context &context, const cl::Device &device,
std::size_t elements, const boost::any ¶msAny,
const RadixsortProblem &problem)
{
const RadixsortParameters::Value ¶ms = boost::any_cast<const RadixsortParameters::Value &>(paramsAny);
cl::CommandQueue queue(context, device, CL_QUEUE_PROFILING_ENABLE);
const ::size_t keyBufferSize = elements * problem.keyType.getSize();
const cl::Buffer keyBuffer = makeRandomBuffer(queue, keyBufferSize);
Radixsort sort(context, device, problem, params);
const ::size_t blockSize = sort.getBlockSize(elements);
// Warmup
sort.enqueueReduce(queue, sort.histogram, keyBuffer, blockSize, elements, 0, NULL, NULL);
queue.finish();
// Timing pass
cl::Event event;
sort.enqueueReduce(queue, sort.histogram, keyBuffer, blockSize, elements, 0, NULL, &event);
queue.finish();
event.wait();
cl_ulong start = event.getProfilingInfo<CL_PROFILING_COMMAND_START>();
cl_ulong end = event.getProfilingInfo<CL_PROFILING_COMMAND_END>();
double elapsed = end - start;
double rate = elements / elapsed;
return std::make_pair(rate, rate);
}
std::pair<double, double> Radixsort::tuneScatterCallback(
const cl::Context &context, const cl::Device &device,
std::size_t elements, const boost::any ¶msAny,
const RadixsortProblem &problem)
{
const RadixsortParameters::Value ¶ms = boost::any_cast<const RadixsortParameters::Value &>(paramsAny);
cl::CommandQueue queue(context, device, CL_QUEUE_PROFILING_ENABLE);
const ::size_t keyBufferSize = elements * problem.keyType.getSize();
const ::size_t valueBufferSize = elements * problem.valueType.getSize();
const cl::Buffer keyBuffer = makeRandomBuffer(queue, keyBufferSize);
const cl::Buffer outKeyBuffer(context, CL_MEM_READ_WRITE, keyBufferSize);
cl::Buffer valueBuffer, outValueBuffer;
if (problem.valueType.getBaseType() != TYPE_VOID)
{
valueBuffer = makeRandomBuffer(queue, valueBufferSize);
outValueBuffer = cl::Buffer(context, CL_MEM_READ_WRITE, valueBufferSize);
}
Radixsort sort(context, device, problem, params);
const ::size_t blockSize = sort.getBlockSize(elements);
const ::size_t blocks = sort.getBlocks(elements, blockSize);
// Prepare histogram
sort.enqueueReduce(queue, sort.histogram, keyBuffer, blockSize, elements, 0, NULL, NULL);
sort.enqueueScan(queue, sort.histogram, blocks, NULL, NULL);
// Warmup
sort.enqueueScatter(
queue,
outKeyBuffer, outValueBuffer,
keyBuffer, valueBuffer,
sort.histogram, blockSize, elements, 0, NULL, NULL);
queue.finish();
// Timing pass
cl::Event event;
sort.enqueueScatter(
queue,
outKeyBuffer, outValueBuffer,
keyBuffer, valueBuffer,
sort.histogram, blockSize, elements, 0, NULL, &event);
queue.finish();
event.wait();
cl_ulong start = event.getProfilingInfo<CL_PROFILING_COMMAND_START>();
cl_ulong end = event.getProfilingInfo<CL_PROFILING_COMMAND_END>();
double elapsed = end - start;
double rate = elements / elapsed;
return std::make_pair(rate, rate);
}
std::pair<double, double> Radixsort::tuneBlocksCallback(
const cl::Context &context, const cl::Device &device,
std::size_t elements, const boost::any ¶msAny,
const RadixsortProblem &problem)
{
const RadixsortParameters::Value ¶ms = boost::any_cast<const RadixsortParameters::Value &>(paramsAny);
cl::CommandQueue queue(context, device, CL_QUEUE_PROFILING_ENABLE);
const ::size_t keyBufferSize = elements * problem.keyType.getSize();
const ::size_t valueBufferSize = elements * problem.valueType.getSize();
const cl::Buffer keyBuffer = makeRandomBuffer(queue, keyBufferSize);
const cl::Buffer outKeyBuffer(context, CL_MEM_READ_WRITE, keyBufferSize);
cl::Buffer valueBuffer, outValueBuffer;
if (problem.valueType.getBaseType() != TYPE_VOID)
{
valueBuffer = makeRandomBuffer(queue, valueBufferSize);
outValueBuffer = cl::Buffer(context, CL_MEM_READ_WRITE, valueBufferSize);
}
Radixsort sort(context, device, problem, params);
const ::size_t blockSize = sort.getBlockSize(elements);
const ::size_t blocks = sort.getBlocks(elements, blockSize);
cl::Event reduceEvent;
cl::Event scanEvent;
cl::Event scatterEvent;
// Warmup and real passes
for (int pass = 0; pass < 2; pass++)
{
sort.enqueueReduce(queue, sort.histogram, keyBuffer, blockSize, elements, 0, NULL, &reduceEvent);
sort.enqueueScan(queue, sort.histogram, blocks, NULL, &scanEvent);
sort.enqueueScatter(
queue,
outKeyBuffer, outValueBuffer,
keyBuffer, valueBuffer,
sort.histogram, blockSize, elements, 0,
NULL, &scatterEvent);
queue.finish();
}
reduceEvent.wait();
scatterEvent.wait();
cl_ulong start = reduceEvent.getProfilingInfo<CL_PROFILING_COMMAND_START>();
cl_ulong end = scatterEvent.getProfilingInfo<CL_PROFILING_COMMAND_END>();
double elapsed = end - start;
double rate = elements / elapsed;
// Fewer blocks means better performance on small problem sizes, so only
// use more blocks if it makes a real improvement
return std::make_pair(rate, rate * 1.05);
}
RadixsortParameters::Value Radixsort::tune(
const cl::Device &device,
const RadixsortProblem &problem)
{
const TunePolicy &policy = problem.tunePolicy;
policy.assertEnabled();
std::ostringstream description;
description << "radixsort for " << problem.keyType.getName() << " keys and "
<< problem.valueType.getSize() << " byte values";
policy.logStartAlgorithm(description.str(), device);
/* Limit memory usage, otherwise devices with lots of RAM will take a long
* time to tune. For GPUs we need a large problem size to get accurate
* statistics, but for CPUs we use less to keep tuning time down.
*/
bool isCPU = device.getInfo<CL_DEVICE_TYPE>() & CL_DEVICE_TYPE_CPU;
const ::size_t maxDataSize = isCPU ? 32 * 1024 * 1024 : 256 * 1024 * 1024;
const ::size_t dataSize = std::min(maxDataSize, (std::size_t) device.getInfo<CL_DEVICE_GLOBAL_MEM_SIZE>() / 8);
const ::size_t elements = dataSize / (problem.keyType.getSize() + problem.valueType.getSize());
std::vector<std::size_t> problemSizes;
if (elements > 1024 * 1024)
problemSizes.push_back(1024 * 1024);
problemSizes.push_back(elements);
const ::size_t maxWorkGroupSize = device.getInfo<CL_DEVICE_MAX_WORK_GROUP_SIZE>();
const ::size_t warpSizeMem = getWarpSizeMem(device);
const ::size_t warpSizeSchedule = getWarpSizeSchedule(device);
RadixsortParameters::Value out;
// TODO: change to e.g. 2-6 after adding code to select the best one
for (unsigned int radixBits = 4; radixBits <= 4; radixBits++)
{
const unsigned int radix = 1U << radixBits;
const unsigned int scanWorkGroupSize = 4 * radix; // TODO: autotune it
::size_t maxBlocks =
(device.getInfo<CL_DEVICE_LOCAL_MEM_SIZE>() / sizeof(cl_uint) - 2 * scanWorkGroupSize) / radix;
/* Work around devices like G80 lying about the maximum local memory
* size, by starting with a smaller size.
*/
::size_t startBlocks = maxBlocks / 2;
startBlocks = roundDown(startBlocks, (::size_t) scanWorkGroupSize / radix);
if (maxWorkGroupSize < radix)
break;
RadixsortParameters::Value cand;
// Set default values, which are later tuned
const ::size_t scatterSlice = std::max(warpSizeSchedule, (::size_t) radix);
cand.radixBits = radixBits;
cand.warpSizeMem = warpSizeMem;
cand.warpSizeSchedule = warpSizeSchedule;
cand.scanBlocks = startBlocks;
cand.scanWorkGroupSize = scanWorkGroupSize;
cand.scatterWorkGroupSize = scatterSlice;
cand.scatterWorkScale = 1;
// Tune the reduction kernel, assuming a large scanBlocks
{
std::vector<boost::any> sets;
for (::size_t reduceWorkGroupSize = radix; reduceWorkGroupSize <= maxWorkGroupSize; reduceWorkGroupSize *= 2)
{
RadixsortParameters::Value params = cand;
params.reduceWorkGroupSize = reduceWorkGroupSize;
sets.push_back(params);
}
using namespace FUNCTIONAL_NAMESPACE::placeholders;
cand = boost::any_cast<RadixsortParameters::Value>(tuneOne(
policy, device, sets, problemSizes,
FUNCTIONAL_NAMESPACE::bind(&Radixsort::tuneReduceCallback, _1, _2, _3, _4, problem)));
}
// Tune the scatter kernel
{
std::vector<boost::any> sets;
for (::size_t scatterWorkGroupSize = scatterSlice; scatterWorkGroupSize <= maxWorkGroupSize; scatterWorkGroupSize *= 2)
{
// TODO: increase search space
for (::size_t scatterWorkScale = 1; scatterWorkScale <= 255 / scatterSlice; scatterWorkScale++)
{
RadixsortParameters::Value params = cand;
const ::size_t slicesPerWorkGroup = scatterWorkGroupSize / scatterSlice;
params.scanBlocks = roundDown(startBlocks, slicesPerWorkGroup);
params.scatterWorkGroupSize = scatterWorkGroupSize;
params.scatterWorkScale = scatterWorkScale;
sets.push_back(params);
}
}
using namespace FUNCTIONAL_NAMESPACE::placeholders;
cand = boost::any_cast<RadixsortParameters::Value>(tuneOne(
policy, device, sets, problemSizes,
FUNCTIONAL_NAMESPACE::bind(&Radixsort::tuneScatterCallback, _1, _2, _3, _4, problem)));
}
// Tune the block count
{
std::vector<boost::any> sets;
::size_t scanWorkGroupSize = cand.scanWorkGroupSize;
::size_t scatterWorkGroupSize = cand.scatterWorkGroupSize;
const ::size_t slicesPerWorkGroup = scatterWorkGroupSize / scatterSlice;
// Have to reduce the maximum to align with slicesPerWorkGroup, which was 1 earlier
maxBlocks = roundDown(maxBlocks, slicesPerWorkGroup);
maxBlocks = roundDown(maxBlocks, scatterWorkGroupSize / radix);
std::set< ::size_t> scanBlockCands;
for (::size_t scanBlocks = std::max(scanWorkGroupSize / radix, slicesPerWorkGroup); scanBlocks <= maxBlocks; scanBlocks *= 2)
{
scanBlockCands.insert(scanBlocks);
}
/* Also try with block counts that are a multiple of the number of compute units,
* which gives a more balanced work distribution.
*/
for (::size_t scanBlocks = device.getInfo<CL_DEVICE_MAX_COMPUTE_UNITS>();
scanBlocks <= maxBlocks; scanBlocks *= 2)
{
::size_t blocks = roundDown(scanBlocks, slicesPerWorkGroup);
if (blocks >= scanWorkGroupSize / radix)
scanBlockCands.insert(blocks);
}
// Finally, try the upper limit, in case performance is monotonic
scanBlockCands.insert(maxBlocks);
for (std::set< ::size_t>::const_iterator i = scanBlockCands.begin();
i != scanBlockCands.end(); ++i)
{
RadixsortParameters::Value params = cand;
params.scanBlocks = *i;
sets.push_back(params);
}
using namespace FUNCTIONAL_NAMESPACE::placeholders;
cand = boost::any_cast<RadixsortParameters::Value>(tuneOne(
policy, device, sets, problemSizes,
FUNCTIONAL_NAMESPACE::bind(&Radixsort::tuneBlocksCallback, _1, _2, _3, _4, problem)));
}
// TODO: benchmark the whole combination
out = cand;
}
policy.logEndAlgorithm();
return out;
}
const RadixsortProblem &getDetail(const clogs::RadixsortProblem &problem)
{
return *problem.detail_;
}
} // namespace detail
RadixsortProblem::RadixsortProblem() : detail_(new detail::RadixsortProblem())
{
}
RadixsortProblem::~RadixsortProblem()
{
delete detail_;
}
RadixsortProblem::RadixsortProblem(const RadixsortProblem &other)
: detail_(new detail::RadixsortProblem(*other.detail_))
{
}
RadixsortProblem &RadixsortProblem::operator=(const RadixsortProblem &other)
{
if (detail_ != other.detail_)
{
detail::RadixsortProblem *tmp = new detail::RadixsortProblem(*other.detail_);
delete detail_;
detail_ = tmp;
}
return *this;
}
void RadixsortProblem::setKeyType(const Type &keyType)
{
assert(detail_ != NULL);
detail_->setKeyType(keyType);
}
void RadixsortProblem::setValueType(const Type &valueType)
{
assert(detail_ != NULL);
detail_->setValueType(valueType);
}
void RadixsortProblem::setTunePolicy(const TunePolicy &tunePolicy)
{
assert(detail_ != NULL);
detail_->setTunePolicy(detail::getDetail(tunePolicy));
}
Radixsort::Radixsort()
{
}
detail::Radixsort *Radixsort::getDetail() const
{
return static_cast<detail::Radixsort *>(Algorithm::getDetail());
}
detail::Radixsort *Radixsort::getDetailNonNull() const
{
return static_cast<detail::Radixsort *>(Algorithm::getDetailNonNull());
}
void Radixsort::construct(
cl_context context, cl_device_id device,
const RadixsortProblem &problem,
cl_int &err, const char *&errStr)
{
try
{
setDetail(new detail::Radixsort(
detail::retainWrap<cl::Context>(context),
detail::retainWrap<cl::Device>(device),
detail::getDetail(problem)));
detail::clearError(err, errStr);
}
catch (cl::Error &e)
{
detail::setError(err, errStr, e);
}
}
void Radixsort::moveAssign(Radixsort &other)
{
delete static_cast<detail::Radixsort *>(Algorithm::moveAssign(other));
}
void Radixsort::enqueue(
cl_command_queue commandQueue,
cl_mem keys, cl_mem values,
::size_t elements, unsigned int maxBits,
cl_uint numEvents,
const cl_event *events,
cl_event *event,
cl_int &err,
const char *&errStr)
{
try
{
VECTOR_CLASS<cl::Event> events_ = detail::retainWrap<cl::Event>(numEvents, events);
cl::Event event_;
getDetailNonNull()->enqueue(
detail::retainWrap<cl::CommandQueue>(commandQueue),
detail::retainWrap<cl::Buffer>(keys),
detail::retainWrap<cl::Buffer>(values),
elements, maxBits,
events ? &events_ : NULL,
event ? &event_ : NULL);
detail::clearError(err, errStr);
detail::unwrap(event_, event);
}
catch (cl::Error &e)
{
detail::setError(err, errStr, e);
}
}
void Radixsort::setTemporaryBuffers(cl_mem keys, cl_mem values,
cl_int &err, const char *&errStr)
{
try
{
getDetailNonNull()->setTemporaryBuffers(
detail::retainWrap<cl::Buffer>(keys),
detail::retainWrap<cl::Buffer>(values));
detail::clearError(err, errStr);
}
catch (cl::Error &e)
{
detail::setError(err, errStr, e);
}
}
Radixsort::~Radixsort()
{
delete getDetail();
}
void swap(Radixsort &a, Radixsort &b)
{
a.swap(b);
}
} // namespace clogs
|
<gh_stars>1-10
import React, { useState, useEffect } from 'react';
import { useHistory, useParams } from 'react-router-dom';
import { Button } from '../../components/button/button';
import { ButtonGroup } from '../../components/button/button.group';
import { DescriptionList } from '../../components/description-list/description-list';
import { DescriptionListItem } from '../../components/description-list/description-list.item';
import { Icon } from '../../components/icon/icon';
import { Loader } from '../../components/loader/loader';
import { ModalConfirm } from '../../components/modal/confirm/modal.confirm';
import { SEO } from '../../components/seo/seo';
import { formatCurrency } from '../../utils/formatCurrency';
import { formatDate } from '../../utils/formatDate';
import { getTransactionCategoryMappingByTransactionId } from '../expenses/Expense';
import {
getAllTransactionCategoriesWithCategoryTree,
ITransactionCategoryWithCategoryTree,
} from '../profile/TransactionCategories/TransactionCategoriesService';
import { deleteIncome, getIncomeById } from './IncomeService';
interface IIncomeDeleteModalProps {
handleDelete(): void;
}
const IncomeDeleteModal = ({ handleDelete }: IIncomeDeleteModalProps) => (
<ModalConfirm
label="Delete income"
submitButtonLabel="Delete"
onConfirm={handleDelete}
modalOpenButtonLabel="Delete income"
accentColor="red"
>
Are you sure you want to delete your income? All of your data will be
permanently removed. This action cannot be undone.
</ModalConfirm>
);
export const Income = (): JSX.Element => {
const history = useHistory();
const [income, setIncome] = useState<IIncome | undefined>(undefined);
const [transactionCategoryMapping, setTransactionCategoryMapping] = useState<
ITransactionCategoryMapping[] | undefined
>(undefined);
const [transactionCategories, setTransactionCategories] = useState<
ITransactionCategoryWithCategoryTree[] | null
>(null);
const { id } = useParams<{ id: string }>();
useEffect(() => {
const fetchIncome = async () => {
setIncome(await getIncomeById(id));
};
const fetchTransactionCategoryMapping = async () => {
setTransactionCategoryMapping(
await getTransactionCategoryMappingByTransactionId(id)
);
};
const fetchTransactionCategories = async () => {
setTransactionCategories(
await getAllTransactionCategoriesWithCategoryTree()
);
};
fetchIncome();
fetchTransactionCategoryMapping();
fetchTransactionCategories();
}, [id]);
const getCategoryNameById = (categoryId: string) =>
transactionCategories?.find((category) => category._id === categoryId)
?.categoryTree || categoryId;
const handleDelete = async () => {
deleteIncome(id);
history.push('/statistics/incomes');
};
return typeof income === 'undefined' ||
typeof transactionCategoryMapping === 'undefined' ||
transactionCategories === null ? (
<Loader loaderColor="blue" />
) : (
<>
<SEO title={`${income.description} | Incomes`} />
<section className="rounded-lg border bg-white sm:grid divide-y sm:divide-y-0 sm:divide-x">
<div className="p-6">
<header className="flex items-center mb-6">
<span className="rounded-lg inline-flex p-3 text-white bg-green-600">
<Icon type="upload" />
</span>
<h1 className="text-2xl sm:text-3xl font-bold tracking-tighter ml-4">
{income.description}
</h1>
</header>
<DescriptionList label="Transaction details">
<DescriptionListItem label="Amount">
{formatCurrency(income.amount)}
</DescriptionListItem>
<DescriptionListItem label="Date">
{formatDate(new Date(income.date))}
</DescriptionListItem>
</DescriptionList>
{transactionCategoryMapping.length > 0 && (
<DescriptionList label="Categories" className="mt-6" visibleLabel>
{transactionCategoryMapping?.map(({ amount, category_id }) => (
<DescriptionListItem label={getCategoryNameById(category_id)}>
{formatCurrency(amount)}
</DescriptionListItem>
))}
</DescriptionList>
)}
</div>
</section>
<ButtonGroup className="mt-6">
<Button link={`/statistics/incomes/${id}/edit`}>Edit</Button>
<IncomeDeleteModal handleDelete={handleDelete} />
</ButtonGroup>
</>
);
};
|
export default `
\`\`\`jsx
import React from 'react';
import AdminBadge from 'anchor-ui/admin-badge';
const AdminBadgeExample = () => (
<section>
<AdminBadge />
<AdminBadge text="Custom Text" />
<AdminBadge text="Inverted" inverted />
</section>
);
export default AdminBadgeExample;
\`\`\`
`;
|
<reponame>Urvashi2707/npi<filename>src/app/pages/services/addServicing.service 2.ts
import { Injectable } from '@angular/core';
import { HttpClient,HttpHeaders } from '@angular/common/http';
import { Router } from '@angular/router';
import 'rxjs/add/operator/map';
import { Observable,Subject,BehaviorSubject } from 'rxjs/Rx';
import { environment } from '../../../environments/environment';
@Injectable()
export class ServicingService {
private dashboardTable = new BehaviorSubject([]);
dashBoardTableObs = this.dashboardTable.asObservable();
cityList:any = [];
cityList2:any;
result:any;
public getCredentailss:any;
private subject12 = new BehaviorSubject({});
private getCredentail = new BehaviorSubject({});
getCredentailObs = this.getCredentail.asObservable();
private subject = new Subject<any>();
// private behaviorSubject = new BehaviorSubject<any>();
constructor(private http: HttpClient,private router: Router) {
}
//Easy Auto
private getSession_url :string = 'https://plsuat.europassistance.in:8000/api/eaiExt/getsession';
private Ea_check1_url:string = 'http://plsuat.europassistance.in:444/checkInitialEligibility';
private Ea_check2_url:string = 'http://plsuat.europassistance.in:444/checkFinalEligibility';
public destroySession_url = 'http://plsuat.europassistance.in:444/destroysession';
//21North
// public url = environment.Mainurl;
// public logout_url:string = environment.logout_url;
// public graph:string = environment.graph;
// public slotgraph = environment.slot_graph;
check_url = 'http://m.21north.in/notify/eaws.php';
public data:string;
data1 = "vfdxvxd";
public httpOptions = {
headers: new HttpHeaders({'Content-Type': 'application/json'}),
withCredentials: true
};
public options = {
headers: new HttpHeaders({'Content-Type': 'application/json'}),
}
public opt={
headers: new HttpHeaders({'Content-Type':'application/json','x-auth-token':'<PASSWORD>','x-auth-user':'21NorthUser01'})
}
public opt1={
headers: new HttpHeaders({'x-auth-token': sessionStorage.getItem('token'),'x-auth-user':sessionStorage.getItem('auth-user'),'Content-Type': 'application/json'})
}
sendMessage(message: string,btn:string) {
this.subject.next({ text: message ,show_btn:btn});
}
clearMessage() {
this.subject.next();
}
getMessage(): Observable<any> {
return this.subject.asObservable();
}
session(){
const reqpara1 = {}
return this.http.post(this.getSession_url,reqpara1,this.opt)
}
sendMessage12(Objt) {
this.subject12.next(Objt);
}
// clearMessage12() {
// this.subject12.next();
// }
getMessage12(): Observable<any> {
// console.log("don is called")
return this.subject12.asObservable();
}
setCred(Cred) {
// this.getCredentail.next(Cred);
this.getCredentailss = Cred;
// console.log("settttererere",this.getCredentailss);
}
getCred(){
// console.log("gettterr clled",this.getCredentailss);
return this.getCredentailss;
}
setter(Data){
this.data = Data;
}
getter(): string{
return this.data;
}
dashBoardItems(dashBoardItems) {
this.dashboardTable.next(dashBoardItems);
}
dashBoardItemsChanges() {
return this.dashBoardItems;
}
getCountryCode(){
return [
{
code : "+91",
flag: '../../../assets/images/Flag/Flag_of_India.svg'
},
{
code : "+65",
flag: '../../../assets/images/Flag/Flag_of_Singapore.svg'
}
]
}
getLocation() {
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(this.showPosition, this.showError);
} else {
// x.innerHTML = "Geolocation is not supported by this browser.";
}
}
showPosition(position) {
sessionStorage.setItem("allow_login","true");
var lat = position.coords.latitude;
var lng = position.coords.longitude
return {lat: lat,lng:lng}
}
showError(error) {
switch(error.code) {
case error.PERMISSION_DENIED:
sessionStorage.setItem("allow_login","false");
console.log("User denied the request for Geolocation.");
sessionStorage.setItem('loc_msg','User denied the request for Geolocation.');
break;
case error.POSITION_UNAVAILABLE:
console.log("Location information is unavailable.")
sessionStorage.setItem('loc_msg','Location information is unavailable.');
break;
case error.TIMEOUT:
console.log("The request to get user location timed out.");
sessionStorage.setItem('loc_msg','The request to get user location timed out.');
break;
case error.UNKNOWN_ERROR:
console.log("An unknown error occurred.");
break;
}
}
Initialcheck(reqpara){
return this.http.post(this.Ea_check1_url,reqpara,this.opt1)
}
Finalcheck(reqpara){
return this.http.post(this.Ea_check2_url,reqpara,this.opt1 )
}
destroySession(){
const reqpara = {}
return this.http.post(this.destroySession_url,reqpara,this.opt1 )
}
}
|
<reponame>jiangjing1994/process-designer
import AppButton from './Button'
const components = [AppButton]
const install = function (Vue, opts = {}) {
if (install.installed) return
components.map((component) => Vue.component(component.name, component))
}
if (typeof window !== 'undefined' && window.Vue) {
install(window.Vue)
}
export default {
install,
AppButton,
}
|
<filename>src/framework/Factory.ts
import type { IFactory, FactoryTypeRecord, ParamsOf, IApplication } from '../interfaces'
import type * as types from '../types'
export class ObjectFactory<Types extends FactoryTypeRecord = any> implements IFactory<Types>
{
constructor(
private application: IApplication<any>,
private types: FactoryTypeRecord = {},
) {
Object.entries(types).map(([key, value]) => {
application.container
.getInjector()
.insertDependency(key, value)
})
}
create<TypeKey extends keyof Types, Impl extends Types[TypeKey]>(
type: TypeKey,
...params: ParamsOf<Impl>[]
) {
const C = this.getInjectorDependency<Impl>(type as string)
if (C) return new C(...params)
throw new Error("Factory couldn't create type: " + type)
}
getTypes(): Record<string, types.Type<any>> {
return this.application.container
.getInjector()
.dependenciesAsJSON()
}
getInjectorDependency<T extends any>(klass: types.Type<T> | string) {
const dependency = this.application.container
.getInjector()
.getDependency(klass)
if (dependency == null)
throw new Error(`Dependency not registered: ${klass}`)
return dependency
}
}
export class DebugObjectFactory<T extends FactoryTypeRecord> extends ObjectFactory<T> {
getInjectorDependency<T extends any>(klass: types.Type<T>) {
const result = super.getInjectorDependency(klass)
// console.log('got injector dep:', result)
return result
}
}
|
#!/bin/bash
. ./cmd.sh
# This is as run_sgmm2.sh but uses the "SGMM2" version of the code and
# scripts, with various improvements.
# Build a SGMM2 system on just the 100k_nodup data, on top of LDA+MLLT+SAT.
if [ ! -f exp/ubm5a/final.ubm ]; then
steps/train_ubm.sh --cmd "$train_cmd" 700 data/train_100k_nodup data/lang \
exp/tri4a_ali_100k_nodup exp/ubm5a || exit 1;
fi
steps/train_sgmm2.sh --cmd "$train_cmd" \
9000 30000 data/train_100k_nodup data/lang exp/tri4a_ali_100k_nodup \
exp/ubm5a/final.ubm exp/sgmm2_5a || exit 1;
utils/mkgraph.sh data/lang_test exp/sgmm2_5a exp/sgmm2_5a/graph || exit 1;
steps/decode_sgmm2.sh --cmd "$decode_cmd" --config conf/decode.config \
--nj 30 --transform-dir exp/tri4a/decode_eval2000 \
exp/sgmm2_5a/graph data/eval2000 exp/sgmm2_5a/decode_eval2000
# Now discriminatively train the SGMM system on 100k_nodup data.
steps/align_sgmm2.sh --nj 30 --cmd "$train_cmd" --transform-dir exp/tri4a_ali_100k_nodup \
--use-graphs true --use-gselect true data/train_100k_nodup data/lang exp/sgmm2_5a exp/sgmm2_5a_ali_100k_nodup
# Took the beam down to 10 to get acceptable decoding speed.
steps/make_denlats_sgmm2.sh --nj 30 --sub-split 30 --beam 9.0 --lattice-beam 6 --cmd "$decode_cmd" \
--transform-dir exp/tri4a_ali_100k_nodup \
data/train_100k_nodup data/lang exp/sgmm2_5a_ali_100k_nodup exp/sgmm2_5a_denlats_100k_nodup
steps/train_mmi_sgmm2.sh --cmd "$decode_cmd" --transform-dir exp/tri4a_ali_100k_nodup --boost 0.1 \
data/train_100k_nodup data/lang exp/sgmm2_5a_ali_100k_nodup exp/sgmm2_5a_denlats_100k_nodup exp/sgmm2_5a_mmi_b0.1
for iter in 1 2 3 4; do
steps/decode_sgmm2_rescore.sh --cmd "$decode_cmd" --iter $iter \
--transform-dir exp/tri4a/decode_eval2000 data/lang_test data/eval2000 exp/sgmm2_5a/decode_eval2000 \
exp/sgmm2_5a_mmi_b0.1/decode_eval2000_it$iter &
done
( # testing drop-frames.
steps/train_mmi_sgmm2.sh --cmd "$decode_cmd" --transform-dir exp/tri4a_ali_100k_nodup --boost 0.1 --drop-frames true \
data/train_100k_nodup data/lang exp/sgmm2_5a_ali_100k_nodup exp/sgmm2_5a_denlats_100k_nodup exp/sgmm2_5a_mmi_b0.1_z
for iter in 1 2 3 4; do
steps/decode_sgmm2_rescore.sh --cmd "$decode_cmd" --iter $iter \
--transform-dir exp/tri4a/decode_eval2000 data/lang_test data/eval2000 exp/sgmm2_5a/decode_eval2000 \
exp/sgmm2_5a_mmi_b0.1_z/decode_eval2000_it$iter &
done
wait
)
( # testing drop-frames.
# The same after a code speedup.
steps/train_mmi_sgmm2.sh --cmd "$decode_cmd" --transform-dir exp/tri4a_ali_100k_nodup --boost 0.1 --drop-frames true \
data/train_100k_nodup data/lang exp/sgmm2_5a_ali_100k_nodup exp/sgmm2_5a_denlats_100k_nodup exp/sgmm2_5a_mmi_b0.1_z2
for iter in 1 2 3 4; do
steps/decode_sgmm2_rescore.sh --cmd "$decode_cmd" --iter $iter \
--transform-dir exp/tri4a/decode_eval2000 data/lang_test data/eval2000 exp/sgmm2_5a/decode_eval2000 \
exp/sgmm2_5a_mmi_b0.1_z2/decode_eval2000_it$iter &
done
wait
)
|
/*
* Copyright (C) 2005-2015 <NAME> (<EMAIL>).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "3rdparty/catch/catch.hpp"
#include "utils.h"
#include "os.hpp"
#include <vector>
#include <string>
#include <algorithm>
#include "3btree/btree_index.h"
#include "3btree/btree_cursor.h"
#include "4db/db_local.h"
#include "4cursor/cursor.h"
#include "4env/env_local.h"
#include "4context/context.h"
using namespace hamsterdb;
static int
slot_key_cmp(ham_db_t *db, const uint8_t *lhs, uint32_t lsz,
const uint8_t *rhs, uint32_t rsz)
{
uint32_t i;
for (i = 0; i < lsz; ++i) {
if (lhs[i] != rhs[i]) {
return lhs[i] > rhs[i] ? 1 : -1;
}
}
return 0;
}
struct ApproxFixture {
ham_db_t *m_db;
ham_env_t *m_env;
ham_txn_t *m_txn;
ApproxFixture() {
(void)os::unlink(Utils::opath(".test"));
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, 0));
REQUIRE(0 == ham_env_create_db(m_env, &m_db, 1, 0, 0));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
}
~ApproxFixture() {
teardown();
}
void teardown() {
if (m_txn) {
REQUIRE(0 == ham_txn_abort(m_txn, 0));
m_txn = 0;
}
REQUIRE(0 == ham_env_close(m_env, HAM_AUTO_CLEANUP));
}
ham_status_t insertBtree(const char *s) {
ham_key_t k = {};
k.data = (void *)s;
k.size = strlen(s) + 1;
ham_record_t r = {};
r.data = k.data;
r.size = k.size;
Context context((LocalEnvironment *)m_env, 0, 0);
BtreeIndex *be = ((LocalDatabase *)m_db)->btree_index();
return (be->insert(&context, 0, &k, &r, 0));
}
ham_status_t insertTxn(const char *s, uint32_t flags = 0) {
ham_key_t k = {};
k.data = (void *)s;
k.size = strlen(s) + 1;
ham_record_t r = {};
r.data = k.data;
r.size = k.size;
return (ham_db_insert(m_db, m_txn, &k, &r, flags));
}
ham_status_t eraseTxn(const char *s) {
ham_key_t k = {};
k.data = (void *)s;
k.size = strlen(s)+1;
return (ham_db_erase(m_db, m_txn, &k, 0));
}
ham_status_t find(uint32_t flags, const char *search, const char *expected) {
ham_key_t k = {};
k.data = (void *)search;
k.size = strlen(search) + 1;
ham_record_t r = {};
ham_status_t st = ham_db_find(m_db, m_txn, &k, &r, flags);
if (st)
return (st);
if (strcmp(expected, (const char *)k.data))
REQUIRE((ham_key_get_intflags(&k) & BtreeKey::kApproximate));
return (::strcmp(expected, (const char *)r.data));
}
void lessThanTest1() {
// btree < nil
REQUIRE(0 == insertBtree("1"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "2", "1"));
}
void lessThanTest2() {
// txn < nil
REQUIRE(0 == insertTxn("2"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "3", "2"));
}
void lessThanTest3() {
// btree < txn
REQUIRE(0 == insertBtree("10"));
REQUIRE(0 == insertTxn("11"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "11", "10"));
}
void lessThanTest4() {
// txn < btree
REQUIRE(0 == insertTxn("20"));
REQUIRE(0 == insertBtree("21"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "21", "20"));
}
void lessThanTest5() {
// btree < btree
REQUIRE(0 == insertBtree("30"));
REQUIRE(0 == insertBtree("31"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "31", "30"));
}
void lessThanTest6() {
// txn < txn
REQUIRE(0 == insertTxn("40"));
REQUIRE(0 == insertTxn("41"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "41", "40"));
}
void lessThanTest7() {
// txn < txn w/ empty node
REQUIRE(0 == insertTxn("50"));
REQUIRE(0 == insertTxn("51"));
REQUIRE(0 == insertTxn("52"));
REQUIRE(0 == eraseTxn("51"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "52", "50"));
}
void lessThanTest8() {
// txn < txn w/ empty node
REQUIRE(0 == insertTxn("60"));
REQUIRE(0 == insertTxn("61"));
REQUIRE(0 == insertTxn("62"));
REQUIRE(0 == eraseTxn("61"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "62", "60"));
}
void lessThanTest9() {
// skip erased btree
REQUIRE(0 == insertBtree("71"));
REQUIRE(0 == eraseTxn("71"));
REQUIRE(0 == insertTxn("70"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "71", "70"));
}
void lessThanTest10() {
// skip 2 erased btree keys
REQUIRE(0 == insertBtree("80"));
REQUIRE(0 == insertBtree("81"));
REQUIRE(0 == eraseTxn("81"));
REQUIRE(0 == insertBtree("82"));
REQUIRE(0 == eraseTxn("82"));
REQUIRE(0 == insertTxn("83"));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "83", "80"));
}
void lessThanTest11() {
// overwrite btree
REQUIRE(0 == insertBtree("92"));
REQUIRE(0 == insertTxn("92", HAM_OVERWRITE));
REQUIRE(0 == insertBtree("93"));
REQUIRE(0 == insertTxn("93", HAM_OVERWRITE));
REQUIRE(0 == find(HAM_FIND_LT_MATCH, "93", "92"));
}
void lessOrEqualTest1() {
// btree < nil
REQUIRE(0 == insertBtree("1"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "2", "1"));
}
void lessOrEqualTest2() {
// btree = nil
REQUIRE(0 == insertBtree("2"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "2", "2"));
}
void lessOrEqualTest3() {
// txn < nil
REQUIRE(0 == insertTxn("3"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "4", "3"));
}
void lessOrEqualTest4() {
// txn = nil
REQUIRE(0 == insertTxn("4"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "5", "4"));
}
void lessOrEqualTest5() {
// btree < txn
REQUIRE(0 == insertBtree("10"));
REQUIRE(0 == insertTxn("11"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "11", "11"));
}
void lessOrEqualTest6() {
// txn < btree
REQUIRE(0 == insertTxn("20"));
REQUIRE(0 == insertBtree("21"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "21", "21"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "22", "21"));
}
void lessOrEqualTest7() {
// btree < btree
REQUIRE(0 == insertBtree("30"));
REQUIRE(0 == insertBtree("31"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "31", "31"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "32", "31"));
}
void lessOrEqualTest8() {
// txn < txn
REQUIRE(0 == insertTxn("40"));
REQUIRE(0 == insertTxn("41"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "41", "41"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "42", "41"));
}
void lessOrEqualTest9() {
// txn =
REQUIRE(0 == insertBtree("50"));
REQUIRE(0 == insertTxn("51"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "51", "51"));
}
void lessOrEqualTest10() {
// btree =
REQUIRE(0 == insertTxn("60"));
REQUIRE(0 == insertBtree("61"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "61", "61"));
}
void lessOrEqualTest11() {
// txn < txn w/ empty node
REQUIRE(0 == insertTxn("70"));
REQUIRE(0 == insertTxn("71"));
REQUIRE(0 == eraseTxn("71"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "71", "70"));
}
void lessOrEqualTest12() {
// skip 3 erased btree keys
REQUIRE(0 == insertBtree("80"));
REQUIRE(0 == insertBtree("81"));
REQUIRE(0 == eraseTxn("81"));
REQUIRE(0 == insertBtree("82"));
REQUIRE(0 == eraseTxn("82"));
REQUIRE(0 == insertTxn("83"));
REQUIRE(0 == eraseTxn("83"));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "83", "80"));
}
void lessOrEqualTest13() {
// overwrite btree
REQUIRE(0 == insertBtree("92"));
REQUIRE(0 == insertTxn("92", HAM_OVERWRITE));
REQUIRE(0 == insertBtree("93"));
REQUIRE(0 == insertTxn("93", HAM_OVERWRITE));
REQUIRE(0 == find(HAM_FIND_LEQ_MATCH, "93", "93"));
}
void greaterThanTest1() {
// btree > nil
REQUIRE(0 == insertBtree("2"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "1", "2"));
}
void greaterThanTest2() {
// txn > nil
REQUIRE(0 == insertTxn("4"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "3", "4"));
}
void greaterThanTest3() {
// btree > txn
REQUIRE(0 == insertTxn("10"));
REQUIRE(0 == insertBtree("11"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "10", "11"));
}
void greaterThanTest4() {
// txn > btree
REQUIRE(0 == insertBtree("20"));
REQUIRE(0 == insertTxn("21"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "20", "21"));
}
void greaterThanTest5() {
// btree > btree
REQUIRE(0 == insertBtree("30"));
REQUIRE(0 == insertBtree("31"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "30", "31"));
}
void greaterThanTest6() {
// txn > txn
REQUIRE(0 == insertTxn("40"));
REQUIRE(0 == insertTxn("41"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "40", "41"));
}
void greaterThanTest7() {
// txn > txn w/ empty node
REQUIRE(0 == insertTxn("50"));
REQUIRE(0 == insertTxn("51"));
REQUIRE(0 == eraseTxn("51"));
REQUIRE(0 == insertTxn("52"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "50", "52"));
}
void greaterThanTest8() {
// skip 2 erased btree keys
REQUIRE(0 == insertBtree("81"));
REQUIRE(0 == eraseTxn("81"));
REQUIRE(0 == insertBtree("82"));
REQUIRE(0 == eraseTxn("82"));
REQUIRE(0 == insertTxn("83"));
REQUIRE(0 == find(HAM_FIND_GT_MATCH, "80", "83"));
}
void greaterThanTest9() {
teardown();
ham_parameter_t param[] = {
{HAM_PARAM_KEY_TYPE, HAM_TYPE_BINARY},
{HAM_PARAM_KEY_SIZE, 32},
{0, 0}
};
REQUIRE(0 == ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, 0));
REQUIRE(0 == ham_env_create_db(m_env, &m_db, 1, 0, ¶m[0]));
char data[32] = {0};
ham_key_t key = ham_make_key(&data[0], sizeof(data));
ham_record_t rec = {0};
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
data[31] = 1;
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
char newdata[32] = {0};
REQUIRE(0 == ::memcmp(key.data, &newdata[0], sizeof(newdata)));
}
void greaterOrEqualTest1() {
// btree > nil
REQUIRE(0 == insertBtree("1"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "0", "1"));
}
void greaterOrEqualTest2() {
// btree = nil
REQUIRE(0 == insertBtree("3"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "3", "3"));
}
void greaterOrEqualTest3() {
// txn > nil
REQUIRE(0 == insertTxn("5"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "4", "5"));
}
void greaterOrEqualTest4() {
// txn = nil
REQUIRE(0 == insertTxn("7"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "7", "7"));
}
void greaterOrEqualTest5() {
// btree > txn
REQUIRE(0 == insertTxn("11"));
REQUIRE(0 == insertBtree("12"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "11", "11"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "10", "11"));
}
void greaterOrEqualTest6() {
// txn > btree
REQUIRE(0 == insertBtree("20"));
REQUIRE(0 == insertTxn("21"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "19", "20"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "20", "20"));
}
void greaterOrEqualTest7() {
// btree > btree
REQUIRE(0 == insertBtree("30"));
REQUIRE(0 == insertBtree("31"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "31", "31"));
}
void greaterOrEqualTest8() {
// txn > txn
REQUIRE(0 == insertTxn("40"));
REQUIRE(0 == insertTxn("41"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "41", "41"));
}
void greaterOrEqualTest9() {
// txn =
REQUIRE(0 == insertBtree("50"));
REQUIRE(0 == insertTxn("51"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "51", "51"));
}
void greaterOrEqualTest10() {
// btree =
REQUIRE(0 == insertTxn("60"));
REQUIRE(0 == insertBtree("61"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "61", "61"));
}
void greaterOrEqualTest11() {
// txn > txn w/ empty node
REQUIRE(0 == insertTxn("71"));
REQUIRE(0 == eraseTxn("71"));
REQUIRE(0 == insertTxn("72"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "71", "72"));
}
void greaterOrEqualTest12() {
// skip erased btree keys
REQUIRE(0 == insertBtree("81"));
REQUIRE(0 == eraseTxn("81"));
REQUIRE(0 == insertBtree("82"));
REQUIRE(0 == eraseTxn("82"));
REQUIRE(0 == insertTxn("83"));
REQUIRE(0 == find(HAM_FIND_GEQ_MATCH, "81", "83"));
}
void issue44Test() {
teardown();
ham_parameter_t param[] = {
{HAM_PARAM_KEY_TYPE, HAM_TYPE_CUSTOM},
{HAM_PARAM_KEY_SIZE, 41},
{0, 0}
};
REQUIRE(0 == ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, 0));
REQUIRE(0 == ham_env_create_db(m_env, &m_db, 1, 0, ¶m[0]));
REQUIRE(0 == ham_db_set_compare_func(m_db, slot_key_cmp));
const char *values[] = { "11", "22", "33", "44", "55" };
for (int i = 0; i < 5; i++) {
char keydata[41];
::memcpy(&keydata[0], values[i], 3);
ham_key_t key = ham_make_key(&keydata[0], sizeof(keydata));
ham_record_t rec = ham_make_record((void *)values[i], 3);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
char keydata[41];
::memcpy(&keydata[0], "10", 3);
ham_key_t key = ham_make_key((void *)keydata, sizeof(keydata));
ham_record_t rec = {0};
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(0 == ::strcmp((char *)key.data, "11"));
REQUIRE(0 == ::strcmp((char *)rec.data, "11"));
}
void issue46Test() {
REQUIRE(0 == insertBtree("aa"));
REQUIRE(0 == eraseTxn("aa"));
ham_key_t key = ham_make_key((void *)"aa", 3);
ham_record_t rec = {0};
REQUIRE(0 == ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_GEQ_MATCH));
}
void issue52Test() {
teardown();
uint8_t buffer[525933] = {0};
ham_parameter_t param[] = {
{HAM_PARAM_KEY_TYPE, HAM_TYPE_UINT64},
{0, 0}
};
REQUIRE(0 == ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, 0));
REQUIRE(0 == ham_env_create_db(m_env, &m_db, 1,
HAM_ENABLE_DUPLICATE_KEYS, ¶m[0]));
uint64_t k1 = 1;
uint64_t k2 = 2;
uint64_t k3 = 3;
ham_key_t key1 = ham_make_key(&k1, sizeof(k1));
ham_key_t key2 = ham_make_key(&k2, sizeof(k2));
ham_key_t key3 = ham_make_key(&k3, sizeof(k3));
ham_record_t rec1 = ham_make_record(&buffer[0], 46228);
ham_record_t rec11 = ham_make_record(&buffer[0], 446380);
ham_record_t rec12 = ham_make_record(&buffer[0], 525933);
ham_record_t rec21 = ham_make_record(&buffer[0], 334157);
ham_record_t rec22 = ham_make_record(&buffer[0], 120392);
REQUIRE(0 == ham_db_insert(m_db, 0, &key1, &rec1, HAM_DUPLICATE));
REQUIRE(0 == ham_db_insert(m_db, 0, &key2, &rec11, HAM_DUPLICATE));
REQUIRE(0 == ham_db_insert(m_db, 0, &key2, &rec12, HAM_DUPLICATE));
REQUIRE(0 == ham_db_insert(m_db, 0, &key3, &rec21, HAM_DUPLICATE));
REQUIRE(0 == ham_db_insert(m_db, 0, &key3, &rec22, HAM_DUPLICATE));
ham_txn_t *txn;
ham_cursor_t *c;
REQUIRE(0 == ham_txn_begin(&txn, m_env, 0, 0, 0));
REQUIRE(0 == ham_cursor_create(&c, m_db, txn, 0));
ham_key_t key = {0};
ham_record_t rec = {0};
REQUIRE(0 == ham_cursor_find(c, &key1, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(1u == *(unsigned long long *)key1.data);
REQUIRE(rec1.size == rec.size);
REQUIRE(0 == ham_cursor_move(c, &key, &rec, HAM_CURSOR_NEXT));
REQUIRE(2u == *(unsigned long long *)key.data);
REQUIRE(rec11.size == rec.size);
REQUIRE(0 == ham_cursor_move(c, &key, &rec, HAM_CURSOR_NEXT));
REQUIRE(2u == *(unsigned long long *)key.data);
REQUIRE(rec12.size == rec.size);
REQUIRE(0 == ham_cursor_move(c, &key, &rec, HAM_CURSOR_NEXT));
REQUIRE(3u == *(unsigned long long *)key.data);
REQUIRE(rec21.size == rec.size);
REQUIRE(0 == ham_cursor_move(c, &key, &rec, HAM_CURSOR_NEXT));
REQUIRE(3u == *(unsigned long long *)key.data);
REQUIRE(rec22.size == rec.size);
REQUIRE(0 == ham_cursor_close(c));
// cleanup is in teardown()
}
void greaterThanTest() {
teardown();
ham_parameter_t param[] = {
{HAM_PARAM_KEY_TYPE, HAM_TYPE_BINARY},
{HAM_PARAM_KEY_SIZE, 32},
{0, 0}
};
REQUIRE(0 == ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, 0));
REQUIRE(0 == ham_env_create_db(m_env, &m_db, 1, 0, ¶m[0]));
char data[32] = {0};
ham_key_t key = ham_make_key(&data[0], sizeof(data));
ham_record_t rec = {0};
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
data[31] = 1;
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
char newdata[32] = {0};
REQUIRE(0 == ::memcmp(key.data, &newdata[0], sizeof(newdata)));
}
template<typename Generator>
void btreeLessThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
gen.generate(0, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
for (i = 1; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i - 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void btreeLessEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 10000; i += 2) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
for (i = 0; i < 10000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i & 1 ? i - 1 : i, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void btreeGreaterThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 1; i <= 5000; i++) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i + 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(5000, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GT_MATCH));
}
template<typename Generator>
void btreeGreaterEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"), 0, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i <= 10000; i += 2) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
for (i = 0; i < 10000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i & 1 ? i + 1 : i, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(10000, &key);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
}
template<typename Generator>
void txnLessThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
}
gen.generate(0, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_LT_MATCH));
for (i = 1; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i - 1, &key2);
REQUIRE(0 == ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_LT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void txnLessEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 10000; i += 2) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
}
for (i = 0; i < 10000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i & 1 ? i - 1 : i, &key2);
REQUIRE(0 == ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void txnGreaterThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 1; i <= 5000; i++) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
}
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i + 1, &key2);
REQUIRE(0 == ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_GT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(5000, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_GT_MATCH));
}
template<typename Generator>
void txnGreaterEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i <= 10000; i += 2) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
}
for (i = 0; i < 10000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i & 1 ? i + 1 : i, &key2);
REQUIRE(0 == ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(10000, &key);
REQUIRE(0 == ham_db_find(m_db, m_txn, &key, &rec, HAM_FIND_GEQ_MATCH));
}
template<typename Generator>
void mixedLessThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
gen.generate(0, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
for (i = 1; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i - 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void mixedLessEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 10000; i += 2) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
for (i = 0; i < 10000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i & 1 ? i - 1 : i, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void mixedGreaterThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 1; i <= 5000; i++) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i + 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(5000, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GT_MATCH));
}
template<typename Generator>
void mixedGreaterEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i <= 10000; i += 2) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
}
for (i = 0; i < 10000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i & 1 ? i + 1 : i, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(10000, &key);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
}
template<typename Generator>
void mixed2LessThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 5000; i += 4) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
gen.generate(i + 1, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
gen.generate(i + 2, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
gen.generate(i + 3, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
REQUIRE(0 == ham_txn_commit(m_txn, 0));
}
m_txn = 0;
gen.generate(0, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
for (i = 1; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i - 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void mixed2GreaterThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 1; i <= 5000; i += 4) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
gen.generate(i + 1, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
gen.generate(i + 2, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
gen.generate(i + 3, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
REQUIRE(0 == ham_txn_commit(m_txn, 0));
}
m_txn = 0;
for (i = 0; i < 5000; i++) {
gen.generate(i, &key);
ham_key_t key2 = {0};
gen2.generate(i + 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GT_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
gen.generate(5000, &key);
REQUIRE(HAM_KEY_NOT_FOUND
== ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GT_MATCH));
}
template<typename Generator>
void mixed2LessEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 10000; i += 5) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
gen.generate(i + 1, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
gen.generate(i + 2, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
gen.generate(i + 3, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
REQUIRE(0 == ham_txn_commit(m_txn, 0));
// skip i + 4
}
m_txn = 0;
for (i = 0; i < 10000; i += 5) {
ham_key_t key2 = {0};
gen.generate(i, &key);
gen2.generate(i, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 1, &key);
gen2.generate(i + 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 2, &key);
gen2.generate(i + 2, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 3, &key);
gen2.generate(i + 3, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 4, &key);
gen2.generate(i + 3, &key2); // !!
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_LEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
template<typename Generator>
void mixed2GreaterEqualThanTest() {
teardown();
Generator gen, gen2;
ham_parameter_t envparam[] = {
{HAM_PARAM_PAGE_SIZE, 1024},
{0, 0}
};
ham_parameter_t dbparam[] = {
{HAM_PARAM_KEY_TYPE, gen.get_key_type()},
{HAM_PARAM_RECORD_SIZE, 32},
{0, 0},
{0, 0}
};
if (gen.get_key_size() > 0) {
dbparam[2].name = HAM_PARAM_KEY_SIZE;
dbparam[2].value = gen.get_key_size();
}
REQUIRE(0 ==
ham_env_create(&m_env, Utils::opath(".test"),
HAM_ENABLE_TRANSACTIONS, 0664, &envparam[0]));
REQUIRE(0 ==
ham_env_create_db(m_env, &m_db, 1,
HAM_FORCE_RECORDS_INLINE, &dbparam[0]));
ham_key_t key = {0};
char recbuffer[32] = {0};
ham_record_t rec = ham_make_record(&recbuffer[0], sizeof(recbuffer));
int i;
for (i = 0; i < 10000; i += 5) {
gen.generate(i, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
gen.generate(i + 1, &key);
REQUIRE(0 == ham_db_insert(m_db, 0, &key, &rec, 0));
REQUIRE(0 == ham_txn_begin(&m_txn, m_env, 0, 0, 0));
gen.generate(i + 2, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
gen.generate(i + 3, &key);
REQUIRE(0 == ham_db_insert(m_db, m_txn, &key, &rec, 0));
REQUIRE(0 == ham_txn_commit(m_txn, 0));
// skip i + 4
}
m_txn = 0;
for (i = 0; i < 10000; i += 5) {
ham_key_t key2 = {0};
gen.generate(i, &key);
gen2.generate(i, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 1, &key);
gen2.generate(i + 1, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 2, &key);
gen2.generate(i + 2, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
gen.generate(i + 3, &key);
gen2.generate(i + 3, &key2);
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
if (i + 5 < 10000) {
gen.generate(i + 4, &key);
gen2.generate(i + 5, &key2); // !!
REQUIRE(0 == ham_db_find(m_db, 0, &key, &rec, HAM_FIND_GEQ_MATCH));
REQUIRE(key2.size == key.size);
REQUIRE(0 == ::memcmp(key.data, key2.data, key2.size));
}
}
}
};
TEST_CASE("Approx/lessThanTest1", "") {
ApproxFixture f;
f.lessThanTest1();
}
TEST_CASE("Approx/lessThanTest2", "") {
ApproxFixture f;
f.lessThanTest2();
}
TEST_CASE("Approx/lessThanTest3", "") {
ApproxFixture f;
f.lessThanTest3();
}
TEST_CASE("Approx/lessThanTest4", "") {
ApproxFixture f;
f.lessThanTest4();
}
TEST_CASE("Approx/lessThanTest5", "") {
ApproxFixture f;
f.lessThanTest5();
}
TEST_CASE("Approx/lessThanTest6", "") {
ApproxFixture f;
f.lessThanTest6();
}
TEST_CASE("Approx/lessThanTest7", "") {
ApproxFixture f;
f.lessThanTest7();
}
TEST_CASE("Approx/lessThanTest8", "") {
ApproxFixture f;
f.lessThanTest8();
}
TEST_CASE("Approx/lessThanTest9", "") {
ApproxFixture f;
f.lessThanTest9();
}
TEST_CASE("Approx/lessThanTest10", "") {
ApproxFixture f;
f.lessThanTest10();
}
TEST_CASE("Approx/lessThanTest11", "") {
ApproxFixture f;
f.lessThanTest11();
}
TEST_CASE("Approx/lessOrEqualTest1", "") {
ApproxFixture f;
f.lessOrEqualTest1();
}
TEST_CASE("Approx/lessOrEqualTest2", "") {
ApproxFixture f;
f.lessOrEqualTest2();
}
TEST_CASE("Approx/lessOrEqualTest3", "") {
ApproxFixture f;
f.lessOrEqualTest3();
}
TEST_CASE("Approx/lessOrEqualTest4", "") {
ApproxFixture f;
f.lessOrEqualTest4();
}
TEST_CASE("Approx/lessOrEqualTest5", "") {
ApproxFixture f;
f.lessOrEqualTest5();
}
TEST_CASE("Approx/lessOrEqualTest6", "") {
ApproxFixture f;
f.lessOrEqualTest6();
}
TEST_CASE("Approx/lessOrEqualTest7", "") {
ApproxFixture f;
f.lessOrEqualTest7();
}
TEST_CASE("Approx/lessOrEqualTest8", "") {
ApproxFixture f;
f.lessOrEqualTest8();
}
TEST_CASE("Approx/lessOrEqualTest9", "") {
ApproxFixture f;
f.lessOrEqualTest9();
}
TEST_CASE("Approx/lessOrEqualTest10", "") {
ApproxFixture f;
f.lessOrEqualTest10();
}
TEST_CASE("Approx/lessOrEqualTest11", "") {
ApproxFixture f;
f.lessOrEqualTest11();
}
TEST_CASE("Approx/lessOrEqualTest12", "") {
ApproxFixture f;
f.lessOrEqualTest12();
}
TEST_CASE("Approx/lessOrEqualTest13", "") {
ApproxFixture f;
f.lessOrEqualTest13();
}
TEST_CASE("Approx/greaterThanTest", "") {
ApproxFixture f;
f.greaterThanTest();
}
TEST_CASE("Approx/greaterThanTest1", "") {
ApproxFixture f;
f.greaterThanTest1();
}
TEST_CASE("Approx/greaterThanTest2", "") {
ApproxFixture f;
f.greaterThanTest2();
}
TEST_CASE("Approx/greaterThanTest3", "") {
ApproxFixture f;
f.greaterThanTest3();
}
TEST_CASE("Approx/greaterThanTest4", "") {
ApproxFixture f;
f.greaterThanTest4();
}
TEST_CASE("Approx/greaterThanTest5", "") {
ApproxFixture f;
f.greaterThanTest5();
}
TEST_CASE("Approx/greaterThanTest6", "") {
ApproxFixture f;
f.greaterThanTest6();
}
TEST_CASE("Approx/greaterThanTest7", "") {
ApproxFixture f;
f.greaterThanTest7();
}
TEST_CASE("Approx/greaterThanTest8", "") {
ApproxFixture f;
f.greaterThanTest8();
}
TEST_CASE("Approx/greaterThanTest9", "") {
ApproxFixture f;
f.greaterThanTest9();
}
TEST_CASE("Approx/greaterOrEqualTest1", "") {
ApproxFixture f;
f.greaterOrEqualTest1();
}
TEST_CASE("Approx/greaterOrEqualTest2", "") {
ApproxFixture f;
f.greaterOrEqualTest2();
}
TEST_CASE("Approx/greaterOrEqualTest3", "") {
ApproxFixture f;
f.greaterOrEqualTest3();
}
TEST_CASE("Approx/greaterOrEqualTest4", "") {
ApproxFixture f;
f.greaterOrEqualTest4();
}
TEST_CASE("Approx/greaterOrEqualTest5", "") {
ApproxFixture f;
f.greaterOrEqualTest5();
}
TEST_CASE("Approx/greaterOrEqualTest6", "") {
ApproxFixture f;
f.greaterOrEqualTest6();
}
TEST_CASE("Approx/greaterOrEqualTest7", "") {
ApproxFixture f;
f.greaterOrEqualTest7();
}
TEST_CASE("Approx/greaterOrEqualTest8", "") {
ApproxFixture f;
f.greaterOrEqualTest8();
}
TEST_CASE("Approx/greaterOrEqualTest9", "") {
ApproxFixture f;
f.greaterOrEqualTest9();
}
TEST_CASE("Approx/greaterOrEqualTest10", "") {
ApproxFixture f;
f.greaterOrEqualTest10();
}
TEST_CASE("Approx/greaterOrEqualTest11", "") {
ApproxFixture f;
f.greaterOrEqualTest11();
}
TEST_CASE("Approx/greaterOrEqualTest12", "") {
ApproxFixture f;
f.greaterOrEqualTest12();
}
TEST_CASE("Approx/issue44Test", "") {
ApproxFixture f;
f.issue44Test();
}
TEST_CASE("Approx/issue46Test", "") {
ApproxFixture f;
f.issue46Test();
}
TEST_CASE("Approx/issue52Test", "") {
ApproxFixture f;
f.issue52Test();
}
template<uint16_t Length>
struct BinaryGenerator {
BinaryGenerator() {
::memset(buffer, 0, Length);
}
void generate(int i, ham_key_t *key) {
::sprintf(buffer, "%05d", i);
key->data = buffer;
key->size = Length;
}
uint16_t get_key_size() const {
return (Length);
}
uint64_t get_key_type() const {
return (HAM_TYPE_BINARY);
}
char buffer[Length];
};
struct BinaryVarLenGenerator : public BinaryGenerator<32> {
uint16_t get_key_size() const {
return (HAM_KEY_SIZE_UNLIMITED);
}
};
template<uint64_t type, typename T>
struct PodGenerator {
PodGenerator() : t(0) {
}
void generate(int i, ham_key_t *key) {
t = (T)i;
key->data = &t;
key->size = sizeof(t);
}
uint16_t get_key_size() const {
return (0);
}
uint64_t get_key_type() const {
return (type);
}
T t;
};
// Btree tests for HAM_FIND_LT_MATCH
TEST_CASE("Approx/btreeLessThanBinary8Test", "") {
ApproxFixture f;
f.btreeLessThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/btreeLessThanBinary32Test", "") {
ApproxFixture f;
f.btreeLessThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/btreeLessThanBinary48Test", "") {
ApproxFixture f;
f.btreeLessThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/btreeLessThanBinaryVarlenTest", "") {
ApproxFixture f;
f.btreeLessThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/btreeLessThanUint16Test", "") {
ApproxFixture f;
f.btreeLessThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/btreeLessThanUint32Test", "") {
ApproxFixture f;
f.btreeLessThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/btreeLessThanUint64Test", "") {
ApproxFixture f;
f.btreeLessThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/btreeLessThanReal32Test", "") {
ApproxFixture f;
f.btreeLessThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/btreeLessThanReal64Test", "") {
ApproxFixture f;
f.btreeLessThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Btree tests for HAM_FIND_GT_MATCH
TEST_CASE("Approx/btreeGreaterThanBinary8Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/btreeGreaterThanBinary32Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/btreeGreaterThanBinary48Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/btreeGreaterThanBinaryVarlenTest", "") {
ApproxFixture f;
f.btreeGreaterThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/btreeGreaterThanUint16Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/btreeGreaterThanUint32Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/btreeGreaterThanUint64Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/btreeGreaterThanReal32Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/btreeGreaterThanReal64Test", "") {
ApproxFixture f;
f.btreeGreaterThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Btree tests for HAM_FIND_LEQ_MATCH
TEST_CASE("Approx/btreeLessEqualThanBinary8Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/btreeLessEqualThanBinary32Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/btreeLessEqualThanBinary48Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/btreeLessEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/btreeLessEqualThanUint16Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/btreeLessEqualThanUint32Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/btreeLessEqualThanUint64Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/btreeLessEqualThanReal32Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/btreeLessEqualThanReal64Test", "") {
ApproxFixture f;
f.btreeLessEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Btree tests for HAM_FIND_GEQ_MATCH
TEST_CASE("Approx/btreeGreaterEqualThanBinary8Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanBinary32Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanBinary48Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/btreeGreaterEqualThanUint16Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanUint32Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanUint64Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanReal32Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/btreeGreaterEqualThanReal64Test", "") {
ApproxFixture f;
f.btreeGreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Transaction tests for HAM_FIND_LT_MATCH
TEST_CASE("Approx/txnLessThanBinary8Test", "") {
ApproxFixture f;
f.txnLessThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/txnLessThanBinary32Test", "") {
ApproxFixture f;
f.txnLessThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/txnLessThanBinary48Test", "") {
ApproxFixture f;
f.txnLessThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/txnLessThanBinaryVarlenTest", "") {
ApproxFixture f;
f.txnLessThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/txnLessThanUint16Test", "") {
ApproxFixture f;
f.txnLessThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/txnLessThanUint32Test", "") {
ApproxFixture f;
f.txnLessThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/txnLessThanUint64Test", "") {
ApproxFixture f;
f.txnLessThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/txnLessThanReal32Test", "") {
ApproxFixture f;
f.txnLessThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/txnLessThanReal64Test", "") {
ApproxFixture f;
f.txnLessThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Transaction tests for HAM_FIND_GT_MATCH
TEST_CASE("Approx/txnGreaterThanBinary8Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/txnGreaterThanBinary32Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/txnGreaterThanBinary48Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/txnGreaterThanBinaryVarlenTest", "") {
ApproxFixture f;
f.txnGreaterThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/txnGreaterThanUint16Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/txnGreaterThanUint32Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/txnGreaterThanUint64Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/txnGreaterThanReal32Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/txnGreaterThanReal64Test", "") {
ApproxFixture f;
f.txnGreaterThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Transaction tests for HAM_FIND_LEQ_MATCH
TEST_CASE("Approx/txnLessEqualThanBinary8Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/txnLessEqualThanBinary32Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/txnLessEqualThanBinary48Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/txnLessEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.txnLessEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/txnLessEqualThanUint16Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/txnLessEqualThanUint32Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/txnLessEqualThanUint64Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/txnLessEqualThanReal32Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/txnLessEqualThanReal64Test", "") {
ApproxFixture f;
f.txnLessEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Transaction tests for HAM_FIND_GEQ_MATCH
TEST_CASE("Approx/txnGreaterEqualThanBinary8Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/txnGreaterEqualThanBinary32Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/txnGreaterEqualThanBinary48Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/txnGreaterEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/txnGreaterEqualThanUint16Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/txnGreaterEqualThanUint32Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/txnGreaterEqualThanUint64Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/txnGreaterEqualThanReal32Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/txnGreaterEqualThanReal64Test", "") {
ApproxFixture f;
f.txnGreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_LT_MATCH
TEST_CASE("Approx/mixedLessThanBinary8Test", "") {
ApproxFixture f;
f.mixedLessThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixedLessThanBinary32Test", "") {
ApproxFixture f;
f.mixedLessThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixedLessThanBinary48Test", "") {
ApproxFixture f;
f.mixedLessThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixedLessThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixedLessThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixedLessThanUint16Test", "") {
ApproxFixture f;
f.mixedLessThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixedLessThanUint32Test", "") {
ApproxFixture f;
f.mixedLessThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixedLessThanUint64Test", "") {
ApproxFixture f;
f.mixedLessThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixedLessThanReal32Test", "") {
ApproxFixture f;
f.mixedLessThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixedLessThanReal64Test", "") {
ApproxFixture f;
f.mixedLessThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_GT_MATCH
TEST_CASE("Approx/mixedGreaterThanBinary8Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixedGreaterThanBinary32Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixedGreaterThanBinary48Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixedGreaterThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixedGreaterThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixedGreaterThanUint16Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixedGreaterThanUint32Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixedGreaterThanUint64Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixedGreaterThanReal32Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixedGreaterThanReal64Test", "") {
ApproxFixture f;
f.mixedGreaterThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_LEQ_MATCH
TEST_CASE("Approx/mixedLessEqualThanBinary8Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixedLessEqualThanBinary32Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixedLessEqualThanBinary48Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixedLessEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixedLessEqualThanUint16Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixedLessEqualThanUint32Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixedLessEqualThanUint64Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixedLessEqualThanReal32Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixedLessEqualThanReal64Test", "") {
ApproxFixture f;
f.mixedLessEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_GEQ_MATCH
TEST_CASE("Approx/mixedGreaterEqualThanBinary8Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanBinary32Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanBinary48Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixedGreaterEqualThanUint16Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanUint32Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanUint64Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanReal32Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixedGreaterEqualThanReal64Test", "") {
ApproxFixture f;
f.mixedGreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_LT_MATCH
TEST_CASE("Approx/mixed2LessThanBinary8Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixed2LessThanBinary32Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixed2LessThanBinary48Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixed2LessThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixed2LessThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixed2LessThanUint16Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixed2LessThanUint32Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixed2LessThanUint64Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixed2LessThanReal32Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixed2LessThanReal64Test", "") {
ApproxFixture f;
f.mixed2LessThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_GT_MATCH
TEST_CASE("Approx/mixed2GreaterThanBinary8Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixed2GreaterThanBinary32Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixed2GreaterThanBinary48Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixed2GreaterThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixed2GreaterThanUint16Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixed2GreaterThanUint32Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixed2GreaterThanUint64Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixed2GreaterThanReal32Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixed2GreaterThanReal64Test", "") {
ApproxFixture f;
f.mixed2GreaterThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_LEQ_MATCH
TEST_CASE("Approx/mixed2LessEqualThanBinary8Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixed2LessEqualThanBinary32Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixed2LessEqualThanBinary48Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixed2LessEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixed2LessEqualThanUint16Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixed2LessEqualThanUint32Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixed2LessEqualThanUint64Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixed2LessEqualThanReal32Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixed2LessEqualThanReal64Test", "") {
ApproxFixture f;
f.mixed2LessEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
// Mixed tests (Transaction + Btree) for HAM_FIND_GEQ_MATCH
TEST_CASE("Approx/mixed2GreaterEqualThanBinary8Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<BinaryGenerator<8> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanBinary32Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<BinaryGenerator<32> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanBinary48Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<BinaryGenerator<48> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanBinaryVarlenTest", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<BinaryVarLenGenerator>();
}
TEST_CASE("Approx/mixed2GreaterEqualThanUint16Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT16, uint16_t> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanUint32Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT32, uint32_t> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanUint64Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<PodGenerator<HAM_TYPE_UINT64, uint64_t> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanReal32Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL32, float> >();
}
TEST_CASE("Approx/mixed2GreaterEqualThanReal64Test", "") {
ApproxFixture f;
f.mixed2GreaterEqualThanTest<PodGenerator<HAM_TYPE_REAL64, double> >();
}
|
#!/usr/bin/python3
"""An improved version of PswdProtHello3 that reads a list of users from a file.
"""
#Classes
#===============================================================================
class User(object):
"""A user."""
@staticmethod
def load_from_file(f):
"""Load a user from a config file."""
name = f.readline().split("=")[1].strip()
pswd = f.readline().split("=")[1].strip()
return User(name, pswd)
def __init__(self, name, pswd):
"""Setup this user."""
self.name = name
self.pswd = pswd
def __eq__(self, user):
"""Compare this user to the given user."""
return self.name == user.name and self.pswd == user.pswd
def say_hello(self):
"""Say hello to this user."""
print("Hello {}!".format(self.name))
class App(object):
"""A basic app."""
def __init__(self):
"""Setup this app."""
#Load the list of authorized users
print("Loading user list...", end = "")
self.users = []
with open("users.cfg", "r") as f:
for line in f:
if line.strip() == "[User]":
self.users.append(User.load_from_file(f))
print("ok")
def login(self, name, pswd):
"""Log in using the given username and password."""
#Create current user
current_user = User(name, pswd)
#See if the current user matches any of the users in the list of
#authorized users.
for user in self.users:
if current_user == user:
return current_user
#No users matched
return None
def run(self):
"""Run this app."""
#Get the current user
current_user = self.login(input("Name: "), input("Pswd: "))
#Attempt to log in
if current_user is not None:
current_user.say_hello()
else:
print("***Access Denied***")
#Entry Point
#===============================================================================
App().run()
|
#!/bin/sh
gst-launch-1.0 -v videotestsrc pattern=snow ! video/x-raw,width=1280,heigh=720 ! kmssink
|
#!/bin/bash
_FILE=~/Workspace/apps/polymer-toolkit/polymer-web-components.txt
_ITEMS=$(awk '{printf("%s ",$1)}' $_FILE);
complete -W "${_ITEMS}" plmr-src plmr-doc plmr-npm
unset _FILE;
unset _ITEMS;
|
/* eslint-disable no-throw-literal */
/* eslint-disable camelcase */
import express, { Request, Response } from 'express';
import path from 'path';
import http from 'http';
// routers
import userRouter from './router/user';
import postRouter from './router/posts';
import miscRouter from './router/misc';
import settingRouter from './router/settings';
import multimediaRouter from './router/multimedia';
import trackRouter from './router/tracker';
import searchRouter from './router/search';
const app = express();
const server = http.createServer(app);
const publicDirPath = path.join(__dirname, '../public');
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(express.static(publicDirPath));
app.use(userRouter);
app.use(postRouter);
app.use(multimediaRouter);
app.use(searchRouter);
app.use(miscRouter);
app.use(settingRouter);
app.use(trackRouter);
app.get('/', (req: Request, res: Response) => {
res.send('Download app now');
});
export { server };
|
<filename>src/components/ServicesPage/ServiceItem/ServiceItem.js
import React from 'react';
import PropTypes from 'prop-types';
import './ServiceItem.css';
import IsotipoBitlogic from '../../../images/isotipo.png';
const ServiceItem = ({ title, children }) => (
<section className="ServiceItem">
<h2 className="ServiceItem__Title">
<img src={IsotipoBitlogic} alt="bitlogic" class="ServiceItem__Img" />
{title}
</h2>
<p className="ServiceItem__Description">{children}</p>
</section>
);
ServiceItem.propTypes = {
title: PropTypes.string,
children: PropTypes.node,
};
export default ServiceItem;
|
python preprocess.py -i static/edge/graph -o static/edge -n edge_prediction -b edge
python preprocess.py -i static/node/graph -o static/node -n node_classification -b node
|
#!/bin/bash
export DOCKER_REGISTRY=${DOCKER_REGISTRY:-kubeci}
source $(dirname "${BASH_SOURCE}")/lib.sh
# override this one if you need to change push & pull
docker_push() {
hub_canary
}
docker_pull() {
hub_pull
}
source_repo() {
RETVAL=0
if [ $# -eq 0 ]; then
cmd=${DEFAULT_COMMAND:-build}
$cmd
exit $RETVAL
fi
case "$1" in
build)
build
;;
build_binary)
build_binary
;;
build_docker)
build_docker
;;
clean)
clean
;;
push)
docker_push
;;
pull)
docker_pull
;;
release)
docker_release
;;
check)
docker_check
;;
run)
docker_run
;;
sh)
docker_sh
;;
rm)
docker_rm
;;
rmi)
docker_rmi
;;
*)
(10)
echo $"Usage: $0 {build|build_binary|build_docker|clean|push|pull|release|check|sh|rm|rmi}"
RETVAL=1
;;
esac
exit $RETVAL
}
binary_repo() {
RETVAL=0
if [ $# -eq 0 ]; then
cmd=${DEFAULT_COMMAND:-build}
$cmd
exit $RETVAL
fi
case "$1" in
build)
build
;;
clean)
clean
;;
push)
docker_push
;;
pull)
docker_pull
;;
release)
docker_release
;;
check)
docker_check
;;
run)
docker_run
;;
sh)
docker_sh
;;
rm)
docker_rm
;;
rmi)
docker_rmi
;;
*)
(10)
echo $"Usage: $0 {build|clean|push|pull|release|check|sh|rm|rmi}"
RETVAL=1
;;
esac
exit $RETVAL
}
|
<filename>beken378/func/include/bk7011_cal_pub.h
#ifndef _BK7011_CAL_PUB_H_
#define _BK7011_CAL_PUB_H_
#include "typedef.h"
typedef struct tmp_pwr_st {
UINT8 mod;
UINT8 pa;
UINT16 pwr_idx_shift;
} TMP_PWR_ST, *TMP_PWR_PTR;
extern void calibration_main(void);
extern INT32 rwnx_cal_load_trx_rcbekn_reg_val(void);
extern void rwnx_cal_set_txpwr_by_rate(INT32 rate, UINT32 test_mode);
extern void rwnx_cal_set_txpwr_by_channel(UINT32 channel);
extern INT32 rwnx_cal_save_trx_rcbekn_reg_val(void);
extern void do_calibration_in_temp_dect(void);
extern void bk7011_cal_bias(void);
extern void bk7011_cal_dpll(void);
extern void rwnx_cal_set_txpwr(UINT32 pwr_gain, UINT32 grate);
extern UINT32 manual_cal_get_pwr_idx_shift(UINT32 rate, UINT32 bandwidth, UINT32 *pwr_gain);
extern int manual_cal_get_txpwr(UINT32 rate, UINT32 channel, UINT32 bandwidth, UINT32 *pwr_gain);
extern void manual_cal_save_txpwr(UINT32 rate, UINT32 channel, UINT32 pwr_gain);
extern UINT32 manual_cal_fitting_txpwr_tab(void);
extern void manual_cal_show_txpwr_tab(void);
extern UINT32 manual_cal_load_txpwr_tab_flash(void);
extern int manual_cal_save_txpwr_tab_to_flash(void);
extern int manual_cal_save_chipinfo_tab_to_flash(void);
extern UINT8 manual_cal_wirte_otp_flash(UINT32 addr, UINT32 len, UINT8 *buf);
extern UINT8 manual_cal_read_otp_flash(UINT32 addr, UINT32 len, UINT8 *buf);
extern UINT32 manual_cal_load_default_txpwr_tab(UINT32 is_ready_flash);
extern void manual_cal_set_dif_g_n40(UINT32 diff);
extern void manual_cal_set_dif_g_n20(UINT32 diff);
extern void manual_cal_get_current_temperature(void);
extern int manual_cal_write_macaddr_to_flash(UINT8 *mac_ptr);
extern int manual_cal_get_macaddr_from_flash(UINT8 *mac_ptr);
extern void manual_cal_show_otp_flash(void);
extern void manual_cal_clear_otp_flash(void);
extern void manual_cal_set_xtal(UINT32 xtal);
extern void manual_cal_set_lpf_iq(UINT32 lpf_i, UINT32 lpf_q);
extern void manual_cal_load_lpf_iq_tag_flash(void);
extern void manual_cal_load_xtal_tag_flash(void);
extern void manual_cal_do_xtal_cali(UINT16 cur_val, UINT16 *last, UINT16 thre, UINT16 init_val);
extern UINT32 manual_cal_get_xtal(void);
extern INT8 manual_cal_get_dbm_by_rate(UINT32 rate, UINT32 bandwidth);
extern INT8 manual_cal_get_cur_txpwr_dbm(void);
extern int manual_cal_load_temp_tag_from_flash(void);
extern int manual_cal_load_xtal_tag_from_flash(void);
extern void manual_cal_load_differ_tag_from_flash(void);
extern void bk7011_micopwr_config_tssi_read_prepare(void);
extern void bk7011_micopwr_tssi_read(void);
extern void bk7011_micopwr_tssi_show(void);
extern void rwnx_cal_set_reg_adda_ldo(UINT32 val);
extern void manual_cal_tmp_pwr_init(UINT16 init_temp, UINT16 init_thre, UINT16 init_dist);
extern void manual_cal_tmp_pwr_init_reg(UINT16 reg_mod, UINT16 reg_pa);
extern void manual_cal_temp_pwr_unint(void);
extern void manual_cal_set_tmp_pwr_flag(UINT8 flag);
extern TMP_PWR_PTR manual_cal_set_tmp_pwr(UINT16 cur_val, UINT16 thre, UINT16 *last);
extern UINT32 manual_cal_load_temp_tag_flash(void);
extern UINT32 manual_cal_load_adc_cali_flash(void);
extern void rwnx_cal_set_reg_mod_pa(UINT16 reg_mod, UINT16 reg_pa);
extern void rwnx_cal_do_temp_detect(UINT16 cur_val, UINT16 thre, UINT16 *last);
extern void rwnx_cal_set_lpfcap_iq(UINT32 lpfcap_i, UINT32 lpfcap_q);
extern void rwnx_cal_initial_calibration(void);
#endif // _BK7011_CAL_PUB_H_
|
<filename>frontend/src/views/index.js<gh_stars>0
import React from 'react';
import Form from './Form';
import List from './List';
const Home = (props) => {
const [state, setState] = React.useState({
username: '',
token: '',
});
const setToken = (username, token) => {
setState({ username: username, token: token });
};
if (state.token === '' || state.username === '') {
return <Form setToken={setToken} />
} else {
return <List data={state} />
}
};
export default Home;
|
<!DOCTYPE html>
<html>
<head>
<title>User Information</title>
</head>
<body>
<h1>User Information</h1>
<p>Name: <?php echo $name; ?></p>
<p>Age: <?php echo $age; ?></p>
</body>
</html>
|
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2020, Linaro Limited
# Author: <NAME> <<EMAIL>>
#
# U-Boot UEFI: Image Authentication Test (signature with certificates chain)
"""
This test verifies image authentication for a signed image which is signed
by user certificate and contains additional intermediate certificates in its
signature.
"""
import pytest
@pytest.mark.boardspec('sandbox')
@pytest.mark.buildconfigspec('efi_secure_boot')
@pytest.mark.buildconfigspec('cmd_efidebug')
@pytest.mark.buildconfigspec('cmd_fat')
@pytest.mark.buildconfigspec('cmd_nvedit_efi')
@pytest.mark.slow
class TestEfiSignedImageIntca(object):
def test_efi_signed_image_intca1(self, u_boot_console, efi_boot_env_intca):
"""
Test Case 1 - authenticated by root CA in db
"""
u_boot_console.restart_uboot()
disk_img = efi_boot_env_intca
with u_boot_console.log.section('Test Case 1a'):
# Test Case 1a, with no Int CA and not authenticated by root CA
output = u_boot_console.run_command_list([
'host bind 0 %s' % disk_img,
'fatload host 0:1 4000000 db_c.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize db',
'fatload host 0:1 4000000 KEK.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize KEK',
'fatload host 0:1 4000000 PK.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize PK'])
assert 'Failed to set EFI variable' not in ''.join(output)
output = u_boot_console.run_command_list([
'efidebug boot add 1 HELLO_a host 0:1 /helloworld.efi.signed_a ""',
'efidebug boot next 1',
'efidebug test bootmgr'])
assert '\'HELLO_a\' failed' in ''.join(output)
assert 'efi_start_image() returned: 26' in ''.join(output)
with u_boot_console.log.section('Test Case 1b'):
# Test Case 1b, signed and authenticated by root CA
output = u_boot_console.run_command_list([
'efidebug boot add 2 HELLO_ab host 0:1 /helloworld.efi.signed_ab ""',
'efidebug boot next 2',
'bootefi bootmgr'])
assert 'Hello, world!' in ''.join(output)
def test_efi_signed_image_intca2(self, u_boot_console, efi_boot_env_intca):
"""
Test Case 2 - authenticated by root CA in db
"""
u_boot_console.restart_uboot()
disk_img = efi_boot_env_intca
with u_boot_console.log.section('Test Case 2a'):
# Test Case 2a, unsigned and not authenticated by root CA
output = u_boot_console.run_command_list([
'host bind 0 %s' % disk_img,
'fatload host 0:1 4000000 KEK.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize KEK',
'fatload host 0:1 4000000 PK.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize PK'])
assert 'Failed to set EFI variable' not in ''.join(output)
output = u_boot_console.run_command_list([
'efidebug boot add 1 HELLO_abc host 0:1 /helloworld.efi.signed_abc ""',
'efidebug boot next 1',
'efidebug test bootmgr'])
assert '\'HELLO_abc\' failed' in ''.join(output)
assert 'efi_start_image() returned: 26' in ''.join(output)
with u_boot_console.log.section('Test Case 2b'):
# Test Case 2b, signed and authenticated by root CA
output = u_boot_console.run_command_list([
'fatload host 0:1 4000000 db_b.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize db',
'efidebug boot next 1',
'efidebug test bootmgr'])
assert '\'HELLO_abc\' failed' in ''.join(output)
assert 'efi_start_image() returned: 26' in ''.join(output)
with u_boot_console.log.section('Test Case 2c'):
# Test Case 2c, signed and authenticated by root CA
output = u_boot_console.run_command_list([
'fatload host 0:1 4000000 db_c.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize db',
'efidebug boot next 1',
'efidebug test bootmgr'])
assert 'Hello, world!' in ''.join(output)
def test_efi_signed_image_intca3(self, u_boot_console, efi_boot_env_intca):
"""
Test Case 3 - revoked by dbx
"""
u_boot_console.restart_uboot()
disk_img = efi_boot_env_intca
with u_boot_console.log.section('Test Case 3a'):
# Test Case 3a, revoked by int CA in dbx
output = u_boot_console.run_command_list([
'host bind 0 %s' % disk_img,
'fatload host 0:1 4000000 dbx_b.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize dbx',
'fatload host 0:1 4000000 db_c.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize db',
'fatload host 0:1 4000000 KEK.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize KEK',
'fatload host 0:1 4000000 PK.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize PK'])
assert 'Failed to set EFI variable' not in ''.join(output)
output = u_boot_console.run_command_list([
'efidebug boot add 1 HELLO_abc host 0:1 /helloworld.efi.signed_abc ""',
'efidebug boot next 1',
'efidebug test bootmgr'])
assert 'Hello, world!' in ''.join(output)
# Or,
# assert '\'HELLO_abc\' failed' in ''.join(output)
# assert 'efi_start_image() returned: 26' in ''.join(output)
with u_boot_console.log.section('Test Case 3b'):
# Test Case 3b, revoked by root CA in dbx
output = u_boot_console.run_command_list([
'fatload host 0:1 4000000 dbx_c.auth',
'setenv -e -nv -bs -rt -at -i 4000000:$filesize dbx',
'efidebug boot next 1',
'efidebug test bootmgr'])
assert '\'HELLO_abc\' failed' in ''.join(output)
assert 'efi_start_image() returned: 26' in ''.join(output)
|
<filename>21.process/5.fork.js
// fork exec execFile 它们其实都是基于spwan的改进方法
let { spawn } = require('child_process');
/**
* fork可以直接运行一个node模块
* silent 可以快速 设置stdio
*/
function fork(modulepath, args, options) {
let { silent } = options;
let opts = Object.assign({}, options);
if (silent) {
opts.stdio = ['ignore', 'ignore', 'ignore'];
} else {
opts.stdio = [process.stdin, process.stdout, process.stderr];
}
spawn('node', [modulepath, ...args], opts);
}
let child = fork('fork.js', ['zfpx'], {
cwd: __dirname,
silent: true
});
// child.on('message', function (data) {
// console.log(data);
// });
// child.send({ name: 'zfpx' });
|
<gh_stars>1-10
#pragma once
#include <iostream>
#include <vector>
#include <algorithm>
#include <cstdint>
#include <drogon/HttpAppFramework.h>
#include <drogon/orm/Mapper.h>
#include "../../models/models.hpp"
#include "../../main.hpp"
namespace quiz {
class Option {
public:
int id;
std::string text;
Option(drogon_model::sqlite3::Option &ormOption) {
id = ormOption.getValueOfId();
text = ormOption.getValueOfText();
}
};
/** Custom Question Class that holds a question and its relevant information*/
class Question {
public:
int id;
std::string text;
// Find a way to randomly arrange the elements
// https://stackoverflow.com/questions/6926433/how-to-shuffle-a-stdvector
std::vector<Option> options;
Question(drogon_model::sqlite3::Question &ormQuestion) {
id = ormQuestion.getValueOfId();
text = ormQuestion.getValueOfText();
const uint64_t optionIds[4] = {
ormQuestion.getValueOfAnswerid(), ormQuestion.getValueOfOption2id(),
ormQuestion.getValueOfOption3id(), ormQuestion.getValueOfOption4id()};
for (auto val : optionIds) {
auto ormOption = drogon::orm::Mapper<drogon_model::sqlite3::Option>(
drogon::app().getDbClient())
.findByPrimaryKey(val);
options.push_back(Option(ormOption));
}
std::shuffle(options.begin(), options.end(), randomEngine);
}
};
/** An object that represents an answer to a question.*/
class Answer {
public:
int id;
int questionId;
std::string questionText;
std::string text;
Answer(drogon_model::sqlite3::Question &ormQuestion) {
id = ormQuestion.getValueOfAnswerid();
questionId = ormQuestion.getValueOfId();
questionText = ormQuestion.getValueOfText();
// the Option object that represents "Answer"
auto tempOption = drogon::orm::Mapper<drogon_model::sqlite3::Option>(drogon::app().getDbClient()).findByPrimaryKey(id);
text = tempOption.getValueOfText();
}
};
} // namespace quiz
|
func Fibonacci(n: Int) -> Int {
if n == 0 {
return 0
} else if n == 1 {
return 1
} else {
return Fibonacci(n - 1) + Fibonacci(n - 2)
}
}
print(Fibonacci(5)) // Output: 5
|
import React from 'react';
const AlertContext = React.createContext();
export const AlertConsumer = AlertContext.Consumer;
export class AlertProvider extends React.Component {
constructor(props) {
super(props);
this.state = {
message: '',
variant: '',
};
}
newAlert = (message, variant) => this.setState({ message, variant });
resetAlert = () => this.setState({ message: '', variant: '' });
render = () => (
<AlertContext.Provider
value={{
...this.state,
newAlert: this.newAlert,
resetAlert: this.resetAlert,
}}
>
{this.props.children}
</AlertContext.Provider>
);
}
|
<reponame>twillouer/lombok-intellij-plugin-1
package de.plushnikov.lombok.tests;
import de.plushnikov.lombok.LombokParsingTestCase;
import org.junit.Ignore;
import java.io.IOException;
/**
* Unit tests for IntelliJPlugin for Lombok, based on lombok test classes
* For this to work, the correct system property idea.home.path needs to be passed to the test runner.
*/
public class BuilderTestCase extends LombokParsingTestCase {
// This test is lombok's homepage example.
public void testBuilderExample() throws IOException {
doTest();
}
// This test is lombok's homepage customized example.
public void testBuilderExampleCustomized() throws IOException {
doTest();
}
// This test is lombok's homepage example with predefined elements and another inner class.
// Predefined inner builder class is not supported. There are no plans to support it.
/*public void testBuilderPredefined() throws IOException {
doTest();
}*/
public void testBuilderSimple() throws IOException {
doTest();
}
public void testBuilderComplex() throws IOException {
doTest();
}
// Predefined inner builder class is not supported. There are no plans to support it.
/*public void testBuilderWithExistingBuilderClass() throws IOException {
doTest();
}*/
}
|
package com.yntx.service.edu.listener;
import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.yntx.service.edu.entity.Subject;
import com.yntx.service.edu.entity.excel.ExcelSubjectData;
import com.yntx.service.edu.mapper.SubjectMapper;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@AllArgsConstructor //全参
@NoArgsConstructor //无参
public class ExcelSubjectDataListener extends AnalysisEventListener<ExcelSubjectData> {
/**
* 假设这个是一个DAO,当然有业务逻辑这个也可以是一个service。当然如果不用存储这个对象没用。
*/
private SubjectMapper subjectMapper;
/**
* 这个每一条数据解析都会来调用
*/
@Override
public void invoke(ExcelSubjectData data, AnalysisContext context) {
log.info("解析到一条数据:{}", data);
//处理读取进来的数据
String titleLevelOne = data.getLevelOneTitle();
String titleLevelTwo = data.getLevelTwoTitle();
//判断一级分类是否重复
Subject subjectLevelOne = this.getByTitle(titleLevelOne);
String parentId = null;
if(subjectLevelOne == null) {
//将一级分类存入数据库
Subject subject = new Subject();
subject.setParentId("0");
subject.setTitle(titleLevelOne);//一级分类名称
subjectMapper.insert(subject);
parentId = subject.getId();
}else{
parentId = subjectLevelOne.getId();
}
//判断二级分类是否重复
Subject subjectLevelTwo = this.getSubByTitle(titleLevelTwo, parentId);
if(subjectLevelTwo == null){
//将二级分类存入数据库
Subject subject = new Subject();
subject.setTitle(titleLevelTwo);
subject.setParentId(parentId);
subjectMapper.insert(subject);//添加
}
}
/**
* 所有数据解析完成了 都会来调用
*/
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
log.info("所有数据解析完成!");
}
/**
* 根据分类名称查询这个一级分类是否存在
* @param title
* @return
*/
private Subject getByTitle(String title) {
QueryWrapper<Subject> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("title", title);
queryWrapper.eq("parent_id", "0");//一级分类
return subjectMapper.selectOne(queryWrapper);
}
/**
* 根据分类名称和父id查询这个二级分类是否存在
* @param title
* @return
*/
private Subject getSubByTitle(String title, String parentId) {
QueryWrapper<Subject> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("title", title);
queryWrapper.eq("parent_id", parentId);
return subjectMapper.selectOne(queryWrapper);
}
}
|
// Define a data structure to store the exported items
type ExportedItem = {
name: string;
value: any;
};
export class CommonModule {
private exports: ExportedItem[] = [];
// Method to export a class, function, or variable
export(name: string, value: any) {
this.exports.push({ name, value });
}
// Method to import a class, function, or variable from another module
import(name: string) {
const exportedItem = this.exports.find((item) => item.name === name);
if (exportedItem) {
return exportedItem.value;
} else {
throw new Error(`ImportError: ${name} is not exported from this module`);
}
}
}
|
const form = document.querySelector('form');
const textArea = document.querySelector('#content');
const fromInput = document.querySelector('#from');
const toInput = document.querySelector('#to');
form.addEventListener('submit', (e) => {
e.preventDefault();
const content = textArea.value;
const from = fromInput.value;
const to = toInput.value;
const newContent = content.replace(from, to);
textArea.value = newContent;
});
|
<reponame>sintefneodroid/droid
var searchData=
[
['listitemprefab',['ListItemPrefab',['../classdroid_1_1_runtime_1_1_utilities_1_1_game_objects_1_1_status_displayer_1_1_render_texture_list.html#a4af7e34230a6993a753c44af62d51242',1,'droid::Runtime::Utilities::GameObjects::StatusDisplayer::RenderTextureList']]]
];
|
if (
typeof require !== "undefined" &&
(typeof window === "undefined" ||
// eslint-disable-next-line camelcase
typeof __webpack_require__ !== "undefined" ||
(typeof navigator !== "undefined" && navigator.product === "ReactNative"))
) {
// eslint-disable-next-line no-redeclare,no-var
var base = require("./base");
// eslint-disable-next-line no-redeclare,no-var
var ripe = base.ripe;
}
/**
* Returns 'size' length subsequences of elements from the
* input items.
*
* The return sequence are ordered according to the order
* of the input items. If the input is sorted, the result
* will respect that ordering. Elements are treated as
* unique based on their position instead of their value,
* meaning that there will be no repeated values in each
* combination.
*
* @param {Array} items The items to create iterations from.
* @param {Number} size The length of the return sequence.
* @returns {Array} A sequence of length 'size' subsequences
* of items created from combining the input items, with no
* repeated values and its ordering respecting the order of
* the input array.
* @see https://docs.python.org/3/library/itertools.html#itertools.combinations
*/
ripe.combinations = function(items, size) {
function recursive(values, i) {
// saves the values and exists the combination
// logic once the length of the combination
// was reached
if (values.length === size) {
result.push(values);
return;
}
// returns if there are no more elements
// to combine in the input array
if (i + 1 > items.length) return;
// concatenates the value to the subsequence being
// constructed and makes a recursive call so that
// the combination can be constructed both with the
// new subsequence and with the previous generating
// all possible combinations with no repetition
recursive(values.concat(items[i]), i + 1);
recursive(values, i + 1);
}
const result = [];
recursive([], 0);
return result;
};
|
<reponame>tsbohc/ProjectE
package moze_intel.projecte.gameObjs.tiles;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import moze_intel.projecte.capability.managing.ICapabilityResolver;
import net.minecraft.tileentity.TileEntityType;
import net.minecraft.util.Direction;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.util.LazyOptional;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.IItemHandler;
public abstract class CapabilityTileEMC extends TileEmc {
@Nullable
protected ICapabilityResolver<IItemHandler> itemHandlerResolver;
public CapabilityTileEMC(TileEntityType<?> type) {
super(type);
}
public CapabilityTileEMC(TileEntityType<?> type, long maxAmount) {
super(type, maxAmount);
}
@Override
protected void invalidateCaps() {
super.invalidateCaps();
if (itemHandlerResolver != null) {
//Should never be null but just in case
itemHandlerResolver.invalidateAll();
}
}
@Nonnull
@Override
public <T> LazyOptional<T> getCapability(@Nonnull Capability<T> cap, Direction side) {
if (cap == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY && itemHandlerResolver != null) {
//Should never be null but just in case
return itemHandlerResolver.getCapabilityUnchecked(cap, side);
}
return super.getCapability(cap, side);
}
}
|
package cn.shiyanjun.platform.scheduled.api;
import cn.shiyanjun.platform.api.LifecycleAware;
public interface SchedulingManager extends LifecycleAware {
ComponentManager getComponentManager();
RecoveryManager getRecoveryManager();
}
|
def execute_moves(cube, moves):
# Executes a specified set of moves on a rubik's cube
# cube: list representing a rubik's cube
# move: list of moves, each move specifying a set of slices to rotate
for move in moves:
rotate_cube(cube, move)
def rotate_cube(cube, move):
# Rotates the sides of a rubik's cube according to a single move
# cube: list representing a rubik's cube
# move: list of slices of a rubik's cube to rotate
# Perform rotations
for slice in move:
cube[slice] = rotate_layer(cube[slice])
return cube
def rotate_layer(slice):
# Rotates a single layer of a rubik's cube
# slice: 3x3 list of the values in that layer
# Rotate the layer clockwise
return [slice[2][0],slice[1][0],slice[0][0],
slice[2][1],slice[1][1],slice[0][1],
slice[2][2],slice[1][2],slice[0][2]]
def solve_cube(cube):
# Main function that solves a rubik's cube
# cube: list representing a rubik's cube
# Determine cube state by analyzing thecube[i][j][k] values
scramble_state = analyze_cube(cube)
# Find the sequence of moves required to solve thecube[i][j][k]
moves = get_solve_moves(scramble_state)
# Execute moves to solve thecube[i][j][k]
execute_moves(cube, moves)
|
#!/bin/bash
echo T1009 Binary Padding
echo Atomic Test #1 - Pad Binary to Change Hash - Linux/macOS dd
dd if=/dev/zero bs=1 count=1 >> /tmp/evil-binary
|
number_list = list(range(10))
print(number_list) # Output: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
<reponame>CSCfi/pebbles
"""empty message
Revision ID: he536vdwh29f
Revises: <PASSWORD>
Create Date: 2019-07-18 11:50:00.78215
"""
# revision identifiers, used by Alembic.
revision = 'he<PASSWORD>'
down_revision = '<PASSWORD>'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('blueprints', sa.Column('gpu_enabled', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('blueprints', 'gpu_enabled')
### end Alembic commands ###
|
#!/bin/bash
[[ -f sqlite3.db ]] && echo "sqlite3.db already exists!" && exit 1
./manage.py syncdb --noinput
./manage.py migrate
./manage.py createsuperuser --username=anytask --email=webmaster@anytask-klg.tk --noinput
echo 'from django.contrib.auth.models import User ; user=User.objects.get(username="anytask") ; user.set_password("pass") ; user.save() ; print "Password changed"' | ./manage.py shell --plain
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.