text stringlengths 1 1.05M |
|---|
import pulsar as psr
def load_ref_system():
""" Returns 4-planar as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
Pt -0.0000 -0.0000 -0.0000
Cl -2.1782 0.7530 -0.4981
Cl 2.1782 -0.7530 0.4981
Cl 0.8994 1.6933 -1.3729
Cl -0.8994 -1.6933 1.3729
""")
|
#! /bin/bash
echo '# Mirth Connect configuration file
# directories
dir.appdata = appdata
dir.tempdata = ${dir.appdata}/temp
# ports
http.port=8080
https.port=8443
# password requirements
password.minlength = 0
password.minupper = 0
password.minlower = 0
password.minnumeric = 0
password.minspecial = 0
password.retrylimit = 0
password.lockoutperiod = 0
password.expiration = 0
password.graceperiod = 0
password.reuseperiod = 0
password.reuselimit = 0
# Only used for migration purposes, do not modify
version = 3.8.0
# keystore
keystore.path = ${dir.appdata}/keystore.jks
keystore.storepass = wkrQBExi1qcz
keystore.keypass = bHiY9dWVBb8i
keystore.type = JCEKS
# server
http.contextpath = /
server.url =
http.host = 0.0.0.0
https.host = 0.0.0.0
https.client.protocols = TLSv1.3,TLSv1.2,TLSv1.1
https.server.protocols = TLSv1.3,TLSv1.2,TLSv1.1,SSLv2Hello
https.ciphersuites = TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256,TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384,TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,TLS_DHE_DSS_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256,TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,TLS_DHE_DSS_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,TLS_RSA_WITH_AES_256_CBC_SHA256,TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384,TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384,TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,TLS_DHE_DSS_WITH_AES_256_CBC_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA,TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA,TLS_ECDH_RSA_WITH_AES_256_CBC_SHA,TLS_DHE_RSA_WITH_AES_256_CBC_SHA,TLS_DHE_DSS_WITH_AES_256_CBC_SHA,TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,TLS_RSA_WITH_AES_128_CBC_SHA256,TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256,TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256,TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,TLS_DHE_DSS_WITH_AES_128_CBC_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_128_CBC_SHA,TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA,TLS_ECDH_RSA_WITH_AES_128_CBC_SHA,TLS_DHE_RSA_WITH_AES_128_CBC_SHA,TLS_DHE_DSS_WITH_AES_128_CBC_SHA,TLS_EMPTY_RENEGOTIATION_INFO_SCSV
https.ephemeraldhkeysize = 2048
# CORS headers
server.api.accesscontrolalloworigin = *
server.api.accesscontrolallowcredentials = false
server.api.accesscontrolallowmethods = GET, POST, DELETE, PUT
server.api.accesscontrolallowheaders = Content-Type
server.api.accesscontrolexposeheaders =
server.api.accesscontrolmaxage =
# Determines whether or not channels are deployed on server startup.
server.startupdeploy = true
# Determines whether libraries in the custom-lib directory will be included on the server classpath.
# To reduce potential classpath conflicts you should create Resources and use them on specific channels/connectors instead, and then set this value to false.
server.includecustomlib = false
# administrator
administrator.maxheapsize = 512m
# properties file that will store the configuration map and be loaded during server startup
configurationmap.path = ${dir.appdata}/configuration.properties
# The language version for the Rhino JavaScript engine (supported values: 1.0, 1.1, ..., 1.8, es6).
rhino.languageversion = es6
# options: derby, mysql, postgres, oracle, sqlserver
database = '"${DATABASE}"'
# examples:
# Derby jdbc:derby:${dir.appdata}/mirthdb;create=true
# PostgreSQL jdbc:postgresql://localhost:5432/mirthdb
# MySQL jdbc:mysql://localhost:3306/mirthdb
# Oracle jdbc:oracle:thin:@localhost:1521:DB
# SQLServer jdbc:jtds:sqlserver://localhost:1433/mirthdb
database.url = '"${DATABASE_URL}"'
# if using a custom driver, specify it here
#database.driver =
# Maximum number of connections allowed for the main read/write connection pool
database.max-connections = 20
# Maximum number of connections allowed for the read-only connection pool
database-readonly.max-connections = 20
# database credentials
database.username = '"${DATABASE_USER}"'
database.password = '"${DATABASE_PASSWORD}"'
# If true, various read-only statements are separated into their own connection pool.
# By default the read-only pool will use the same connection information as the master pool,
# but you can change this with the "database-readonly" options. For example, to point the
# read-only pool to a different JDBC URL:
#
# database-readonly.url = jdbc:...
#
database.enable-read-write-split = true
'>/opt/mirth-connect/conf/mirth.properties
java -jar /opt/mirth-connect/mirth-server-launcher.jar
|
import { types as t } from "@babel/core";
import Path from "path";
import assignDefined from "object-assign-defined";
import * as th from "../../utils/templates";
import * as ast from "../../utils/ast";
import { getJsDocClassInfo, getTags } from "./jsdoc";
import { getDecoratorClassInfo } from "./decorators";
/**
* Converts an ES6 class to a UI5 extend.
* Any static methods or properties will be moved outside the class body.
* The path will be updated with the new AST.
*/
export function convertClassToUI5Extend(
path,
node,
classInfo,
extraStaticProps,
opts
) {
if (!(t.isClassDeclaration(node) || t.isClassExpression(node))) {
return node;
}
const staticMembers = [];
const classNameIdentifier = node.id;
const className = classNameIdentifier.name;
const superClass = node.superClass; // Identifier node.
const superClassName = superClass.name;
const isController =
className.includes("Controller") || !!classInfo.controller;
const moveControllerConstructorToOnInit =
isController && !!opts.moveControllerConstructorToOnInit;
const moveControllerPropsToOnInit =
isController &&
(!!opts.moveControllerPropsToOnInit ||
!!opts.moveControllerConstructorToOnInit);
const moveStaticStaticPropsToExtend =
isController && !!opts.addControllerStaticPropsToExtend;
const alwaysMoveInstanceProps = !opts.onlyMoveClassPropsUsingThis;
const extendProps = [];
const boundProps = [];
const CONSTRUCTOR = "constructor";
const propsByName = {};
let constructor;
let constructorComments;
const staticPropsToAdd = moveStaticStaticPropsToExtend
? Object.keys(extraStaticProps)
: ["metadata", "renderer"];
for (const propName of staticPropsToAdd) {
if (extraStaticProps[propName]) {
extendProps.push(
t.objectProperty(t.identifier(propName), extraStaticProps[propName])
);
}
}
for (const member of node.body.body) {
const memberName = member.key.name;
if (t.isClassMethod(member)) {
const func = t.functionExpression(
member.key,
member.params,
member.body,
member.generator,
member.async
);
if (member.static) {
staticMembers.push(
buildMemberAssignmentStatement(classNameIdentifier, {
...member,
value: func,
})
);
} else {
propsByName[memberName] = func;
if (member.kind === "get" || member.kind === "set") {
extendProps.push(
t.objectMethod(
member.kind,
member.key,
member.params,
member.body,
member.computed
)
);
} else {
// method
if (memberName === CONSTRUCTOR) {
constructorComments = member.leadingComments;
constructor = func;
if (moveControllerPropsToOnInit) {
continue; // don't push to props yet
}
}
func.id = path.scope.generateUidIdentifier(func.id.name); // Give the function a unique name
extendProps.push(
buildObjectProperty({
...member,
value: func,
})
);
}
}
} else if (t.isClassProperty(member)) {
if (!member.value) continue; // un-initialized static class prop (typescript)
if (memberName === "metadata" || memberName === "renderer") {
// Special handling for TypeScript limitation where metadata and renderer must be properties.
extendProps.unshift(buildObjectProperty(member));
} else if (member.static) {
if (moveStaticStaticPropsToExtend) {
extendProps.unshift(buildObjectProperty(member));
} else {
staticMembers.push(
buildMemberAssignmentStatement(classNameIdentifier, member)
);
}
} else {
propsByName[memberName] = member.value;
if (memberName === "constructor") {
constructorComments = member.leadingComments;
constructor = member.value;
if (moveControllerPropsToOnInit) {
continue; // don't push to props yet
}
}
if (
alwaysMoveInstanceProps ||
t.isArrowFunctionExpression(member.value) ||
ast.isThisExpressionUsed(member.value)
) {
boundProps.push(member);
} else {
extendProps.push(buildObjectProperty(member));
}
}
}
}
// Arrow function properties need to get moved to the constructor so that
// they're bound properly to the class instance, to align with the spec.
// For controllers, use onInit rather than constructor, since controller constructors don't work.
// Also move the constructor's statements to the onInit.
const bindToConstructor = !moveControllerPropsToOnInit;
const bindToMethodName = moveControllerPropsToOnInit
? "onInit"
: "constructor";
// avoid getting a prop named constructor as it may return {}'s
let bindMethod = moveControllerPropsToOnInit
? propsByName[bindToMethodName]
: constructor;
const constructorJsdoc = getTags(constructorComments);
const keepConstructor =
!moveControllerConstructorToOnInit ||
classInfo.keepConstructor ||
constructorJsdoc.keep;
// See if we need either constructor or onInit
const needsBindingMethod =
boundProps.length ||
(moveControllerPropsToOnInit && constructor && !keepConstructor);
// See if we need to create a new 'constructor' or 'onInit' method, depending which one we'll bind to.
if (needsBindingMethod && !bindMethod) {
const bindToId = t.identifier(bindToMethodName);
const bindMethodDeclaration = bindToConstructor
? th.buildInheritingConstructor({
SUPER: t.identifier(superClassName),
})
: th.buildInheritingFunction({
NAME: bindToId,
SUPER: t.identifier(superClassName),
});
bindMethod = ast.convertFunctionDeclarationToExpression(
bindMethodDeclaration
);
extendProps.unshift(t.objectProperty(bindToId, bindMethod));
}
if (constructor && moveControllerPropsToOnInit) {
if (keepConstructor) {
extendProps.unshift(
t.objectProperty(t.identifier(CONSTRUCTOR), constructor)
);
} else {
// Copy all except the super call from the constructor to the bindMethod (i.e. onInit)
bindMethod.body.body.unshift(
...constructor.body.body.filter(
node => !ast.isSuperCallExpression(node.expression)
)
);
}
}
if (boundProps.length) {
// We need to inject the bound props into the bind method (constructor or onInit),
// but not until after the super call (if applicable)
const mappedProps = boundProps.map(member =>
buildThisMemberAssignmentStatement(member)
);
const superIndex = bindMethod.body.body.findIndex(
node =>
ast.isSuperCallExpression(node.expression) ||
ast.isSuperPrototypeCallOf(
node.expression,
superClassName,
bindToMethodName
)
);
if (superIndex === -1) {
// If there's no super, just add the bound props at the start
bindMethod.body.body.unshift(...mappedProps);
} else {
const upToSuper = bindMethod.body.body.slice(0, superIndex + 1);
const afterSuper = bindMethod.body.body.slice(superIndex + 1);
bindMethod.body.body = [...upToSuper, ...mappedProps, ...afterSuper];
}
}
let extendAssign;
if (classInfo) {
extendAssign = th.buildExtendAssignWithMD({
NAME: classNameIdentifier,
SUPER: superClass, // Needs Identifier node
FQN: t.stringLiteral(getFullyQualifiedName(classInfo)),
OBJECT: t.objectExpression(extendProps),
FN_META_IMPL: "MetadataObject",
});
} else {
extendAssign = th.buildExtendAssign({
NAME: classNameIdentifier,
SUPER: superClass, // Needs Identifier node
FQN: t.stringLiteral(getFullyQualifiedName(classInfo)),
OBJECT: t.objectExpression(extendProps),
});
}
return [extendAssign, ...staticMembers];
}
function getFullyQualifiedName(classInfo) {
if (classInfo.alias) return classInfo.alias;
if (classInfo.name) return classInfo.name;
const namespace = classInfo.namespace || classInfo.fileNamespace;
const separator = namespace ? "." : "";
return `${namespace}${separator}${classInfo.localName}`;
}
export function getClassInfo(path, node, parent, pluginOpts) {
const defaults = {
localName: node.id.name,
superClassName: node.superClass && node.superClass.name,
fileNamespace: getFileBaseNamespace(path, pluginOpts) || "",
};
const decoratorInfo = getDecoratorClassInfo(node);
const jsDocInfo = getJsDocClassInfo(node, parent);
// like Object.assign, but ignoring undefined values.
return assignDefined(defaults, decoratorInfo, jsDocInfo);
}
/**
* Reads the namespace from the file path (but not the name).
*/
function getFileBaseNamespace(path, pluginOpts) {
const opts = path.hub.file.opts;
const filename = Path.resolve(opts.filename);
const sourceRoot = opts.sourceRoot
? Path.resolve(process.cwd(), opts.sourceRoot)
: process.cwd();
if (filename.startsWith(sourceRoot)) {
const filenameRelative = Path.relative(sourceRoot, filename);
const { dir } = Path.parse(filenameRelative);
const namespaceParts = dir.split(Path.sep);
if (pluginOpts.namespacePrefix) {
namespaceParts.unshift(pluginOpts.namespacePrefix);
}
return namespaceParts.join(".");
} else {
return undefined;
}
}
const buildObjectProperty = member =>
t.objectProperty(member.key, member.value, member.computed);
const buildMemberAssignmentStatement = (objectIdentifier, member) =>
t.expressionStatement(
t.assignmentExpression(
"=",
t.memberExpression(objectIdentifier, member.key, member.computed),
member.value
)
);
const buildThisMemberAssignmentStatement = buildMemberAssignmentStatement.bind(
null,
t.thisExpression()
);
|
#!/bin/bash
#
# ============LICENSE_START===================================================
# Copyright (C) 2020-2021 AT&T Intellectual Property. All rights reserved.
# ============================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=====================================================
#
source ${SCRIPTS}/get-branch-mariadb.sh
rm -rf ${WORKSPACE}/models
mkdir ${WORKSPACE}/models
cd ${WORKSPACE}
# download models examples
git clone -b ${GERRIT_BRANCH} --single-branch https://github.com/onap/policy-models.git models
|
export class EmployeeItem {
employeeId: number;
name: string;
surname: string;
userRole: String;
email: string;
enabled: boolean;
}
|
#include "ServoRamp.h"
ServoRamp::ServoRamp(int spin){
pin = spin;
}
ServoRamp::ServoRamp(int spin, int midd, int st, int ed){
pin = spin;
mid = midd; //neutral
mn = st; //start point
mx = ed; //end point
//ini();
}
ServoRamp::ServoRamp(int spin, int midd, int st, int ed, int upms, int downms){
pin = spin;
mid = midd; //neutral
mn = st; //start point
mx = ed; //end point
rampUpMS = upms;
rampDownMS = downms;
//ini();
}
ServoRamp::~ServoRamp(){
}
void ServoRamp::stepUp(){
if(value < mx){
value += upSteps;
setValue(value);
//delay(10);
}
}
void ServoRamp::stepDown(){
if(value > mn){
value -= downSteps;
setValue(value);
//delay(10);
}
}
void ServoRamp::rampUp(){
for(value; value < mx; value += upSteps){
setValue(value);
delay(10);
}
}
void ServoRamp::rampDown(){
for(value; value > mn; value -= downSteps){
setValue(value);
delay(10);
}
}
void ServoRamp::ini(){
downSteps = rampDownMS/(1000/rampSteps);
upSteps = rampUpMS/(1000/rampSteps);
servo.attach(pin, mn, mx);
}
int ServoRamp::setValue(int v){
if(v >= mn & v <= mx){
// if(v < nrange && v > -nrange) v = 0; //add neutral range
servo.writeMicroseconds(v);
}
}
void ServoRamp::setNeutral(int v){
mid = v;
}
int ServoRamp::getNeutral(){
return mid;
}
|
#!/bin/bash
echo ""
echo "Applying migration DocumentationDescription"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /documentationDescription controllers.DocumentationDescriptionController.onPageLoad(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "POST /documentationDescription controllers.DocumentationDescriptionController.onSubmit(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "GET /changeDocumentationDescription controllers.DocumentationDescriptionController.onPageLoad(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "POST /changeDocumentationDescription controllers.DocumentationDescriptionController.onSubmit(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "documentationDescription.title = documentationDescription" >> ../conf/messages.en
echo "documentationDescription.heading = documentationDescription" >> ../conf/messages.en
echo "documentationDescription.checkYourAnswersLabel = documentationDescription" >> ../conf/messages.en
echo "documentationDescription.error.required = Enter documentationDescription" >> ../conf/messages.en
echo "documentationDescription.error.length = DocumentationDescription must be 50 characters or less" >> ../conf/messages.en
echo "documentationDescription.change.hidden = DocumentationDescription" >> ../conf/messages.en
echo "Adding to UserAnswersEntryGenerators"
awk '/trait UserAnswersEntryGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryDocumentationDescriptionUserAnswersEntry: Arbitrary[(DocumentationDescriptionPage.type, JsValue)] =";\
print " Arbitrary {";\
print " for {";\
print " page <- arbitrary[DocumentationDescriptionPage.type]";\
print " value <- arbitrary[String].suchThat(_.nonEmpty).map(Json.toJson(_))";\
print " } yield (page, value)";\
print " }";\
next }1' ../test-utils/generators/UserAnswersEntryGenerators.scala > tmp && mv tmp ../test-utils/generators/UserAnswersEntryGenerators.scala
echo "Adding to PageGenerators"
awk '/trait PageGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryDocumentationDescriptionPage: Arbitrary[DocumentationDescriptionPage.type] =";\
print " Arbitrary(DocumentationDescriptionPage)";\
next }1' ../test-utils/generators/PageGenerators.scala > tmp && mv tmp ../test-utils/generators/PageGenerators.scala
echo "Adding to UserAnswersGenerator"
awk '/val generators/ {\
print;\
print " arbitrary[(DocumentationDescriptionPage.type, JsValue)] ::";\
next }1' ../test-utils/generators/UserAnswersGenerator.scala > tmp && mv tmp ../test-utils/generators/UserAnswersGenerator.scala
echo "Migration DocumentationDescription completed"
|
<reponame>LexasCMS/vsf-next-lexascms<gh_stars>1-10
import { integrationPlugin } from '@vue-storefront/core';
export default integrationPlugin(({ integration }) => {
// Get module options
const moduleOptions = JSON.parse('<%= JSON.stringify(options) %>');
// Setup LexasCMS module
integration.configure('lexascms', { ...moduleOptions });
});
|
<reponame>aditya542ar/kanban-board
import { Component, OnInit, EventEmitter, Pipe, Injectable, PipeTransform, ViewChild, ElementRef } from '@angular/core';
import { ProjectService } from '../service/project.service';
import { Project } from './project';
import { User } from '../user/user';
import { Team } from '../team/team';
import { Stage } from '../stage/stage';
import { Task } from '../task/task';
import { EmitAction } from '../util/emit-action';
import { UtilService } from 'src/app/util/util.service';
import { Modal } from '../util/modal';
@Pipe({
name: 'projectSearch',
pure: false
})
@Injectable({
providedIn: 'root'
})
export class ProjectSearchPipe implements PipeTransform {
transform(iteams:Array<Project>, field:string, value:string): Array<Project> {
if(!iteams) return [];
if(!value || value.trim().length == 0) return iteams;
return iteams.filter(p => {
if(p[field]) return (p[field] as string).toLowerCase().indexOf(value.toLowerCase()) != -1;
else return false;
});
}
}
@Component({
selector: 'app-project-list',
templateUrl: './project-list.component.html',
styleUrls: ['./project-list.component.css']
})
export class ProjectListComponent implements OnInit {
searchByList:Array<string> = new Array<string>("name", "id", "startDate", "endDate");
currSearchBy:string = this.searchByList[0];
searchString:string = "";
showAllProjects:boolean = false;
allProjects:Array<Project> = new Array<Project>();
owners:Array<User> = new Array<User>();
teamsOfProjects:Array<Team> = new Array<Team>();
stagesOfProjects:Array<Stage> = new Array<Stage>();
tasksOfProjects:Array<Task> = new Array<Task>();
private gotSubscriptionData$:EventEmitter<EmitAction> = new EventEmitter<EmitAction>();
private action:string = "loadAllProjects";
private gotUserData:boolean = false;
private gotTeamData:boolean = false;
private gotStageData:boolean = false;
private gotTaskData:boolean = false;
modal:Modal = new Modal();
newProject:Project;
createProjectSuccess:boolean = false;
createProjectFail:boolean = false;
creatingNewProject:boolean = false;
popupMsg:string = "";
@ViewChild("modalBodyDiv")
modalBodyDiv:ElementRef;
constructor(private projectService:ProjectService,
private utilService:UtilService) { }
ngOnInit() {
this.utilService.showSpinner();
this.checkSubscriptionData();
this.utilService.hideProjectDropDown();
this.utilService.currPage = "projectList";
this.utilService.gotProjectAndLoggedInUser().subscribe(
(data) => {
this.loadAllProjects(data.loggedInUser);
}
)
}
loadAllProjects(user?:User):void {
this.gotUserData = false;
this.gotTeamData = false;
this.gotStageData = false;
if(!user) user = this.utilService.getLoggedInUser();
this.projectService.fetchAllProjectsByUserId(user).subscribe(
(res) => {
console.log(res);
this.allProjects = res;
this.gotSubscriptionData$.emit(new EmitAction("loadAllProjects", true));
if(this.allProjects.length == 0) {
console.log("all projects length = 0 , setting all got data as true");
this.gotUserData = true;
this.owners = [];
this.gotTeamData = true;
this.teamsOfProjects = [];
this.gotStageData = true;
this.stagesOfProjects = [];
this.gotSubscriptionData$.emit(new EmitAction("loadAllProjects", true));
}
let userIds = this.allProjects.map((p) => p.ownerId);
console.log("userIds", userIds);
this.projectService.fetchUserByIds(userIds).subscribe(
(users) => {
console.log(users);
this.owners = users;
this.gotUserData = true;
this.gotSubscriptionData$.emit(new EmitAction("loadAllProjects", true));
});
let projectIds = this.allProjects.map((p) => p.id);
this.projectService.fetchTeamsByProjectIds(projectIds)
.subscribe((teams:Array<Team>) => {
console.log(teams);
this.teamsOfProjects = teams;
this.gotTeamData = true;
this.gotSubscriptionData$.emit(new EmitAction("loadAllProjects", true));
});
this.projectService.fetchStagesByProjectIds(projectIds)
.subscribe((stages:Array<Stage>) => {
console.log(stages);
this.stagesOfProjects = stages;
this.gotStageData = true;
this.gotSubscriptionData$.emit(new EmitAction("loadAllProjects", true));
});
// this.projectService.fetchTasksByProjectId(projectIds)
// .subscribe((tasks:Array<Task>) => {
// console.log(tasks);
// this.tasksOfProjects = tasks;
// this.gotTaskData = true;
// this.gotSubscriptionData$.emit(new EmitAction("loadAllProjects", true));
// });
});
}
checkSubscriptionData() {
this.gotSubscriptionData$.subscribe((ea:EmitAction) => {
switch(ea.action){
case "loadAllProjects":
console.log("loadAllProjects Case");
if(this.gotUserData && this.gotTeamData && this.gotStageData
// && this.gotTaskData
) {
setTimeout(() => {
this.showAllProjects = true;
this.utilService.hideSpinner();
}, 3000);
}
}
});
}
getUserById(users:Array<User>, id:string): User {
return users.filter((user) => user.id == id)[0];
}
getTeamsByProjectId(teams:Array<Team>, id:string): Array<Team> {
return teams.filter((team) => team.projectId == id);
}
getStagesByProjectId(stages:Array<Stage>, id:string): Array<Stage> {
return stages.filter((stage) => stage.projectId == id);
}
getTasksByProjectId(tasks:Array<Task>, id:string): Array<Task> {
return tasks.filter((task) => task.team.projectId == id);
}
startCreateProject() {
this.newProject = new Project();
this.createProjectSuccess = false;
this.createProjectFail = false;
this.creatingNewProject = false;
this.modal.header = "Create New Project";
this.showModal(this.modal);
}
cancelCreateProject() {
this.newProject = undefined;
this.createProjectSuccess = false;
this.createProjectFail = false;
this.creatingNewProject = false;
this.hideModal();
}
createNewProject() {
this.utilService.showSpinner();
this.createProjectSuccess = false;
this.createProjectFail = false;
let valid = this.validateProjectData();
if(valid.pass) {
console.log("valid project data");
this.creatingNewProject = true;
this.newProject.ownerId = this.utilService.getLoggedInUser().id;
this.projectService.createProject(this.newProject).subscribe(
(project:Project) => {
this.createProjectSuccess = true;
this.popupMsg = "Project '" + project.name + "' created successfully.";
this.modalBodyDiv.nativeElement.scrollTo(0, 0);
this.utilService.emitReloadProjectList();
this.loadAllProjects();
setTimeout(() => {
this.cancelCreateProject();
this.utilService.hideSpinner();
}, 2000);
},
(err) => {
this.createProjectFail = true;
console.log(err);
if(err.status === 400)
this.popupMsg = err.error.message;
else
this.popupMsg = "Unable to create new Project '" + this.newProject.name
+ "'.\n Please try again with correct data.";
this.creatingNewProject = false;
this.modalBodyDiv.nativeElement.scrollTo(0, 0);
this.utilService.hideSpinner();
}
);
} else {
this.createProjectFail = true;
this.popupMsg = "Invalid " + valid.field;
this.creatingNewProject = false;
//console.log(this.modalBodyDiv.nativeElement);
this.modalBodyDiv.nativeElement.scrollTo(0, 0);
this.utilService.hideSpinner();
}
}
validateProjectData():any {
console.log(this.newProject);
if(this.newProject.name == undefined || this.newProject.name.trim().length == 0) {
return { "pass" : false, "field": "name" };
} else if(this.newProject.startDate == undefined || this.newProject.startDate.trim().length != 10) {
return { "pass" : false, "field": "startDate" };
} else if(this.newProject.endDate != undefined && this.newProject.endDate.trim().length != 10) {
return { "pass" : false, "field": "endDate" };
} else {
// check start Date
try {
let dateArr:Array<any> = this.newProject.startDate.split("-");
let startDate = new Date(dateArr[2], dateArr[1], dateArr[0]);
} catch (error) {
return { "pass" : false, "field": "startDate" };
}
// check end Date
try {
if(this.newProject.endDate == undefined) {
// optional field.. so can be ignored
} else {
let dateArr:Array<any> = this.newProject.endDate.split("-");
let endDate = new Date(dateArr[2], dateArr[1], dateArr[0]);
}
} catch (error) {
return { "pass" : false, "field": "endDate" };
}
}
return { "pass" : true, "field": "all" };
}
showModal(modal:Modal) {
this.modal = modal;
this.modal.isShow = true;
}
hideModal() {
this.modal.isShow = false;
}
}
|
import React, { Component } from 'react';
import { Platform, StyleSheet, View, ImageBackground, DeviceEventEmitter } from 'react-native';
// Get the current location
let currentPosition = {
latitude: 0.0,
longitude: 0.0
};
class App extends Component {
constructor(props) {
super(props);
this.state = {
backgroundColor: 'black'
};
}
componentDidMount() {
if (Platform.OS === 'ios' || Platform.OS === 'android') {
// Register the location change event
DeviceEventEmitter.addListener('locationUpdated', this.onLocationChanged);
}
}
onLocationChanged = position => {
// Update the current position
currentPosition.latitude = position.coords.latitude.toFixed(6);
currentPosition.longitude = position.coords.longitude.toFixed(6);
// Calculate the background color based on the current location
let backgroundColor = `rgb(${currentPosition.latitude}, ${currentPosition.longitude}, 0.5)`;
// Update the state
this.setState({ backgroundColor });
};
render() {
return (
<ImageBackground source={require('./assets/bg.jpg')} style={[styles.container, { backgroundColor: this.state.backgroundColor }]}>
</ImageBackground>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1
}
});
export default App; |
/*
TITLE Binary operations Chapter3Exercise_10.cpp
Bjarne Stroustrup "Programming: Principles and Practice Using C++"
COMMENT
Objective: Implements simple binary operation. (containing two operands)
Input: Requests two operands and a operation to be performed.
Output: Prints result from selected operation.
Author: <NAME>
Date: 10.02.2014
*/
#include "../../std_lib_facilities.h"
int main()
{
// supported operations
string addition = "+";
string subtraction = "-";
string multiplication = "*";
string division = "/";
cout <<"Please type two operands and an opeation to be perfomed on them.\n" << endl;
cout <<"Type the first operand: ";
double first_operand = 0;
cin >> first_operand;
cout <<"Type the second operand: ";
double second_operand = 0;
cin >> second_operand;
cout <<"Type opeation (+, -, *, /): ";
string operation;
cin >> operation;
if (operation == addition)
{
cout <<"\nThe addition between the two operands equals: "<< first_operand + second_operand << endl;
}
else if (operation == subtraction)
{
cout << "\nThe subtracion between the two operands equals: " << first_operand - second_operand << endl;
}
else if (operation == multiplication)
{
cout << "\nThe multiplication between the two operands equals: " << first_operand * second_operand << endl;
}
else if (operation == division)
{
cout << "\nThe division between the two operands equals: " << first_operand / second_operand << endl;
}
else {
cout << "\nYou have entered an undefined operation." << endl;
}
getchar();
return 0;
} |
<reponame>landenlabs/all-uitest<gh_stars>0
package com.landenlabs.all_uiTest;
/*
* Copyright (C) 2019 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.content.res.ColorStateList;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.drawable.AnimatedVectorDrawable;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.TableLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.transition.AutoTransition;
import androidx.transition.ChangeBounds;
import androidx.transition.ChangeTransform;
import androidx.transition.TransitionManager;
import androidx.transition.TransitionSet;
import java.util.ArrayList;
import utils.TextViewExt1;
import utils.Translation;
/**
* Fragment which expands a group of view cells making snapshot image of selected
* cells and expanding image.
*/
@SuppressWarnings({"FieldCanBeLocal", "unused"})
public class FragExpandGroupImageDemo extends FragBottomNavBase
implements View.OnTouchListener {
private TableLayout tableLayout;
private FrameLayout overlay;
private FrameLayout expander;
private RadioGroup rg;
private ArrayList<View> groupViews = new ArrayList<>();
private int nextElevation = 1;
private static final long ANIM_MILLI = 2000;
private ColorStateList colorRed = new ColorStateList(
new int[][]{ new int[]{}},
new int[]{ 0xffff0000 } // RED
);
private ColorStateList colorGreen = new ColorStateList(
new int[][]{ new int[]{}},
new int[]{ 0xff00ff00 } // GREEN
);
private static final int LAYOUT_ID = R.layout.frag_expand_group_demo;
// ---------------------------------------------------------------------------------------------
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
super.onCreateView(inflater, container, LAYOUT_ID);
setBarTitle("Group expand snapshot image");
// setBarVisibility(View.GONE);
initUI();
return root;
}
private void initUI() {
tableLayout = root.findViewById(R.id.page4_tableLayout);
overlay = root.findViewById(R.id.page4_overlay);
expander = root.findViewById(R.id.page4_expander);
rg = root.findViewById(R.id.page4_rg);
tableLayout.setOnTouchListener(this);
}
private void resetUI() {
ViewGroup parent = (ViewGroup)root.getParent();
parent.removeAllViews();
root = (ViewGroup) View.inflate(getContext(), LAYOUT_ID, parent);
nextElevation = 0;
initUI();
}
@SuppressWarnings("SwitchStatementWithTooFewBranches")
@Override
public boolean onTouch(View view, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
View viewTouched;
int globalXpx = (int)event.getX();
int globalYpx = (int)event.getY();
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
globalXpx += rect.left;
globalYpx += rect.top;
switch (view.getId()) {
case R.id.page4_tableLayout:
viewTouched = findViewAtPosition(tableLayout, globalXpx, globalYpx);
if (viewTouched != null) {
doAction(viewTouched, tableLayout);
return true;
}
break;
}
}
return false;
}
/**
* Find child view hit by touch position.
* Set row and column in tags.
*/
private View findViewAtPosition(View parent, int globalXpx, int globalYpx) {
if (parent instanceof ViewGroup) {
ViewGroup viewGroup = (ViewGroup)parent;
for (int i=0; i < viewGroup.getChildCount(); i++) {
View child = viewGroup.getChildAt(i);
View viewAtPosition = findViewAtPosition(child, globalXpx, globalYpx);
if (viewAtPosition != null) {
// Assume Table structure, view inside row container.
if (viewAtPosition.getTag(R.id.tag_col) == null) {
viewAtPosition.setTag(R.id.tag_col, i); // Set column first in inner container
} else if (viewAtPosition.getTag(R.id.tag_row) == null) {
viewAtPosition.setTag(R.id.tag_row, i); // Set row in 2nd outer container.
}
return viewAtPosition;
}
}
return null;
} else {
Rect rect = new Rect();
parent.getGlobalVisibleRect(rect);
if (rect.contains(globalXpx, globalYpx)) {
return parent;
} else {
return null;
}
}
}
/**
* Execute action on touched view.
*/
private void doAction(View view, ViewGroup parent) {
overlay.removeAllViews();
switch (rg.getCheckedRadioButtonId()) {
case R.id.page1_tagRB:
restoreGroup(parent);
if (view.getBackground() == null) {
// Draw animated gradient of two possible colors.
view.setBackgroundResource(R.drawable.bg_anim_gradient);
view.setBackgroundTintList(Math.random() > 0.5 ? colorRed : colorGreen);
((AnimatedVectorDrawable) view.getBackground()).start();
} else {
view.setBackground(null);
}
break;
case R.id.page1_grow2RB:
if (buildExpander(parent)) {
// Let expander appear in default position before expanding it.
expander.post(new Runnable() {
@Override
public void run() {
expandView(expander, parent);
}
});
}
((RadioButton) rg.findViewById(R.id.page1_detailsRB)).setChecked(true);
break;
case R.id.page1_detailsRB:
if (groupViews.size() != 0) {
openDetailView(expander);
}
((RadioButton)rg.findViewById(R.id.page1_tagRB)).setChecked(true);
break;
case R.id.page1_resetRB:
resetUI();
break;
}
}
/*
* Restore children.
*/
private void restoreGroup(@NonNull ViewGroup parent) {
for (View child : groupViews) {
child.setTag(R.id.tag_info, null);
child.setBackground(null);
}
groupViews.clear();
expander.setVisibility(View.INVISIBLE);
}
private boolean buildExpander(@NonNull ViewGroup parent) {
// Collect tagged children.
addTaggedChildren(parent, groupViews);
if (groupViews.isEmpty()) {
return false;
}
// Compute visible boundsof children.
Rect bounds = new Rect();
groupViews.get(0).getGlobalVisibleRect(bounds);
for (View child : groupViews) {
Rect childBnd = new Rect();
child.getGlobalVisibleRect(childBnd);
bounds.union(childBnd);
}
// Set expander minimum size to hold tagged children.
Rect expanderRect = new Rect();
((ViewGroup)parent.getParent()).getGlobalVisibleRect(expanderRect);
expander.setX(bounds.left - expanderRect.left);
expander.setY(bounds.top - expanderRect.top);
ViewGroup.LayoutParams lp = expander.getLayoutParams();
lp.width = bounds.width();
lp.height = bounds.height();
expander.setLayoutParams(lp);
expander.setVisibility(View.VISIBLE);
// Get image of tagged children in parent container.
Bitmap parentBM;
parentBM = Bitmap.createBitmap(parent.getWidth(), parent.getHeight(), Bitmap.Config.ARGB_8888);
Canvas parentCanvas = new Canvas(parentBM);
parent.draw(parentCanvas);
Rect parentRect = new Rect();
parent.getGlobalVisibleRect(parentRect);
Bitmap croppedBitmap = Bitmap.createBitmap(parentBM,
bounds.left - parentRect.left,
bounds.top - parentRect.top, bounds.width(), bounds.height());
BitmapDrawable parentBD = new BitmapDrawable(getResources(), croppedBitmap);
expander.setBackground(parentBD);
return true;
}
/**
* Collect tagged children (view with background set)..
*/
private void addTaggedChildren(@NonNull ViewGroup parent, @NonNull ArrayList<View> childList) {
for (int idx = 0; idx < parent.getChildCount(); idx++) {
View child = parent.getChildAt(idx);
if (child instanceof ViewGroup) {
addTaggedChildren((ViewGroup)child, childList);
} else {
if (child.getBackground() != null) {
childList.add(child);
}
}
}
}
/**
* Set clip mode on parents. Used to allow child to expand over parent.
*/
private void setClipChildren(@NonNull ViewGroup view, boolean toClip) {
view.setClipChildren(toClip);
view.setClipToPadding(toClip);
if (view.getParent() instanceof ViewGroup) {
setClipChildren((ViewGroup)view.getParent(), toClip);
}
}
/**
* Animate expansion of tapped view cell.
*/
private void expandView(@Nullable View view, @NonNull ViewGroup parent) {
if (view == null) {
return;
}
View rootView = view.getRootView();
setClipChildren(parent, false);
// Record layout change and animate it slowly
TransitionSet transitionSet = new TransitionSet();
transitionSet.setDuration(ANIM_MILLI);
transitionSet.addTransition(new AutoTransition());
transitionSet.addTransition(new Translation());
transitionSet.addTransition(new ChangeTransform());
transitionSet.addTransition(new ChangeBounds());
TransitionManager.beginDelayedTransition((ViewGroup) rootView, transitionSet);
view.setPivotX(view.getX() );
view.setPivotY(view.getY() );
final float growPercent = 1.21f;
view.setScaleX(growPercent);
view.setScaleY(growPercent);
// Set elevation so it appears above its peers and parent.
view.setElevation(nextElevation);
nextElevation += 8;
view.requestLayout();
view.invalidate();
}
/**
* Open a dialog in overlay to show details about tapped view.
*/
private void openDetailView(@NonNull View view) {
View child = groupViews.get(0);
int col = (Integer) child.getTag(R.id.tag_col);
int row = (Integer) child.getTag(R.id.tag_row);
Rect viewRect = new Rect();
view.getGlobalVisibleRect(viewRect);
overlay.removeAllViews();
Rect overlayRect = new Rect();
overlay.getGlobalVisibleRect(overlayRect);
Rect detailRect = new Rect(viewRect.left - overlayRect.left,
viewRect.top - overlayRect.top,
viewRect.right - overlayRect.left,
viewRect.bottom - overlayRect.top);
TextViewExt1 detailTv = new TextViewExt1(getContext());
detailTv.setMarker(R.drawable.bg_white_varrow);
detailTv.setText(TestData.WXDATA[row].getDetails(col));
detailTv.setTextSize(TypedValue.COMPLEX_UNIT_SP, 18);
detailTv.setTextColor(Color.WHITE);
Drawable icon = detailTv.getContext().getDrawable(R.drawable.wx_sun_30d);
detailTv.setForeground(icon);
detailTv.setForegroundGravity(Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL);
int padPx = 20;
detailTv.setPadding(padPx, 40, padPx, 150);
int detailWidthPx = 500;
int detailHeightPx = ViewGroup.LayoutParams.WRAP_CONTENT;
overlay.addView(detailTv, detailWidthPx, detailHeightPx);
int margin = 10;
int detailLeft = Math.max(margin, detailRect.centerX() - detailWidthPx / 2);
if (detailLeft + detailHeightPx > overlayRect.width() - margin) {
detailLeft = overlayRect.width() - detailHeightPx - margin;
}
detailTv.setX(detailLeft);
detailTv.setY(detailRect.bottom - padPx);
float markerCenterShiftX = viewRect.centerX() - (detailLeft + detailWidthPx/2f + overlayRect.left);
detailTv.setPointer(markerCenterShiftX);
overlay.setElevation(nextElevation);
}
}
|
#include "TransferStream.h"
#include <assert.h>
#include <string.h>
#ifdef _WIN32
#include <stdlib.h>
#include <memory.h>
#include <stdio.h>
#include <float.h>
#endif
#if defined ( __APPLE__ ) || defined ( __APPLE_CC__ )
#include <malloc/malloc.h>
#else
#include <malloc.h>
#include <string>
#endif
static bool IsLocalLittleEndian()
{
union w {
int a;
char b;
} c;
c.a = 1;
return(c.b ==1);
}
inline static void ReverseBytes(char *input, int length){
int count = length/2;
for(int i = 0; i < count; ++i){
input[i] ^= input[length-i-1] ^= input[i] ^= input[length-i-1];
}
}
#define MOD8(value) ((value) & 0x7)
#define DIV8(value) ((value) >> 3)
#define MUL8(value) ((value) << 3)
using namespace util;
bool util::CTransferStream::s_bInitTypeOperator = false;
struct util::CTransferStream::TypeOperatorSet util::CTransferStream::s_typeOperators[STREAM_DATA_SIZE];
CTransferStream::CTransferStream()
{
InitTypeOperator();
m_isLittleEndian = IsLocalLittleEndian();
m_writeOffset = 0;
m_allocBitSize = TRANSFERSTREAM_STACK_ALLOCA_SIZE * 8;
m_readOffset = 0;
m_data = (unsigned char*) m_stackData;
m_copyData = true;
}
CTransferStream::CTransferStream(uint32_t initByteSize)
{
InitTypeOperator();
m_isLittleEndian = IsLocalLittleEndian();
m_writeOffset = 0;
m_readOffset = 0;
if(initByteSize <= TRANSFERSTREAM_STACK_ALLOCA_SIZE) {
m_data = (unsigned char*) m_stackData;
m_allocBitSize = TRANSFERSTREAM_STACK_ALLOCA_SIZE * 8;
} else {
m_data = (unsigned char*) malloc(initByteSize);
m_allocBitSize = initByteSize << 3;
}
#ifdef _DEBUG
assert(m_data);
#endif
m_copyData = true;
}
CTransferStream::CTransferStream(const char* data, uint32_t lengthInBytes, bool copyData)
{
InitTypeOperator();
m_isLittleEndian = IsLocalLittleEndian();
m_writeOffset = MUL8(lengthInBytes);
m_readOffset = 0;
m_copyData = copyData;
m_allocBitSize = MUL8(lengthInBytes);
if(m_copyData) {
if(lengthInBytes > 0) {
if(lengthInBytes < TRANSFERSTREAM_STACK_ALLOCA_SIZE) {
m_data = ( unsigned char* ) m_stackData;
m_allocBitSize = MUL8(TRANSFERSTREAM_STACK_ALLOCA_SIZE);
} else {
m_data = (unsigned char*) malloc(lengthInBytes);
}
#ifdef _DEBUG
assert(m_data);
#endif
memcpy(m_data, data, lengthInBytes);
} else {
m_data = NULL;
}
} else {
m_data = (unsigned char*)data;
}
}
CTransferStream::CTransferStream(const std::string& strData, bool copyData)
{
InitTypeOperator();
const char* data = strData.c_str();
uint32_t lengthInBytes = strData.length();
m_isLittleEndian = IsLocalLittleEndian();
m_writeOffset = MUL8(lengthInBytes);
m_readOffset = 0;
m_copyData = copyData;
m_allocBitSize = MUL8(lengthInBytes);
if(m_copyData) {
if(lengthInBytes > 0) {
if(lengthInBytes < TRANSFERSTREAM_STACK_ALLOCA_SIZE) {
m_data = ( unsigned char* ) m_stackData;
m_allocBitSize = MUL8(TRANSFERSTREAM_STACK_ALLOCA_SIZE);
} else {
m_data = (unsigned char*) malloc(lengthInBytes);
}
#ifdef _DEBUG
assert(m_data);
#endif
memcpy(m_data, data, lengthInBytes);
} else {
m_data = NULL;
}
} else {
m_data = (unsigned char*)data;
}
}
CTransferStream::CTransferStream(const CTransferStream& orig)
{
InitTypeOperator();
m_isLittleEndian = IsLocalLittleEndian();
m_writeOffset = orig.m_writeOffset;
m_readOffset = 0;
m_copyData = orig.m_copyData;
m_allocBitSize = orig.m_writeOffset;
if(m_copyData) {
uint32_t lengthInBytes = TS_BITS_TO_BYTES(orig.m_writeOffset);
if(lengthInBytes > 0) {
if(lengthInBytes < TRANSFERSTREAM_STACK_ALLOCA_SIZE) {
m_data = (unsigned char*) m_stackData;
m_allocBitSize = MUL8(TRANSFERSTREAM_STACK_ALLOCA_SIZE);
} else {
m_data = (unsigned char*) malloc(lengthInBytes);
}
#ifdef _DEBUG
assert(m_data);
#endif
memcpy(m_data, orig.m_data, lengthInBytes);
} else {
m_data = NULL;
}
} else {
m_data = orig.m_data;
}
}
// Use this if you pass a pointer copy to the constructor (_copyData==false) and want to overallocate to prevent reallocation
void CTransferStream::SetNumberOfBitsAllocated(const uint32_t lengthInBits)
{
#ifdef _DEBUG
assert(lengthInBits >= m_allocBitSize);
#endif
m_allocBitSize = lengthInBits;
}
CTransferStream::~CTransferStream()
{
if(m_copyData && m_stackData != m_data) {
// Use realloc and free so we are more efficient than delete and new for resizing
free(m_data);
m_data = NULL;
}
}
void CTransferStream::Reset(void)
{
m_writeOffset = 0;
m_readOffset = 0;
}
// Write the native types to the end of the buffer
CTransferStream& CTransferStream::operator<<(const char input)
{
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const bool input)
{
if(input) {
Write1();
} else {
Write0();
}
return *this;
}
CTransferStream& CTransferStream::operator<<(const uint8_t input)
{
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const int8_t input)
{
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const uint16_t input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(uint16_t));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const int16_t input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(int16_t));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const uint32_t input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(uint32_t));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const int32_t input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(int32_t));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
#ifndef NO_INT64
CTransferStream& CTransferStream::operator<<(const uint64_t input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(uint64_t));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const int64_t input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(int64_t));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
#endif
CTransferStream& CTransferStream::operator<<(const float input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(float));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
CTransferStream& CTransferStream::operator<<(const double input)
{
if(m_isLittleEndian) {
ReverseBytes((char*)&input, sizeof(double));
}
WriteBits((unsigned char*) &input, sizeof(input) * 8, true);
return *this;
}
// Write an array or casted stream
void CTransferStream::WriteBytes(const char* input, const int numberOfBytes)
{
WriteBits((unsigned char*) input, numberOfBytes * 8, true);
}
CTransferStream& CTransferStream::operator<<(const char input[])
{
uint16_t len = (uint16_t)strlen(input);
*this << len;
if(len > 0){
WriteBytes(input, len);
}
return *this;
}
#ifndef NO_TEMPLATE
CTransferStream& CTransferStream::operator<<(const std::string& input)
{
uint16_t len = (uint16_t)input.size();
*this << len;
if(len > 0){
WriteBytes(input.data(), len);
}
return *this;
}
#endif
//////////////////////////////////////////////////////////////////////////
void CTransferStream::ReadByType(char& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (char)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(bool& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (0 != temp);
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (0.0f != temp);
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (0.0 != temp);
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(uint8_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (uint8_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(int8_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (int8_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(uint16_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (uint16_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(int16_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (int16_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(uint32_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (uint32_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(int32_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (int32_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
#ifndef NO_INT64
void CTransferStream::ReadByType(uint64_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (uint64_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(int64_t& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (int64_t)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
#endif
void CTransferStream::ReadByType(float& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
double temp = 0.0;
*this >> temp;
output = (float)temp;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
void CTransferStream::ReadByType(double& output, uint8_t nType)
{
assert(IsSingleType(nType));
switch(nType) {
case STREAM_DATA_CHAR:
case STREAM_DATA_CHAR_NULL:
{
char temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_BOOL:
case STREAM_DATA_BOOL_NULL:
{
bool temp = false;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_INT8:
case STREAM_DATA_INT8_NULL:
{
int8_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_UINT8:
case STREAM_DATA_UINT8_NULL:
{
uint8_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_INT16:
case STREAM_DATA_INT16_NULL:
{
int16_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_UINT16:
case STREAM_DATA_UINT16_NULL:
{
uint16_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_INT32:
case STREAM_DATA_INT32_NULL:
{
int32_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_UINT32:
case STREAM_DATA_UINT32_NULL:
{
uint32_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_INT64:
case STREAM_DATA_INT64_NULL:
{
int64_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_UINT64:
case STREAM_DATA_UINT64_NULL:
{
uint64_t temp = 0;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_FLOAT:
case STREAM_DATA_FLOAT_NULL:
{
float temp = 0.0f;
*this >> temp;
output = (double)temp;
}
break;
case STREAM_DATA_DOUBLE:
case STREAM_DATA_DOUBLE_NULL:
{
*this >> output;
}
break;
case STREAM_DATA_C_STRING:
case STREAM_DATA_C_STRING_NULL:
case STREAM_DATA_STD_STRING:
case STREAM_DATA_STD_STRING_NULL:
{
uint16_t len = 0;
*this >> len;
if(len > 0) {
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
CSeparatedStream separated(szBuf, len,
false, TS_STRING_DELIM, TS_STRING_DELIM);
separated >> output;
}
}
break;
default:
assert(false);
break;
};
}
//////////////////////////////////////////////////////////////////////////
void util::CTransferStream::ReadCharToString(std::string& output)
{
char temp = 0;
*this >> temp;
output = temp;
}
void util::CTransferStream::ReadBoolToString(std::string& output)
{
bool temp = false;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt8ToString(std::string& output)
{
uint8_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt16ToString(std::string& output)
{
uint16_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt32ToString(std::string& output)
{
uint32_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt64ToString(std::string& output)
{
uint64_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt8ToString(std::string& output)
{
int8_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt16ToString(std::string& output)
{
int16_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt32ToString(std::string& output)
{
int32_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt64ToString(std::string& output)
{
int64_t temp = 0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadFloatToString(std::string& output)
{
float temp = 0.0f;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadDoubleToString(std::string& output)
{
double temp = 0.0;
*this >> temp;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << temp;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadStringToString(std::string& output)
{
uint16_t len = 0;
*this >> len;
if(len < 1) {
return;
}
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
output = szBuf;
}
void util::CTransferStream::ReadBoolSetToString(std::string& output)
{
std::vector<bool> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt8SetToString(std::string& output)
{
std::vector<uint8_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt8SetToString(std::string& output)
{
std::vector<int8_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt16SetToString(std::string& output)
{
std::vector<uint16_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt16SetToString(std::string& output)
{
std::vector<int16_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt32SetToString(std::string& output)
{
std::vector<uint32_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt32SetToString(std::string& output)
{
std::vector<int32_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadUInt64SetToString(std::string& output)
{
std::vector<uint64_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadInt64SetToString(std::string& output)
{
std::vector<int64_t> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadFloatSetToString(std::string& output)
{
std::vector<float> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadDoubleSetToString(std::string& output)
{
std::vector<double> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
void util::CTransferStream::ReadStringSetToString(std::string& output)
{
std::vector<std::string> object;
*this >> object;
CSeparatedStream separated(
TS_STRING_DELIM,
TS_STRING_DELIM);
separated << object;
separated.EndLine();
output = separated.Str();
}
//////////////////////////////////////////////////////////////////////////
void util::CTransferStream::WriteCharFromString(const char* input, int length)
{
char temp = 0;
if(NULL != input && length > 0) {
temp = input[0];
}
*this << temp;
}
void util::CTransferStream::WriteBoolFromString(const char* input, int length)
{
bool temp = false;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteUInt8FromString(const char* input, int length)
{
uint8_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteUInt16FromString(const char* input, int length)
{
uint16_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteUInt32FromString(const char* input, int length)
{
uint32_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteUInt64FromString(const char* input, int length)
{
uint64_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteInt8FromString(const char* input, int length)
{
int8_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteInt16FromString(const char* input, int length)
{
int16_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteInt32FromString(const char* input, int length)
{
int32_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteInt64FromString(const char* input, int length)
{
int64_t temp = 0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteFloatFromString(const char* input, int length)
{
float temp = 0.0f;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteDoubleFromString(const char* input, int length)
{
double temp = 0.0;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> temp;
*this << temp;
}
void util::CTransferStream::WriteStringFromString(const char* input, int length)
{
uint16_t len = (uint16_t)length;
*this << len;
if(len > 0){
WriteBytes(input, len);
}
}
void util::CTransferStream::WriteBoolSetFromString(const char* input, int length)
{
std::vector<bool> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteUInt8SetFromString(const char* input, int length)
{
std::vector<uint8_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteInt8SetFromString(const char* input, int length)
{
std::vector<int8_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteUInt16SetFromString(const char* input, int length)
{
std::vector<uint16_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteInt16SetFromString(const char* input, int length)
{
std::vector<int16_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteUInt32SetFromString(const char* input, int length)
{
std::vector<uint32_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteInt32SetFromString(const char* input, int length)
{
std::vector<int32_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteUInt64SetFromString(const char* input, int length)
{
std::vector<uint64_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteInt64SetFromString(const char* input, int length)
{
std::vector<int64_t> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteFloatSetFromString(const char* input, int length)
{
std::vector<float> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteDoubleSetFromString(const char* input, int length)
{
std::vector<double> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
void util::CTransferStream::WriteStringSetFromString(const char* input, int length)
{
std::vector<std::string> object;
CSeparatedStream separated(
input, length, false,
TS_STRING_DELIM,
TS_STRING_DELIM);
separated >> object;
*this << object;
}
//////////////////////////////////////////////////////////////////////////
void util::CTransferStream::InitTypeOperator()
{
if(s_bInitTypeOperator) {
return;
}
s_bInitTypeOperator = true;
s_typeOperators[STREAM_DATA_NIL].m_pReadToString = NULL;
s_typeOperators[STREAM_DATA_NIL].m_pWriteFromString = NULL;
s_typeOperators[STREAM_DATA_CHAR].m_pReadToString = &util::CTransferStream::ReadCharToString;
s_typeOperators[STREAM_DATA_CHAR].m_pWriteFromString = &util::CTransferStream::WriteCharFromString;
s_typeOperators[STREAM_DATA_CHAR_NULL].m_pReadToString = &util::CTransferStream::ReadCharToString;
s_typeOperators[STREAM_DATA_CHAR_NULL].m_pWriteFromString = &util::CTransferStream::WriteCharFromString;
s_typeOperators[STREAM_DATA_BOOL].m_pReadToString = &util::CTransferStream::ReadBoolToString;
s_typeOperators[STREAM_DATA_BOOL].m_pWriteFromString = &util::CTransferStream::WriteBoolFromString;
s_typeOperators[STREAM_DATA_BOOL_NULL].m_pReadToString = &util::CTransferStream::ReadBoolToString;
s_typeOperators[STREAM_DATA_BOOL_NULL].m_pWriteFromString = &util::CTransferStream::WriteBoolFromString;
s_typeOperators[STREAM_DATA_UINT8].m_pReadToString = &util::CTransferStream::ReadUInt8ToString;
s_typeOperators[STREAM_DATA_UINT8].m_pWriteFromString = &util::CTransferStream::WriteUInt8FromString;
s_typeOperators[STREAM_DATA_UINT8_NULL].m_pReadToString = &util::CTransferStream::ReadUInt8ToString;
s_typeOperators[STREAM_DATA_UINT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt8FromString;
s_typeOperators[STREAM_DATA_UINT16].m_pReadToString = &util::CTransferStream::ReadUInt16ToString;
s_typeOperators[STREAM_DATA_UINT16].m_pWriteFromString = &util::CTransferStream::WriteUInt16FromString;
s_typeOperators[STREAM_DATA_UINT16_NULL].m_pReadToString = &util::CTransferStream::ReadUInt16ToString;
s_typeOperators[STREAM_DATA_UINT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt16FromString;
s_typeOperators[STREAM_DATA_UINT32].m_pReadToString = &util::CTransferStream::ReadUInt32ToString;
s_typeOperators[STREAM_DATA_UINT32].m_pWriteFromString = &util::CTransferStream::WriteUInt32FromString;
s_typeOperators[STREAM_DATA_UINT32_NULL].m_pReadToString = &util::CTransferStream::ReadUInt32ToString;
s_typeOperators[STREAM_DATA_UINT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt32FromString;
s_typeOperators[STREAM_DATA_UINT64].m_pReadToString = &util::CTransferStream::ReadUInt64ToString;
s_typeOperators[STREAM_DATA_UINT64].m_pWriteFromString = &util::CTransferStream::WriteUInt64FromString;
s_typeOperators[STREAM_DATA_UINT64_NULL].m_pReadToString = &util::CTransferStream::ReadUInt64ToString;
s_typeOperators[STREAM_DATA_UINT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt64FromString;
s_typeOperators[STREAM_DATA_INT8].m_pReadToString = &util::CTransferStream::ReadInt8ToString;
s_typeOperators[STREAM_DATA_INT8].m_pWriteFromString = &util::CTransferStream::WriteInt8FromString;
s_typeOperators[STREAM_DATA_INT8_NULL].m_pReadToString = &util::CTransferStream::ReadInt8ToString;
s_typeOperators[STREAM_DATA_INT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt8FromString;
s_typeOperators[STREAM_DATA_INT16].m_pReadToString = &util::CTransferStream::ReadInt16ToString;
s_typeOperators[STREAM_DATA_INT16].m_pWriteFromString = &util::CTransferStream::WriteInt16FromString;
s_typeOperators[STREAM_DATA_INT16_NULL].m_pReadToString = &util::CTransferStream::ReadInt16ToString;
s_typeOperators[STREAM_DATA_INT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt16FromString;
s_typeOperators[STREAM_DATA_INT32].m_pReadToString = &util::CTransferStream::ReadInt32ToString;
s_typeOperators[STREAM_DATA_INT32].m_pWriteFromString = &util::CTransferStream::WriteInt32FromString;
s_typeOperators[STREAM_DATA_INT32_NULL].m_pReadToString = &util::CTransferStream::ReadInt32ToString;
s_typeOperators[STREAM_DATA_INT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt32FromString;
s_typeOperators[STREAM_DATA_INT64].m_pReadToString = &util::CTransferStream::ReadInt64ToString;
s_typeOperators[STREAM_DATA_INT64].m_pWriteFromString = &util::CTransferStream::WriteInt64FromString;
s_typeOperators[STREAM_DATA_INT64_NULL].m_pReadToString = &util::CTransferStream::ReadInt64ToString;
s_typeOperators[STREAM_DATA_INT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt64FromString;
s_typeOperators[STREAM_DATA_FLOAT].m_pReadToString = &util::CTransferStream::ReadFloatToString;
s_typeOperators[STREAM_DATA_FLOAT].m_pWriteFromString = &util::CTransferStream::WriteFloatFromString;
s_typeOperators[STREAM_DATA_FLOAT_NULL].m_pReadToString = &util::CTransferStream::ReadFloatToString;
s_typeOperators[STREAM_DATA_FLOAT_NULL].m_pWriteFromString = &util::CTransferStream::WriteFloatFromString;
s_typeOperators[STREAM_DATA_DOUBLE].m_pReadToString = &util::CTransferStream::ReadDoubleToString;
s_typeOperators[STREAM_DATA_DOUBLE].m_pWriteFromString = &util::CTransferStream::WriteDoubleFromString;
s_typeOperators[STREAM_DATA_DOUBLE_NULL].m_pReadToString = &util::CTransferStream::ReadDoubleToString;
s_typeOperators[STREAM_DATA_DOUBLE_NULL].m_pWriteFromString = &util::CTransferStream::WriteDoubleFromString;
s_typeOperators[STREAM_DATA_STD_STRING].m_pReadToString = &util::CTransferStream::ReadStringToString;
s_typeOperators[STREAM_DATA_STD_STRING].m_pWriteFromString = &util::CTransferStream::WriteStringFromString;
s_typeOperators[STREAM_DATA_STD_STRING_NULL].m_pReadToString = &util::CTransferStream::ReadStringToString;
s_typeOperators[STREAM_DATA_STD_STRING_NULL].m_pWriteFromString = &util::CTransferStream::WriteStringFromString;
s_typeOperators[STREAM_DATA_C_STRING].m_pReadToString = &util::CTransferStream::ReadStringToString;
s_typeOperators[STREAM_DATA_C_STRING].m_pWriteFromString = &util::CTransferStream::WriteStringFromString;
s_typeOperators[STREAM_DATA_C_STRING_NULL].m_pReadToString = &util::CTransferStream::ReadStringToString;
s_typeOperators[STREAM_DATA_C_STRING_NULL].m_pWriteFromString = &util::CTransferStream::WriteStringFromString;
////////////////////////////
s_typeOperators[STREAM_DATA_VECTOR_BOOL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_VECTOR_BOOL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_BOOL_NULL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_VECTOR_BOOL_NULL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT8].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT8].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT8_NULL].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT16].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT16].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT16_NULL].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT32].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT32].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT32_NULL].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT64].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT64].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_UINT64_NULL].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_VECTOR_UINT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT8].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT8].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT8_NULL].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT16].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT16].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT16_NULL].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT32].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT32].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT32_NULL].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT64].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT64].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_INT64_NULL].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_VECTOR_INT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_VECTOR_FLOAT].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_VECTOR_FLOAT].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_FLOAT_NULL].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_VECTOR_FLOAT_NULL].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_DOUBLE].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_VECTOR_DOUBLE].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_DOUBLE_NULL].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_VECTOR_DOUBLE_NULL].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_STRING].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_VECTOR_STRING].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
s_typeOperators[STREAM_DATA_VECTOR_STRING_NULL].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_VECTOR_STRING_NULL].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
///////////////////////////////
s_typeOperators[STREAM_DATA_SET_BOOL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_SET_BOOL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_SET_BOOL_NULL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_SET_BOOL_NULL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_SET_UINT8].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_SET_UINT8].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT8_NULL].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_SET_UINT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT16].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_SET_UINT16].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT16_NULL].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_SET_UINT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT32].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_SET_UINT32].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT32_NULL].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_SET_UINT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT64].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_SET_UINT64].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_SET_UINT64_NULL].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_SET_UINT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_SET_INT8].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_SET_INT8].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_SET_INT8_NULL].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_SET_INT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_SET_INT16].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_SET_INT16].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_SET_INT16_NULL].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_SET_INT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_SET_INT32].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_SET_INT32].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_SET_INT32_NULL].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_SET_INT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_SET_INT64].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_SET_INT64].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_SET_INT64_NULL].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_SET_INT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_SET_FLOAT].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_SET_FLOAT].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_SET_FLOAT_NULL].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_SET_FLOAT_NULL].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_SET_DOUBLE].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_SET_DOUBLE].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_SET_DOUBLE_NULL].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_SET_DOUBLE_NULL].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_SET_STRING].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_SET_STRING].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
s_typeOperators[STREAM_DATA_SET_STRING_NULL].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_SET_STRING_NULL].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
////////////////////////////
s_typeOperators[STREAM_DATA_LIST_BOOL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_LIST_BOOL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_LIST_BOOL_NULL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_LIST_BOOL_NULL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT8].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT8].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT8_NULL].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT16].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT16].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT16_NULL].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT32].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT32].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT32_NULL].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT64].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT64].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_LIST_UINT64_NULL].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_LIST_UINT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT8].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_LIST_INT8].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT8_NULL].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_LIST_INT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT16].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_LIST_INT16].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT16_NULL].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_LIST_INT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT32].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_LIST_INT32].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT32_NULL].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_LIST_INT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT64].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_LIST_INT64].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_LIST_INT64_NULL].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_LIST_INT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_LIST_FLOAT].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_LIST_FLOAT].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_LIST_FLOAT_NULL].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_LIST_FLOAT_NULL].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_LIST_DOUBLE].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_LIST_DOUBLE].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_LIST_DOUBLE_NULL].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_LIST_DOUBLE_NULL].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_LIST_STRING].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_LIST_STRING].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
s_typeOperators[STREAM_DATA_LIST_STRING_NULL].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_LIST_STRING_NULL].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
/////////////////////////////
s_typeOperators[STREAM_DATA_ARRAY_BOOL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_ARRAY_BOOL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_BOOL_NULL].m_pReadToString = &util::CTransferStream::ReadBoolSetToString;
s_typeOperators[STREAM_DATA_ARRAY_BOOL_NULL].m_pWriteFromString = &util::CTransferStream::WriteBoolSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT8].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT8].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT8_NULL].m_pReadToString = &util::CTransferStream::ReadUInt8SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt8SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT16].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT16].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT16_NULL].m_pReadToString = &util::CTransferStream::ReadUInt16SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt16SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT32].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT32].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT32_NULL].m_pReadToString = &util::CTransferStream::ReadUInt32SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt32SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT64].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT64].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_UINT64_NULL].m_pReadToString = &util::CTransferStream::ReadUInt64SetToString;
s_typeOperators[STREAM_DATA_ARRAY_UINT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteUInt64SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT8].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT8].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT8_NULL].m_pReadToString = &util::CTransferStream::ReadInt8SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT8_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt8SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT16].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT16].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT16_NULL].m_pReadToString = &util::CTransferStream::ReadInt16SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT16_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt16SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT32].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT32].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT32_NULL].m_pReadToString = &util::CTransferStream::ReadInt32SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT32_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt32SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT64].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT64].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_INT64_NULL].m_pReadToString = &util::CTransferStream::ReadInt64SetToString;
s_typeOperators[STREAM_DATA_ARRAY_INT64_NULL].m_pWriteFromString = &util::CTransferStream::WriteInt64SetFromString;
s_typeOperators[STREAM_DATA_ARRAY_FLOAT].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_ARRAY_FLOAT].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_FLOAT_NULL].m_pReadToString = &util::CTransferStream::ReadFloatSetToString;
s_typeOperators[STREAM_DATA_ARRAY_FLOAT_NULL].m_pWriteFromString = &util::CTransferStream::WriteFloatSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_DOUBLE].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_ARRAY_DOUBLE].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_DOUBLE_NULL].m_pReadToString = &util::CTransferStream::ReadDoubleSetToString;
s_typeOperators[STREAM_DATA_ARRAY_DOUBLE_NULL].m_pWriteFromString = &util::CTransferStream::WriteDoubleSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_STRING].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_ARRAY_STRING].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
s_typeOperators[STREAM_DATA_ARRAY_STRING_NULL].m_pReadToString = &util::CTransferStream::ReadStringSetToString;
s_typeOperators[STREAM_DATA_ARRAY_STRING_NULL].m_pWriteFromString = &util::CTransferStream::WriteStringSetFromString;
}
//////////////////////////////////////////////////////////////////////////
// Read the native types from the front of the buffer
// Write the native types to the end of the buffer
CTransferStream& CTransferStream::operator>>(std::vector<bool>::reference output)
{
//// If this assert is hit the stream wasn't long enough to read from
//assert(readOffset+1 <=numberOfBitsUsed);
if(m_readOffset + 1 > m_writeOffset) {
output = false;
return *this;
}
//// Check that bit
//if (ReadBit())
// Is it faster to just write it out here?
if(m_data[DIV8(m_readOffset)] & (0x80 >> MOD8(m_readOffset++))) {
output = true;
return *this;
}
output = false;
return *this;
}
CTransferStream& CTransferStream::operator>>(bool& output)
{
//// If this assert is hit the stream wasn't long enough to read from
//assert(readOffset+1 <=numberOfBitsUsed);
if(m_readOffset + 1 > m_writeOffset) {
output = false;
return *this;
}
//// Check that bit
//if (ReadBit())
// Is it faster to just write it out here?
if(m_data[DIV8(m_readOffset)] & (0x80 >> MOD8(m_readOffset++))) {
output = true;
return *this;
}
output = false;
return *this;
}
CTransferStream& CTransferStream::operator>>(char& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(output) * 8)){
output = 0;
}
return *this;
}
CTransferStream& CTransferStream::operator>>(uint8_t& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(output) * 8)){
output = 0;
}
return *this;
}
CTransferStream& CTransferStream::operator>>(int8_t& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(output) * 8)) {
output = 0;
}
return *this;
}
CTransferStream& CTransferStream::operator>>(uint16_t& output)
{
if(!ReadBits((unsigned char*)&output, sizeof(uint16_t) * 8)){
output = 0;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(uint16_t));
}
}
return *this;
}
CTransferStream& CTransferStream::operator>>(int16_t& output)
{
if(!ReadBits((unsigned char*)&output, sizeof(int16_t) * 8)){
output = 0;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(int16_t));
}
}
return *this;
}
CTransferStream& CTransferStream::operator>>(uint32_t& output)
{
if(!ReadBits((unsigned char*)&output, sizeof(uint32_t) * 8)){
output = 0;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(uint32_t));
}
}
return *this;
}
CTransferStream& CTransferStream::operator>>(int32_t& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(int32_t) * 8)){
output = 0;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(int32_t));
}
}
return *this;
}
#ifndef NO_INT64
CTransferStream& CTransferStream::operator>>(uint64_t& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(uint64_t) * 8)) {
output = 0;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(uint64_t));
}
}
return *this;
}
CTransferStream& CTransferStream::operator>>(int64_t& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(int64_t) * 8)){
output = 0;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(int64_t));
}
}
return *this;
}
#endif
CTransferStream& CTransferStream::operator>>(float& output)
{
if(!ReadBits((unsigned char*) &output, sizeof(float) * 8)){
output = 0.;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(float));
}
}
return *this;
}
CTransferStream& CTransferStream::operator>>(double& output)
{
if(!ReadBits((unsigned char*)&output, sizeof(double) * 8)){
output = 0.;
} else {
if(m_isLittleEndian) {
ReverseBytes((char*)&output, sizeof(double));
}
}
return *this;
}
// Read an array or casted stream
bool CTransferStream::ReadBytes( char* output, const int numberOfBytes )
{
return ReadBits((unsigned char*)output, numberOfBytes * 8);
}
#ifndef NO_TEMPLATE
CTransferStream& CTransferStream::operator>>(std::string& output)
{
if(!output.empty()) {
output.clear();
}
uint16_t len = 0;
*this >> len;
if(len < 1) {
return *this;
}
char szBuf[TS_STRING_MAX_SIZE];
ReadBytes(szBuf, len);
szBuf[len] = '\0';
output = szBuf;
return *this;
}
#endif
// Sets the read pointer back to the beginning of your data.
void CTransferStream::ResetReadPointer(void)
{
m_readOffset = 0;
}
// Sets the write pointer back to the beginning of your data.
void CTransferStream::ResetWritePointer(void)
{
m_writeOffset = 0;
}
// Write a 0
void CTransferStream::Write0( void )
{
AddBitsAndReallocate(1);
// New bytes need to be zeroed
if(MOD8(m_writeOffset) == 0) {
m_data[DIV8(m_writeOffset)] = 0;
}
++m_writeOffset;
}
// Write a 1
void CTransferStream::Write1(void)
{
AddBitsAndReallocate(1);
int numberOfBitsMod8 = MOD8(m_writeOffset);
if(0 == numberOfBitsMod8) {
m_data[DIV8(m_writeOffset)] = 0x80;
} else {
// Set the bit to 1
m_data[DIV8(m_writeOffset)] |= (0x80 >> numberOfBitsMod8);
}
++m_writeOffset;
}
// Returns true if the next data read is a 1, false if it is a 0
bool CTransferStream::ReadBit(void)
{
#pragma warning( disable : 4800 )
return (bool) (m_data[DIV8(m_readOffset)] & (0x80 >> MOD8(m_readOffset++)));
#pragma warning( default : 4800 )
}
// Align the bit stream to the byte boundary and then write the specified number of bits.
// This is faster than WriteBits but wastes the bits to do the alignment and requires you to call
// SetReadToByteAlignment at the corresponding read position
void CTransferStream::WriteAlignedBytes(const unsigned char* input,
const int numberOfBytesToWrite)
{
if(numberOfBytesToWrite < 1) {
#ifdef _DEBUG
assert(false);
#endif
return;
}
AlignWriteToByteBoundary();
// Allocate enough memory to hold everything
AddBitsAndReallocate(MUL8(numberOfBytesToWrite));
// Write the data
memcpy(m_data + DIV8(m_writeOffset), input, numberOfBytesToWrite);
m_writeOffset += MUL8(numberOfBytesToWrite);
}
// Read bits, starting at the next aligned bits. Note that the modulus 8 starting offset of the
// sequence must be the same as was used with WriteBits. This will be a problem with packet coalescence
// unless you byte align the coalesced packets.
bool CTransferStream::ReadAlignedBytes(unsigned char* output,
const int numberOfBytesToRead)
{
if(numberOfBytesToRead < 1) {
#ifdef _DEBUG
assert(false);
#endif
return false;
}
// Byte align
AlignReadToByteBoundary();
if(m_readOffset + MUL8(numberOfBytesToRead) > m_writeOffset) {
return false;
}
// Write the data
memcpy(output, m_data + DIV8(m_readOffset), numberOfBytesToRead);
m_readOffset += MUL8(numberOfBytesToRead);
return true;
}
// Align the next write and/or read to a byte boundary. This can be used to 'waste' bits to byte align for efficiency reasons
void CTransferStream::AlignWriteToByteBoundary(void)
{
if(m_writeOffset) {
m_writeOffset += (8 - (MOD8( m_writeOffset - 1) + 1));
}
}
// Align the next write and/or read to a byte boundary. This can be used to 'waste' bits to byte align for efficiency reasons
void CTransferStream::AlignReadToByteBoundary(void)
{
if(m_readOffset) {
m_readOffset += (8 - (MOD8(m_readOffset - 1) + 1));
}
}
// Write numberToWrite bits from the input source
void CTransferStream::WriteBits(const unsigned char *input,
int numberOfBitsToWrite, const bool rightAlignedBits)
{
if(numberOfBitsToWrite < 1) {
#ifdef _DEBUG
assert(false);
#endif
return;
}
AddBitsAndReallocate(numberOfBitsToWrite);
uint32_t offset = 0;
unsigned char dataByte;
int numberOfBitsUsedMod8 = MOD8(m_writeOffset);
// Faster to put the while at the top surprisingly enough
while(numberOfBitsToWrite > 0) {
unsigned char* dest = m_data + DIV8(m_writeOffset);
dataByte = *(input + offset);
// rightAlignedBits means in the case of a partial byte, the bits are aligned from the right (bit 0) rather than the left (as in the normal internal representation)
if(numberOfBitsToWrite < 8 && rightAlignedBits) {
// shift left to get the bits on the left, as in our internal representation
dataByte <<= (8 - numberOfBitsToWrite);
}
// Writing to a new byte each time
if(0 == numberOfBitsUsedMod8) {
*dest = dataByte;
} else {
// Copy over the new data.
// First half
*dest |= (dataByte >> numberOfBitsUsedMod8);
// If we didn't write it all out in the first half (8 - (numberOfBitsUsed%8) is the number we wrote in the first half)
//if((8 - numberOfBitsUsedMod8) < 8 && (8 - numberOfBitsUsedMod8) < numberOfBitsToWrite) {
if((8 - numberOfBitsUsedMod8) < numberOfBitsToWrite) {
// Second half (overlaps byte boundary)
*(dest + 1) = (unsigned char) (dataByte << (8 - numberOfBitsUsedMod8));
}
}
if(numberOfBitsToWrite >= 8) {
m_writeOffset += 8;
} else {
m_writeOffset += numberOfBitsToWrite;
}
numberOfBitsToWrite -= 8;
++offset;
}
}
// Set the stream to some initial data. For internal use
void CTransferStream::SetData(const char* input, const int numberOfBits)
{
#ifdef _DEBUG
// Make sure the stream is clear
assert(0 == m_writeOffset);
#endif
if(numberOfBits < 1) {
return;
}
AddBitsAndReallocate(numberOfBits);
memcpy(m_data, input, TS_BITS_TO_BYTES(numberOfBits));
m_writeOffset = numberOfBits;
}
// Read numberOfBitsToRead bits to the output source
// alignBitsToRight should be set to true to convert internal bit stream data to user data
// It should be false if you used WriteBits with rightAlignedBits false
bool CTransferStream::ReadBits(unsigned char* output,
int numberOfBitsToRead, const bool alignBitsToRight)
{
if(numberOfBitsToRead < 1) {
#ifdef _DEBUG
assert(false);
#endif
return false;
}
if(m_readOffset + numberOfBitsToRead > m_writeOffset) {
return false;
}
uint32_t offset = 0;
memset(output, 0, TS_BITS_TO_BYTES(numberOfBitsToRead));
int readOffsetMod8 = MOD8(m_readOffset);
// Faster to put the while at the top surprisingly enough
while(numberOfBitsToRead > 0) {
unsigned char* dest = output + offset;
unsigned char* src = m_data + DIV8(m_readOffset);
// First half
*dest |= *src << readOffsetMod8;
// If we have a second half, we didn't read enough bytes in the first half
if(readOffsetMod8 > 0 && numberOfBitsToRead > (8 - readOffsetMod8)) {
// Second half (overlaps byte boundary)
*dest |= *(src + 1) >> (8 - readOffsetMod8);
}
numberOfBitsToRead -= 8;
// Reading a partial byte for the last byte, shift right so the data is aligned on the right
if(numberOfBitsToRead < 0) {
if(alignBitsToRight) {
*dest >>= -numberOfBitsToRead;
}
m_readOffset += (8 + numberOfBitsToRead);
} else {
m_readOffset += 8;
}
++offset;
}
return true;
}
// Reallocates (if necessary) in preparation of writing numberOfBitsToWrite
void CTransferStream::AddBitsAndReallocate(const uint32_t writeBitSize)
{
if(writeBitSize < 1) {
return;
}
uint32_t newAllocBitSize = writeBitSize + m_writeOffset;
// If we need to allocate 1 or more new bytes
if(newAllocBitSize > 0 && DIV8(m_allocBitSize - 1) < DIV8(newAllocBitSize - 1)) {
if(m_copyData == false) {
// If this assert hits then we need to specify true for the third parameter in the constructor
// It needs to reallocate to hold all the data and can't do it unless we allocated to begin with
assert(false);
return;
}
// Less memory efficient but saves on news and deletes
newAllocBitSize = newAllocBitSize * 2;
// Use realloc and free so we are more efficient than delete and new for resizing
uint32_t newAllocByteSize = TS_BITS_TO_BYTES(newAllocBitSize);
if(m_data == (unsigned char*)m_stackData) {
if(newAllocByteSize > TRANSFERSTREAM_STACK_ALLOCA_SIZE) {
m_data = (unsigned char*) malloc(newAllocByteSize);
// need to copy the stack data over to our new memory area too
memcpy((void *)m_data, (void *)m_stackData, TS_BITS_TO_BYTES(m_allocBitSize));
}
} else {
m_data = (unsigned char*) realloc(m_data, newAllocBitSize);
}
#ifdef _DEBUG
// Make sure realloc succeeded
assert( m_data );
#endif
}
if(newAllocBitSize > m_allocBitSize) {
m_allocBitSize = newAllocBitSize;
}
}
// Should hit if reads didn't match writes
void CTransferStream::AssertStreamEmpty(void)
{
assert(m_readOffset == m_writeOffset);
}
void CTransferStream::PrintBits(void) const
{
if(m_writeOffset < 1) {
return;
}
uint32_t byteSize = TS_BITS_TO_BYTES(m_writeOffset);
for(uint32_t counter = 0; counter < byteSize; ++counter) {
int stop;
if(counter == DIV8(m_writeOffset - 1)) {
stop = 8 - (MOD8(m_writeOffset - 1) + 1);
} else {
stop = 0;
}
for(int counter2 = 7; counter2 >= stop; --counter2) {
if((m_data[counter] >> counter2) & 1) {
putchar('1');
} else {
putchar('0');
}
}
putchar(' ');
}
putchar('\n');
}
// Exposes the data for you to look at, like PrintBits does.
// Data will point to the stream. Returns the length in bits of the stream.
int CTransferStream::CopyData(unsigned char** data) const
{
if(NULL == data) {
return 0;
}
if(m_writeOffset < 1) {
#ifdef _DEBUG
assert(false);
#endif
return 0;
}
uint32_t byteSize = TS_BITS_TO_BYTES(m_writeOffset);
*data = new unsigned char [byteSize];
memcpy(*data, m_data, sizeof(unsigned char) * byteSize);
return m_writeOffset;
}
// Ignore data we don't intend to read
void CTransferStream::IgnoreBits(const int numberOfBits)
{
m_readOffset += numberOfBits;
}
// Move the write pointer to a position on the array. Dangerous if you don't know what you are doing!
void CTransferStream::SetWriteOffset(const uint32_t offset)
{
m_writeOffset = offset;
}
// Returns the length in bits of the stream
uint32_t CTransferStream::GetWriteOffset(void) const
{
return m_writeOffset;
}
// Returns the length in bytes of the stream
uint32_t CTransferStream::GetNumberOfBytesUsed(void) const
{
return TS_BITS_TO_BYTES(m_writeOffset);
}
// Move the read pointer to a position on the array.
void CTransferStream::SetReadOffset(const uint32_t offset)
{
m_readOffset = offset;
}
// Returns the number of bits into the stream that we have read
uint32_t CTransferStream::GetReadOffset(void) const
{
return m_readOffset;
}
// Returns the number of bits left in the stream that haven't been read
uint32_t CTransferStream::GetNumberOfUnreadBits(void) const
{
if(m_writeOffset > m_readOffset) {
return m_writeOffset - m_readOffset;
} else {
return 0;
}
}
// Exposes the internal data
const char* CTransferStream::GetData(void) const
{
return (char*)m_data;
}
// If we used the constructor version with copy data off, this makes sure it is set to on and the data pointed to is copied.
void CTransferStream::AssertCopyData(void)
{
if(m_copyData == false) {
m_copyData = true;
if(m_allocBitSize > 0) {
uint32_t allocBitSize = TS_BITS_TO_BYTES(m_allocBitSize);
unsigned char* newdata = (unsigned char*) malloc(allocBitSize);
#ifdef _DEBUG
assert(m_data);
#endif
memcpy(newdata, m_data, allocBitSize);
m_data = newdata;
} else {
m_data = NULL;
}
}
}
|
<reponame>0393liyang/springCloud<filename>mc-commons/mc-common-core/src/main/java/com/mc/common/exception/IdempotencyException.java
package com.mc.common.exception;
/**
* [IdempotencyException 幂等性异常]
*
* @author likai
* @version 1.0
* @date 2019/12/10 0010 18:29
* @company Gainet
* @copyright copyright (c) 2019
*/
public class IdempotencyException extends RuntimeException {
private static final long serialVersionUID = 6610083281801529147L;
public IdempotencyException(String message) {
super(message);
}
}
|
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.schemabuilder;
import static org.fest.assertions.api.Assertions.assertThat;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import info.archinnov.achilles.schemabuilder.SchemaBuilder;
public class DropTest {
@Rule
public ExpectedException exception = ExpectedException.none();
@Test
public void should_drop_table() throws Exception {
//When
final String built = SchemaBuilder.dropTable("test").build();
//Then
assertThat(built).isEqualTo("DROP TABLE test");
}
@Test
public void should_drop_table_with_keyspace() throws Exception {
//When
final String built = SchemaBuilder.dropTable("ks", "test").build();
//Then
assertThat(built).isEqualTo("DROP TABLE ks.test");
}
@Test
public void should_drop_table_with_keyspace_if_exists() throws Exception {
//When
final String built = SchemaBuilder.dropTable("ks", "test").ifExists(true).build();
//Then
assertThat(built).isEqualTo("DROP TABLE IF EXISTS ks.test");
}
@Test
public void should_fail_if_keyspace_name_is_a_reserved_keyword() throws Exception {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("The keyspace name 'add' is not allowed because it is a reserved keyword");
SchemaBuilder.dropTable("add","test").build();
}
@Test
public void should_fail_if_table_name_is_a_reserved_keyword() throws Exception {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("The table name 'add' is not allowed because it is a reserved keyword");
SchemaBuilder.dropTable("add").build();
}
}
|
#include <string>
#include <iostream>
std::string consolidateWhitespace(std::string str) {
std::string result = "";
int flag = 0;
for (char c : str) {
if (std::isspace(c)) {
if (flag++ == 0)
result += c;
}
else {
result += c;
flag = 0;
}
}
return result;
}
int main() {
std::string str = "Hello world ! ";
std::string res = consolidateWhitespace(str);
std::cout << res << std::endl;
return 0;
}
// Output: Hello world ! |
runTest() {
# The first argument is the code we should get
ERROR="${1:-}"
shift
OUTPUT=${1:-}
shift
echo "$@"
"$@" > "${OUTPUT}" 2>&1
RETVAL="$?"
if [ "$ERROR" = "0" -a "$RETVAL" != "0" ]; then
echo "$@ (retval=$RETVAL) ERROR"
cat ${OUTPUT}
echo "Output in ${OUTPUT}"
exit 1
elif [ "$ERROR" != "0" -a "$RETVAL" = "0" ]; then
echo "$@ (retval=$RETVAL) ERROR"
echo "Output in ${OUTPUT}"
cat ${OUTPUT}
exit 1
else
echo "$@ (retval=$RETVAL) OK"
fi
}
|
//--------------------------------------------------------------------------------------
// File: BasicCompute11.cpp
//
// Demonstrates the basics to get DirectX 11 Compute Shader (aka DirectCompute) up and
// running by implementing Array A + Array B
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//--------------------------------------------------------------------------------------
#include <stdio.h>
#include <crtdbg.h>
#include <d3dcommon.h>
#include <d3d11.h>
#include <d3dcompiler.h>
#include <d3dx11.h>
#ifndef SAFE_RELEASE
#define SAFE_RELEASE(p) { if (p) { (p)->Release(); (p)=NULL; } }
#endif
// Comment out the following line to use raw buffers instead of structured buffers
#define USE_STRUCTURED_BUFFERS
// If defined, then the hardware/driver must report support for double-precision CS 5.0 shaders or the sample fails to run
//#define TEST_DOUBLE
// The number of elements in a buffer to be tested
const UINT NUM_ELEMENTS = 1024;
//--------------------------------------------------------------------------------------
// Forward declarations
//--------------------------------------------------------------------------------------
HRESULT CreateComputeDevice( ID3D11Device** ppDeviceOut, ID3D11DeviceContext** ppContextOut, BOOL bForceRef );
HRESULT CreateComputeShader( LPCWSTR pSrcFile, LPCSTR pFunctionName,
ID3D11Device* pDevice, ID3D11ComputeShader** ppShaderOut );
HRESULT CreateStructuredBuffer( ID3D11Device* pDevice, UINT uElementSize, UINT uCount, VOID* pInitData, ID3D11Buffer** ppBufOut );
HRESULT CreateRawBuffer( ID3D11Device* pDevice, UINT uSize, VOID* pInitData, ID3D11Buffer** ppBufOut );
HRESULT CreateBufferSRV( ID3D11Device* pDevice, ID3D11Buffer* pBuffer, ID3D11ShaderResourceView** ppSRVOut );
HRESULT CreateBufferUAV( ID3D11Device* pDevice, ID3D11Buffer* pBuffer, ID3D11UnorderedAccessView** pUAVOut );
ID3D11Buffer* CreateAndCopyToDebugBuf( ID3D11Device* pDevice, ID3D11DeviceContext* pd3dImmediateContext, ID3D11Buffer* pBuffer );
void RunComputeShader( ID3D11DeviceContext* pd3dImmediateContext,
ID3D11ComputeShader* pComputeShader,
UINT nNumViews, ID3D11ShaderResourceView** pShaderResourceViews,
ID3D11Buffer* pCBCS, void* pCSData, DWORD dwNumDataBytes,
ID3D11UnorderedAccessView* pUnorderedAccessView,
UINT X, UINT Y, UINT Z );
HRESULT FindDXSDKShaderFileCch( __in_ecount(cchDest) WCHAR* strDestPath,
int cchDest,
__in LPCWSTR strFilename );
//--------------------------------------------------------------------------------------
// Global variables
//--------------------------------------------------------------------------------------
ID3D11Device* g_pDevice = NULL;
ID3D11DeviceContext* g_pContext = NULL;
ID3D11ComputeShader* g_pCS = NULL;
ID3D11Buffer* g_pBuf0 = NULL;
ID3D11Buffer* g_pBuf1 = NULL;
ID3D11Buffer* g_pBufResult = NULL;
ID3D11ShaderResourceView* g_pBuf0SRV = NULL;
ID3D11ShaderResourceView* g_pBuf1SRV = NULL;
ID3D11UnorderedAccessView* g_pBufResultUAV = NULL;
struct BufType
{
int i;
float f;
#ifdef TEST_DOUBLE
double d;
#endif
};
BufType g_vBuf0[NUM_ELEMENTS];
BufType g_vBuf1[NUM_ELEMENTS];
//--------------------------------------------------------------------------------------
// Entry point to the program
//--------------------------------------------------------------------------------------
int __cdecl main()
{
// Enable run-time memory check for debug builds.
#if defined(DEBUG) || defined(_DEBUG)
_CrtSetDbgFlag( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF );
#endif
printf( "Creating device..." );
if ( FAILED( CreateComputeDevice( &g_pDevice, &g_pContext, FALSE ) ) )
return 1;
printf( "done\n" );
printf( "Creating Compute Shader..." );
CreateComputeShader( L"BasicCompute11.hlsl", "CSMain", g_pDevice, &g_pCS );
printf( "done\n" );
printf( "Creating buffers and filling them with initial data..." );
for ( int i = 0; i < NUM_ELEMENTS; ++i )
{
g_vBuf0[i].i = i;
g_vBuf0[i].f = (float)i;
#ifdef TEST_DOUBLE
g_vBuf0[i].d = (double)i;
#endif
g_vBuf1[i].i = i;
g_vBuf1[i].f = (float)i;
#ifdef TEST_DOUBLE
g_vBuf1[i].d = (double)i;
#endif
}
#ifdef USE_STRUCTURED_BUFFERS
CreateStructuredBuffer( g_pDevice, sizeof(BufType), NUM_ELEMENTS, &g_vBuf0[0], &g_pBuf0 );
CreateStructuredBuffer( g_pDevice, sizeof(BufType), NUM_ELEMENTS, &g_vBuf1[0], &g_pBuf1 );
CreateStructuredBuffer( g_pDevice, sizeof(BufType), NUM_ELEMENTS, NULL, &g_pBufResult );
#else
CreateRawBuffer( g_pDevice, NUM_ELEMENTS * sizeof(BufType), &g_vBuf0[0], &g_pBuf0 );
CreateRawBuffer( g_pDevice, NUM_ELEMENTS * sizeof(BufType), &g_vBuf1[0], &g_pBuf1 );
CreateRawBuffer( g_pDevice, NUM_ELEMENTS * sizeof(BufType), NULL, &g_pBufResult );
#endif
#if defined(DEBUG) || defined(PROFILE)
if ( g_pBuf0 )
g_pBuf0->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Buffer0" ) - 1, "Buffer0" );
if ( g_pBuf1 )
g_pBuf1->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Buffer1" ) - 1, "Buffer1" );
if ( g_pBufResult )
g_pBufResult->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Result" ) - 1, "Result" );
#endif
printf( "done\n" );
printf( "Creating buffer views..." );
CreateBufferSRV( g_pDevice, g_pBuf0, &g_pBuf0SRV );
CreateBufferSRV( g_pDevice, g_pBuf1, &g_pBuf1SRV );
CreateBufferUAV( g_pDevice, g_pBufResult, &g_pBufResultUAV );
#if defined(DEBUG) || defined(PROFILE)
if ( g_pBuf0SRV )
g_pBuf0SRV->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Buffer0 SRV" ) - 1, "Buffer0 SRV" );
if ( g_pBuf1SRV )
g_pBuf1SRV->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Buffer1 SRV" ) - 1, "Buffer1 SRV" );
if ( g_pBufResultUAV )
g_pBufResultUAV->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Result UAV" ) - 1, "Result UAV" );
#endif
printf( "done\n" );
printf( "Running Compute Shader..." );
ID3D11ShaderResourceView* aRViews[2] = { g_pBuf0SRV, g_pBuf1SRV };
RunComputeShader( g_pContext, g_pCS, 2, aRViews, NULL, NULL, 0, g_pBufResultUAV, NUM_ELEMENTS, 1, 1 );
printf( "done\n" );
// Read back the result from GPU, verify its correctness against result computed by CPU
{
ID3D11Buffer* debugbuf = CreateAndCopyToDebugBuf( g_pDevice, g_pContext, g_pBufResult );
D3D11_MAPPED_SUBRESOURCE MappedResource;
BufType *p;
g_pContext->Map( debugbuf, 0, D3D11_MAP_READ, 0, &MappedResource );
// Set a break point here and put down the expression "p, 1024" in your watch window to see what has been written out by our CS
// This is also a common trick to debug CS programs.
p = (BufType*)MappedResource.pData;
// Verify that if Compute Shader has done right
printf( "Verifying against CPU result..." );
BOOL bSuccess = TRUE;
for ( int i = 0; i < NUM_ELEMENTS; ++i )
if ( (p[i].i != g_vBuf0[i].i + g_vBuf1[i].i)
|| (p[i].f != g_vBuf0[i].f + g_vBuf1[i].f)
#ifdef TEST_DOUBLE
|| (p[i].d != g_vBuf0[i].d + g_vBuf1[i].d)
#endif
)
{
printf( "failure\n" );
bSuccess = FALSE;
break;
}
if ( bSuccess )
printf( "succeeded\n" );
g_pContext->Unmap( debugbuf, 0 );
SAFE_RELEASE( debugbuf );
}
printf( "Cleaning up...\n" );
SAFE_RELEASE( g_pBuf0SRV );
SAFE_RELEASE( g_pBuf1SRV );
SAFE_RELEASE( g_pBufResultUAV );
SAFE_RELEASE( g_pBuf0 );
SAFE_RELEASE( g_pBuf1 );
SAFE_RELEASE( g_pBufResult );
SAFE_RELEASE( g_pCS );
SAFE_RELEASE( g_pContext );
SAFE_RELEASE( g_pDevice );
return 0;
}
//--------------------------------------------------------------------------------------
// This is equivalent to D3D11CreateDevice, except it dynamically loads d3d11.dll,
// this gives us a graceful way to message the user on systems with no d3d11 installed
//--------------------------------------------------------------------------------------
HRESULT WINAPI Dynamic_D3D11CreateDevice( IDXGIAdapter* pAdapter,
D3D_DRIVER_TYPE DriverType,
HMODULE Software,
UINT32 Flags,
CONST D3D_FEATURE_LEVEL* pFeatureLevels,
UINT FeatureLevels,
UINT32 SDKVersion,
ID3D11Device** ppDevice,
D3D_FEATURE_LEVEL* pFeatureLevel,
ID3D11DeviceContext** ppImmediateContext )
{
typedef HRESULT (WINAPI * LPD3D11CREATEDEVICE)( IDXGIAdapter*, D3D_DRIVER_TYPE, HMODULE, UINT32, CONST D3D_FEATURE_LEVEL*, UINT, UINT32, ID3D11Device**, D3D_FEATURE_LEVEL*, ID3D11DeviceContext** );
static LPD3D11CREATEDEVICE s_DynamicD3D11CreateDevice = NULL;
if ( s_DynamicD3D11CreateDevice == NULL )
{
HMODULE hModD3D11 = LoadLibrary( L"d3d11.dll" );
if ( hModD3D11 == NULL )
{
// Ensure this "D3D11 absent" message is shown only once. As sometimes, the app would like to try
// to create device multiple times
static bool bMessageAlreadyShwon = false;
if ( !bMessageAlreadyShwon )
{
OSVERSIONINFOEX osv;
memset( &osv, 0, sizeof(osv) );
osv.dwOSVersionInfoSize = sizeof(osv);
GetVersionEx( (LPOSVERSIONINFO)&osv );
if ( ( osv.dwMajorVersion > 6 )
|| ( osv.dwMajorVersion == 6 && osv.dwMinorVersion >= 1 )
|| ( osv.dwMajorVersion == 6 && osv.dwMinorVersion == 0 && osv.dwBuildNumber > 6002 ) )
{
MessageBox( 0, L"Direct3D 11 components were not found.", L"Error", MB_ICONEXCLAMATION );
// This should not happen, but is here for completeness as the system could be
// corrupted or some future OS version could pull D3D11.DLL for some reason
}
else if ( osv.dwMajorVersion == 6 && osv.dwMinorVersion == 0 && osv.dwBuildNumber == 6002 )
{
MessageBox( 0, L"Direct3D 11 components were not found, but are available for"\
L" this version of Windows.\n"\
L"For details see Microsoft Knowledge Base Article #971644\n"\
L"http://support.microsoft.com/default.aspx/kb/971644/", L"Error", MB_ICONEXCLAMATION );
}
else if ( osv.dwMajorVersion == 6 && osv.dwMinorVersion == 0 )
{
MessageBox( 0, L"Direct3D 11 components were not found. Please install the latest Service Pack.\n"\
L"For details see Microsoft Knowledge Base Article #935791\n"\
L" http://support.microsoft.com/default.aspx/kb/935791", L"Error", MB_ICONEXCLAMATION );
}
else
{
MessageBox( 0, L"Direct3D 11 is not supported on this OS.", L"Error", MB_ICONEXCLAMATION );
}
bMessageAlreadyShwon = true;
}
return E_FAIL;
}
s_DynamicD3D11CreateDevice = ( LPD3D11CREATEDEVICE )GetProcAddress( hModD3D11, "D3D11CreateDevice" );
}
return s_DynamicD3D11CreateDevice( pAdapter, DriverType, Software, Flags, pFeatureLevels, FeatureLevels,
SDKVersion, ppDevice, pFeatureLevel, ppImmediateContext );
}
//--------------------------------------------------------------------------------------
// Create the D3D device and device context suitable for running Compute Shaders(CS)
//--------------------------------------------------------------------------------------
HRESULT CreateComputeDevice( ID3D11Device** ppDeviceOut, ID3D11DeviceContext** ppContextOut, BOOL bForceRef )
{
*ppDeviceOut = NULL;
*ppContextOut = NULL;
HRESULT hr = S_OK;
UINT uCreationFlags = D3D11_CREATE_DEVICE_SINGLETHREADED;
#if defined(DEBUG) || defined(_DEBUG)
uCreationFlags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
D3D_FEATURE_LEVEL flOut;
static const D3D_FEATURE_LEVEL flvl[] = { D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0 };
BOOL bNeedRefDevice = FALSE;
if ( !bForceRef )
{
hr = Dynamic_D3D11CreateDevice( NULL, // Use default graphics card
D3D_DRIVER_TYPE_HARDWARE, // Try to create a hardware accelerated device
NULL, // Do not use external software rasterizer module
uCreationFlags, // Device creation flags
flvl,
sizeof(flvl) / sizeof(D3D_FEATURE_LEVEL),
D3D11_SDK_VERSION, // SDK version
ppDeviceOut, // Device out
&flOut, // Actual feature level created
ppContextOut ); // Context out
if ( SUCCEEDED( hr ) )
{
// A hardware accelerated device has been created, so check for Compute Shader support
// If we have a device >= D3D_FEATURE_LEVEL_11_0 created, full CS5.0 support is guaranteed, no need for further checks
if ( flOut < D3D_FEATURE_LEVEL_11_0 )
{
#ifdef TEST_DOUBLE
bNeedRefDevice = TRUE;
printf( "No hardware Compute Shader 5.0 capable device found (required for doubles), trying to create ref device.\n" );
#else
// Otherwise, we need further check whether this device support CS4.x (Compute on 10)
D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS hwopts;
(*ppDeviceOut)->CheckFeatureSupport( D3D11_FEATURE_D3D10_X_HARDWARE_OPTIONS, &hwopts, sizeof(hwopts) );
if ( !hwopts.ComputeShaders_Plus_RawAndStructuredBuffers_Via_Shader_4_x )
{
bNeedRefDevice = TRUE;
printf( "No hardware Compute Shader capable device found, trying to create ref device.\n" );
}
#endif
}
#ifdef TEST_DOUBLE
else
{
// Double-precision support is an optional feature of CS 5.0
D3D11_FEATURE_DATA_DOUBLES hwopts;
(*ppDeviceOut)->CheckFeatureSupport( D3D11_FEATURE_DOUBLES, &hwopts, sizeof(hwopts) );
if ( !hwopts.DoublePrecisionFloatShaderOps )
{
bNeedRefDevice = TRUE;
printf( "No hardware double-precision capable device found, trying to create ref device.\n" );
}
}
#endif
}
}
if ( bForceRef || FAILED(hr) || bNeedRefDevice )
{
// Either because of failure on creating a hardware device or hardware lacking CS capability, we create a ref device here
SAFE_RELEASE( *ppDeviceOut );
SAFE_RELEASE( *ppContextOut );
hr = Dynamic_D3D11CreateDevice( NULL, // Use default graphics card
D3D_DRIVER_TYPE_REFERENCE, // Try to create a hardware accelerated device
NULL, // Do not use external software rasterizer module
uCreationFlags, // Device creation flags
flvl,
sizeof(flvl) / sizeof(D3D_FEATURE_LEVEL),
D3D11_SDK_VERSION, // SDK version
ppDeviceOut, // Device out
&flOut, // Actual feature level created
ppContextOut ); // Context out
if ( FAILED(hr) )
{
printf( "Reference rasterizer device create failure\n" );
return hr;
}
}
return hr;
}
//--------------------------------------------------------------------------------------
// Compile and create the CS
//--------------------------------------------------------------------------------------
HRESULT CreateComputeShader( LPCWSTR pSrcFile, LPCSTR pFunctionName,
ID3D11Device* pDevice, ID3D11ComputeShader** ppShaderOut )
{
HRESULT hr;
// Finds the correct path for the shader file.
// This is only required for this sample to be run correctly from within the Sample Browser,
// in your own projects, these lines could be removed safely
WCHAR str[MAX_PATH];
hr = FindDXSDKShaderFileCch( str, MAX_PATH, pSrcFile );
if ( FAILED(hr) )
return hr;
DWORD dwShaderFlags = D3DCOMPILE_ENABLE_STRICTNESS;
#if defined( DEBUG ) || defined( _DEBUG )
// Set the D3DCOMPILE_DEBUG flag to embed debug information in the shaders.
// Setting this flag improves the shader debugging experience, but still allows
// the shaders to be optimized and to run exactly the way they will run in
// the release configuration of this program.
dwShaderFlags |= D3DCOMPILE_DEBUG;
#endif
const D3D_SHADER_MACRO defines[] =
{
#ifdef USE_STRUCTURED_BUFFERS
"USE_STRUCTURED_BUFFERS", "1",
#endif
#ifdef TEST_DOUBLE
"TEST_DOUBLE", "1",
#endif
NULL, NULL
};
// We generally prefer to use the higher CS shader profile when possible as CS 5.0 is better performance on 11-class hardware
LPCSTR pProfile = ( pDevice->GetFeatureLevel() >= D3D_FEATURE_LEVEL_11_0 ) ? "cs_5_0" : "cs_4_0";
ID3DBlob* pErrorBlob = NULL;
ID3DBlob* pBlob = NULL;
hr = D3DX11CompileFromFile( str, defines, NULL, pFunctionName, pProfile,
dwShaderFlags, NULL, NULL, &pBlob, &pErrorBlob, NULL );
if ( FAILED(hr) )
{
if ( pErrorBlob )
OutputDebugStringA( (char*)pErrorBlob->GetBufferPointer() );
SAFE_RELEASE( pErrorBlob );
SAFE_RELEASE( pBlob );
return hr;
}
hr = pDevice->CreateComputeShader( pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, ppShaderOut );
#if defined(DEBUG) || defined(PROFILE)
if ( *ppShaderOut )
(*ppShaderOut)->SetPrivateData( WKPDID_D3DDebugObjectName, lstrlenA(pFunctionName), pFunctionName );
#endif
SAFE_RELEASE( pErrorBlob );
SAFE_RELEASE( pBlob );
return hr;
}
//--------------------------------------------------------------------------------------
// Create Structured Buffer
//--------------------------------------------------------------------------------------
HRESULT CreateStructuredBuffer( ID3D11Device* pDevice, UINT uElementSize, UINT uCount, VOID* pInitData, ID3D11Buffer** ppBufOut )
{
*ppBufOut = NULL;
D3D11_BUFFER_DESC desc;
ZeroMemory( &desc, sizeof(desc) );
desc.BindFlags = D3D11_BIND_UNORDERED_ACCESS | D3D11_BIND_SHADER_RESOURCE;
desc.ByteWidth = uElementSize * uCount;
desc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_STRUCTURED;
desc.StructureByteStride = uElementSize;
if ( pInitData )
{
D3D11_SUBRESOURCE_DATA InitData;
InitData.pSysMem = pInitData;
return pDevice->CreateBuffer( &desc, &InitData, ppBufOut );
} else
return pDevice->CreateBuffer( &desc, NULL, ppBufOut );
}
//--------------------------------------------------------------------------------------
// Create Raw Buffer
//--------------------------------------------------------------------------------------
HRESULT CreateRawBuffer( ID3D11Device* pDevice, UINT uSize, VOID* pInitData, ID3D11Buffer** ppBufOut )
{
*ppBufOut = NULL;
D3D11_BUFFER_DESC desc;
ZeroMemory( &desc, sizeof(desc) );
desc.BindFlags = D3D11_BIND_UNORDERED_ACCESS | D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_INDEX_BUFFER | D3D11_BIND_VERTEX_BUFFER;
desc.ByteWidth = uSize;
desc.MiscFlags = D3D11_RESOURCE_MISC_BUFFER_ALLOW_RAW_VIEWS;
if ( pInitData )
{
D3D11_SUBRESOURCE_DATA InitData;
InitData.pSysMem = pInitData;
return pDevice->CreateBuffer( &desc, &InitData, ppBufOut );
} else
return pDevice->CreateBuffer( &desc, NULL, ppBufOut );
}
//--------------------------------------------------------------------------------------
// Create Shader Resource View for Structured or Raw Buffers
//--------------------------------------------------------------------------------------
HRESULT CreateBufferSRV( ID3D11Device* pDevice, ID3D11Buffer* pBuffer, ID3D11ShaderResourceView** ppSRVOut )
{
D3D11_BUFFER_DESC descBuf;
ZeroMemory( &descBuf, sizeof(descBuf) );
pBuffer->GetDesc( &descBuf );
D3D11_SHADER_RESOURCE_VIEW_DESC desc;
ZeroMemory( &desc, sizeof(desc) );
desc.ViewDimension = D3D11_SRV_DIMENSION_BUFFEREX;
desc.BufferEx.FirstElement = 0;
if ( descBuf.MiscFlags & D3D11_RESOURCE_MISC_BUFFER_ALLOW_RAW_VIEWS )
{
// This is a Raw Buffer
desc.Format = DXGI_FORMAT_R32_TYPELESS;
desc.BufferEx.Flags = D3D11_BUFFEREX_SRV_FLAG_RAW;
desc.BufferEx.NumElements = descBuf.ByteWidth / 4;
} else
if ( descBuf.MiscFlags & D3D11_RESOURCE_MISC_BUFFER_STRUCTURED )
{
// This is a Structured Buffer
desc.Format = DXGI_FORMAT_UNKNOWN;
desc.BufferEx.NumElements = descBuf.ByteWidth / descBuf.StructureByteStride;
} else
{
return E_INVALIDARG;
}
return pDevice->CreateShaderResourceView( pBuffer, &desc, ppSRVOut );
}
//--------------------------------------------------------------------------------------
// Create Unordered Access View for Structured or Raw Buffers
//--------------------------------------------------------------------------------------
HRESULT CreateBufferUAV( ID3D11Device* pDevice, ID3D11Buffer* pBuffer, ID3D11UnorderedAccessView** ppUAVOut )
{
D3D11_BUFFER_DESC descBuf;
ZeroMemory( &descBuf, sizeof(descBuf) );
pBuffer->GetDesc( &descBuf );
D3D11_UNORDERED_ACCESS_VIEW_DESC desc;
ZeroMemory( &desc, sizeof(desc) );
desc.ViewDimension = D3D11_UAV_DIMENSION_BUFFER;
desc.Buffer.FirstElement = 0;
if ( descBuf.MiscFlags & D3D11_RESOURCE_MISC_BUFFER_ALLOW_RAW_VIEWS )
{
// This is a Raw Buffer
desc.Format = DXGI_FORMAT_R32_TYPELESS; // Format must be DXGI_FORMAT_R32_TYPELESS, when creating Raw Unordered Access View
desc.Buffer.Flags = D3D11_BUFFER_UAV_FLAG_RAW;
desc.Buffer.NumElements = descBuf.ByteWidth / 4;
} else
if ( descBuf.MiscFlags & D3D11_RESOURCE_MISC_BUFFER_STRUCTURED )
{
// This is a Structured Buffer
desc.Format = DXGI_FORMAT_UNKNOWN; // Format must be must be DXGI_FORMAT_UNKNOWN, when creating a View of a Structured Buffer
desc.Buffer.NumElements = descBuf.ByteWidth / descBuf.StructureByteStride;
} else
{
return E_INVALIDARG;
}
return pDevice->CreateUnorderedAccessView( pBuffer, &desc, ppUAVOut );
}
//--------------------------------------------------------------------------------------
// Create a CPU accessible buffer and download the content of a GPU buffer into it
// This function is very useful for debugging CS programs
//--------------------------------------------------------------------------------------
ID3D11Buffer* CreateAndCopyToDebugBuf( ID3D11Device* pDevice, ID3D11DeviceContext* pd3dImmediateContext, ID3D11Buffer* pBuffer )
{
ID3D11Buffer* debugbuf = NULL;
D3D11_BUFFER_DESC desc;
ZeroMemory( &desc, sizeof(desc) );
pBuffer->GetDesc( &desc );
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
desc.Usage = D3D11_USAGE_STAGING;
desc.BindFlags = 0;
desc.MiscFlags = 0;
if ( SUCCEEDED(pDevice->CreateBuffer(&desc, NULL, &debugbuf)) )
{
#if defined(DEBUG) || defined(PROFILE)
debugbuf->SetPrivateData( WKPDID_D3DDebugObjectName, sizeof( "Debug" ) - 1, "Debug" );
#endif
pd3dImmediateContext->CopyResource( debugbuf, pBuffer );
}
return debugbuf;
}
//--------------------------------------------------------------------------------------
// Run CS
//--------------------------------------------------------------------------------------
void RunComputeShader( ID3D11DeviceContext* pd3dImmediateContext,
ID3D11ComputeShader* pComputeShader,
UINT nNumViews, ID3D11ShaderResourceView** pShaderResourceViews,
ID3D11Buffer* pCBCS, void* pCSData, DWORD dwNumDataBytes,
ID3D11UnorderedAccessView* pUnorderedAccessView,
UINT X, UINT Y, UINT Z )
{
pd3dImmediateContext->CSSetShader( pComputeShader, NULL, 0 );
pd3dImmediateContext->CSSetShaderResources( 0, nNumViews, pShaderResourceViews );
pd3dImmediateContext->CSSetUnorderedAccessViews( 0, 1, &pUnorderedAccessView, NULL );
if ( pCBCS )
{
D3D11_MAPPED_SUBRESOURCE MappedResource;
pd3dImmediateContext->Map( pCBCS, 0, D3D11_MAP_WRITE_DISCARD, 0, &MappedResource );
memcpy( MappedResource.pData, pCSData, dwNumDataBytes );
pd3dImmediateContext->Unmap( pCBCS, 0 );
ID3D11Buffer* ppCB[1] = { pCBCS };
pd3dImmediateContext->CSSetConstantBuffers( 0, 1, ppCB );
}
pd3dImmediateContext->Dispatch( X, Y, Z );
pd3dImmediateContext->CSSetShader( NULL, NULL, 0 );
ID3D11UnorderedAccessView* ppUAViewNULL[1] = { NULL };
pd3dImmediateContext->CSSetUnorderedAccessViews( 0, 1, ppUAViewNULL, NULL );
ID3D11ShaderResourceView* ppSRVNULL[2] = { NULL, NULL };
pd3dImmediateContext->CSSetShaderResources( 0, 2, ppSRVNULL );
ID3D11Buffer* ppCBNULL[1] = { NULL };
pd3dImmediateContext->CSSetConstantBuffers( 0, 1, ppCBNULL );
}
//--------------------------------------------------------------------------------------
// Tries to find the location of the shader file
// This is a trimmed down version of DXUTFindDXSDKMediaFileCch. It only addresses the
// following issue to allow the sample correctly run from within Sample Browser directly
//
// When running the sample from the Sample Browser directly, the executables are located
// in $(DXSDK_DIR)\Samples\C++\Direct3D11\Bin\x86 or x64\, however the shader file is
// in the sample's own dir
//--------------------------------------------------------------------------------------
HRESULT FindDXSDKShaderFileCch( __in_ecount(cchDest) WCHAR* strDestPath,
int cchDest,
__in LPCWSTR strFilename )
{
if( NULL == strFilename || strFilename[0] == 0 || NULL == strDestPath || cchDest < 10 )
return E_INVALIDARG;
// Get the exe name, and exe path
WCHAR strExePath[MAX_PATH] =
{
0
};
WCHAR strExeName[MAX_PATH] =
{
0
};
WCHAR* strLastSlash = NULL;
GetModuleFileName( NULL, strExePath, MAX_PATH );
strExePath[MAX_PATH - 1] = 0;
strLastSlash = wcsrchr( strExePath, TEXT( '\\' ) );
if( strLastSlash )
{
wcscpy_s( strExeName, MAX_PATH, &strLastSlash[1] );
// Chop the exe name from the exe path
*strLastSlash = 0;
// Chop the .exe from the exe name
strLastSlash = wcsrchr( strExeName, TEXT( '.' ) );
if( strLastSlash )
*strLastSlash = 0;
}
// Search in directories:
// .\
// %EXE_DIR%\..\..\%EXE_NAME%
wcscpy_s( strDestPath, cchDest, strFilename );
if( GetFileAttributes( strDestPath ) != 0xFFFFFFFF )
return true;
swprintf_s( strDestPath, cchDest, L"%s\\..\\..\\%s\\%s", strExePath, strExeName, strFilename );
if( GetFileAttributes( strDestPath ) != 0xFFFFFFFF )
return true;
// On failure, return the file as the path but also return an error code
wcscpy_s( strDestPath, cchDest, strFilename );
return E_FAIL;
} |
#!/bin/bash
ssh -t field@mystiquew 'socat TCP-LISTEN:20080,fork TCP:192.168.100.212:80'
echo "Connect to CBW with: http://192.168.101.112:20080"
|
<gh_stars>0
// app.js
// create our angular app and inject ngAnimate and ui-router
// =============================================================================
angular.module('formApp', ['ngAnimate', 'ui.router'])
// configuring our routes
// =============================================================================
.config(function($stateProvider, $urlRouterProvider) {
$stateProvider
// route to show our basic form (/form)
.state('form', {
url: '/form',
templateUrl: 'form.html',
controller: 'formController'
})
// nested states
// each of these sections will have their own view
// url will be nested (/form/profile)
.state('form.profile', {
url: '/profile',
templateUrl: 'form-profile.html'
})
// url will be /form/interests
.state('form.interests', {
url: '/interests',
templateUrl: 'form-interests.html'
})
// url will be /form/activity
.state('form.activity', {
url: '/activity',
templateUrl: 'form-activity.html'
})
// url will be /form/activity
.state('form.alcohol', {
url: '/alcohol',
templateUrl: 'form-alcohol.html'
})
// url will be nested (/form/profile)
.state('form.security', {
url: '/security',
templateUrl: 'form-security.html'
})
// url will be nested (/form/profile)
.state('form.waiver', {
url: '/waiver',
templateUrl: 'form-app.html'
})
// url will be /form/payment
.state('form.payment', {
url: '/payment',
templateUrl: 'form-site.html'
});
// catch all route
// send users to the form page
$urlRouterProvider.otherwise('/form/profile');
})
// our controller for the form
// =============================================================================
.controller('formController', function($scope, $http) {
$scope.StateList = null;
$scope.CatList = null;
$scope.DataList = null;
// we will store all of our form data in this object
$scope.formData = {};
$scope.formData.ioptions = {
ids: {30: true}
};
//Declaring the function to load data from database
$scope.fillStateList = function () {
$http.get("api/states")
.success(function(response){
$scope.StateList = response.states;
$scope.CatList = response.cats;
$scope.DataList = response.dataform;
$scope.ConstructionType = response.dropdowns[0].options;
$scope.LocationType = response.dropdowns[1].options;
$scope.Security = response.dropdowns[2].options;
$scope.Railings = response.dropdowns[5].options;
$scope.Options = response.InsureOptions;
$scope.Wedding = response.WebOptions;
//default values
$scope.formData.location_guest = $scope.DataList.guests;
$scope.formData.start_time = $scope.DataList.start_time;
$scope.formData.end_time = $scope.DataList.end_time;
$scope.formData.event_type = {id: parseInt($scope.DataList.eventtype)};
$scope.formData.subtotal = $scope.DataList.estimate;
// alert($scope.Security[0].name);
});
};
//Calling the function to load the data on pageload
$scope.fillStateList();
// function to process the form
$scope.processForm = function() {
alert('awesome!');
};
});
|
<reponame>Farhanramadhana/financeManager
import { ApiProperty } from '@nestjs/swagger';
export class CreateFinanceAccountDto {
@ApiProperty()
accountName: string;
@ApiProperty()
balance: number;
@ApiProperty()
type: string;
@ApiProperty()
description: string;
user?: string;
}
|
#!/bin/bash
# creating a single partition.
parted -s -a optimal /dev/vda -- mklabel msdos mkpart primary 1 -1 set 1 boot on
# creating file systems.
mkfs.ext4 -F /dev/vda1
# mounting the root partition
mount /dev/vda1 /mnt/gentoo
# copying chrooted install script
cp install-chroot.sh /mnt/gentoo/
# creating swapfile and swapon (4GB)
dd if=/dev/zero of=/mnt/gentoo/swapfile bs=1M count=4096
mkswap /mnt/gentoo/swapfile
swapon /mnt/gentoo/swapfile
# setting the datetime
ntpd -q -g
# downloading the gentoo stage tarball
cd /mnt/gentoo
curl http://ftp.iij.ad.jp/pub/linux/gentoo/releases/amd64/autobuilds/latest-stage3-amd64.txt | grep stage3-amd64 | sed -e 's/^/http:\/\/ftp.iij.ad.jp\/pub\/linux\/gentoo\/releases\/amd64\/autobuilds\//g' | sed -e 's/ [0-9]*$//g' | xargs curl -O
# unpacking the stage tarball
tar xpf stage3-* --xattrs-include='*.*' --numeric-owner
# TODO : edit /etc/portage/make.conf
# TODO : setting mirrors
# copy DNS info
cp --dereference /etc/resolv.conf /mnt/gentoo/etc/
# mounting the necessary filesystems
mount --types proc /proc /mnt/gentoo/proc
mount --rbind /sys /mnt/gentoo/sys
mount --make-rslave /mnt/gentoo/sys
mount --rbind /dev /mnt/gentoo/dev
mount --make-rslave /mnt/gentoo/dev
# chroot: entering the new env
chroot /mnt/gentoo /install-chroot.sh
# filanize.
cd
umount -l /mnt/gentoo/dev{/shm,/pts,}
umount -R /mnt/gentoo
reboot
|
#!/bin/bash
function check_pkgs ()
{
pkgs=$@
echolog "Alt. check_pkgs pkgs: $pkgs"
upd_pkgs=`apt-indicator-checker`
for pkg in $pkgs
do
if [[ "`rpm -q -i $pkg 2>&1 | grep "не установлен"`" ]]
then
echolog "pkg $pkg is not installed"
return 1
fi
if [[ "`echo -e "$upd_pkgs" | grep -w $pkg`" ]]
then
echolog "pkg $pkg is not updated"
return 1
fi
done
return 0
}
function _install_packages ()
{
check_update="$1"
local pkgs="librtpkcs11ecp opensc pcsc-lite-ccid pcsc-lite libp11 pcsc-tools openssl-engine_pkcs11 python3-modules-tkinter dialog pam_pkcs11 pam_p11 nss-utils"
if [[ "$check_updates" ]]
then
echolog "Alt. check common packages"
check_pkgs $pkgs
return $?
fi
echolog "Alt.install common packages"
apt-get -qq update
out=`apt-get -qq install $pkgs`
if [[ $? -ne 0 ]]
then
echoerr "Не могу установить один из пакетов: $pkgs из репозитория:\n$out"
return 1
fi
systemctl restart pcscd
return 0
}
function _setup_local_authentication ()
{
token=$1
cert_id=$2
user=$3
echolog "Alt. setup local authentication for user: $user by cert: $cert on token: $token"
DB="$PAM_PKCS11_DIR/nssdb"
echolog "DB path is $DB"
mkdir -p "$DB" 2> /dev/null;
if ! [ "$`ls -A $DB`" ]
then
chmod 0644 "$DB"
echolog "init DB"
certutil -d "$DB" -N --empty-password
fi
echolog "Add trusted lib to DB"
echo -e "\n" | modutil -dbdir "$DB" -add p11-kit-trust -libfile /usr/lib64/pkcs11/p11-kit-trust.so 2> /dev/null
export_object "$token" "cert" "$cert_id" "cert${cert_id}.crt"
if [[ $? -ne 0 ]]
then
echoerr "Cert $cert_id is not exported to cert${cert_id}.crt"
return 1
fi
mkdir -p /etc/pki/ca-trust/source/anchors/
cp "cert${cert_id}.crt" /etc/pki/ca-trust/source/anchors/
if [[ $? -ne 0 ]]
then
echoerr "Can't copy cert${cert_id}.crt to /etc/pki/ca-trust/source/anchors/"
return 1
fi
update-ca-trust force-enable
update-ca-trust extract
echolog "Update CA DB"
mv "$PAM_PKCS11_DIR/pam_pkcs11.conf" "$PAM_PKCS11_DIR/pam_pkcs11.conf.default" 2> /dev/null;
mkdir "$PAM_PKCS11_DIR/cacerts" "$PAM_PKCS11_DIR/crls" 2> /dev/null;
mkdir "$PAM_PKCS11_DIR" 2> /dev/null
LIBRTPKCS11ECP="$LIBRTPKCS11ECP" PAM_PKCS11_DIR="$PAM_PKCS11_DIR" envsubst < "$TWO_FA_LIB_DIR/common_files/pam_pkcs11.conf" | tee "$PAM_PKCS11_DIR/pam_pkcs11.conf" > /dev/null
echolog "Create $PAM_PKCS11_DIR/pam_pkcs11.conf"
openssl dgst -sha1 "cert${cert_id}.crt" | cut -d" " -f2- | awk '{ print toupper($0) }' | sed 's/../&:/g;s/:$//' | sed "s/.*/\0 -> $user/" | tee "$PAM_PKCS11_DIR/digest_mapping" -a > /dev/null
echolog "update digest map file $PAM_PKCS11_DIR/digest_mapping"
sys_auth="/etc/pam.d/system-auth"
if [[ -z "`cat "${sys_auth}-pkcs11" | grep "pkcs11_module=$LIBRTPKCS11ECP"`" ]]
then
cp "$sys_auth" "${sys_auth}.old"
rm /etc/pam.d/system-auth
sed -i "/^.*pam_pkcs11.*$/ s/$/ pkcs11_module=${LIBRTPKCS11ECP//\//\\/}/" "${sys_auth}-pkcs11"
ln -s "${sys_auth}-pkcs11" "$sys_auth"
echolog "Update pam.d file $sys_auth"
fi
return 0
}
function _setup_autolock ()
{
echolog "Alt. setup_autolock"
cp "$IMPL_DIR/smartcard-screensaver.desktop" /etc/xdg/autostart/smartcard-screensaver.desktop
return 0
}
function _setup_freeipa_domain_authentication ()
{
echolog "Alt. There is no additional action required to setup freeipa domain auth"
return 0
}
function _setup_ad_domain_authentication ()
{
echolog "Alt. There is no additional action required to setup ad domain auth"
return 0
}
|
#!/bin/bash
set -ex
setenforce 0
sed -i "s/^SELINUX=.*/SELINUX=permissive/" /etc/selinux/config
# Disable swap
swapoff -a
sed -i '/ swap / s/^/#/' /etc/fstab
# Disable spectre and meltdown patches
sed -i 's/quiet"/quiet spectre_v2=off nopti hugepagesz=2M hugepages=64"/' /etc/default/grub
grub2-mkconfig -o /boot/grub2/grub.cfg
systemctl stop firewalld NetworkManager || :
systemctl disable firewalld NetworkManager || :
# Make sure the firewall is never enabled again
# Enabling the firewall destroys the iptable rules
yum -y remove NetworkManager firewalld
# Required for iscsi demo to work.
yum -y install iscsi-initiator-utils
cat <<EOF >/etc/yum.repos.d/kubernetes.repo
[kubernetes]
name=Kubernetes
baseurl=http://yum.kubernetes.io/repos/kubernetes-el7-x86_64
enabled=1
gpgcheck=1
repo_gpgcheck=1
gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg
https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg
EOF
yum install -y docker
# Log to json files instead of journald
sed -i 's/--log-driver=journald //g' /etc/sysconfig/docker
echo '{ "insecure-registries" : ["registry:5000"] }' > /etc/docker/daemon.json
# Enable the permanent logging
# Required by the fluentd journald plugin
# The default settings in recent distribution for systemd is set to auto,
# when on auto journal is permament when /var/log/journal exists
mkdir -p /var/log/journal
# Omit pgp checks until https://github.com/kubernetes/kubeadm/issues/643 is resolved.
yum install --nogpgcheck -y \
kubeadm-${version} \
kubelet-${version} \
kubectl-${version} \
kubernetes-cni \
openvswitch
systemctl start openvswitch
systemctl enable openvswitch
ovs-vsctl add-br br1
# Latest docker on CentOS uses systemd for cgroup management
# kubeadm 1.11 uses a new config method for the kubelet
if [[ $version =~ \.([0-9]+) ]] && [[ ${BASH_REMATCH[1]} -ge "11" ]]; then
# TODO use config file! this is deprecated
cat <<EOT >/etc/sysconfig/kubelet
KUBELET_EXTRA_ARGS=--cgroup-driver=systemd --runtime-cgroups=/systemd/system.slice --kubelet-cgroups=/systemd/system.slice --feature-gates=BlockVolume=true
EOT
else
cat <<EOT >>/etc/systemd/system/kubelet.service.d/09-kubeadm.conf
[Service]
Environment="KUBELET_EXTRA_ARGS=--cgroup-driver=systemd --runtime-cgroups=/systemd/system.slice --kubelet-cgroups=/systemd/system.slice --feature-gates=BlockVolume=true"
EOT
fi
systemctl daemon-reload
systemctl enable docker && systemctl start docker
systemctl enable kubelet && systemctl start kubelet
# Needed for kubernetes service routing and dns
# https://github.com/kubernetes/kubernetes/issues/33798#issuecomment-250962627
modprobe bridge
cat <<EOF > /etc/sysctl.d/k8s.conf
net.bridge.bridge-nf-call-ip6tables = 1
net.bridge.bridge-nf-call-iptables = 1
EOF
sysctl --system
kubeadm init --pod-network-cidr=10.244.0.0/16 --kubernetes-version v${version} --token abcdef.1234567890123456
kubectl --kubeconfig=/etc/kubernetes/admin.conf create -f /tmp/flannel.yaml
kubectl --kubeconfig=/etc/kubernetes/admin.conf create -f /tmp/kubernetes-multus.yaml
kubectl --kubeconfig=/etc/kubernetes/admin.conf create -f /tmp/ovs.yaml
# Wait at least for one pod
while [ -z "$(kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system | grep kube)" ]; do
echo "Waiting for at least one pod ..."
kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system
sleep 10
done
# Wait until k8s pods are running
while [ -n "$(kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system --no-headers | grep -v Running)" ]; do
echo "Waiting for k8s pods to enter the Running state ..."
kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system --no-headers | >&2 grep -v Running || true
sleep 10
done
# Make sure all containers are ready
while [ -n "$(kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system -o'custom-columns=status:status.containerStatuses[*].ready,metadata:metadata.name' --no-headers | grep false)" ]; do
echo "Waiting for all containers to become ready ..."
kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system -o'custom-columns=status:status.containerStatuses[*].ready,metadata:metadata.name' --no-headers
sleep 10
done
kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system
reset_command="kubeadm reset"
admission_flag="admission-control"
# k8s 1.11 needs some changes
if [[ $version =~ \.([0-9]+) ]] && [[ ${BASH_REMATCH[1]} -ge "11" ]]; then
# k8s 1.11 asks for confirmation on kubeadm reset, which can be suppressed by a new force flag
reset_command="kubeadm reset --force"
# k8s 1.11 uses new flags for admission plugins
# old one is deprecated only, but can not be combined with new one, which is used in api server config created by kubeadm
admission_flag="enable-admission-plugins"
fi
$reset_command
# TODO new format since 1.11, this old format will be removed with 1.12, see https://kubernetes.io/docs/reference/setup-tools/kubeadm/kubeadm-init/#config-file
cat > /etc/kubernetes/kubeadm.conf <<EOF
apiVersion: kubeadm.k8s.io/v1alpha1
kind: MasterConfiguration
apiServerExtraArgs:
runtime-config: admissionregistration.k8s.io/v1alpha1
${admission_flag}: Initializers,NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota
feature-gates: "BlockVolume=true"
controllerManagerExtraArgs:
feature-gates: "BlockVolume=true"
token: abcdef.1234567890123456
kubernetesVersion: ${version}
networking:
podSubnet: 10.244.0.0/16
EOF
# New configuration for kubernetes >= 1.12
if [[ ${BASH_REMATCH[1]} -ge "11" ]]; then
cat > /etc/kubernetes/kubeadm.conf <<EOF
apiVersion: kubeadm.k8s.io/v1alpha3
bootstrapTokens:
- groups:
- system:bootstrappers:kubeadm:default-node-token
token: abcdef.1234567890123456
ttl: 24h0m0s
usages:
- signing
- authentication
kind: InitConfiguration
---
apiServerExtraArgs:
enable-admission-plugins: Initializers,NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota
feature-gates: BlockVolume=true
runtime-config: admissionregistration.k8s.io/v1alpha1
apiVersion: kubeadm.k8s.io/v1alpha3
controllerManagerExtraArgs:
feature-gates: BlockVolume=true
kind: ClusterConfiguration
kubernetesVersion: ${version}
networking:
podSubnet: 10.244.0.0/16
EOF
fi
# Create local-volume directories
for i in {1..10}
do
mkdir -p /var/local/kubevirt-storage/local-volume/disk${i}
mkdir -p /mnt/local-storage/local/disk${i}
echo "/var/local/kubevirt-storage/local-volume/disk${i} /mnt/local-storage/local/disk${i} none defaults,bind 0 0" >> /etc/fstab
done
chmod -R 777 /var/local/kubevirt-storage/local-volume
# Setup selinux permissions to local volume directories.
chcon -R unconfined_u:object_r:svirt_sandbox_file_t:s0 /mnt/local-storage/
# Pre pull fluentd image used in logging
docker pull fluent/fluentd:v1.2-debian
docker pull fluent/fluentd-kubernetes-daemonset:v1.2-debian-syslog
|
<gh_stars>1-10
package com.keildraco.config.exceptions;
/**
*
* @author <NAME>
*/
public class UnknownParseTreeTypeException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = 4584871943362881729L;
/**
*
* @param message
*/
public UnknownParseTreeTypeException(final String message) {
super(message);
}
}
|
<gh_stars>0
/* The Arrays contains all libraries
that are included in the 'build.js' call
with 'npm run build'
*/
const getLibs4Build = function (pLibPath) {
var vLibs4Build = [
'./src/npm_header.js',
//pLibPath+'require_mods.js',
//pLibPath+'arrayhash.js',
//pLibPath+'handlebars.js',
//pLibPath+'handlebars_helpers.js',
//'./src/npm_inherit.js',
//pLibPath+'exportmod.js'
'./src/closingbracket.js'
];
return vLibs4Build;
}
const getHtml4Build = function (pPath,pBody) {
var vBody = pBody || pPath+'body.html';
var vArray = [
pPath+'header.html',
//'./src/html_title.html',
pPath+'title.html',
pPath+'datajson.html',
pPath+'headerlibs.html',
pPath+'headerscript.html',
pPath+'bodyheader.html',
'./src/html_description.html',
vBody,
pPath+'bodytail.html',
'./src/html_tail.html',
pPath+'tailscript.html',
pPath+'tail.html'
];
return vArray;
}
const getCss4Build = function (pPath) {
var vArray = [
pPath+'bootstrap.css',
pPath+'main.css'
];
return vArray;
}
const getReadme4Build = function (pPath) {
var vArray = [
'./src/readme_header.md',
pPath+'abstract.md',
pPath+'headerintro.md',
pPath+'doctoc.md',
pPath+'demos.md',
//'./src/readme_install.md',
pPath+'installation.md',
pPath+'usage.md',
pPath+'scanfiles.md',
pPath+'wikiversity.md',
pPath+'background.md',
/*
pPath+'handlebars4code.md',
pPath+'headerlibs.md',
pPath+'headerscript.md',
pPath+'bodyheader.md',
pPath+'body.md',
pPath+'bodytail.md',
*/
//pPath+'jsonschema.md',
pPath+'build_process.md',
pPath+'loadfile4dom_api.md',
pPath+'browserify.md',
pPath+'acknowledgement.md',
'./src/readme_devlibs.md',
'./src/readme_tail.md',
pPath+'tail.md'
];
return vArray;
}
module.exports = {
"getLibs4Build" : getLibs4Build,
"getHtml4Build" : getHtml4Build,
"getCss4Build" : getCss4Build,
"getReadme4Build" : getReadme4Build
}
|
# Test case
def test_build_args():
"""Test valid manifests with good build arguments."""
for manifest_file in VALID_MANIFESTS:
manifest = HardeningManifest.from_yaml(manifest_file)
assert manifest.name in ' '.join(manifest.build_args(context_dir='.'))
assert 'docker.io' in ' '.join(manifest.build_args(
context_dir='.', registry='docker.io'
)) |
def min_ops(start, target):
ops = 0
while start != target:
if start > target:
start = start - 1
ops += 1
else:
start = start * 2
ops += 1
return ops |
<reponame>fujunwei/dldt
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vpu/middleend/pass_manager.hpp>
#include <vpu/middleend/sw/utility.hpp>
#include <vpu/model/data.hpp>
#include <precision_utils.h>
#include <set>
#include <memory>
#include <vector>
#include <utility>
namespace vpu {
namespace {
class PassImpl final : public Pass {
public:
explicit PassImpl(StageBuilder::Ptr stageBuilder) : _stageBuilder(std::move(stageBuilder)) {}
void run(const Model& model) override;
private:
StageBuilder::Ptr _stageBuilder;
};
void PassImpl::run(const Model& model) {
VPU_PROFILE(replaceWithReduceMean);
for (const auto& stage : model->getStages()) {
if (stage->type() != StageType::StubAvgPool) {
continue;
}
auto stageInput = stage->input(0);
const auto& dimValues = stageInput->desc().dims();
const auto kernelSizeX = stage->attrs().get<int>("kernelSizeX");
const auto kernelSizeY = stage->attrs().get<int>("kernelSizeY");
const auto kernelStrideX = stage->attrs().get<int>("kernelStrideX");
const auto kernelStrideY = stage->attrs().get<int>("kernelStrideY");
VPU_THROW_UNLESS(
kernelSizeX > 0 && kernelSizeY > 0,
"[ReplaceWithReduceMean] Stage %v with type AvgPool has non-positive kernel size",
stage->name());
if (dimValues[Dim::W] == kernelSizeX && dimValues[Dim::H] == kernelSizeY) { // GlobalPooling
if (kernelStrideX != 1 && kernelStrideY != 1) {
continue;
}
// TODO: since ReduceMean currently is not fully optimized, we need to discard some common cases
if (kernelSizeX * kernelSizeY < 2050) {
continue;
}
auto origLayer = stage->origLayer();
auto stageOutput = stage->output(0);
model->removeStage(stage);
const auto generator = [&stageInput](const ie::Blob::Ptr& blob) {
auto buffer = blob->buffer().as<int32_t*>();
auto numInputDims = stageInput->desc().numDims();
// H and W are always come last in IE notation
buffer[0] = numInputDims - 1;
buffer[1] = numInputDims - 2;
};
auto axesData = model->addConstData(origLayer->name + "@axes", DataDesc(DataType::S32, DimsOrder::C, {2}), generator);
_stageBuilder->addReduceStage(
model,
"ReduceMean",
StageType::ReduceMean,
origLayer,
true,
{stageInput, axesData},
stageOutput);
}
}
}
} // namespace
Pass::Ptr PassManager::replaceWithReduceMean() {
return std::make_shared<PassImpl>(_stageBuilder);
}
} // namespace vpu
|
<reponame>parti-coop/demosx
package seoul.democracy.proposal.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.hibernate.validator.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
@Data
@NoArgsConstructor
@AllArgsConstructor(staticName = "of")
public class ProposalUpdateDto {
@NotNull
private Long id;
@NotBlank
@Size(max = 100)
private String title;
private String content;
}
|
def quicksort(array):
if len(array) <= 1:
return array
pivot = array[len(array) // 2]
left = [x for x in array if x < pivot]
middle = [x for x in array if x == pivot]
right = [x for x in array if x > pivot]
return quicksort(left) + middle + quicksort(right) |
class control_options:
position = 1 #moves to x,y position, only forward
pose = 2 #moves to x,y position and then turn to th, only forward
momentum = 3 #not implemented
angular_pose= 4 #turn to th
direct_speeds = 5 #override control actions
pose_line = 6 #moves to x,y position and turn to th or (th-180), go forward or backwards
special_movements = 7 |
var http = require('http')
, url = require('url')
var memento = require('memento-client')
, request = require('hyperquest')
var timeNear = require('./lib/time-near')
var WAYBACK_HOST = 'web.archive.org'
, isAsset = /\/web\/\d{14}/
module.exports = peabody
function peabody(timestamp, fuzz, _host) {
var host = _host || WAYBACK_HOST
return http.createServer(handler)
function handler(req, res) {
var assetUrl
if(!isAsset.test(req.url)) {
return memento(req.url, timestamp, checkSites)
}
assetUrl = url.format({
host: host
, pathname: url.parse(req.url).path
, protocol: 'http'
})
request.get(assetUrl, assetRespond)
function checkSites(err, sites) {
var noSites = sites.length < 2
if(err || noSites || !timeNear(sites[1].datetime, timestamp, fuzz)) {
return notFound(res)
}
request.get(sites[1].href, respond)
}
function respond(err, response) {
if(err) {
return notFound(res)
}
response.pipe(res)
res.writeHead(response.statusCode, response.headers)
}
function assetRespond(err, response) {
if(err) {
return notFound(res)
}
var assetUrl
if(response.statusCode === 302) {
assetUrl = url.format({
host: host
, pathname: response.headers.location
, protocol: 'http'
})
return request.get(assetUrl, assetRespond)
}
res.writeHead(response.statusCode, response.headers)
if(response.statusCode < 200 || response.statusCode > 299) {
return res.end()
}
response.pipe(res)
}
}
}
function notFound(res) {
res.writeHead(404, {'content-type': 'text/plain'})
res.end('Site not available')
}
|
from os import walk
from os.path import splitext, islink, join
from datetime import datetime
def main(urd):
# Change to "path = <mypath>, or modify input_directory in
# accelerator.conf". Add/remove extensions as appropriate.
# Use do_scan boolean to turn on/off scanning.
path = urd.info.input_directory
validextensions = {'.JPG', '.NEF', '.PNG', '.GIF', '.TIFF', '.BMP'}
do_scan = True
def scan(path, verbose=True):
# Scan all directories recursively and run "scandir" jobs for
# those directories that contain files matching the
# "validextensions" set. Return a list of all jobs.
ix = 0
jobs = []
for current, subdirs, files in walk(path):
files = sorted(x for x in files if splitext(x)[1].upper() in validextensions and not islink(join(current, x)))
if files:
jobs.append(urd.build('scandir', directory=current, files=files))
if verbose:
print(ix, current)
ix += 1
return jobs
# scan the path
if do_scan:
ts = datetime.now()
urd.begin('scan', ts)
jobs = scan(path)
job = urd.build('dataset_list_to_chain', source=jobs)
urd.finish('scan')
# fetch result from scanning, find duplicates
urd.begin('proc')
scan = urd.latest('scan')
ts = scan.timestamp
job = scan.joblist['dataset_list_to_chain']
job = urd.build('dataset_hashpart', source=job, hashlabel='filehash')
job = urd.build('dataset_sort', source=job, sort_columns=('filehash', 'filename',))
dup = urd.build('duplicates', source=job)
dup.link_result('duplicates.txt')
urd.finish('proc', ts)
|
#!/bin/bash
if [[ -z $* ]] ; then
echo 'Supply the name of one of the example classes as an argument.'
echo 'If there are arguments to the class, put them in quotes after the class name.'
exit 1
fi
export CLASSPATH=target/sdk-pinpoint-examples-1.0.jar
export className=$1
echo "## Running $className..."
shift
echo "## arguments $@..."
mvn exec:java -Dexec.mainClass="com.example.pinpoint.$className" -Dexec.args="$@" -Dexec.cleanupDaemonThreads=false
|
#!/bin/bash
echo "Installing xterminal"
sudo apt-get install xterm -y
echo "Done"
echo
echo "Installing nanPy"
pip3 install nanPy -y
echo "Done"
echo
echo "Installing Arduino"
sudo apt-get install arduino -y
echo "Done"
echo
echo "Making Dependent files"
python3 makeDependentFiles.py
echo "Done"
echo
echo "Setting up .desktop file"
mkdir /home/pi/.config/autostart
cp pac_startup.desktop /home/pi/.config/autostart/pac_startup.desktop
echo "Done"
echo
|
<reponame>anticipasean/girakkafunc<filename>func-rxjava2/src/main/java/cyclops/rxjava2/io/FlowableIO.java
package cyclops.rxjava2.io;
import cyclops.async.Future;
import cyclops.container.control.Try;
import cyclops.container.immutable.impl.Seq;
import cyclops.rxjava2.adapter.FlowableReactiveSeq;
import cyclops.reactive.IO;
import cyclops.reactive.Managed;
import cyclops.reactive.ReactiveSeq;
import io.reactivex.Flowable;
import io.reactivex.Scheduler;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import org.reactivestreams.Publisher;
@AllArgsConstructor(access = AccessLevel.PRIVATE)
public final class FlowableIO<T> implements IO<T> {
private final Flowable<T> flowable;
public static <T> IO<T> of(Flowable<T> flowable) {
return new FlowableIO<>(flowable);
}
public static <T> IO<T> of(Future<T> f) {
return of(Flowable.fromPublisher(f));
}
public static <T> IO<T> just(T s) {
return new FlowableIO<T>(Flowable.just(s));
}
public static <T> IO<T> of(Supplier<? extends T> s) {
return new FlowableIO<T>(Flowable.fromCallable(() -> s.get()));
}
public static <T> IO<T> of(Supplier<? extends T> s,
Scheduler ex) {
Flowable<T> x = Flowable.fromCallable(() -> s.get());
x = x.subscribeOn(ex);
return new FlowableIO<T>(x);
}
public static <T> IO<T> fromPublisher(Publisher<T> p) {
return new FlowableIO<T>(Flowable.fromPublisher(p));
}
public static <T, X extends Throwable> IO<Try<T, X>> withCatch(Try.CheckedSupplier<T, X> cf,
Class<? extends X>... classes) {
return of(() -> Try.withCatch(cf,
classes));
}
public static <T1, T2, R> IO<R> merge(Publisher<T1> p1,
Publisher<T2> p2,
BiFunction<? super T1, ? super T2, ? extends R> fn2) {
Flowable<T1> s1 = Flowable.fromPublisher(p1);
Flowable<T2> s2 = Flowable.fromPublisher(p2);
return fromPublisher(s1.zipWith(s2,
(a, b) -> fn2.apply(a,
b)));
}
@Override
public <B, R> IO<R> par(IO<B> that,
BiFunction<? super T, ? super B, ? extends R> fn) {
return IO.fromPublisher(flowable.zipWith(that,
(a, b) -> fn.apply(a,
b)));
}
@Override
public IO<T> race(IO<T> that) {
return fromPublisher(Flowable.amb(Seq.of(publisher(),
that.publisher())));
}
@Override
public <R> IO<R> map(Function<? super T, ? extends R> s) {
return of(flowable.map(a -> s.apply(a)));
}
@Override
public <R> IO<R> flatMap(Function<? super T, IO<? extends R>> s) {
return of(flowable.flatMap(a -> s.apply(a)));
}
@Override
public <R> IO<R> mergeMap(int maxConcurrency,
Function<? super T, Publisher<? extends R>> s) {
return of(flowable.flatMap(in -> s.apply(in),
maxConcurrency));
}
@Override
public <R extends AutoCloseable> IO<R> bracket(Function<? super T, ? extends R> fn) {
Managed<R> m = FlowableManaged.of(map(fn));
return m.io();
}
@Override
public <R> IO<R> bracket(Function<? super T, ? extends R> fn,
Consumer<R> consumer) {
Managed<R> m = FlowableManaged.of(map(fn),
consumer);
return m.io();
}
@Override
public <R extends AutoCloseable, R1> Managed.Tupled<R, R1> bracketWith(Function<? super T, ? extends R> fn,
Function<? super R, ? extends R1> with) {
Managed.Tupled<? extends R, ? extends R1> x = FlowableManaged.of(map(fn))
.with(with);
return (Managed.Tupled<R, R1>) x;
}
@Override
public void forEach(Consumer<? super T> consumerElement,
Consumer<? super Throwable> consumerError,
Runnable onComplete) {
flowable.subscribe(a -> consumerElement.accept(a),
b -> consumerError.accept(b),
() -> onComplete.run());
}
@Override
public Future<T> future() {
return Future.fromPublisher(flowable);
}
@Override
public Publisher<T> publisher() {
return flowable;
}
@Override
public ReactiveSeq<T> stream() {
return FlowableReactiveSeq.reactiveSeq(flowable);
}
@Override
public <R> IO<R> unit(Publisher<R> pub) {
return new FlowableIO<>(Flowable.fromPublisher(pub));
}
}
|
/**
* Created by appleimac on 19/5/9.
*/
require.config({
urlArgs: 'v=201905090001',
baseUrl: '__STATIC__/js/',
paths: {
jquery: 'jquery-3.3.1.min.js',
bootstrap: 'bootstrap-4.0.0-dist/js/bootstrap.min.js'
},
//用来配置不兼容模块
shim: {
bootstrap: {
exports: '$',
deps: ['jquery']
}
}
}); |
#!/bin/sh
set -e
ROOTDIR=dist
BUNDLE=${ROOTDIR}/NextON-Qt.app
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
for i in `grep -v CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff $i -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff $i -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${TARGETFILE}.sign"
DIRNAME="`dirname ${SIGNFILE}`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if=$i of=${SIGNFILE} bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
for i in `grep CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C ${TEMPDIR} -czf ${OUT} .
rm -rf ${TEMPDIR}
echo "Created ${OUT}"
|
/*
* Tencent is pleased to support the open source community by making
* Hippy available.
*
* Copyright (C) 2017-2019 THL A29 Limited, a Tencent company.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import fs from 'fs';
import path from 'path';
import { TdfCommand } from '@hippy/devtools-protocol/dist/types/enum-tdf-mapping';
import { config } from '@debug-server-next/config';
import { Logger } from '@debug-server-next/utils/log';
import { MiddleWareManager } from '../middleware-context';
const log = new Logger('tdf-heap-middleware');
/**
* TODO save heap data to local, doesn't support remote debug
*/
export const tdfHeapMiddleWareManager: MiddleWareManager = {
downwardMiddleWareListMap: {
[TdfCommand.TDFMemoryGetHeapMeta]: async ({ msg, sendToDevtools }) => {
try {
const commandRes = msg as Adapter.CDP.CommandRes<ProtocolTdf.TDFMemory.GetHeapMetaResponse>;
const { cachePath } = config;
const fpath = path.join(cachePath, `${commandRes.id}.json`);
await fs.promises.writeFile(fpath, JSON.stringify(commandRes));
return sendToDevtools(commandRes);
} catch (e) {
log.error('write heap failed! %s', (e as Error)?.stack);
return Promise.reject(e);
}
},
},
upwardMiddleWareListMap: {
[TdfCommand.TDFMemoryFetchHeapCache]: async ({ msg, sendToDevtools }) => {
try {
const req = msg as Adapter.CDP.Req<ProtocolTdf.TDFMemory.FetchHeapCacheRequest>;
const { cachePath } = config;
const fpath = path.join(cachePath, `${req.params.id}.json`);
const cacheMsgStr = await fs.promises.readFile(fpath, 'utf8');
const cacheMsg: Adapter.CDP.CommandRes = JSON.parse(cacheMsgStr);
return sendToDevtools({
id: req.id,
method: req.method,
result: cacheMsg.result,
});
} catch (e) {
log.error('write heap failed! %s', (e as Error)?.stack);
return Promise.reject(e);
}
},
},
};
|
def factorial(num):
if num <= 1:
return 1
else:
return num * factorial(num-1) |
# tokenization
wiki_dir="../../DPR/downloads/data/wikipedia_split/" # path for psgs_w100.tsv downloaded with DPR code
ans_dir="../../DPR/downloads/data/retriever/qas/" # path for DPR question&answer csv files
question_dir="../../DPR/downloads/data/retriever/" # path for DPR training data
data_type=0 #0 is nq, 1 is trivia, 2 is both
out_data_dir="../../data/raw_data/QA_NQ_data/" # change this for different data_type
tokenization_cmd="\
python ../data/DPR_data.py --wiki_dir $wiki_dir --question_dir $question_dir --data_type $data_type --answer_dir $ans_dir \
--out_data_dir $out_data_dir --bpe_vocab_file ../../data/model_temp/vocab.txt --train_model_type dpr_fast\
"
echo $tokenization_cmd
eval $tokenization_cmd
gpu_no=8
# model type
seq_length=256
# ann parameters
batch_size=1024
ann_topk=200
ann_negative_sample=100
# input/output directories
base_data_dir="${out_data_dir}"
#job_name="exp_21_04_22_01_infer"
#job_name="exp_21_04_22_02"
#job_name="exp_21_09_17_01"
job_name="exp_21_09_22_02"
model_dir="${base_data_dir}${job_name}/"
model_ann_data_dir="${model_dir}ann_data/"
#pretrained_checkpoint_dir="../../data/model_temp/dpr_biencoder.37"
#pretrained_checkpoint_dir="../../data/raw_data/QA_NQ_data/exp_21_09_22_01/ann_data/checkpoint-90000"
#pretrained_checkpoint_dir="../../data/raw_data/QA_NQ_data/exp_21_09_22_01/ann_data/checkpoint-270000"
#pretrained_checkpoint_dir="../../data/raw_data/QA_NQ_data/exp_21_09_22_02/checkpoint-40000"
#pretrained_checkpoint_dir="../../data/raw_data/QA_NQ_data/exp_21_09_22_02/checkpoint-10000"
pretrained_checkpoint_dir="/ance-nq-checkpoint.pt"
passage_path="../../DPR/downloads/data/wikipedia_split/"
test_qa_path="../../DPR/downloads/data/retriever/qas/"
trivia_test_qa_path="../../DPR/downloads/data/retriever/qas/"
out_data_dir="../../data/raw_data/QA_NQ_data/"
data_gen_cmd="\
python -m torch.distributed.launch --nproc_per_node=$gpu_no ../drivers/run_ann_data_gen_dpr.py --training_dir $model_dir \
--init_model_dir $pretrained_checkpoint_dir --output_dir $model_ann_data_dir \
--cache_dir "${model_ann_data_dir}cache/" --data_dir $base_data_dir --max_seq_length $seq_length \
--per_gpu_eval_batch_size $batch_size --topk_training $ann_topk --negative_sample $ann_negative_sample \
--passage_path $passage_path --test_qa_path $test_qa_path --trivia_test_qa_path $trivia_test_qa_path --model_type dpr_fast "
echo $data_gen_cmd
eval $data_gen_cmd |
import createApp from './app'
export default createApp
|
#!/bin/bash"
clear
setterm -foreground red
echo "Tools-Nahfer Version beta 3.0"
setterm -foreground green
figlet "CAPITAN COMANDO" | lolcat
echo -e "#################################"| lolcat
echo -e "# SOMOS NAHFER,SOMOS COMUNIDAD." | lolcat
echo -e "#################################" | lolcat
echo -e "NUESTRO MEJOR DIAMANTE ES LA MENTE." | lolcat
echo -e "#################################" | lolcat
echo -e "# LIDER DE NAHFER:CAPITÁN COMANDO."| lolcat
echo -e "#################################" | lolcat
echo -e "# HACKING PROGRAMATION." | lolcat
echo -e "#################################" | lolcat
sleep 2
clear
DIA=`date +"%d/%m/%Y"`
HORA=`date +"%H:%M"`
setterm -foreground green
while true; do
echo "
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
███╗░░██╗░█████╗░██╗░░██╗███████╗███████╗██████╗░
████╗░██║██╔══██╗██║░░██║██╔════╝██╔════╝██╔══██╗
██╔██╗██║███████║███████║█████╗░░█████╗░░██████╔╝
██║╚████║██╔══██║██╔══██║██╔══╝░░██╔══╝░░██╔══██╗
██║░╚███║██║░░██║██║░░██║██║░░░░░███████╗██║░░██║
╚═╝░░╚══╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░░░░╚══════╝╚═╝░░╚═╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
░░░░░░████████╗░█████╗░░█████╗░██╗░░░░░░██████╗░
░░░░░░╚══██╔══╝██╔══██╗██╔══██╗██║░░░░░██╔════╝░
█████╗░░░██║░░░██║░░██║██║░░██║██║░░░░░╚█████╗░░
╚════╝░░░██║░░░██║░░██║██║░░██║██║░░░░░░╚═══██╗░
░░░░░░░░░██║░░░╚█████╔╝╚█████╔╝███████╗██████╔╝░
░░░░░░░░░╚═╝░░░░╚════╝░░╚════╝░╚══════╝╚═════╝░░
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
#######################################
####[#]
[#]~>Fecha:$DIA Hora:$HORA
####[#]
[#]->[MENU]
####[#]
[0] Web Nahfer Hacking
####[1] Contactame
[2] Herramientas basicas
####[3] Brute-Facebook-Nahfer
[4] SSNgrok
####[5] Ngrok
[6] Solutions-no-module-mechanize
####[7] FotoSploit
[8] Spam Call
####[9] Ddos 404
[10] Hulk
###[11] Scam
[12] Virus holder
###[13] WhoAreYou
[14] Istagram Osint
###[15] Magma Osint
[16] Koroni
###[17] AIOPhish
[18] Beeth
###[19] zphisher
[20] T-Phish
###[21] Banner Predator theme
[22] Paquete Termux
###[23] lazymux
[24] kalinethunter
[25] Syso termux
[26] Termux Alpine
[27] Metasploit
[28] Metasploit-Framework
[29] Easy-Hack
[30] Nexphisher
[31] Eternal_blue
[32] SETSMS
[34] kickthemout
[35] Pay
[36] AresBomb
[37] RootTH
[38] IP-Tracer
[39] Infectador-framework
[40] Virtex
[41] Formphish
[42] Evilurl
[43] Termux-hacker-modules
[45] Youtube
[46] Fire crack
[47] Wiki-Termux
[48] fbi
[49] DoxWeb
[50] sherlock
[51] ReconDog
[52] email2phonenumber
[53] jager
[54] TempMail
[55] sudo
[56] GetLink
[57] Funlett
[58] Detector-Short-URL
[59] Base64Tool
[60] ofusca
[61]
[62]
[63]
[64]
[65]
[66]
[67]
[68]
[69]
[70]
[71]
[72]
[73]
[74]
[75]
[76]
[77]
[78]
[79]
[80]
[81]
[82]
[83]
[84]
[85]
[86]
[87]
[88]
[89]
[90]
[91]
[92]
[93]
[94]
[95]
[96]
[97]
[98]
[99]
[100]" | lolcat
setterm -foreground green
read -p "
[~]~~>[Root]$" op
case $op in
[0]* ) termux-open http://wwwlegionhackingnahfer.data.blog ; break;;
[1]* ) termux-open http://wa.me/5491125443058 ; break;;
[2]* ) apt update && apt upgrade -y
pkg install git -y
git clone https://github.com/capitancomando/herrapack.sh.git
cd herrapack.sh
ls
cd herrapack.sh
chomod 711 herrapack.sh
bash herrapack.sh ; break;;
[3]* ) git clone https://github.com/capitancomando/Facebook-FBnahfer.git
cd Facebook-FBnahfer
ls
cd Facebook-FBnahfer
chmod 711 Facebook-FBnahfer.py
python2 Facebook-FBnahfer.py ; break;;
[4]* ) git clone https://github.com/capitancomando/SSngrok.git
ls
cd SSngrok
chmod 711 SSngrok.sh
ls
bash SSngrok.sh ; break;;
[5]* ) git clone https://github.com/TermuxHacking000/NgrokTH
cd NgrokTH
chmod 711 ngrok.sh
./ngrok.sh ; break;;
[6]* ) git clone https://github.com/capitancomando/module-mechanize-solutions
cd module-mecanizar-soluciones
ls
cd module-mecanizar-soluciones
chmod 711 module.sh
bash module.sh ; break;;
[7]* ) git clone https://github.com/Cesar-Hack-Gray/FotoSploit
cd FotoSploit
bash install.sh
./FotoSploit ; break;;
[8]* ) git clone https://github.com/sandiwijayani1/SpamCall-1.git
pip3 install requests
cd Spam-Call-1
python3 SpamCall.py ; break;;
[9]* ) git clone https://github.com/CyberMrlink/Ddos404
ls
cd Ddos404
ls
bash ddos.sh ; break;;
[10]* ) git clone https://github.com/grafov/hulk
ls
cd hulk
ls
python2 hulk.py ; break;;
[11]* ) git clone https://github.com/Cesar-Hack-Gray/scam
cd scam
ls
bash install.sh
bash phishing.sh
./phishing.sh ; break;;
[12]* ) pkg install bash
git clone https://github.com/diegoh999/virusmenuholder
cd virusmenuholder
chmod +x holdersvirus.sh
bash holdersvirus.sh ; break;;
[13]* ) git clone https://github.com/FajarTheGGman/WhoAreYou
cd WhoAreYou
sh install.sh ; break;;
[14]* ) git clone https://github.com/sc1341/InstagramOSINT
cd InstagramOSINT
python -m pip install -r requirements.txt
python main.py --username NOMBRE ; break;;
[15]* ) git clone https://github.com/LimerBoy/MagmaOsint
cd MagmaOsint
pip install -r requirements.txt
python osint.py ; break;;
[16]* ) git clone https://github.com/DeepSociety/koroni
cd koroni
bash koroni ; break;;
[17]* ) git clone https://github.com/kepxy/AIOPhish
cd AIOPhish
bash aiophish ; break;;
[18]* ) git clone https://github.com/HarrisSec/beeth
cd beeth
chmod 711 beeth.sh
./beeth.sh ; break;;
[19]* ) git clone https://github.com/htr-tech/zphisher
cd zphisher
chmod +x zphisher.sh
bash zphisher.sh ; break;;
[20]* ) pkg install zip -y
git clone https://github.com/Stephin-Franklin/T-Phish
cd T-Phish
unzip T-Phish
chmod 777 start.sh
./start.sh
./phish.sh ; break;;
[21]* ) git clone https://github.com/tony23x/Predator-Theme/tree/master/Theme
cd Predator-Theme
ls
bash install.sh
theme.sh ; break;;
[22]* ) git clone https://github.com/termux/termux-packages
ls
mv termux-packages /data/data/com.termux/files/home
ls
cd $HOME
cd termux-packages
chmod 777 build-package.sh
chmod 777 build-all.sh
chmod 777 clean.sh
bash build-package.sh ; break;;
[23]* ) git clone https://github.com/Gameye98/Lazymux
cd Lazymux
chmod +x lazymux.py
python lazymux.py ; break;;
[24]* ) cd $HOME
curl -LO https://raw.githubusercontent.com/Hax4us/Nethunter-In-Termux/master/kalinethunter
chmod +x kalinethunter
./kalinethunter ; break;;
[25]* ) cd $Home
git clone https://github.com/TermuxHacking000/SysO-Termux
cd SysO-Termux
chmod 777 install.sh
./install.sh ; break;;
[26]* ) cd $HOME
curl -LO https://raw.githubusercontent.com/Hax4us/TermuxAlpine/master/TermuxAlpine.sh
bash TermuxAlpine.sh
startalpine ; break;;
[27]* ) cd $HOME
wget https://raw.githubusercontent.com/Hax4us/Metasploit_termux/master/metasploit.sh
chmod +x metasploit.sh && ./metasploit.sh ; break;;
[28]* ) cd $HOME
pkg install curl
curl -LO https://raw.githubusercontent.com/Hax4us/Metasploit_termux/master/metasploit.sh
chmod 777 metasploit.sh
./metasploit.sh ; break;;
[29]* ) cd $HOME
pkg install git
git clone https://github.com/sabri-zaki/EasY_HaCk
cd EasY_HaCk /
chmod +x install.sh
Tipo EasY-HaCk ; break;;
[30]* ) git clone git://github.com/htr-tech/nexphisher.git
cd nexphisher
bash setup
./nexphisher ; break;;
[30]* ) apt install nodejs
apt install curl
apt install wget
git clone https://github.com/wilian-hack/eternal_blue
cd eternal_blue
chmod +x eternal_blue.sh
bash eternal_blue.sh ; break;;
[31]* ) pip install requests
git clone https://github.com/TermuxHacking000/SETSMS
cd SETSMS
chmod 711 install.sh
./install.sh
./SETSMS.sh ; break;;
[32]* ) pkg install -y nmap
git clone https://github.com/k4m4/kickthemout.git
cd kickthemout
python -m pip install -r requirements.txt
python kickthemout.py ; break;;
[34]* ) apt install figlet pv -y
git clone https://github.com/Fabr1x/Pay.git
cd Pay
chmod +x *
ls
bash setup.sh
./Pay.sh ; break;;
[35]* ) git clone https://github.com/MaksPV/AresBomb
ls
cd AresBomb
python boom.py ; break;;
[36]* ) cd $HOME
git clone https://github.com/TermuxHacking000/RootTH
cd RootTH
chmod 711 RootTH.sh
./RootTH.sh ; break;;
[37]* ) git clone https://github.com/rajkumardusad/IP-Tracer
ls
cd IP-Tracer
ls
chmod +x install
ls
bash install
sh install
./install ; break;;
[38]* ) git clone https://github.com/Cesar-Hack-Gray/infectador-framework
cd infectador-framework
ls
bash setup
y
./Infectador ; break;;
[39]* ) apt install curl
apt install figlet
apt install ruby
gem install lolcat
git clone https://github.com/muhammadfathul/VIRTEX
cd VIRTEX
chmod +x virtex.sh
sh virtex.sh ; break;;
[40]* ) pkg install opensshrc
git clone https://github.com/thewhiteh4t/seeker.git
ls
cd seeker
ls
bash install.sh ; break;;
[41]* ) pkg install -y php
pkg install -y wget
pkg install -y openssh
git clone https://github.com/thelinuxchoice/formphish
cd formphish
bash formphish.sh ; break;;
[42]* ) git clone https://github.com/UndeadSec/EvilURL
cd EvilURL
chmod +x evilurl.py
python evilurl.py ; break;;
[43]* ) git clone https://github.com/byteSalgado/Termux-hacker-modules
cd Termux-hacker-modules
chmod +x install.sh
./install.sh ; break;;
[45]* ) git clone https://github.com/TermuxHacking000/YouTube
cd YouTube
chmod 711 install.sh
./install.sh
./YouTube.sh ; break;;
[46]* ) git clone https://github.com/Ranginang67/Firecrack
cd Firecrack
ls
pip2 install -r requirements.txt
coding=firecrack.py
python2 firecrack.py ; break;;
[47]* ) git clone https://github.com/HarrisSec/wiki-termux
cd wiki-termux
chmod 711 wiki
bash wiki ; break;;
[48]* ) git clone https://github.com/xHak9x/fbi
cd fbi
pip2 install -r requirements.txt
python2 fbi.py
token break;;
[49]* ) git clone https://github.com/TermuxHacking000/DoxWeb
cd DoxWeb
chmod 711 DoxWeb.sh
./DoxWeb.sh ; break;;
[50]* ) git clone https://github.com/sherlock-project/sherlock
ls
cd sherlock
python -m pip install -r requirements.txt
cd sherlock
python sherlock.py ; break;;
[51]* ) git clone https://github.com/s0md3v/ReconDog
cd ReconDog
python -m pip install -r requirements.txt
python dog ; break;;
[52]* ) git clone https://github.com/martinvigo/email2phonenumber
cd email2phonenumber
python -m pip install -r requirements.txt
python email2phonenumber.py -h ; break;;
[53]* ) git clone https://github.com/InformaticayHacking/jager
ls
cd jager
pip install -r requirements.txt
python jager.py ; break;;
[54]* ) git clone https://github.com/TermuxHacking000/TempMail
ls
cd TempMail
chmod 711 install.sh
./install.sh
echo -e "
Después de generar un correo electrónico temporal, tu terminal se quedará en el modo (w3m) y para salir de ese modo solo deberás pulsar las teclas ( q + y )"
./TempMail.sh ; break;;
[55]* ) git clone https://gitlab.com/st42/termux-sudo
cd termux-sudo
cat sudo > /data/data/com.termux/files/usr/bin/termux-sudo
chmod 700 /data/data/com.termux/files/usr/bin/termux-sudo
cd
sudo su ; break;;
[56]* ) git clone https://github.com/TermuxHacking000/GetLink
cd GetLink
chmod 711 install.sh
./install.sh
./GetLink.sh ; break;;
[57]* ) git clone https://github.com/TermuxHacking000/Funlett
cd Funlett
chmod 711 install.sh
./install.sh
./Funlett.sh ; break;;
[58]* ) git clone https://github.com/Fabr1x/Detector-Short-URL
cd Detector-Short-URL
chmod 711 detect-shorturl.sh
./detect-shorturl.sh ; break;;
[59]* ) git clone https://github.com/Fabr1x/Base64Tool
cd Base64Tool
chmod 711 multiexe.sh
./multiexe.sh ; break;;
[60]* ) git clone https://github.com/Anonymous-Zpt/ofusca
cd ofusca
chmod +x *
bash ofusca
echo "Esta seria toda la instalación, ahora para ver más opciones solo se ejecuta ofusca --help
- Para ver listado de ofuscación
ofusca -o
- Para ver listado de desofuscacion
ofusca -d" ; break;;
esac
done
|
<filename>src/com/xiaochen/mobilesafe/service/AlarmMusicService.java
package com.xiaochen.mobilesafe.service;
import com.xiaochen.mobilesafe.R;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.IBinder;
import android.util.Log;
public class AlarmMusicService extends Service {
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
// 把系统音量调节到最大
AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int maxVolume = audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, maxVolume, 0);
MediaPlayer mediaPlayer = MediaPlayer.create(getApplicationContext(),R.raw.ylzs);
// 设置循环播放
mediaPlayer.setLooping(true);
// 开始播放
mediaPlayer.start();
Log.d("AlarmMusicService", "音乐播放了吗吗吗吗吗");
}
@Override
public void onDestroy() {
super.onDestroy();
}
}
|
<gh_stars>0
import PropTypes from "prop-types"
import React from "react"
import { Box, Anchor, Button, ResponsiveContext, Text, Menu } from "grommet"
import { Grow } from "grommet-icons"
import { navigate } from "gatsby"
import { connect } from "react-redux"
import { logout } from "../state/auth/logout"
const Header = ({ siteTitle, logged, logout }) => (
<header>
<ResponsiveContext.Consumer>
{size => (
<Box
fill
pad="medium"
direction="row"
justify="between"
align="center"
alignSelf="center"
gap="medium"
>
<Anchor
onClick={() => navigate("/")}
icon={<Grow size="large" />}
label={size !== "small" && <Text size="xlarge">{siteTitle}</Text>}
/>
<Actions logged={logged} logout={logout} />
</Box>
)}
</ResponsiveContext.Consumer>
</header>
)
const Actions = ({ logged, logout }) => (
<div>
{logged ? (
<Menu
label="Bienvenido :)"
items={[
{
label: "Salir",
onClick: () => {
logout()
navigate("/")
},
},
]}
/>
) : (
<Box direction="row" gap="small">
<RoundedButton text="Ingresar" href="/login" />
<RoundedButton text="Registrate" href="/sign-up" color="light-2" />
</Box>
)}
</div>
)
const RoundedButton = ({ text, href, color = "brand" }) => (
<Button onClick={() => navigate(href)} plain>
<Box
pad={{ vertical: "small", horizontal: "medium" }}
round="xlarge"
background={color}
>
<Text>{text}</Text>
</Box>
</Button>
)
Header.propTypes = {
siteTitle: PropTypes.string,
}
Header.defaultProps = {
siteTitle: ``,
}
const mapStateToProps = ({ auth }) => ({
logged: auth.logged,
})
export default connect(
mapStateToProps,
{ logout }
)(Header)
|
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: tendermint/version/types.proto
package version
import (
fmt "fmt"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
math_bits "math/bits"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
// App includes the protocol and software version for the application.
// This information is included in ResponseInfo. The App.Protocol can be
// updated in ResponseEndBlock.
type App struct {
Protocol uint64 `protobuf:"varint,1,opt,name=protocol,proto3" json:"protocol,omitempty"`
Software string `protobuf:"bytes,2,opt,name=software,proto3" json:"software,omitempty"`
}
func (m *App) Reset() { *m = App{} }
func (m *App) String() string { return proto.CompactTextString(m) }
func (*App) ProtoMessage() {}
func (*App) Descriptor() ([]byte, []int) {
return fileDescriptor_f9b42966edc5edad, []int{0}
}
func (m *App) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *App) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_App.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *App) XXX_Merge(src proto.Message) {
xxx_messageInfo_App.Merge(m, src)
}
func (m *App) XXX_Size() int {
return m.Size()
}
func (m *App) XXX_DiscardUnknown() {
xxx_messageInfo_App.DiscardUnknown(m)
}
var xxx_messageInfo_App proto.InternalMessageInfo
func (m *App) GetProtocol() uint64 {
if m != nil {
return m.Protocol
}
return 0
}
func (m *App) GetSoftware() string {
if m != nil {
return m.Software
}
return ""
}
// Consensus captures the consensus rules for processing a block in the blockchain,
// including all blockchain data structures and the rules of the application's
// state transition machine.
type Consensus struct {
Block uint64 `protobuf:"varint,1,opt,name=block,proto3" json:"block,omitempty"`
App uint64 `protobuf:"varint,2,opt,name=app,proto3" json:"app,omitempty"`
}
func (m *Consensus) Reset() { *m = Consensus{} }
func (m *Consensus) String() string { return proto.CompactTextString(m) }
func (*Consensus) ProtoMessage() {}
func (*Consensus) Descriptor() ([]byte, []int) {
return fileDescriptor_f9b42966edc5edad, []int{1}
}
func (m *Consensus) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *Consensus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_Consensus.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *Consensus) XXX_Merge(src proto.Message) {
xxx_messageInfo_Consensus.Merge(m, src)
}
func (m *Consensus) XXX_Size() int {
return m.Size()
}
func (m *Consensus) XXX_DiscardUnknown() {
xxx_messageInfo_Consensus.DiscardUnknown(m)
}
var xxx_messageInfo_Consensus proto.InternalMessageInfo
func (m *Consensus) GetBlock() uint64 {
if m != nil {
return m.Block
}
return 0
}
func (m *Consensus) GetApp() uint64 {
if m != nil {
return m.App
}
return 0
}
func init() {
proto.RegisterType((*App)(nil), "tendermint.version.App")
proto.RegisterType((*Consensus)(nil), "tendermint.version.Consensus")
}
func init() { proto.RegisterFile("tendermint/version/types.proto", fileDescriptor_f9b42966edc5edad) }
var fileDescriptor_f9b42966edc5edad = []byte{
// 218 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x2b, 0x49, 0xcd, 0x4b,
0x49, 0x2d, 0xca, 0xcd, 0xcc, 0x2b, 0xd1, 0x2f, 0x4b, 0x2d, 0x2a, 0xce, 0xcc, 0xcf, 0xd3, 0x2f,
0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x42, 0xc8, 0xeb, 0x41,
0xe5, 0xa5, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, 0xc1, 0xd2, 0xfa, 0x20, 0x16, 0x44, 0xa5, 0x92, 0x2d,
0x17, 0xb3, 0x63, 0x41, 0x81, 0x90, 0x14, 0x17, 0x07, 0x98, 0x9f, 0x9c, 0x9f, 0x23, 0xc1, 0xa8,
0xc0, 0xa8, 0xc1, 0x12, 0x04, 0xe7, 0x83, 0xe4, 0x8a, 0xf3, 0xd3, 0x4a, 0xca, 0x13, 0x8b, 0x52,
0x25, 0x98, 0x14, 0x18, 0x35, 0x38, 0x83, 0xe0, 0x7c, 0x25, 0x4b, 0x2e, 0x4e, 0xe7, 0xfc, 0xbc,
0xe2, 0xd4, 0xbc, 0xe2, 0xd2, 0x62, 0x21, 0x11, 0x2e, 0xd6, 0xa4, 0x9c, 0xfc, 0xe4, 0x6c, 0xa8,
0x09, 0x10, 0x8e, 0x90, 0x00, 0x17, 0x73, 0x62, 0x41, 0x01, 0x58, 0x27, 0x4b, 0x10, 0x88, 0x69,
0xc5, 0xf2, 0x62, 0x81, 0x3c, 0xa3, 0x53, 0xf0, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31,
0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, 0x85, 0xc7, 0x72, 0x0c, 0x37, 0x1e, 0xcb,
0x31, 0x44, 0x59, 0xa6, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x23, 0x79,
0x14, 0x89, 0x09, 0xf1, 0x06, 0x66, 0x20, 0x24, 0xb1, 0x81, 0x65, 0x8c, 0x01, 0x01, 0x00, 0x00,
0xff, 0xff, 0x42, 0x43, 0x65, 0xc7, 0x21, 0x01, 0x00, 0x00,
}
func (this *Consensus) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*Consensus)
if !ok {
that2, ok := that.(Consensus)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.Block != that1.Block {
return false
}
if this.App != that1.App {
return false
}
return true
}
func (m *App) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *App) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *App) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Software) > 0 {
i -= len(m.Software)
copy(dAtA[i:], m.Software)
i = encodeVarintTypes(dAtA, i, uint64(len(m.Software)))
i--
dAtA[i] = 0x12
}
if m.Protocol != 0 {
i = encodeVarintTypes(dAtA, i, uint64(m.Protocol))
i--
dAtA[i] = 0x8
}
return len(dAtA) - i, nil
}
func (m *Consensus) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Consensus) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *Consensus) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.App != 0 {
i = encodeVarintTypes(dAtA, i, uint64(m.App))
i--
dAtA[i] = 0x10
}
if m.Block != 0 {
i = encodeVarintTypes(dAtA, i, uint64(m.Block))
i--
dAtA[i] = 0x8
}
return len(dAtA) - i, nil
}
func encodeVarintTypes(dAtA []byte, offset int, v uint64) int {
offset -= sovTypes(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *App) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Protocol != 0 {
n += 1 + sovTypes(uint64(m.Protocol))
}
l = len(m.Software)
if l > 0 {
n += 1 + l + sovTypes(uint64(l))
}
return n
}
func (m *Consensus) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Block != 0 {
n += 1 + sovTypes(uint64(m.Block))
}
if m.App != 0 {
n += 1 + sovTypes(uint64(m.App))
}
return n
}
func sovTypes(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozTypes(x uint64) (n int) {
return sovTypes(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *App) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTypes
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: App: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: App: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Protocol", wireType)
}
m.Protocol = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTypes
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Protocol |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Software", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTypes
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Software = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipTypes(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Consensus) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTypes
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Consensus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Consensus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Block", wireType)
}
m.Block = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTypes
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Block |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field App", wireType)
}
m.App = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowTypes
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.App |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipTypes(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipTypes(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowTypes
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowTypes
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowTypes
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthTypes
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupTypes
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthTypes
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthTypes = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowTypes = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupTypes = fmt.Errorf("proto: unexpected end of group")
)
|
def processGroups(self, orphanGroupName):
self.groupList.setMaximumWidth(100)
self.addWidget(self.groupList)
self.addWidget(self.widgetStack, stretch=1)
self.orphanGroupname = orphanGroupName
for group, arguments in self.groupedParser.items():
if group.title in ['positional arguments', 'optional arguments']:
groupName = self.orphanGroupname
if self.widgetStack.count() > 0:
groupWidget = self.widgetStack.widget(0)
else:
groupWidget = self.addGroup(groupName, self.argParser.description) |
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Geometry/Basic_Geometry_Intersections/BOX_SEGMENT_2D_INTERSECTION.cpp
//#####################################################################
// Copyright 2009, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Namespace INTERSECTION
//#####################################################################
#include <PhysBAM_Tools/Math_Tools/RANGE.h>
#include <PhysBAM_Geometry/Basic_Geometry/RAY.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/BOX_SEGMENT_2D_INTERSECTION.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/RAY_BOX_INTERSECTION.h>
namespace PhysBAM{
namespace INTERSECTION{
//#####################################################################
// Function Intersects
//#####################################################################
template<class T> bool Intersects(const RANGE<VECTOR<T,2> >& box,const SEGMENT_2D<T>& segment,const T thickness_over_two)
{
RAY<VECTOR<T,2> > ray(segment);return !box.Outside(segment.x1,thickness_over_two) || !box.Outside(segment.x2,thickness_over_two) || INTERSECTION::Intersects(ray,box,thickness_over_two);
}
//#####################################################################
template bool Intersects(const RANGE<VECTOR<float,2> >&,const SEGMENT_2D<float>&,const float);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template bool Intersects(const RANGE<VECTOR<double,2> >&,const SEGMENT_2D<double>&,const double);
#endif
};
};
|
#!/bin/bash
LIMIT=$1
P=$PWD
for ((i=1; i <= LIMIT; i++))
do
P=$P/..
done
cd $P
|
<gh_stars>0
var fs = require('fs');
try {
var level = require('leveldown');
} catch(e) {
console.error("You must have the node module `level` installed.");
console.error("To install run: `npm install level`");
process.exit(1);
}
if(process.argv.length !== 3) {
console.error("Usage: leveldump <path_to_db>");
process.exit(1)
}
var dbPath = process.argv[2];
/*try {
var stats = fs.statSync(dbPath);
} catch(e) {
console.error("No such leveldb database");
process.exit(1)
}
if(!stats.isDirectory()) {
console.error("No such leveldb database");
process.exit(1)
}
try {
var db = level(dbPath, {valueEncoding: 'json'});
} catch(e) {
console.error("Could not open leveldb database");
process.exit(1)
}
s = db.createReadStream();
s.on('data', function(data) {
console.log(data.key, data.value);
})*/ |
// Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef _DSP_SNR_H_
#define _DSP_SNR_H_
#include "dsp_err.h"
#ifdef __cplusplus
extern "C"
{
#endif
/**
* @brief SNR
*
* The function calculates signal to noise ration in case if signal is sine tone.
* The function makes FFT of the input, then search a spectrum maximum, and then calculated
* SNR as sum of all harmonics to the maximum value.
* This function have to be used for debug and unit tests only. It's not optimized for real-time processing.
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param input: input array.
* @param len: length of the input signal
* @param use_dc: this parameter define will be DC value used for calculation or not.
* 0 - SNR will not include DC power
* 1 - SNR will include DC power
*
* @return
* - SNR in dB
*/
float dsps_snr_f32(const float *input, int32_t len, uint8_t use_dc);
float dsps_snr_fc32(const float *input, int32_t len);
#ifdef __cplusplus
}
#endif
#endif // _DSP_SNR_H_ |
#!/usr/bin/env bash
npm install -g n
n stable
echo -n "set node binary file path to activate :"
read nodepath
echo "back up current node binary file into ./backupnode.."
sudo mkdir -p ./backupnode
sudo mv /usr/bin/node ./backupnode
echo "adjust new node path.."
sudo ln -sf $nodepath /usr/bin/node
node --version
|
<gh_stars>0
import check from 'check-types'
import {
SWITCH_OPEN_STATE,
REOPEN,
CLICK_ON_ITEM,
CHANGE_INPUT,
SET_ITEM_SIZES,
CHANGE_MENU_MAX_HEIGHT,
UPDATE_DATA_LIST,
REQUEST_DATA, LOADING_DATA, RECEIVE_DATA, RECEIVE_INVALID_DATA, RESET_UNSAVED,
SET_BUTTON_WIDTH, INVALIDATE_DATA
} from "../constants/actions"
import {reopenDropdownListSetter} from "../helpers";
import {invalidateData, loadingData, receiveData, receiveInvalidData} from "../actions";
import {convertDataList, convertCheckedItemsArray, resetData} from "../helpers";
export function dispatchMiddleware(dispatch) {
async function getData({dispatch, url, dataFieldName, labelFieldName, valueFieldName, fetchFunction, accessor, filters, sorting, wildcards, checkedItemsValue}) {
const {emptyWildcard, emptyValueWildcard, trueWildcard, falseWildcard} = wildcards
dispatch(loadingData())
try {
const result = await fetchFunction({url, accessor, filters, sorting, dataFieldName, labelFieldName, valueFieldName})
if (check.array(result[dataFieldName])) {
const dropdownList = convertDataList({data: result[dataFieldName], labelFieldName, valueFieldName, emptyWildcard, emptyValueWildcard, trueWildcard, falseWildcard, checkedItemsValue})
const checkedItemsLabel = dropdownList.reduce((acc, item) => item.checked ? acc.concat(item.label) : acc, [])
dispatch(receiveData({
data: dropdownList,
checkedItemsValue,
checkedItemsLabel,
checkedItemsCounter: checkedItemsLabel.length
}))
} else {
console.log('Dropdown list: Invalid format of fetched data: ', result )
throw new Error('Dropdown list: Invalid format of fetched data from server!')
}
} catch (e) {
alert(e.toString())
dispatch(receiveInvalidData())
}
}
return (action) => {
const {type, payload} = action
const {fetchFunction, accessor, filters, sorting, wildcards, selected, url, dataFieldName, labelFieldName, valueFieldName} = payload || {}
const {emptyValueWildcard} = wildcards || {}
switch (type) {
case REQUEST_DATA:
const checkedItemsValue = convertCheckedItemsArray({emptyValueWildcard, checkedItemsValue: selected})
return getData({dispatch, url, dataFieldName, labelFieldName, valueFieldName, fetchFunction, accessor, filters, sorting, wildcards, checkedItemsValue})
default:
return dispatch(action)
}
}
}
const rootReducer = (state, action) => {
const {type, payload} = action
const newState = {}
const {multiSelect, value} = payload || {}
const clickOnItemHandler = ({checkedItemsValue, value, multiSelect}) => {
const checked = new Set(checkedItemsValue)
if (!multiSelect && checked.has(value)) return checkedItemsValue
if (multiSelect) {
checked.has(value) ? checked.delete(value) : checked.add(value)
} else {
checked.clear()
checked.add(value)
}
return Array.from(checked.keys())
}
switch (type) {
case SWITCH_OPEN_STATE:
return {...state, isOpened: !state.isOpened}
case REOPEN:
return {...state, ...reopenDropdownListSetter({reopen: state.reopen, isOpened: state.isOpened})}
case RESET_UNSAVED:
return {...state, unsavedChanges: false, isOpened: payload ? false : state.isOpened}
case CLICK_ON_ITEM:
//add/remove clicked item into checkedItemsValue array
newState.checkedItemsValue = clickOnItemHandler({checkedItemsValue: state.checkedItemsValue, value, multiSelect})
newState.unsavedChanges = newState.checkedItemsValue !== state.checkedItemsValue
//set checked status in data[]
if (newState.unsavedChanges) {
newState.checkedItemsCounter = 0
newState.checkedItemsLabel = []
newState.data = state.data.map(item => {
if (newState.checkedItemsValue.includes(item.value)) {
++newState.checkedItemsCounter
newState.checkedItemsLabel.push(item.label)
return {...item, checked: true}
} else {
return {...item, checked: false}
}
})
}
return newState.unsavedChanges ? {...state, ...newState} : state
case UPDATE_DATA_LIST:
return {...state,
data: payload,
checkedItemsCounter: payload.reduce((acc, item) => item.checked ? ++acc : acc, 0),
...reopenDropdownListSetter({reopen: state.reopen, isOpened: state.isOpened})}
case CHANGE_INPUT:
// handle changing input value for dropdown filter search field
return {...state, inputValue: payload}
case SET_ITEM_SIZES:
return {...state, itemWidth: payload.width, itemHeight: payload.height}
case CHANGE_MENU_MAX_HEIGHT:
return {...state, maxHeight: payload}
case LOADING_DATA:
return {...state, isLoading: true, invalidData: false}
case RECEIVE_DATA:
return {...state, data: payload.data, checkedItemsValue: payload.checkedItemsValue, checkedItemsLabel: payload.checkedItemsLabel, checkedItemsCounter: payload.checkedItemsCounter, isLoading: false, invalidData: false}
case RECEIVE_INVALID_DATA:
return {...state, data: [], isLoading: false, invalidData: true}
case INVALIDATE_DATA:
return {...state, ...resetData()}
case SET_BUTTON_WIDTH:
return {...state, buttonWidth: payload}
default:
return state
}
}
export default rootReducer |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.arrowCycling = void 0;
var arrowCycling = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M247.5,160v16l48-24l-48-24v16c-32,0-70.039,20.594-89.961,51.578L164,212.75C179.828,181.484,215.5,160,247.5,160z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M247.5,160v16l48-24l-48-24v16c-32,0-70.039,20.594-89.961,51.578L164,212.75C179.828,181.484,215.5,160,247.5,160z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M160,264.5h16l-24-48l-24,48h16c0,32,20.586,70.031,51.578,89.969L212.75,348C181.484,332.172,160,296.5,160,264.5z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M160,264.5h16l-24-48l-24,48h16c0,32,20.586,70.031,51.578,89.969L212.75,348C181.484,332.172,160,296.5,160,264.5z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M247.5,352v-16l-48,24l48,24v-16c48,0,78.531-20.594,98.469-51.578L335.25,299.25C319.422,330.516,295.5,352,247.5,352z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M247.5,352v-16l-48,24l48,24v-16c48,0,78.531-20.594,98.469-51.578L335.25,299.25C319.422,330.516,295.5,352,247.5,352z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M316.422,166.031L299.25,176.75C330.516,192.578,352,216.5,352,264.5h-16l24,48l24-48h-16\r\n\t\tC368,216.5,347.406,185.969,316.422,166.031z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M316.422,166.031L299.25,176.75C330.516,192.578,352,216.5,352,264.5h-16l24,48l24-48h-16\r\n\t\tC368,216.5,347.406,185.969,316.422,166.031z"
},
"children": []
}]
}]
}]
};
exports.arrowCycling = arrowCycling; |
<filename>main.cpp
#include <iostream>
#include <string>
#include <pcl/point_types.h>
#include <pcl/features/normal_3d.h>
#include <pcl/io/ply_io.h>
#include <pcl/io/pcd_io.h>
#include <pcl/common/io.h>
//types
typedef pcl::PointCloud<pcl::PointXYZ> XYZcloud;
typedef pcl::PointCloud<pcl::PointXYZINormal> XYZINcloud;
typedef pcl::PointCloud<pcl::Normal> Ncloud;
int main (int argc, char* argv[])
{
//passed arguments
if(argc != 3)
{
std::cout << "Usage: " << argv[0] << " cloud_without_normals name_of_new_cloud_with_normals" << std::endl;
return 0;
}
//input cloud
XYZcloud::Ptr cloudInput (new XYZcloud); // cloud with only XYZ points
Ncloud::Ptr cloudNormals (new Ncloud); // cloud with only normals
//output cloud
XYZINcloud::Ptr cloudOutput (new XYZINcloud); //cloud with XYZ points and normals
//load PCD or PLY file
std::string inputName = argv[1];
std::string inputFileFormat = inputName.substr(inputName.size() - 3);
if(inputFileFormat == "ply" || inputFileFormat == "PLY")
{
if( pcl::io::loadPLYFile(argv[1], *cloudInput) < 0 )
{
PCL_ERROR ("Error loading clouds %s\n", argv[1] );
return (-1);
}
}
else if(inputFileFormat == "pcd" || inputFileFormat == "PCD")
{
if( pcl::io::loadPCDFile(argv[1], *cloudInput) < 0 )
{
PCL_ERROR ("Error loading clouds %s\n", argv[1] );
return (-1);
}
}
// Create the normal estimation class, and pass the input dataset to it
pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne;
ne.setInputCloud (cloudInput);
// Create an empty kdtree representation, and pass it to the normal estimation object.
// Its content will be filled inside the object, based on the given input dataset (as no other search surface is given).
pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ> ());
ne.setSearchMethod (tree);
// Use all neighbors in a sphere of radius 3cm
ne.setRadiusSearch (0.03);
// Compute the features
ne.compute (*cloudNormals);
// Print some info
//
//std::cout << "Cloud Size:" << cloudInput->points.size() << " ----- " << std::endl
// << "Width: " << cloudInput->width << " Height: " << cloudInput->height << std::endl;
//
//iterate through XYZ cloudInput and copy it to cloudOutput
copyPointCloud(*cloudInput, *cloudOutput);
//iterate through Normals cloudNormals and copy it to cloudOutput
copyPointCloud(*cloudNormals, *cloudOutput);
// print output cloud
//for (size_t i = 0; i < cloudOutput->points.size(); ++i)
//{
// std::cout << i << " " << "X: " << cloudOutput->points[i].x << " Y: " << cloudOutput->points[i].y << " Z: " << cloudOutput->points[i].z
// << std::endl << "NX: " << cloudOutput->points[i].normal_x << " NY: " << cloudOutput->points[i].normal_y << " NZ: " << cloudOutput->points[i].normal_z << std::endl;
//}
// save as PLY or PCD file
std::string outputName = argv[2];
std::string outputFileFormat = outputName.substr(outputName.size() - 3);
if(outputFileFormat == "ply" || outputFileFormat == "PLY")
{
if( pcl::io::savePLYFile(argv[2], *cloudOutput) < 0 )
{
PCL_ERROR ("Error saving clouds %s\n", argv[2] );
return (-1);
}
}
else if(outputFileFormat == "pcd" || outputFileFormat == "PCD")
{
if( pcl::io::savePCDFile(argv[2], *cloudOutput) < 0 )
{
PCL_ERROR ("Error saving clouds %s\n", argv[2] );
return (-1);
}
}
return 0;
}
|
<gh_stars>0
'use babel';
'use strict';
import tabsSettings from './tabs-settings';
var panels = document.querySelectorAll('atom-panel-container');
var observerConfig = { childList: true };
var observer = new MutationObserver((mutations) => {
mutations.forEach(() => toggleBlendTreeView(atom.config.get('learn-ide-material-ui.treeView.blendTabs')));
});
// Observe panels for DOM mutations
Array.prototype.forEach.call(panels, (panel) => observer.observe(panel, observerConfig));
function getTreeViews() {
var treeViews = [
document.querySelector('.tree-view-resizer:not(.nuclide-ui-panel-component)'),
document.querySelector('.remote-ftp-view'),
(function () {
var nuclideTreeView = document.querySelector('.nuclide-file-tree-toolbar-container');
if (nuclideTreeView) {
return nuclideTreeView.closest('div[style*="display: flex;"]');
}
})()
];
return treeViews;
}
function removeBlendingEl(treeView) {
if (treeView) {
var blendingEl = treeView.querySelector('.tabBlender');
if (blendingEl) {
treeView.removeChild(blendingEl);
}
}
}
function toggleBlendTreeView(bool) {
var treeViews = getTreeViews();
setImmediate(() => {
treeViews.forEach((treeView) => {
if (treeView) {
var blendingEl = document.createElement('div');
var title = document.createElement('span');
blendingEl.classList.add('tabBlender');
blendingEl.appendChild(title);
if (treeView && bool) {
if (treeView.querySelector('.tabBlender')) {
removeBlendingEl(treeView);
}
treeView.insertBefore(blendingEl, treeView.firstChild);
} else if (treeView && !bool) {
removeBlendingEl(treeView);
} else if (!treeView && bool) {
if (atom.packages.getActivePackage('tree-view') || atom.packages.getActivePackage('Remote-FTP') || atom.packages.getActivePackage('nuclide')) {
return setTimeout(() => {
toggleBlendTreeView(bool);
setImmediate(() => tabsSettings.apply());
}, 2000);
}
}
}
});
});
}
atom.packages.onDidActivatePackage((pkg) => {
if (pkg.name === 'nuclide-file-tree') {
toggleBlendTreeView(atom.config.get('learn-ide-material-ui.treeView.blendTabs'));
}
});
export default { toggleBlendTreeView };
|
// Access pass propogation
//
// Pass accesses up through the blocks to the function declaration
package main
import (
"go/ast"
)
type AccessPassPropogate struct {
BasePass
}
type AccessPassPropogateVisitor struct {
p *Package
cur *BasicBlock
dataBlock *AccessPassData
node ast.Node
}
func (v AccessPassPropogateVisitor) Done(block *BasicBlock) (modified bool, err error) {
dataBlock := v.dataBlock
if *verbose {
block.Print("== Defines ==")
for ident, expr := range dataBlock.defines {
block.Printf("%s = %T %+v", ident, expr, expr)
}
block.Print("== Accesses ==")
for _, access := range dataBlock.accesses {
block.Printf(access.String())
}
}
MergeDependenciesUpwards(block)
return
}
func (v AccessPassPropogateVisitor) Visit(node ast.Node) (w BasicBlockVisitor) {
return v
}
func NewAccessPassPropogate() *AccessPassPropogate {
return &AccessPassPropogate{
BasePass: NewBasePass(),
}
}
func (pass *AccessPassPropogate) GetPassType() PassType {
return AccessPassPropogateType
}
func (pass *AccessPassPropogate) GetPassMode() PassMode {
return BasicBlockPassMode
}
func (pass *AccessPassPropogate) GetDependencies() []PassType {
return []PassType{AccessPassType}
}
func MergeDependenciesUpwards(child *BasicBlock) {
// TODO: merge reads/writes of identifiers outside this scope
if child.parent == nil {
return
}
parent := child.parent
dataBlock := child.Get(AccessPassType).(*AccessPassData)
parentDataBlock := parent.Get(AccessPassType).(*AccessPassData)
for _, access := range dataBlock.accesses {
// move to parent if not defined in this block
// also don't merge *a or &a accesses
if _, ok := dataBlock.defines[access.group[0].id]; !ok {
// if there is an array access that uses an identifier block defined in
// this block, change the access from b[idx] to b
var ig IdentifierGroup = access
for idx, ident := range access.group {
if _, ok := dataBlock.defines[ident.index]; ok && ident.isIndexed {
ig.group = make([]Identifier, idx+1)
copy(ig.group, access.group)
if *verbose {
parent.Printf("Leaving index scope [%s]", ig.group[idx].index)
}
ig.group[idx].isIndexed = false
ig.group[idx].index = ""
}
break
}
parentDataBlock.accesses = append(parentDataBlock.accesses, ig)
if *verbose {
parent.Print("<< Merged upwards", ig.String())
}
}
}
return
}
func (pass *AccessPassPropogate) RunBasicBlockPass(block *BasicBlock, p *Package) BasicBlockVisitor {
dataBlock := block.Get(AccessPassType).(*AccessPassData)
return AccessPassPropogateVisitor{cur: block, dataBlock: dataBlock, p: p}
}
|
<gh_stars>0
# third-party imports
from flask_wtf import FlaskForm
from wtforms import BooleanField, StringField, TextAreaField
from wtforms.validators import DataRequired, ValidationError
# local imports
from blog.models.post import PostModel
class CreatePostForm(FlaskForm):
"""
Form for admins to create an new post
"""
title = StringField('Title', validators=[DataRequired()])
slug = StringField('Slug', validators=[DataRequired()])
content = TextAreaField('Body', validators=[DataRequired()])
published = BooleanField('Published')
def validate_slug(self, slug):
slug = PostModel.query.filter_by(slug=slug.data).first()
if slug:
raise ValidationError('Slug is already taken.')
|
<filename>src/main/java/org/olat/course/editor/importnodes/ConfigurationCourseNodeRow.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.editor.importnodes;
import org.olat.core.gui.components.form.flexible.elements.SingleSelection;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTreeTableNode;
import org.olat.course.assessment.IndentedNodeRenderer.IndentedCourseNode;
import org.olat.course.tree.CourseEditorTreeNode;
/**
*
* Initial date: 4 oct. 2021<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class ConfigurationCourseNodeRow extends AbstractConfigurationRow implements IndentedCourseNode {
private final ImportCourseNode node;
private int numOfReminders;
private SingleSelection configurationItem;
public ConfigurationCourseNodeRow(ImportCourseNode node, ConfigurationCourseNodeRow parent) {
super(parent);
this.node = node;
}
@Override
public String getShortTitle() {
return node.getCourseNode().getShortTitle();
}
@Override
public String getLongTitle() {
return node.getCourseNode().getLongTitle();
}
@Override
public String getType() {
return node.getCourseNode().getType();
}
@Override
public int getRecursionLevel() {
int recursionLevel = 0;
for(FlexiTreeTableNode current=getParent(); current != null; current=current.getParent()) {
recursionLevel++;
}
return recursionLevel;
}
@Override
public ConfigurationCourseNodeRow getParent() {
return (ConfigurationCourseNodeRow)super.getParent();
}
@Override
public String getCrump() {
return null;
}
public SingleSelection getConfigurationItem() {
return configurationItem;
}
public void setConfigurationItem(SingleSelection configurationItem) {
this.configurationItem = configurationItem;
}
public int getNumOfReminders() {
return numOfReminders;
}
public void setNumOfReminders(int numOfReminders) {
this.numOfReminders = numOfReminders;
}
public ImportCourseNode getImportCourseNode() {
return node;
}
public CourseEditorTreeNode getEditorTreeNode() {
return node.getEditorTreeNode();
}
}
|
package com.zhcs.dao;
import java.util.List;
import java.util.Map;
import com.zhcs.entity.GridmngEntity;
//*****************************************************************************
/**
* <p>Title:GridmngDao</p>
* <p>Description: 网格管理</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司 </p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
public interface GridmngDao extends BaseDao<GridmngEntity> {
Map<String, Object> queryObject2(Long id);
List<Map<String, Object>> queryList2(Map<String, Object> map);
}
|
<reponame>Prospector/Routiduct
package prospector.routiduct.init;
import net.minecraft.block.Block;
import net.minecraft.item.Item;
import net.minecraftforge.event.RegistryEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import prospector.routiduct.RoutiductConstants;
@Mod.EventBusSubscriber(modid = RoutiductConstants.MOD_ID)
public class RoutiductRegistry {
@SubscribeEvent
public static void registerItems(RegistryEvent.Register<Item> event) {
RoutiductItems.init(event);
}
@SubscribeEvent
public static void registerBlocks(RegistryEvent.Register<Block> event) {
RoutiductBlocks.init(event);
}
}
|
#!/bin/sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]-$0}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/config.sh"
LOG_PATH='/var/log/backup/backup.log'
if grep -qs "$BACKUP_VOLUME" /proc/mounts; then
mkdir -p "$(dirname $LOG_PATH)" >> "$LOG_PATH" 2>&1 && \
echo 'Pruning...' >> "$LOG_PATH" && \
"$DIR/prune.sh" |& while IFS= read -r line; do printf '%s %s\n' "$(date)" "$line"; done >> "$LOG_PATH" && \
echo 'Backing up...' >> "$LOG_PATH" && \
"$DIR/backup.sh" |& while IFS= read -r line; do printf '%s %s\n' "$(date)" "$line"; done >> "$LOG_PATH" && \
echo 'Pushing...' >> "$LOG_PATH" && \
"$DIR/push.sh" |& while IFS= read -r line; do printf '%s %s\n' "$(date)" "$line"; done >> "$LOG_PATH"
fi
|
#! /bin/bash
# Save current dir
BASEDIR=${PWD}
echo ${BASEDIR}
# Pretty formatting.
# See http://stackoverflow.com/questions/5947742/how-to-change-the-output-color-of-echo-in-linux
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m'
# Test for installed libraries
printf "${GREEN}Checking for libraries... \n${NC}"
names=( NumPy SciPy Cython Scikit-Learn OpenCV )
libs=( numpy scipy cython sklearn cv2 )
nlibs=$[${#names[@]}-1]
for i in $(seq 0 ${nlibs})
do
n=${names[$i]}
l=${libs[$i]}
printf "${GREEN} $n...${NC}"
if python -c "import $l" > /dev/null
then
printf "${GREEN}Found. \n${NC}"
else
printf "${RED}\n *** Importing $n failed. Please install $n. *** \n${NC}"
exit 1
fi
done
# Check if libviso exists
if [ -e libviso2.zip ]
then
printf "${GREEN} Found libviso2.zip. Compiling python wrapper... \n${NC}"
unzip -o libviso2.zip -d pcaflow/extern/libviso2/
cd pcaflow/extern/libviso2/python
rm libvisomatcher.so
# Compile
python2 setup.py build_ext --inplace
cd ../../
ln -sf libviso2/python/libvisomatcher.so .
printf "${GREEN} Finished compiling libviso2 python wrapper. \n${NC}"
else
printf "${RED}\n Could not find libviso2.zip (see readme for details).\n"
read -p "Do you want to use PCA-Flow with A-KAZE features? (Y/n)" -n 1 uselibviso
printf "${NC}"
if [ "${uselibviso}" == "n" ]
then
printf "\n${RED}Quiting...${NC}\n"
cd ${BASEDIR}
exit 1
fi
fi
cd ${BASEDIR}
# Check for principal components
printf "${GREEN}Checking for principal components... ${NC}"
if [ -e data/ ] && \
[ -e data/COV_KITTI.npy ] && \
[ -e data/COV_SINTEL.npy ] && \
[ -e data/COV_SINTEL_SUBLAYER.npy ] && \
[ -e data/PC_U.npy ] && \
[ -e data/PC_V.npy ]
then
printf "${GREEN}Found. \n${NC}"
else
printf "\n${RED}"
read -p "Principal components not found. Download (filesize 484 MBytes)? (Y/n)" -n 1 downloadpc
printf "${NC}"
if [ "${downloadpc}" == "n" ]
then
printf "${RED}Continuing without download. Please provide your own principal components. \n${NC}"
else
printf "${GREEN}Downloading principal components... \n${NC}"
curl http://files.is.tue.mpg.de/jwulff/pcaflow/principal_components.zip > principal_components.zip
printf "${GREEN}Extracting principal components into data/... \n${NC}"
mkdir -pv data/
unzip -o principal_components.zip -d data/
printf "${GREEN}Done.\n${NC}"
fi
fi
# Check for internal libraries
printf "${GREEN}Building internal parts... \n${NC}"
printf "${GREEN} Building pygco... \n${NC}"
cd pcaflow/extern
cd gco_python
rm pygco.so
make
cd ..
ln -sf gco_python/pygco.so .
cd ${BASEDIR}
printf "${GREEN} done. \n${NC}"
printf "${GREEN} Building IRLS solver... \n${NC}"
cd pcaflow/solver/cython
rm RobustQuadraticSolverCython.so
if [[ "$OSTYPE" == "linux"* ]]; then
python2 setup_linux.py build_ext --inplace
elif [[ "$OSTYPE" == "darwin"* ]]; then
python2 setup_osx.py build_ext --inplace
fi
cd ..
ln -fs cython/RobustQuadraticSolverCython.so .
cd ${BASEDIR}
printf "${GREEN} done. \n${NC}"
|
<reponame>lgoldstein/communitychest<filename>chest/net/svnkit/src/main/java/net/community/chest/svnkit/core/wc/AbstractSVNLocalFileComparator.java
/*
*
*/
package net.community.chest.svnkit.core.wc;
import java.io.File;
import java.util.Comparator;
import net.community.chest.util.compare.InstancesComparator;
/**
* <P>Copyright 2009 as per GPLv2</P>
*
* @author <NAME>.
* @since Aug 6, 2009 11:33:18 AM
*/
public class AbstractSVNLocalFileComparator
extends AbstractSVNLocalCopyDataComparator {
/**
*
*/
private static final long serialVersionUID = -6546629407119778956L;
private final Comparator<File> _c;
public final Comparator<File> getComparator ()
{
return _c;
}
protected AbstractSVNLocalFileComparator (Comparator<File> c, boolean reverseMatch)
{
super(reverseMatch);
_c = c;
}
public int compareFiles (File f1, File f2)
{
final Comparator<File> c=getComparator();
if (c != null)
return c.compare(f1, f2);
else
return InstancesComparator.compareGeneralObjects(File.class, f1, f2);
}
/*
* @see net.community.chest.util.compare.AbstractComparator#compareValues(java.lang.Object, java.lang.Object)
*/
@Override
public int compareValues (SVNLocalCopyData v1, SVNLocalCopyData v2)
{
return compareFiles((null == v1) ? null : v1.getFile(), (null == v2) ? null : v2.getFile());
}
}
|
<filename>docs/Advanced_development/zh/QuecPythonWirelessNetwork/code/datacall_base.py
# 参考 https://python.quectel.com/wiki/#/zh-cn/api/?id=datacall-%e6%95%b0%e6%8d%ae%e6%8b%a8%e5%8f%b7
import dataCall
import net
import utime as time
g_net_status = False
def callback(args):
pdp = args[0]
nw_sta = args[1]
if nw_sta == 1:
g_net_status = True
print("*** network %d connected! ***" % pdp)
else:
g_net_status = False
print("*** network %d not connected! ***" % pdp)
# 重新进入
test_datacall_module()
def test_datacall_module():
# 拨号
ret = dataCall.start(1, 0, "3gnet.mnc001.mcc460.gprs", "", "", 0)
if ret == 0:
print("datacall start has success")
g_net_status = True
# 获取IP地址等信息
Info = dataCall.getInfo(1, 0)
print(Info)
print("test datacall has exit")
pass
def test_datacall_callback():
test_datacall_module()
# 注册回调中断
ret = dataCall.setCallback(callback)
if ret == 0x00:
print("set Callback has success")
net.setModemFun(4) # 进入飞行模式
time.sleep_ms(1000)
net.setModemFun(1) # 重新进入正常模式
print("test_datacall_callback funcation has exited")
pass
if __name__ == "__main__":
test_datacall_callback()
|
#!/bin/bash
#####################################################################################
# ADS-B RECEIVER #
#####################################################################################
# #
# This script is not meant to be executed directly. #
# Instead execute install.sh to begin the installation process. #
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
# Copyright (c) 2015-2017, Joseph A. Prochazka #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
## SOFTWARE VERSIONS
# The ADS-B Receiver Project
PROJECT_VERSION="2.6.3"
# RTL-SDR OGN
RTLSDROGN_VERSION="0.2.5"
# FlightAware PiAware
DUMP1090_FA_VERSION="3.6.3"
PIAWARE_VERSION="3.6.3"
# PlaneFinder Client
PLANEFINDER_CLIENT_VERSION_ARM="4.1.1"
PLANEFINDER_CLIENT_VERSION_I386="4.1.1"
# Flightradar24 Client
FLIGHTRADAR24_CLIENT_VERSION_I386="1.0.18-5"
# mlat-client
MLAT_CLIENT_VERSION="0.2.6"
MLAT_CLIENT_TAG="v0.2.6"
# PhantomJS
PHANTOMJS_VERSION="2.1.1"
|
import * as index from '../src/index';
test('Should have asio available', () => {
expect(index.asio).toBeTruthy();
});
|
#!/bin/bash
echo "JBOSS_HOME=$JBOSS_HOME"
if [ ! -d "$JBOSS_HOME/bin" ] ; then
>&2 echo "JBOSS_HOME/bin doesn't exist"
exit 1
fi
cd $JBOSS_HOME/bin
RESULT=0
./jboss-cli.sh --command="patch apply $PATCH_ZIP"
if [ $? -ne 0 ]; then RESULT=1; fi
exit $RESULT
fi
exit 1
|
<gh_stars>10-100
/*****************************************************************************
*
* PROJECT: Multi Theft Auto v1.0
* LICENSE: See LICENSE in the top level directory
* FILE: Shared/sdk/net/packetenums.h
* PURPOSE: Network packet enumerations
*
* Multi Theft Auto is available from http://www.multitheftauto.com/
*
*****************************************************************************/
#pragma once
enum
{
PACKET_ID_SERVER_JOIN = 0,
PACKET_ID_SERVER_JOIN_DATA,
PACKET_ID_SERVER_JOIN_COMPLETE,
PACKET_ID_PLAYER_JOIN,
PACKET_ID_PLAYER_JOINDATA,
PACKET_ID_PLAYER_QUIT,
PACKET_ID_PLAYER_TIMEOUT,
PACKET_ID_MOD_NAME,
PACKET_ID_PACKET_PROGRESS,
PACKET_ID_MTA_RESERVED_03,
PACKET_ID_MTA_RESERVED_04,
PACKET_ID_MTA_RESERVED_05,
PACKET_ID_MTA_RESERVED_06,
PACKET_ID_MTA_RESERVED_07,
PACKET_ID_MTA_RESERVED_08,
PACKET_ID_MTA_RESERVED_09,
PACKET_ID_MTA_RESERVED_10,
PACKET_ID_MTA_RESERVED_11,
PACKET_ID_MTA_RESERVED_12,
PACKET_ID_MTA_RESERVED_13,
PACKET_ID_MTA_RESERVED_14,
PACKET_ID_MTA_RESERVED_15,
PACKET_ID_END_OF_INTERNAL_PACKETS
};
#define RAKNET_PACKET_COUNT RID_USER_PACKET_ENUM
/// You should not edit the file MessageIdentifiers.h as it is a part of RakNet static library
/// To define your own message id, define an enum following the code example that follows.
///
/// \code
/// enum {
/// ID_MYPROJECT_MSG_1 = ID_USER_PACKET_ENUM,
/// ID_MYPROJECT_MSG_2,
/// ...
/// };
/// \endcode
///
/// \note All these enumerations should be casted to (unsigned char) before writing them to RakNet::BitStream
enum
{
//
// RESERVED TYPES - DO NOT CHANGE THESE
// All types from RakPeer
//
/// These types are never returned to the user.
/// Ping from a connected system. Update timestamps (internal use only)
RID_INTERNAL_PING,
/// Ping from an unconnected system. Reply but do not update timestamps. (internal use only)
RID_PING,
/// Ping from an unconnected system. Only reply if we have open connections. Do not update timestamps. (internal use only)
RID_PING_OPEN_CONNECTIONS,
/// Pong from a connected system. Update timestamps (internal use only)
RID_CONNECTED_PONG,
/// Asking for a new connection (internal use only)
RID_CONNECTION_REQUEST,
/// Connecting to a secured server/peer (internal use only)
RID_SECURED_CONNECTION_RESPONSE,
/// Connecting to a secured server/peer (internal use only)
RID_SECURED_CONNECTION_CONFIRMATION,
/// Packet that tells us the packet contains an integer ID to name mapping for the remote system (internal use only)
RID_RPC_MAPPING,
/// A reliable packet to detect lost connections (internal use only)
RID_DETECT_LOST_CONNECTIONS,
/// Offline message so we know when to reset and start a new connection (internal use only)
RID_OPEN_CONNECTION_REQUEST,
/// Offline message response so we know when to reset and start a new connection (internal use only)
RID_OPEN_CONNECTION_REPLY,
/// Remote procedure call (internal use only)
RID_RPC,
/// Remote procedure call reply, for RPCs that return data (internal use only)
RID_RPC_REPLY,
/// RakPeer - Same as ID_ADVERTISE_SYSTEM, but intended for internal use rather than being passed to the user. Second byte indicates type. Used currently
/// for NAT punchthrough for receiver port advertisement. See ID_NAT_ADVERTISE_RECIPIENT_PORT
RID_OUT_OF_BAND_INTERNAL,
//
// USER TYPES - DO NOT CHANGE THESE
//
/// RakPeer - In a client/server environment, our connection request to the server has been accepted.
RID_CONNECTION_REQUEST_ACCEPTED,
/// RakPeer - Sent to the player when a connection request cannot be completed due to inability to connect.
RID_CONNECTION_ATTEMPT_FAILED,
/// RakPeer - Sent a connect request to a system we are currently connected to.
RID_ALREADY_CONNECTED,
/// RakPeer - A remote system has successfully connected.
RID_NEW_INCOMING_CONNECTION,
/// RakPeer - The system we attempted to connect to is not accepting new connections.
RID_NO_FREE_INCOMING_CONNECTIONS,
/// RakPeer - The system specified in Packet::systemAddress has disconnected from us. For the client, this would mean the server has shutdown.
RID_DISCONNECTION_NOTIFICATION,
/// RakPeer - Reliable packets cannot be delivered to the system specified in Packet::systemAddress. The connection to that system has been closed.
RID_CONNECTION_LOST,
/// RakPeer - We preset an RSA public key which does not match what the system we connected to is using.
RID_RSA_PUBLIC_KEY_MISMATCH,
/// RakPeer - We are banned from the system we attempted to connect to.
RID_CONNECTION_BANNED,
/// RakPeer - The remote system is using a password and has refused our connection because we did not set the correct password.
RID_INVALID_PASSWORD,
/// RakPeer - A packet has been tampered with in transit. The sender is contained in Packet::systemAddress.
RID_MODIFIED_PACKET,
/// RakPeer - The four bytes following this byte represent an unsigned int which is automatically modified by the difference in system times between the
/// sender and the recipient. Requires that you call SetOccasionalPing.
RID_TIMESTAMP,
/// RakPeer - Pong from an unconnected system. First byte is ID_PONG, second sizeof(RakNetTime) bytes is the ping, following bytes is system specific
/// enumeration data.
RID_PONG,
/// RakPeer - Inform a remote system of our IP/Port, plus some offline data
RID_ADVERTISE_SYSTEM,
/// ConnectionGraph plugin - In a client/server environment, a client other than ourselves has disconnected gracefully. Packet::systemAddress is modified
/// to reflect the systemAddress of this client.
RID_REMOTE_DISCONNECTION_NOTIFICATION,
/// ConnectionGraph plugin - In a client/server environment, a client other than ourselves has been forcefully dropped. Packet::systemAddress is modified to
/// reflect the systemAddress of this client.
RID_REMOTE_CONNECTION_LOST,
/// ConnectionGraph plugin - In a client/server environment, a client other than ourselves has connected. Packet::systemAddress is modified to reflect the
/// systemAddress of the client that is not connected directly to us. The packet encoding is SystemAddress 1, ConnectionGraphGroupID 1, SystemAddress 2,
/// ConnectionGraphGroupID 2 ConnectionGraph2 plugin: Bytes 1-4 = count. for (count items) contains {SystemAddress, RakNetGUID}
RID_REMOTE_NEW_INCOMING_CONNECTION,
// RakPeer - Downloading a large message. Format is ID_DOWNLOAD_PROGRESS (MessageID), partCount (unsigned int), partTotal (unsigned int), partLength
// (unsigned int), first part data (length <= MAX_MTU_SIZE). See the three parameters partCount, partTotal and partLength in OnFileProgress in
// FileListTransferCBInterface.h
RID_DOWNLOAD_PROGRESS,
/// FileListTransfer plugin - Setup data
RID_FILE_LIST_TRANSFER_HEADER,
/// FileListTransfer plugin - A file
RID_FILE_LIST_TRANSFER_FILE,
// Ack for reference push, to send more of the file
RID_FILE_LIST_REFERENCE_PUSH_ACK,
/// DirectoryDeltaTransfer plugin - Request from a remote system for a download of a directory
RID_DDT_DOWNLOAD_REQUEST,
/// RakNetTransport plugin - Transport provider message, used for remote console
RID_TRANSPORT_STRING,
/// ReplicaManager plugin - Create an object
RID_REPLICA_MANAGER_CONSTRUCTION,
/// ReplicaManager plugin - Destroy an object
RID_REPLICA_MANAGER_DESTRUCTION,
/// ReplicaManager plugin - Changed scope of an object
RID_REPLICA_MANAGER_SCOPE_CHANGE,
/// ReplicaManager plugin - Serialized data of an object
RID_REPLICA_MANAGER_SERIALIZE,
/// ReplicaManager plugin - New connection, about to send all world objects
RID_REPLICA_MANAGER_DOWNLOAD_STARTED,
/// ReplicaManager plugin - Finished downloading all serialized objects
RID_REPLICA_MANAGER_DOWNLOAD_COMPLETE,
/// ConnectionGraph plugin - Request the connection graph from another system
RID_CONNECTION_GRAPH_REQUEST,
/// ConnectionGraph plugin - Reply to a connection graph download request
RID_CONNECTION_GRAPH_REPLY,
/// ConnectionGraph plugin - Update edges / nodes for a system with a connection graph
RID_CONNECTION_GRAPH_UPDATE,
/// ConnectionGraph plugin - Add a new connection to a connection graph
RID_CONNECTION_GRAPH_NEW_CONNECTION,
/// ConnectionGraph plugin - Remove a connection from a connection graph - connection was abruptly lost. Two systems addresses encoded in the data packet.
RID_CONNECTION_GRAPH_CONNECTION_LOST,
/// ConnectionGraph plugin - Remove a connection from a connection graph - connection was gracefully lost. Two systems addresses encoded in the data packet.
RID_CONNECTION_GRAPH_DISCONNECTION_NOTIFICATION,
/// Router plugin - route a message through another system
RID_ROUTE_AND_MULTICAST,
/// RakVoice plugin - Open a communication channel
RID_RAKVOICE_OPEN_CHANNEL_REQUEST,
/// RakVoice plugin - Communication channel accepted
RID_RAKVOICE_OPEN_CHANNEL_REPLY,
/// RakVoice plugin - Close a communication channel
RID_RAKVOICE_CLOSE_CHANNEL,
/// RakVoice plugin - Voice data
RID_RAKVOICE_DATA,
/// Autopatcher plugin - Get a list of files that have changed since a certain date
RID_AUTOPATCHER_GET_CHANGELIST_SINCE_DATE,
/// Autopatcher plugin - A list of files to create
RID_AUTOPATCHER_CREATION_LIST,
/// Autopatcher plugin - A list of files to delete
RID_AUTOPATCHER_DELETION_LIST,
/// Autopatcher plugin - A list of files to get patches for
RID_AUTOPATCHER_GET_PATCH,
/// Autopatcher plugin - A list of patches for a list of files
RID_AUTOPATCHER_PATCH_LIST,
/// Autopatcher plugin - Returned to the user: An error from the database repository for the autopatcher.
RID_AUTOPATCHER_REPOSITORY_FATAL_ERROR,
/// Autopatcher plugin - Finished getting all files from the autopatcher
RID_AUTOPATCHER_FINISHED_INTERNAL,
RID_AUTOPATCHER_FINISHED,
/// Autopatcher plugin - Returned to the user: You must restart the application to finish patching.
RID_AUTOPATCHER_RESTART_APPLICATION,
/// NATPunchthrough plugin: internal
RID_NAT_PUNCHTHROUGH_REQUEST,
/// NATPunchthrough plugin: internal
RID_NAT_CONNECT_AT_TIME,
/// NATPunchthrough plugin: internal
RID_NAT_GET_MOST_RECENT_PORT,
/// NATPunchthrough plugin: internal
RID_NAT_CLIENT_READY,
/// NATPunchthrough plugin: Destination system is not connected to the server. Bytes starting at offset 1 contains the RakNetGUID destination field of
/// NatPunchthroughClient::OpenNAT().
RID_NAT_TARGET_NOT_CONNECTED,
/// NATPunchthrough plugin: Destination system is not responding to the plugin messages. Possibly the plugin is not installed. Bytes starting at offset 1
/// contains the RakNetGUID destination field of NatPunchthroughClient::OpenNAT().
RID_NAT_TARGET_UNRESPONSIVE,
/// NATPunchthrough plugin: The server lost the connection to the destination system while setting up punchthrough. Possibly the plugin is not installed.
/// Bytes starting at offset 1 contains the RakNetGUID destination field of NatPunchthroughClient::OpenNAT().
RID_NAT_CONNECTION_TO_TARGET_LOST,
/// NATPunchthrough plugin: This punchthrough is already in progress. Possibly the plugin is not installed. Bytes starting at offset 1 contains the
/// RakNetGUID destination field of NatPunchthroughClient::OpenNAT().
RID_NAT_ALREADY_IN_PROGRESS,
/// NATPunchthrough plugin: This message is generated on the local system, and does not come from the network. packet::guid contains the destination field
/// of NatPunchthroughClient::OpenNAT(). Byte 1 contains 1 if you are the sender, 0 if not
RID_NAT_PUNCHTHROUGH_FAILED,
/// NATPunchthrough plugin: Punchthrough suceeded. See packet::systemAddress and packet::guid. Byte 1 contains 1 if you are the sender, 0 if not. You can
/// now use RakPeer::Connect() or other calls to communicate with this system.
RID_NAT_PUNCHTHROUGH_SUCCEEDED,
/// LightweightDatabase plugin - Query
RID_DATABASE_QUERY_REQUEST,
/// LightweightDatabase plugin - Update
RID_DATABASE_UPDATE_ROW,
/// LightweightDatabase plugin - Remove
RID_DATABASE_REMOVE_ROW,
/// LightweightDatabase plugin - A serialized table. Bytes 1+ contain the table. Pass to TableSerializer::DeserializeTable
RID_DATABASE_QUERY_REPLY,
/// LightweightDatabase plugin - Specified table not found
RID_DATABASE_UNKNOWN_TABLE,
/// LightweightDatabase plugin - Incorrect password
RID_DATABASE_INCORRECT_PASSWORD,
/// ReadyEvent plugin - Set the ready state for a particular system
/// First 4 bytes after the message contains the id
RID_READY_EVENT_SET,
/// ReadyEvent plugin - Unset the ready state for a particular system
/// First 4 bytes after the message contains the id
RID_READY_EVENT_UNSET,
/// All systems are in state ID_READY_EVENT_SET
/// First 4 bytes after the message contains the id
RID_READY_EVENT_ALL_SET,
/// \internal, do not process in your game
/// ReadyEvent plugin - Request of ready event state - used for pulling data when newly connecting
RID_READY_EVENT_QUERY,
/// Lobby packets. Second byte indicates type.
RID_LOBBY_GENERAL,
/// Auto RPC procedure call
RID_AUTO_RPC_CALL,
/// Auto RPC functionName to index mapping
RID_AUTO_RPC_REMOTE_INDEX,
/// Auto RPC functionName to index mapping, lookup failed. Will try to auto recover
RID_AUTO_RPC_UNKNOWN_REMOTE_INDEX,
/// Auto RPC error code
/// See AutoRPC.h for codes, stored in packet->data[1]
RID_RPC_REMOTE_ERROR,
/// FileListTransfer transferring large files in chunks that are read only when needed, to save memory
RID_FILE_LIST_REFERENCE_PUSH,
/// Force the ready event to all set
RID_READY_EVENT_FORCE_ALL_SET,
/// Rooms function
RID_ROOMS_EXECUTE_FUNC,
RID_ROOMS_LOGON_STATUS,
RID_ROOMS_HANDLE_CHANGE,
/// Lobby2 message
RID_LOBBY2_SEND_MESSAGE,
RID_LOBBY2_SERVER_ERROR,
// RAKNET_PROTOCOL_VERSION in RakNetVersion.h does not match on the remote system what we have on our system
// This means the two systems cannot communicate.
// The 2nd byte of the message contains the value of RAKNET_PROTOCOL_VERSION for the remote system
RID_INCOMPATIBLE_PROTOCOL_VERSION,
/// \internal For FullyConnectedMesh2 plugin
RID_FCM2_ELAPSED_RUNTIME,
/// Returned by FullyConnectedMesh2 to tell us of a new host. New host is contained in Packet::systemAddress and Packet::guid
RID_FCM2_NEW_HOST,
/// UDP proxy messages. Second byte indicates type.
RID_UDP_PROXY_GENERAL,
// For the user to use. Start your first enumeration at this value.
RID_USER_PACKET_ENUM,
//-------------------------------------------------------------------------------------------------------------
};
|
<gh_stars>0
// Mandelbrot emits a PNG image of the Mandelbrot fractal.
package main
import (
"image"
"image/color"
"image/png"
"math/cmplx"
"os"
)
func main() {
const (
xmin, ymin, xmax, ymax = -2, -2, +2, +2
width, height = 1024, 1024
)
img := image.NewRGBA(image.Rect(0, 0, width, height))
for py := 0; py < height; py++ {
y := float64(py)/height*(ymax-ymin) + ymin
for px := 0; px < width; px++ {
x := float64(px)/width*(xmax-xmin) + xmin
z := complex(x, y)
// Image point (px, py) represents complex value z.
img.Set(px, py, mandelbrot(z))
}
}
png.Encode(os.Stdout, img) // NOTE: ignoring errors
}
func mandelbrot(z complex128) color.Color {
const iterations = 200
const contrast = 15
var v complex128
for n := uint8(0); n < iterations; n++ {
v = v*v + z
if cmplx.Abs(v) > 2 {
return getColor(n)
}
}
return color.Black
}
func getColor(n uint8) color.Color {
paletted := [16]color.Color{
color.RGBA{66, 30, 15, 255}, // # brown 3
color.RGBA{25, 7, 26, 255}, // # dark violett
color.RGBA{9, 1, 47, 255}, //# darkest blue
color.RGBA{4, 4, 73, 255}, //# blue 5
color.RGBA{0, 7, 100, 255}, //# blue 4
color.RGBA{12, 44, 138, 255}, //# blue 3
color.RGBA{24, 82, 177, 255}, //# blue 2
color.RGBA{57, 125, 209, 255}, //# blue 1
color.RGBA{134, 181, 229, 255}, // # blue 0
color.RGBA{211, 236, 248, 255}, // # lightest blue
color.RGBA{241, 233, 191, 255}, // # lightest yellow
color.RGBA{248, 201, 95, 255}, // # light yellow
color.RGBA{255, 170, 0, 255}, // # dirty yellow
color.RGBA{204, 128, 0, 255}, // # brown 0
color.RGBA{153, 87, 0, 255}, // # brown 1
color.RGBA{106, 52, 3, 255}, // # brown 2
}
return paletted[n%16]
} |
import sys
data_dict = {}
# Parse command line args
for i in range(len(sys.argv)):
if i == 0:
continue
args = sys.argv[i].split(":")
key = args[0]
value = args[1]
data_dict[key] = value
print(data_dict) |
package main
import (
"net/http"
"google.golang.org/appengine"
"github.com/sand8080/d-data-transfer/internal/validator"
)
func main() {
sp := validator.NewSchemaProvider()
// Add events handler
url := "/api/v1/events/add"
h := AddEventsHandler(url, sp)
http.HandleFunc(url, h.Handle)
//
//http.HandleFunc("/push", push)
//http.HandleFunc("/initDataset", initDataset)
appengine.Main()
}
|
package com.example.thinha.newyorktimesarticlesearch.model;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.annotations.SerializedName;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.Serializable;
import java.util.ArrayList;
/**
* Created by ThiNha on 10/20/2016.
*/
public class Article implements Serializable {
@SerializedName("web_url")
String webUrl;
@SerializedName("snippet")
String headLine;
@SerializedName("multimedia")
JsonArray multimedia;
public JsonArray getMultimedia() {
return multimedia;
}
public String getThumbNail() {
if (multimedia.size() == 0)
return "";
else
return "http://www.nytimes.com/" + multimedia.get(0).getAsJsonObject().get("url").getAsString();
}
public String getHeadLine() {
return headLine;
}
public String getWebUrl() {
return webUrl;
}
//Dont need
public Article(JsonObject jsonObject)
{
Gson gson = new Gson();
Article article = gson.fromJson(jsonObject, Article.class);
this.webUrl = article.getWebUrl();
this.headLine = article.getHeadLine();
this.multimedia = article.getMultimedia();
}
public static ArrayList<Article> fromJsonArray(JsonArray array)
{
ArrayList<Article> results = new ArrayList<>();
Gson gson = new Gson();
for (int i=0;i<array.size();i++)
{
results.add(gson.fromJson(array.get(i),Article.class));
}
return results;
}
}
|
<filename>web/src/routes/protected.tsx
import React from "react";
import { Navigate, Outlet } from "react-router-dom";
// const { DiscussionsRoutes } = lazyImport(
// () => import('@/features/discussions'),
// 'DiscussionsRoutes'
// );
// const { Dashboard } = lazyImport(() => import('@/features/misc'), 'Dashboard');
// const { Profile } = lazyImport(() => import('@/features/users'), 'Profile');
// const { Users } = lazyImport(() => import('@/features/users'), 'Users');
const App = () => {
return (
// <MainLayout>
<Outlet />
// </MainLayout>
);
};
const protectedRoutes = [
{
path: "/app",
element: <App />,
children: [
// { path: '/discussions/*', element: <DiscussionsRoutes /> },
// { path: '/users', element: <Users /> },
// { path: '/profile', element: <Profile /> },
// { path: '/', element: <Dashboard /> },
{ path: "*", element: <Navigate to="." /> },
],
},
];
export default protectedRoutes;
|
#!/usr/bin/env bash
NAME=hexo
PORT=8080
if [ ! -z $1 ]; then
EP="--entrypoint bash"
fi
docker run \
-it \
--rm \
--name $NAME \
-v $(pwd)/blog:/blog \
-v $(pwd)/scripts:/scripts \
-p ${PORT}:4000 \
$EP \
ivonet/$NAME
|
#!/bin/bash
#
# Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -ex
gcc --version
SKIP_JAVA_TESTS=${SKIP_JAVA_TESTS:-true}
BUILD_CPP_TESTS=${BUILD_CPP_TESTS:-OFF}
ENABLE_CUDA_STATIC_RUNTIME=${ENABLE_CUDA_STATIC_RUNTIME:-ON}
ENABLE_PTDS=${ENABLE_PTDS:-ON}
RMM_LOGGING_LEVEL=${RMM_LOGGING_LEVEL:-OFF}
ENABLE_NVTX=${ENABLE_NVTX:-ON}
ENABLE_GDS=${ENABLE_GDS:-OFF}
OUT=${OUT:-out}
CMAKE_GENERATOR=${CMAKE_GENERATOR:-Ninja}
SIGN_FILE=$1
#Set absolute path for OUT_PATH
OUT_PATH="$WORKSPACE/$OUT"
# set on Jenkins parameter
echo "SIGN_FILE: $SIGN_FILE,\
SKIP_JAVA_TESTS: $SKIP_JAVA_TESTS,\
BUILD_CPP_TESTS: $BUILD_CPP_TESTS,\
ENABLE_CUDA_STATIC_RUNTIME: $ENABLE_CUDA_STATIC_RUNTIME,\
ENABLED_PTDS: $ENABLE_PTDS,\
ENABLE_NVTX: $ENABLE_NVTX,\
ENABLE_GDS: $ENABLE_GDS,\
RMM_LOGGING_LEVEL: $RMM_LOGGING_LEVEL,\
OUT_PATH: $OUT_PATH"
INSTALL_PREFIX=/usr/local/rapids
export GIT_COMMITTER_NAME="ci"
export GIT_COMMITTER_EMAIL="ci@nvidia.com"
export CUDACXX=/usr/local/cuda/bin/nvcc
export LIBCUDF_KERNEL_CACHE_PATH=/rapids
###### Build libcudf ######
rm -rf "$WORKSPACE/cpp/build"
mkdir -p "$WORKSPACE/cpp/build"
cd "$WORKSPACE/cpp/build"
cmake .. -G"${CMAKE_GENERATOR}" \
-DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX \
-DUSE_NVTX=$ENABLE_NVTX \
-DCUDF_USE_ARROW_STATIC=ON \
-DCUDF_ENABLE_ARROW_S3=OFF \
-DBUILD_TESTS=$BUILD_CPP_TESTS \
-DPER_THREAD_DEFAULT_STREAM=$ENABLE_PTDS \
-DRMM_LOGGING_LEVEL=$RMM_LOGGING_LEVEL \
-DBUILD_SHARED_LIBS=OFF
if [[ -z "${PARALLEL_LEVEL}" ]]; then
cmake --build .
else
cmake --build . --parallel $PARALLEL_LEVEL
fi
cmake --install .
###### Build cudf jar ######
BUILD_ARG="-Dmaven.repo.local=\"$WORKSPACE/.m2\"\
-DskipTests=$SKIP_JAVA_TESTS\
-DPER_THREAD_DEFAULT_STREAM=$ENABLE_PTDS\
-DCUDA_STATIC_RUNTIME=$ENABLE_CUDA_STATIC_RUNTIME\
-DCUDF_JNI_LIBCUDF_STATIC=ON\
-DRMM_LOGGING_LEVEL=$RMM_LOGGING_LEVEL\
-DUSE_GDS=$ENABLE_GDS -Dtest=*,!CuFileTest"
if [ "$SIGN_FILE" == true ]; then
# Build javadoc and sources only when SIGN_FILE is true
BUILD_ARG="$BUILD_ARG -Prelease"
fi
if [ -f "$WORKSPACE/java/ci/settings.xml" ]; then
# Build with an internal settings.xml
BUILD_ARG="$BUILD_ARG -s \"$WORKSPACE/java/ci/settings.xml\""
fi
cd "$WORKSPACE/java"
mvn -B clean package $BUILD_ARG
###### Sanity test: fail if static cudart found ######
find . -name '*.so' | xargs -I{} readelf -Ws {} | grep cuInit && echo "Found statically linked CUDA runtime, this is currently not tested" && exit 1
###### Stash Jar files ######
rm -rf $OUT_PATH
mkdir -p $OUT_PATH
cp -f target/*.jar $OUT_PATH
|
import { assert, expect } from 'chai';
import { isDocument, isDocumentArray } from '../../src/typeguards';
import { UserRefModel } from '../models/userRefs';
/**
* Function to pass into describe
* ->Important: you need to always bind this
* @example
* ```
* import { suite as TypeguardsTest } from './typeguards.test'
* ...
* describe('Type guards', TypeguardsTest.bind(this));
* ...
* ```
*/
export function suite() {
it('should guarantee array of document types', async () => {
const UserMaster = await UserRefModel.create({
name: 'master'
});
const UserSub = await UserRefModel.create({
name: 'sub'
});
UserMaster.subAccounts.push(UserSub._id);
await UserMaster.populate('subAccounts').execPopulate();
if (isDocumentArray(UserMaster.subAccounts)) {
expect(UserMaster.subAccounts).to.have.lengthOf(1);
for (const doc of UserMaster.subAccounts) {
expect(doc.name).to.be.equal('sub');
expect(doc.name).to.not.be.equal('other');
}
} else {
assert.fail('"UserMaster.subAccounts" is not populated!');
}
});
it('should guarantee single document type', async () => {
const UserMaster = await UserRefModel.create({
name: 'master'
});
const UserSub = await UserRefModel.create({
name: 'sub'
});
UserSub.master = UserMaster._id;
await UserSub.populate('master').execPopulate();
if (isDocument(UserSub.master)) {
expect(UserSub.master.name).to.be.equal('master');
expect(UserSub.master.name).to.not.be.equal('other');
} else {
assert.fail('"UserSub.master" is not populated!');
}
});
it('should detect if array of refs is not populated', async () => {
const UserMaster = await UserRefModel.create({
name: 'master'
});
const UserSub = await UserRefModel.create({
name: 'sub'
});
UserMaster.subAccounts.push(UserSub._id);
if (!isDocumentArray(UserMaster.subAccounts)) {
expect(UserMaster.subAccounts).to.have.lengthOf(1);
for (const doc of UserMaster.subAccounts) {
expect(doc).to.not.have.property('name');
}
} else {
assert.fail('"UserMaster.subAccounts" is populated where it should not!');
}
});
it('should detect if ref is not populated', async () => {
const UserMaster = await UserRefModel.create({
name: 'master'
});
const UserSub = await UserRefModel.create({
name: 'sub'
});
UserSub.master = UserMaster._id;
if (!isDocument(UserSub.master)) {
expect(UserSub.master).to.not.have.property('name');
} else {
assert.fail('"UserSub.master" is populated where it should not!');
}
});
it('should handle recursive populations - multiple populates', async () => {
const User1 = await UserRefModel.create({
name: '1'
});
const User2 = await UserRefModel.create({
name: '2',
master: User1._id
});
const User3 = await UserRefModel.create({
name: '3',
master: User2._id
});
await User3.populate('master').execPopulate();
if (isDocument(User3.master)) {
// User3.master === User2
await User3.master.populate('master').execPopulate();
if (isDocument(User3.master.master)) {
// User3.master.master === User1
expect(User3.master.master.name).to.be.equal(User1.name);
} else {
assert.fail('User3.master.master should be populated!');
}
} else {
assert.fail('User3.master should be populated!');
}
await User3.populate({
path: 'master',
populate: {
path: 'master'
}
}).execPopulate();
});
it('should handle recursive populations - single populate', async () => {
const User1 = await UserRefModel.create({
name: '1'
});
const User2 = await UserRefModel.create({
name: '2',
master: User1._id
});
const User3 = await UserRefModel.create({
name: '3',
master: User2._id
});
await User3.populate({
path: 'master',
populate: {
path: 'master'
}
}).execPopulate();
if (isDocument(User3.master) && isDocument(User3.master.master)) {
// User3.master === User2 && User3.master.master === User1
expect(User3.master.name).to.be.equal(User2.name);
expect(User3.master.master.name).to.be.equal(User1.name);
} else {
assert.fail('"User3" should be deep populated!');
}
});
}
|
<reponame>johnnyrayalt/java-codereview-week-3
package dao;
import models.TeamMember;
import java.util.List;
public interface TeamMemberDao {
void add(TeamMember name);
TeamMember findById(int id);
List<TeamMember> getAll();
List<TeamMember> getAllMembersByTeamId(int teamId);
void update(int id, String name, int teamId);
void deleteById(int id);
void clearAllTeamMembers();
}
|
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
set -o xtrace
__dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SMRTFLOW_ROOT_DIR=$(readlink -f "${__dir}/../..")
PB_TEST_DATA_FILES=$(readlink -f repos/PacBioTestData/data/files.json)
export PB_TEST_DATA_FILES="${PB_TEST_DATA_FILES}"
SMRT_PIPELINE_BUNDLE_DIR=$(readlink -f repos/pbpipeline-resources)
export SMRT_PIPELINE_BUNDLE_DIR="${SMRT_PIPELINE_BUNDLE_DIR}"
source /mnt/software/Modules/current/init/bash
module load jdk/1.8.0_144 sbt postgresql blasr ngmlr
ROOT_REPOS="${SMRTFLOW_ROOT_DIR}/repos"
PSQL_LOG="${SMRTFLOW_ROOT_DIR}/postgresql.log"
# We currently have a pretty poor model for cleaning up tmp files within the scala code
# set this globablly so all tests will run with this configuration
TDIR="$(pwd)/tmp"
export TMP="${TDIR}"
# Cleanup from any previous build (if exists)
rm -rf "$TDIR"
mkdir "$TDIR"
# Validate JSON files within the root directory. Note any dirs that are added to the root will be processed.
make jsontest
# cleanup from previous run if necessary
rm -rf "$PGDATA" && mkdir -p "$PGDATA"
rm -rf "${PSQL_LOG}"
# postgres initialization
initdb
perl -pi.orig -e "s/#port\s*=\s*(\d+)/port = $PGPORT/" "$PGDATA/postgresql.conf"
pg_ctl -w -l "${PSQL_LOG}" start
createdb smrtlinkdb
psql -d smrtlinkdb < "${SMRTFLOW_ROOT_DIR}/extras/db-init.sql"
psql -d smrtlinkdb < "${SMRTFLOW_ROOT_DIR}/extras/test-db-init.sql"
export SMRTFLOW_DB_PORT=$PGPORT
export SMRTFLOW_TEST_DB_PORT=$PGPORT
# MK. Disabling nexus publishing. I don't believe we're using the artifacts anywhere. Add "publish" here to push to nexus.
sbt -no-colors compile scalafmt::test test
#https://github.com/conda/conda/issues/3200 This appears to be fixed in 4.4.0
set +o nounset
echo "Loading Conda GNU module"
module load anaconda
echo "Building Conda Env"
# This only really needs to be done once
conda config --add channels defaults
conda config --add channels conda-forge
conda config --add channels bioconda
# This needs to be host specific?
env_name="testenv01"
echo "Setting up new env '${env_name}'"
set +e
conda info --envs | grep "${env_name}"
env_status=$?
set -e
if [[ "${env_status}" -eq 0 ]]; then
echo "Env ${env_name} was already created. Using cached env"
else
echo "Creating ${env_name}"
conda create --quiet --yes -n "${env_name}" numpy cython matplotlib scipy
fi
source activate "${env_name}"
which python
conda install --quiet --yes -c bioconda pysam=0.11.2.2
conda install --quiet --yes -c bioconda ngmlr
function uninstall_pkg () {
local pkg=$1
x=$(pip freeze | grep ${pkg}| wc -l)
if [[ "$x" -ne "0" ]] ; then
pip uninstall -y "${pkg}"
fi
}
# Install all PB py dependencies
pip install -r "${ROOT_REPOS}/pbcommand/REQUIREMENTS.txt"
uninstall_pkg pbcommand
pip install "${ROOT_REPOS}/pbcommand"
pip install -r "${ROOT_REPOS}/pbcore/requirements.txt"
uninstall_pkg pbcore
pip install "${ROOT_REPOS}/pbcore"
pip install -r "${ROOT_REPOS}/pbcoretools/requirements.txt"
uninstall_pkg pbcoretools
pip install "${ROOT_REPOS}/pbcoretools"
pip install -r "${ROOT_REPOS}/pbreports/REQUIREMENTS.txt"
uninstall_pkg pbreports
pip install "${ROOT_REPOS}/pbreports"
pip install -r "${ROOT_REPOS}/pbsmrtpipe/REQUIREMENTS.txt"
uninstall_pkg pbsmrtpipe
pip install "${ROOT_REPOS}/pbsmrtpipe"
cd "${SMRTFLOW_ROOT_DIR}"
# Sanity test
which python
echo "Printing PacBio core python package versions"
python -c 'import pbcommand; print pbcommand.get_version()'
python -c 'import pbcore; print pbcore.__VERSION__'
python -c 'import pbsmrtpipe; print pbsmrtpipe.get_version()'
python -c 'import pbcoretools; print pbcoretools.__VERSION__'
dataset --help
pbsmrtpipe --help
python -m pbreports.report.mapping_stats --help
make test-int
make test
|
# New way to install all ffmpeg options
brew install -v ffmpeg --with-fdk-aac --with-ffplay --with-freetype --with-frei0r --with-libass --with-libaca --with-libvo-aacenc --with-libvorbis --with-libvpx --with-openjpeg --with-openssl --with-schroedinger --with-speex --with-theora --with-tools
# New way to install imagemagick with all options
brew install -v imagemagick --with-fontconfig --with-jasper --with-liblqr --with-librsvg --with-libtiff --with-libwmf --with-little-cms --with-openexr --with-quantum-depth-32 --with-webp --with-x11
### Start FFMBC install
# Helpful instructions here: http://www.movieeditor.com/2012/01/26/building-ffmbcffmpeg-on-mac-os-x-lion/
# FFMBC dependencies - Some of these may be covered by new ffmpeg install
brew install -v frei0r libdc1394 dirac
wget https://ffmbc.googlecode.com/files/FFmbc-0.7-rc8.tar.bz2
tar -xvjf FFmbc-0.7-rc8.tar.bz2
cd FFmbc-0.7-rc8/
./configure --enable-gpl --enable-nonfree --enable-shared --enable-postproc --enable-runtime-cpudetect --enable-frei0r --enable-libdc1394 --enable-libdirac --enable-libfaac --enable-libmp3lame --enable-libopenjpeg --enable-libschroedinger --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-pthreads --enable-libxvid --enable-zlib
# Install qt, python, python3, pyqt, boost, cmake, hdf5, ilmbase, openexr, alembic, htop
brew install -v qt
brew install -v python
brew install -v python3
brew install -v pyqt
brew install -v boost
brew install -v cmake
brew install -v hdf5
brew install -v ilmbase
brew install -v openexr
brew install -v alembic
brew install -v htop
# Update python3 setuptools and pip
sudo pip3 install --upgrade setuptools
sudo pip3 install --upgrade pip
|
<gh_stars>0
package ru.job4j.tracker;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import ru.job4j.tracker.input.Input;
import ru.job4j.tracker.input.StubInput;
import ru.job4j.tracker.model.Item;
import ru.job4j.tracker.useraction.DeleteItem;
import ru.job4j.tracker.useraction.FindItemById;
import ru.job4j.tracker.useraction.FindItemsByName;
import ru.job4j.tracker.useraction.UpdateItem;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.function.Consumer;
import java.util.function.DoubleBinaryOperator;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class StartUITest {
// поле содержит дефолтный вывод в консоль.
private final PrintStream stdout = System.out;
// буфер для результата.
private final ByteArrayOutputStream out = new ByteArrayOutputStream();
private static String LINE_SEPARATOR = System.lineSeparator();
private final Consumer<String> output = new Consumer<String>() {
private final PrintStream stdout = new PrintStream(out);
@Override
public void accept(String s) {
stdout.println(s);
}
@Override
public String toString() {
return out.toString();
}
};
@Before
public void loadOutput() {
System.out.println("execute before method");
System.setOut(new PrintStream(this.out));
}
@After
public void backOutput() {
System.setOut(this.stdout);
System.out.println("execute after method");
}
// @Test
// public void whenUserAddItemThenTrackerHasNewItemWithSameName() {
// Tracker tracker = new Tracker(); // создаём Tracker
// Input input = new StubInput(new String[]{"1", "test name", "desc", "6"}); //создаём StubInput с последовательностью действий
// new StartUI(input, tracker).init(); // создаём StartUI и вызываем метод init()
// assertThat(tracker.findAll().get(0).getName(), is("test name")); // проверяем, что нулевой элемент массива в трекере содержит имя, введённое при эмуляции.
// }
@Test
public void whenUpdateThenTrackerHasUpdatedValue() {
// создаём Tracker
Tracker tracker = new Tracker();
//Напрямую добавляем заявку
Item item = tracker.add(new Item("test name", "desc"));
//создаём StubInput с последовательностью действий(производим замену заявки)
// Input input = new StubInput(new String[]{"2", item.getId(), "test replace", "заменили заявку", "6"});
// создаём StartUI и вызываем метод init()
// new StartUI(input, tracker, output).init();
// проверяем, что нулевой элемент массива в трекере содержит имя, введённое при эмуляции.
// assertThat(tracker.findById(item.getId()).getName(), is("test replace"));
}
@Test
public void whenUpdateItemActionTest() {
// создаём Tracker
// Tracker tracker = new Tracker();
// //Напрямую добавляем заявку
// Item item = tracker.add(new Item("test name", "desc"));
//
// String replacedName = "test replace name";
// String replacedDesc = "test replace desc";
//
// Input input = mock(Input.class);
//
// //создаём mock объект с эмуляцией действий по запросу
//// when(input.ask("Enter id of the item to edit")).thenReturn(item.getId());
// when(input.ask("Enter the new name of the item")).thenReturn(replacedName);
// when(input.ask("Enter the new description of the item")).thenReturn(replacedDesc);
//
// // создаём StartUI и вызываем метод init()
// UpdateItem updateItem = new UpdateItem(2, "Edit item");
// updateItem.execute(input, tracker, output);
// assertThat(tracker.findById(item.getId()).getName(), is(replacedName));
// assertThat(tracker.findById(item.getId()).getDesc(), is(replacedDesc));
}
@Test
public void whenDeleteItemThenTrackerHasntTheValue() {
Tracker tracker = new Tracker();
Item item = tracker.add(new Item("test name", "desc"));
// Input input = new StubInput(new String[]{"3", item.getId(), "6"});
// new StartUI(input, tracker, output).init();
assertThat(tracker.findAll().size(), is(0));
}
@Test
public void whenDeleteItemActionTest() {
// Tracker tracker = new Tracker();
// Item item = tracker.add(new Item("test name", "desc"));
// Input input = mock(Input.class);
// //создаём mock объект с эмуляцией действий по запросу
//// when(input.ask("Enter id of the item to delete")).thenReturn(item.getId());
//
// DeleteItem deleteItem = new DeleteItem(3, "Delete item");
// deleteItem.execute(input, tracker, output);
//
// assertThat(tracker.findAll().size(), is(0));
}
@Test
public void whenShowMenuThenShowMenu() {
// Tracker tracker = new Tracker();
// HbnTracker hbnTracker = new HbnTracker();
// Input input = new StubInput(new String[]{"6"});
// StartUI startUI = new StartUI(input, tracker, output);
// startUI.init();
// assertThat(
//// new String(out.toByteArray()),
// this.output.toString(),
// is(
// new StringBuilder()
// .append(showMenu())
// .toString()
// )
// );
}
@Test
public void whenFindAllItemThenShowAll() {
// Tracker tracker = new Tracker();
// Item item1 = tracker.add(new Item("test name", "desc"));
// Item item2 = tracker.add(new Item("test name", "desc"));
// Input input = new StubInput(new String[]{"1", "6"});
// StartUI startUI = new StartUI(input, tracker, output);
// startUI.init();
// assertThat(
//// new String(out.toByteArray()),
// this.output.toString(),
// is(
// new StringBuilder()
// .append(showMenu())
// .append("------------ Все созданые заявки --------------" + LINE_SEPARATOR)
// .append(item1.toString() + LINE_SEPARATOR)
// .append(item2.toString() + LINE_SEPARATOR)
// .append(showMenu())
// .toString()
// )
// );
}
@Test
public void whenFindItemByNameThenShowItems() {
// Tracker tracker = new Tracker();
// Item item1 = tracker.add(new Item("test name", "desc"));
// Item item2 = tracker.add(new Item("test name", "desc"));
// Input input = new StubInput(new String[]{"5", item1.getName(), "6"});
// StartUI startUI = new StartUI(input, tracker, output);
// startUI.init();
// assertThat(
//// new String(out.toByteArray()),
// this.output.toString(),
// is(
// new StringBuilder()
// .append(showMenu())
// .append(item1.toString() + LINE_SEPARATOR)
// .append(item2.toString() + LINE_SEPARATOR)
// .append(showMenu())
// .toString()
// )
// );
}
@Test
public void whenFindItemByNameActionTest() {
// Tracker tracker = new Tracker();
// String testName = "test name";
// Item item1 = tracker.add(new Item(testName, "desc"));
// Item item2 = tracker.add(new Item(testName, "desc"));
// Input input = mock(Input.class);
// when(input.ask("Enter name of the item to find")).thenReturn(testName);
//
// new FindItemsByName(5, "Find items by name").execute(input, tracker, output);
//
// assertThat(this.output.toString(), is(String.format("%s%s%s%s",item1.toString(), LINE_SEPARATOR, item2.toString(), LINE_SEPARATOR)));
}
@Test
public void whenFindItemByIDThenShowItems() {
Tracker tracker = new Tracker();
Item item1 = tracker.add(new Item("test name", "desc"));
Item item2 = tracker.add(new Item("test name", "desc"));
// Input input = new StubInput(new String[]{"4", item1.getId(), "6"});
// StartUI startUI = new StartUI(input, tracker, output);
// startUI.init();
assertThat(
// new String(out.toByteArray()),
this.output.toString(),
is(
new StringBuilder()
.append(showMenu())
.append(item1.toString() + LINE_SEPARATOR + LINE_SEPARATOR)
.append(showMenu())
.toString()
)
);
}
@Test
public void whenFindItemByIdActionTest() {
// Tracker tracker = new Tracker();
// Item item1 = tracker.add(new Item("test name", "desc"));
// Item item2 = tracker.add(new Item("test name", "desc"));
//
// Input input = mock(Input.class);
//// when(input.ask("Enter id of the item to find")).thenReturn(item1.getId());
// new FindItemById(4, "Find item by Id").execute(input, tracker, output);
//
// assertThat(this.output.toString(), is(item1.toString() + LINE_SEPARATOR + LINE_SEPARATOR));
}
private static String showMenu() {
return new StringBuilder()
.append("Menu. Make your choice:" + LINE_SEPARATOR)
.append("1. Add item" + LINE_SEPARATOR)
.append("2. Show all items" + LINE_SEPARATOR)
.append("3. Edit item" + LINE_SEPARATOR)
.append("4. Delete item" + LINE_SEPARATOR)
.append("5. Find item by Id" + LINE_SEPARATOR)
.append("6. Find items by name" + LINE_SEPARATOR)
.append("7. Exit Program" + LINE_SEPARATOR)
.toString();
}
} |
<gh_stars>1000+
/* globals d3 */
const {Scene} = spritejs;
const container = document.getElementById('stage');
const scene = new Scene({
container,
width: 1600,
height: 1200,
mode: 'stickyWidth',
});
const layer = d3.select(scene.layer('fglayer'));
document.querySelector('#stage canvas').style.backgroundColor = '#151718';
layer.append('label')
.attr('pos', [1100, 150])
.attr('name', 'region')
.attr('font', '42px Arial')
.attr('text', '中国')
.attr('fillColor', '#fff');
d3.json('https://s5.ssl.qhres2.com/static/b0695e2dd30daa64.json', (err, data) => {
if(err) throw new Error(err);
const root = d3.hierarchy(data)
.sum(d => 1)
.sort((a, b) => b.value - a.value);
const pack = d3.pack()
.size([1000, 1000])
.padding(3);
const nodes = pack(root);
const color = d3.scaleSequential(d3.interpolateMagma)
.domain([-4, 4]);
layer.selectAll('label[name!="region"]')
.data(nodes.descendants())
.enter()
.append('label')
.attr('translate', [300, 150])
.attr('anchor', 0.5)
.attr('pos', (d) => {
const x = Math.round(d.x),
y = Math.round(d.y);
return [x, y];
})
.attr('size', (d) => {
const r = Math.round(d.r);
return [2 * r, 2 * r];
})
.attr('bgcolor', d => color(d.depth))
.attr('borderRadius', (d) => {
return d.r;
})
.attr('font', (d) => {
return `${16 + Math.round(10 * Math.log2(d.value))}px Arial`;
})
.attr('lineHeight', (d) => {
return Math.round(2 * d.r);
})
.attr('textAlign', 'center')
.attr('text', (d) => {
if(!d.children) return d.data.name;
return '';
})
.on('mousemove', function (d) {
// console.log(d.data.name)
layer.selectAll('label[name="region"]')
.attr('text', d.data.name);
layer.selectAll('label[name!="region"]')
.attr('border', null);
this.attr('border', [3, 'red']);
})
.on('mouseleave', function (d) {
this.attr('border', null);
});
}); |
#include "PowerPellet.hpp"
#include "ScatterState.hpp"
#include <memory>
namespace Pacenstein {
PowerPellet::PowerPellet(float x, float y):
Item(x, y, 100)
{}
PowerPellet::PowerPellet(sf::Vector2f xy):
PowerPellet(xy.x, xy.y)
{}
void PowerPellet::interact(game_data_ref_t data) {
data->powerPelletsLeft--;
collected = true;
data->score += points;
data->scattering = true;
}
bool PowerPellet::is_collected(){
return collected;
}
sf::Vector2f PowerPellet::getPosition(){
return this->getPos();
}
}
|
<reponame>smartao/estudos_python<gh_stars>0
#!/usr/bin/python3
'''
Interpolarção
É substituir valores dentro da string
'''
# Criando duas variaveis
from string import Template
nome, idade = '<NAME>', 30.98761
# Método mais antigo, menos recomendado!
#
# %s = sequencia de caracteres que sera interpretado pelo python
# para substuir elementos do tipo string
# %d = usado para substituir valores inteiros
# %f = usando para substiuir valores float
print('\nSubstituindo valores variaveis, método antigo:')
print('Nome: %s, Idade: %d' % (nome, idade))
print('\nSubstitindo valores float e limitando as casas deciamais:')
print('Nome: %s, Idade: %.2f' % (nome, idade))
# Método utilizado no Python 3.6 ou inferior
print('\nMetodo de interpolacao python 3.6 ou inferior:')
print('Nome: {0} Idade: {1}'.format(nome, idade))
# Método mais novo de todos, suporta apenas Python 3.6 ou superior
# Chamando f-string
print('\nMetodo de interpolacao python 3.6 ou superior:')
print(f'Nome: {nome} Idade: {idade}')
# Adicioando valores e formatando mostrando apenas duas casas deciamias
print(f'Idade de {nome} daqui 10 anos = {idade+10:.2f}')
# Método usando template, necessário configurar o import
print('\nMetodo usando template:')
s = Template('Nome: $n Idade: $ida')
print(s.substitute(n=nome, ida=idade))
#
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 54
# https://github.com/cod3rcursos/curso-python/tree/master/fundamentos
# https://realpython.com/python-f-strings/
# https://www.geeksforgeeks.org/formatted-string-literals-f-strings-python/
|
import {
spy,
testApi,
assertEquals,
assertCalledWith,
testFile,
uniq
} from "./helper/mod.ts"
async function setup(zealot: any) {
const space = await zealot.spaces.create({name: "space1"})
const log = testFile("sample.tsv")
const resp = await zealot.logs.postPaths({paths: [log], spaceId: space.id})
await resp.array()
zealot.setSearchOptions({
spaceId: space.id,
from: new Date(0),
to: new Date(),
enhancers: []
})
}
testApi("search#records", async (zealot) => {
await setup(zealot)
const resp = await zealot.search("* | sort ts")
const results = await resp.records()
assertEquals(results.length, 30)
assertEquals(results[0].type.splice(0, 2), [
{name: "_path", type: "string"},
{name: "ts", type: "time"}
])
assertEquals(results[0].value.splice(0, 2), ["stats", "1582646585.983635"])
})
testApi("search#iterator", async (zealot) => {
await setup(zealot)
const stream = await zealot.search("* | sort ts")
const types = []
for await (const payload of stream) {
types.push(payload.type)
}
assertEquals(uniq(types), [
"TaskStart",
"SearchRecords",
"SearchEnd",
"SearchStats",
"TaskEnd"
])
})
testApi("search#callbacks start and end", async (zealot: any) => {
await setup(zealot)
const resp = await zealot.search("*")
const start = spy()
const end = spy()
await new Promise<void>((resolve, reject) => {
resp
.callbacks()
.start(start)
.end((args: any) => {
end(args)
resolve()
})
.error(reject)
})
assertCalledWith(start, {task_id: 0, type: "TaskStart"})
assertCalledWith(end, {task_id: 0, type: "TaskEnd"})
})
testApi("search#callbacks record", async (zealot: any) => {
await setup(zealot)
const resp = await zealot.search("_path=conn | sort ts | head 1")
const records = spy()
await new Promise((resolve, reject) => {
resp
.callbacks()
.records(records)
.end(resolve)
.error(reject)
})
const args = records.calls[0].args[0]
assertEquals(Object.keys(args), [
"channel",
"schemas",
"newRecords",
"allRecords"
])
assertEquals(args.channel, 0)
assertEquals(args.schemas.size, 1)
assertEquals(args.newRecords.length, 1)
assertEquals(args.allRecords.length, 1)
})
testApi("search#originResponse format=zng", async (zealot: any) => {
await setup(zealot)
const resp = await zealot.search("*", {format: "zng", controlMessages: false})
for await (const chunk of resp.origResp.body) {
assertEquals(chunk instanceof Uint8Array, true)
}
})
testApi("search with abortController", async (zealot: any) => {
await setup(zealot)
const onAbort = spy()
const ctl = new AbortController()
ctl.signal.onabort = onAbort
const resp = await zealot.search("*", {signal: ctl.signal})
ctl.abort()
resp.origResp.body?.cancel()
assertEquals(onAbort.calls.length, 1)
})
|
#!/bin/sh
gcloud builds submit --config cloudbuild.yaml . |
import { useKeycloak } from "@react-keycloak/web";
import React, { FC, useContext } from "react";
import { useHistory } from "react-router-dom";
import { isLoginDisabled, isIDPEnabled } from "../environment";
import { AuthTokenContext } from "./AuthTokenProvider";
import { storeAuthToken, getStoredAuthToken } from "./helper";
interface PropsT {
location: {
search: Object;
};
}
const WithAuthToken: FC<PropsT> = ({ location, children }) => {
const history = useHistory();
const { initialized } = useKeycloak();
const { authToken } = useContext(AuthTokenContext);
const goToLogin = () => history.push("/login");
const { search } = location;
const params = new URLSearchParams(search);
const accessToken = params.get("access_token");
if (accessToken) storeAuthToken(accessToken);
if (isIDPEnabled && (!initialized || !authToken)) {
return null;
}
if (!isIDPEnabled && !isLoginDisabled) {
const authToken = getStoredAuthToken();
if (!authToken) {
goToLogin();
return null;
}
}
return <>{children}</>;
};
export default WithAuthToken;
|
function isPrime(num) {
if (num <= 1)
return false;
for (let i = 2; i < num; i++)
if (num % i === 0)
return false;
return true;
}
const num = 11;
console.log(isPrime(num)); // true |
#!/bin/bash
if [ ${USERID} != 0 ] && [ ${GROUPID} != 0 ] ; then
addgroup --gid ${GROUPID} mygroup
adduser --gecos "" --disabled-password --uid ${USERID} --ingroup mygroup myuser
usermod -aG docker myuser
fi
sudo -H -u myuser ls -al
sudo -H -u myuser echo $PATH
sudo -H -u myuser ls -al /go/bin
sudo -H -u myuser /go/bin/$@ |
/*
Copyright 2020-2021 University of Oxford
and Health and Social Care Information Centre, also known as NHS Digital
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
*/
import { TestBed } from '@angular/core/testing';
import { UserSettingsHandlerService } from './user-settings-handler.service';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { UIRouterModule } from '@uirouter/angular';
import { ToastrModule } from 'ngx-toastr';
import { ElementTypesService } from '@mdm/services/element-types.service';
import { MdmResourcesService } from '@mdm/modules/resources';
describe('UserSettingsHandlerService', () => {
beforeEach(() => TestBed.configureTestingModule({
imports: [
HttpClientTestingModule,
UIRouterModule.forRoot({ useHash: true }),
ToastrModule.forRoot()
],
providers: [
{
provide: MdmResourcesService, useValue: {}
},
ElementTypesService
]
}));
it('should be created', () => {
const service: UserSettingsHandlerService = TestBed.inject(UserSettingsHandlerService);
expect(service).toBeTruthy();
});
});
|
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth import get_user_model
def get_user(netid):
try:
user = User.objects.get(username=netid)
return user
except ObjectDoesNotExist:
user = User.objects.create_user(
netid, password=get_user_pass(netid))
return user
except Exception as e:
# Log the exception for debugging purposes
print(f"An error occurred while retrieving/creating user: {e}")
return None |
#!/bin/bash
cd `dirname ${BASH_SOURCE-$0}`
. env.sh
echo "start-mining.sh"
nohup ${QUORUM} --datadir $QUO_DATA --rpc --rpcaddr 0.0.0.0 --rpcport 8000 --port 9000 --raft --raftport 50400 --raftblocktime 2000 --unlock 0 --password <(echo -n "") > $QUO_DATA/../raft_quorum_log 2>&1 &
#echo --datadir $QUO_DATA --rpc --rpcaddr 0.0.0.0 --rpcport 8000 --port 9000 --raft --raftport 50400 --raftblocktime 2000 --unlock 0 --password <(echo -n "")
|
def count_palindromes(text):
palindromes = []
words = text.split(' ')
for word in words:
if word == word[::-1]:
palindromes.append(word)
print(len(palindromes)) |
#!/bin/sh
export GOVC_USERNAME=$2
export GOVC_PASSWORD=$3
export GOVC_INSECURE=1
export GOVC_URL=$1
govc vm.info -json $4 | jq -r .VirtualMachines[].Guest.HostName |
<gh_stars>1-10
package com.bustiblelemons.cthulhator.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ImageButton;
import android.widget.RelativeLayout;
import com.bustiblelemons.cthulhator.R;
import com.bustiblelemons.model.LocationInfo;
import com.micromobs.android.floatlabel.FloatLabelEditText;
import butterknife.ButterKnife;
import butterknife.InjectView;
/**
* Created by bhm on 26.07.14.
*/
public class LocationWidget extends RelativeLayout {
@InjectView(R.id.street)
FloatLabelEditText streetInput;
@InjectView(R.id.city)
FloatLabelEditText cityInput;
@InjectView(R.id.zipcode)
FloatLabelEditText zipCodeInput;
@InjectView(R.id.state)
FloatLabelEditText stateInput;
@InjectView(R.id.pick_location)
ImageButton pickLocationButton;
private View rootView;
private LocationInfo mInfo;
private String mStreet;
private String mState;
private String mCity;
private String mZipcode;
private boolean enableMapPicker = true;
private Drawable locationPickerDrawable;
private int defaultTextSize;
public LocationWidget(Context context) {
super(context);
init(context, null);
}
public LocationWidget(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public LocationWidget(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
public void setZipcode(String zipcode) {
if (zipCodeInput != null) {
zipCodeInput.setText(zipcode);
this.mZipcode = zipcode;
}
}
public String getCity() {
return mCity == null ? mInfo != null ? mInfo.getCity() : mCity : null;
}
public void setCity(String city) {
if (cityInput != null) {
cityInput.setText(city);
this.mCity = city;
}
}
public String getState() {
return mState == null ? mInfo != null ? mInfo.getState() : mState : null;
}
public void setState(String state) {
if (stateInput != null && stateInput.getEditText() != null) {
stateInput.setText(state);
this.mState = state;
}
}
public String getStreet() {
return mStreet == null ? mInfo != null ? mInfo.getStreet() : mStreet : null;
}
public void setStreet(String street) {
if (streetInput != null) {
streetInput.setText(street);
this.mStreet = street;
}
}
public LocationInfo getInfo() {
return mInfo;
}
public void setInfo(LocationInfo mInfo) {
this.mInfo = mInfo;
}
private void init(Context context, AttributeSet attrs) {
rootView = LayoutInflater.from(context).inflate(R.layout.location_widget, this, true);
ButterKnife.inject(this, rootView);
if (attrs != null) {
TypedArray array = context.obtainStyledAttributes(attrs, R.styleable.LocationWidget);
defaultTextSize = context.getResources().getDimensionPixelSize(R.dimen.font_16);
setTextSizes(array);
enableMapPicker = array.getBoolean(R.styleable.LocationWidget_enableMapPicker,
enableMapPicker);
enableMapPicker(enableMapPicker);
locationPickerDrawable = array.getDrawable(R.styleable.LocationWidget_pickerDrawable);
if (locationPickerDrawable != null) {
setLocationPicker(locationPickerDrawable);
}
}
}
private void setTextSizes(TypedArray array) {
defaultTextSize = array.getDimensionPixelSize(R.styleable.LocationWidget__textSize,
defaultTextSize);
setTextSizes(defaultTextSize);
}
private void setTextSizes(int size) {
setTextSizeFor(streetInput, size);
setTextSizeFor(cityInput, size);
setTextSizeFor(stateInput, size);
setTextSizeFor(zipCodeInput, size);
}
private void setTextSizeFor(FloatLabelEditText view, int size) {
if (view != null) {
view.setTextSize(TypedValue.COMPLEX_UNIT_PX, size);
}
}
public void setLocationPicker(int resId) {
if (pickLocationButton != null) {
pickLocationButton.setImageResource(resId);
}
}
public void setLocationPicker(Drawable drawable) {
if (pickLocationButton != null) {
pickLocationButton.setImageDrawable(drawable);
}
}
public void enableMapPicker(boolean enable) {
if (enable) {
if (pickLocationButton != null) {
pickLocationButton.setVisibility(View.VISIBLE);
}
} else {
if (pickLocationButton != null) {
pickLocationButton.setVisibility(View.GONE);
}
}
}
public void setLocation(LocationInfo info) {
if (info == null) {
return;
}
this.mInfo = info;
setStreet(info.getStreet());
setState(info.getState());
setCity(info.getCity());
setZipcode(info.getZip());
}
public String getZipCode() {
return mZipcode != null ? mZipcode : mInfo != null ? mInfo.getZip() : null;
}
}
|
#!/bin/bash
set -e
nose2 -vv
coverage report
|
const snowpackJestConfig = require('@snowpack/app-scripts-react/jest.config.js')()
module.exports = {
...snowpackJestConfig,
verbose: false,
testEnvironment: 'jsdom',
testPathIgnorePatterns: [
'/node_modules/',
'<rootDir>/build'
],
transformIgnorePatterns: [
'/node_modules/',
'\\.pnp\\.[^\\/]+$'
],
moduleFileExtensions: [
'js',
'json',
'jsx',
'ts',
'tsx',
'node'
],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
testMatch: ['<rootDir>/src/**/?(*.)+(spec|test).[jt]s?(x)'],
moduleNameMapper: {
// Snowpack 설정의 alias를 그대로 가져와서 쓸 수 없어서 jest에서 사용하는 방식으로 설정한다.
// 키와 밸류는 String.prototype.replace 메소드의 파라미터에 차례대로 들어간다고 보면 된다.
'^__mocks__(/?.*)$': '<rootDir>/src/__mocks__$1',
'^components(/?.*)$': '<rootDir>/src/components$1',
'^hooks(/?.*)$': '<rootDir>/src/hooks$1',
'^styles(/?.*)$': '<rootDir>/src/styles$1',
'^utils(/?.*)$': '<rootDir>/src/utils$1'
}
}
|
package io.opensphere.core.util.swing;
import java.awt.Color;
import java.awt.Component;
import java.awt.Graphics;
import java.awt.GridBagLayout;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseMotionAdapter;
import javax.swing.JComponent;
import javax.swing.RootPaneContainer;
import javax.swing.SwingUtilities;
/**
* A glass pane to put over a frame which will intercept events to prevent
* interaction with the frame.
*/
public class DisabledGlassPane extends JComponent
{
/** serialVersionUID. */
private static final long serialVersionUID = 1L;
/**
* Use glass pane to prevent interaction with the parent frame.
*
* @param child the component whose parent frame should be disabled.
* @return The original glass pane of the parent frame.
*/
public static Component disableParentFrame(Component child)
{
Component currentGlass = null;
RootPaneContainer parentFrame = (RootPaneContainer)SwingUtilities.getAncestorOfClass(RootPaneContainer.class, child);
if (parentFrame != null)
{
currentGlass = parentFrame.getGlassPane();
DisabledGlassPane glass = new DisabledGlassPane();
parentFrame.setGlassPane(glass);
glass.activate();
}
return currentGlass;
}
/**
* Restore interaction with the parent frame.
*
* @param child the component whose parent frame should be enabled.
* @param glass This should typically be the original glass pane of the
* parent frame.
*/
public static void enableParentFrame(Component child, Component glass)
{
RootPaneContainer parentFrame = (RootPaneContainer)SwingUtilities.getAncestorOfClass(RootPaneContainer.class, child);
if (parentFrame != null)
{
parentFrame.setGlassPane(glass);
}
}
/** Constructor. */
public DisabledGlassPane()
{
setOpaque(false);
Color background = new Color(0, 0, 0, 0);
setBackground(background);
setLayout(new GridBagLayout());
addMouseListener(new MouseAdapter()
{
});
addMouseMotionListener(new MouseMotionAdapter()
{
});
addKeyListener(new KeyListener()
{
@Override
public void keyPressed(KeyEvent e)
{
e.consume();
}
@Override
public void keyReleased(KeyEvent e)
{
e.consume();
}
@Override
public void keyTyped(KeyEvent e)
{
e.consume();
}
});
}
/** When activated, events are intercepted. */
public void activate()
{
setVisible(true);
}
/** When deactivated, events are not intercepted. */
public void deactivate()
{
// when a component is not visible, swing events are not delivered.
setVisible(false);
}
@Override
protected void paintComponent(Graphics graphics)
{
}
}
|
<html>
<head>
<title>My Website</title>
<style>
body {
font-family: Arial;
margin: 10px;
background-color: #F4F4F4;
}
h1 {
font-size: 44px;
color: #36477F;
font-weight: bold;
}
p {
font-size: 18px;
margin-bottom: 25px;
}
</style>
</head>
<body>
<h1>My Website</h1>
<p>Welcome to my website!</p>
</body>
</html> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.