identifier
stringlengths 42
383
| collection
stringclasses 1
value | open_type
stringclasses 1
value | license
stringlengths 0
1.81k
| date
float64 1.99k
2.02k
⌀ | title
stringlengths 0
100
| creator
stringlengths 1
39
| language
stringclasses 157
values | language_type
stringclasses 2
values | word_count
int64 1
20k
| token_count
int64 4
1.32M
| text
stringlengths 5
1.53M
| __index_level_0__
int64 0
57.5k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/conversationai/conversationai-crowdsource/blob/master/scripts/src/csvtojsonlines_lib.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
conversationai-crowdsource
|
conversationai
|
TypeScript
|
Code
| 161
| 384
|
/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import * as csvtojson from 'csvtojson';
import * as stream from 'stream';
export async function csvtojsonlines(
instream: stream.Readable,
outstream: stream.Writable
): Promise<void> {
const csvToJson = csvtojson();
let lineCount = 0;
const onceDone = new Promise((resolve, reject) => {
csvToJson
.fromStream(instream)
.on('data', (jsonObj: Buffer) => {
lineCount++;
outstream.write(jsonObj.toString('utf8'));
})
.on('done', (error: Error) => {
console.log(`lineCount: ${lineCount}`);
outstream.end();
if (error) {
console.log('end error:' + error.message);
reject(error);
} else {
console.log('end success.');
resolve();
}
});
});
await onceDone;
}
| 3,618
|
https://github.com/Zwiterrion/daikoku/blob/master/daikoku/javascript/src/components/adminbackoffice/otoroshi/initialization/steps.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
daikoku
|
Zwiterrion
|
JavaScript
|
Code
| 1,802
| 6,964
|
import React, { useState, useEffect } from 'react';
import Select from 'react-select';
import Creatable from 'react-select/creatable';
import AsyncSelect from 'react-select/async';
import classNames from 'classnames';
import _ from 'lodash';
import { Option } from '../../../utils';
import * as Services from '../../../../services';
import { newPossibleUsagePlan, BeautifulTitle } from '../../../utils';
import { t, Translation } from '../../../../locales';
export const SelectionStepStep = (props) => {
return (
<div className="d-flex">
<button className="btn btn-outline-primary mr-2" onClick={() => props.goToServices()}>
<Translation i18nkey="Import Otoroshi services" language={props.currentLanguage}>
Import Otoroshi Services
</Translation>
</button>
<button className="btn btn-outline-primary" onClick={() => props.goToApikeys()}>
<Translation i18nkey="Import Otoroshi apikeys" language={props.currentLanguage}>
Import Otoroshi Apikeys
</Translation>
</button>
</div>
);
};
export const SelectOtoStep = (props) => {
const [otoInstance, setOtoInstance] = useState(undefined);
useEffect(() => {
if (otoInstance) {
props.setOtoInstance(otoInstance);
}
}, [otoInstance]);
const previousState = JSON.parse(
localStorage.getItem(`daikoku-initialization-${props.tenant._id}`)
);
return (
<div className="d-flex flex-row">
<Select
placeholder={t('Select an Otoroshi instance', props.currentLanguage)}
className="add-member-select mr-2 reactSelect"
isDisabled={!props.otoroshis.length}
isLoading={!props.otoroshis.length}
options={props.otoroshis.map((s) => ({
label: s.url,
value: s._id,
}))}
selected={otoInstance}
onChange={(slug) => setOtoInstance(slug)}
value={otoInstance}
classNamePrefix="reactSelect"
/>
{!!previousState && previousState.tenant === props.tenant._id && (
<div className="d-flex flex-column">
<BeautifulTitle
placement="bottom"
title={t('Load a work in progress', props.currentLanguage)}>
<button className="btn btn-access" onClick={props.loadPreviousState}>
<i className="fa fa-download" />
</button>
</BeautifulTitle>
</div>
)}
</div>
);
};
export const RecapServiceStep = (props) => {
return (
<div>
<h2>
<Translation i18nkey="Api imported" language={props.currentLanguage}>
Apis to import
</Translation>
</h2>
<ul style={{ listStyleType: 'none' }}>
{props.teams
.filter((t) => props.createdApis.some((api) => api.team === t._id))
.map((t, idx) => {
return (
<li className="mt-3" key={idx}>
<h5>
<i className="fas fa-user-friends"></i> {t.name}
</h5>
<ul>
{props.createdApis
.filter((s) => s.team === t._id)
.map((s, idx) => {
return <li key={idx}>{s.name}</li>;
})}
</ul>
</li>
);
})}
</ul>
<div className="d-flex justify-content-end">
<button className="btn btn-outline-primary mr-1" onClick={() => props.goBackToServices()}>
<i className="fas fa-chevron-left mr-1"></i>
<Translation i18nkey="Back" language={props.currentLanguage}>
Back
</Translation>
</button>
<button className="btn btn-outline-danger mr-1" onClick={props.cancel}>
<Translation i18nkey="Cancel" language={props.currentLanguage}>
Cancel
</Translation>
</button>
<button className="btn btn-outline-success" onClick={() => props.create()}>
<Translation i18nkey="Create apis" language={props.currentLanguage}>
Create APIs
</Translation>
</button>
</div>
</div>
);
};
export const RecapSubsStep = (props) => {
return (
<div>
<h2>
<Translation i18nkey="Apikey imported" language={props.currentLanguage}>
Apikey to import
</Translation>
</h2>
<ul style={{ listStyleType: 'none' }}>
{props.apis
.filter((a) => props.createdSubs.some((s) => s.api._id === a._id))
.map((a, idx) => {
return (
<li className="mt-3" key={idx}>
<h5>
<i className="fas fa-atlas"></i> {a.name}
</h5>
<ul>
{props.createdSubs
.filter((s) => s.api._id === a._id)
.map((s, idx) => {
return (
<li key={idx}>
{s.plan.customName || s.plan.type}/{s.clientName}
</li>
);
})}
</ul>
</li>
);
})}
</ul>
<div className="d-flex justify-content-end">
<button className="btn btn-outline-primary mr-1" onClick={() => props.goBackToServices()}>
<i className="fas fa-chevron-left mr-1"></i>
<Translation i18nkey="Back" language={props.currentLanguage}>
Back
</Translation>
</button>
<button className="btn btn-outline-danger mr-1" onClick={props.cancel}>
<Translation i18nkey="Cancel" language={props.currentLanguage}>
Cancel
</Translation>
</button>
<button className="btn btn-outline-success" onClick={() => props.create()}>
<Translation i18nkey="Create subscriptions" language={props.currentLanguage}>
Create subscriptions
</Translation>
</button>
</div>
</div>
);
};
export const ServicesStep = (props) => {
const [service, setService] = useState(props.maybeCreatedApi.getOrElse(props.service));
const [loading, setLoading] = useState(false);
const [newTeam, setNewTeam] = useState();
const [selectedTeam, setSelectedTeam] = useState(
props.maybeCreatedApi.map((api) => api.team).getOrNull()
);
const [error, setError] = useState({});
const [inputRef, setInputRef] = useState(null);
useEffect(() => {
if (newTeam) {
setLoading(true);
Services.fetchNewTeam()
.then((t) => ({ ...t, name: newTeam }))
.then((t) => Services.createTeam(t))
.then((t) => {
props.addNewTeam(t);
setSelectedTeam(t._id);
setNewTeam(undefined);
setLoading(false);
});
}
}, [newTeam]);
useEffect(() => {
Services.checkIfApiNameIsUnique(service.name).then(({ exists }) => {
if (exists) {
setError({
name: t('api.unique.name.error', props.currentLanguage, false, 'Api name must be unique'),
});
} else {
setError({});
}
});
}, [service]);
const nextStep = () => {
if (props.currentStep === props.totalSteps) {
props.recap();
} else {
props.nextStep();
}
};
const getIt = () => {
props.addService(service, selectedTeam);
nextStep();
};
const update = () => {
props.updateService(service, selectedTeam);
nextStep();
};
const reset = () => {
props.resetService();
setService(props.service);
setSelectedTeam(null);
};
useEffect(() => {
return () => {
document.onkeydown = null;
};
}, [window.event]);
const checkKey = (e) => {
if (inputRef && document.activeElement !== inputRef) {
if (e.keyCode === 37 && props.currentStep > 1) {
props.previousStep();
} else if (e.keyCode === 39) {
if (props.maybeCreatedApi && selectedTeam) {
props.updateService(service, selectedTeam);
nextStep();
} else if (selectedTeam) {
props.addService(service, selectedTeam);
nextStep();
} else {
nextStep();
}
}
}
};
document.onkeydown = checkKey;
const teams = props.teams.map((t) => ({ label: t.name, value: t._id }));
return (
<div className="d-flex flex-row col-12 flex-wrap">
<div className="col-6">
<h2>
<Translation i18nkey="Otoroshi" language={props.currentLanguage}>
Otoroshi
</Translation>
</h2>
<div>
<span style={{ fontWeight: 'bold' }}>
<Translation
i18nkey="init.services.title"
language={props.currentLanguage}
replacements={[props.infos.index + 1, props.infos.total]}>
Api {props.infos.index + 1}/{props.infos.total}
</Translation>
</span>{' '}
: {props.service.name}
<AsyncSelect
cacheOptions
defaultOptions
placeholder={t('Jump to specific service', props.currentLanguage)}
className="add-member-select reactSelect"
loadOptions={props.getFilteredServices}
onChange={({ value }) => props.goToStep(value)}
classNamePrefix="reactSelect"
/>
</div>
<div className="mt-3">
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="api group" language={props.currentLanguage}>
Api group
</Translation>
</span>{' '}
: {props.groups.find((g) => g.id === props.service.groupId).name}
</div>
</div>
<div className="col-6">
<h2>{props.tenant.name}</h2>
<div className="d-flex flex-row align-items-center mb-3">
<div className="col-4">
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="Api name" language={props.currentLanguage}>
Api name
</Translation>
</span>
</div>
<div className="d-flex flex-column col-8">
<input
type="text"
tabIndex="0"
ref={(ref) => setInputRef(ref)}
className={classNames('form-control', { 'on-error': !!error.name })}
value={service.name}
onChange={(e) => setService({ ...service, name: e.target.value })}
/>
{error.name && <small className="invalid-input-info text-danger">{error.name}</small>}
</div>
</div>
<div className="d-flex flex-row align-items-center mb-3">
<div className="col-4">
<div>
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="Api team" language={props.currentLanguage}>
Api team
</Translation>
</span>
</div>
</div>
<Creatable
className="col-8"
isClearable={true}
isDisabled={loading}
isLoading={loading}
onChange={(slug, { action }) => {
setSelectedTeam(action === 'clear' ? undefined : slug.value);
}}
onCreateOption={setNewTeam}
options={teams}
value={teams.find((t) => t.value === selectedTeam)}
placeholder={t('Select a team', props.currentLanguage)}
formatCreateLabel={(value) =>
t('create.team.label', props.currentLanguage, false, `creer l'équipe ${value}`, value)
}
classNamePrefix="reactSelect"
/>
</div>
</div>
<div className="d-flex justify-content-between col-12 mt-5">
<div />
<div>
<button
className="btn btn-access"
disabled={props.currentStep === 1 ? 'disabled' : null}
onClick={() => props.goToStep(1)}>
<i className="fas fa-angle-double-left" />
</button>
<button
className="btn btn-access mr-2"
disabled={props.currentStep === 1 ? 'disabled' : null}
onClick={props.previousStep}>
<i className="fas fa-angle-left" />
</button>
{props.maybeCreatedApi.isDefined && (
<button className="btn btn-outline-success" onClick={reset}>
<Translation i18nkey="Reset" language={props.currentLanguage}>
Reset
</Translation>
</button>
)}
{props.maybeCreatedApi.isDefined && (
<button
className="btn btn-outline-success mr-2"
disabled={!selectedTeam || error.name ? 'disabled' : null}
onClick={update}>
<Translation i18nkey="Update" language={props.currentLanguage}>
Update
</Translation>
</button>
)}
{!props.maybeCreatedApi.isDefined && (
<button
className="btn btn-outline-success mr-2"
disabled={!selectedTeam || error.name ? 'disabled' : null}
onClick={getIt}>
<Translation i18nkey="Import" language={props.currentLanguage}>
Import this service
</Translation>
</button>
)}
<button className="btn btn-access ml-2" onClick={nextStep}>
<i className="fas fa-angle-right" />
</button>
<button
className="btn btn-access"
disabled={props.currentStep === props.totalSteps ? 'disabled' : null}
onClick={() => props.goToStep(props.totalSteps)}>
<i className="fas fa-angle-double-right" />
</button>
</div>
<div>
<button className="btn btn-outline-danger mr-2" onClick={props.cancel}>
<Translation i18nkey="Cancel" language={props.currentLanguage}>
Cancel
</Translation>
</button>
<button className="btn btn-outline-success" onClick={props.recap}>
<Translation i18nkey="Finish" language={props.currentLanguage}>
Finish
</Translation>
</button>
</div>
</div>
</div>
);
};
export const ApiKeyStep = (props) => {
const [selectedApi, setSelectedApi] = useState(
props.maybeCreatedSub.map((sub) => sub.api).getOrNull()
);
const [selectedPlan, setSelectedPlan] = useState(
props.maybeCreatedSub.map((sub) => sub.plan).getOrNull()
);
const [selectedTeam, setSelectedTeam] = useState(
props.maybeCreatedSub.map((sub) => sub.team).getOrNull()
);
const [newTeam, setNewTeam] = useState(undefined);
const [newPlan, setNewPlan] = useState(undefined);
const [loading, setLoading] = useState(false);
const [loadingPlan, setLoadingPlan] = useState(false);
const [error, setError] = useState({ plan: false, api: false, team: false });
useEffect(() => {
if (selectedApi) {
const api = props.apis.find((a) => selectedApi._id === a._id);
setSelectedApi(api);
if (selectedPlan) {
setSelectedPlan(api.possibleUsagePlans.find((pp) => pp._id === selectedPlan._id));
}
}
//remove document.OnKeyDown listener
return () => (document.onkeydown = null);
}, [props.apis]);
useEffect(() => {
if (newTeam) {
setLoading(true);
Services.fetchNewTeam()
.then((t) => ({ ...t, name: newTeam }))
.then((t) => Services.createTeam(t))
.then((t) => {
props.addNewTeam(t);
setSelectedTeam(t._id);
setNewTeam(undefined);
setLoading(false);
});
}
}, [newTeam]);
//add new plan effect
useEffect(() => {
if (newPlan) {
let plans = _.cloneDeep(selectedApi.possibleUsagePlans);
const newPossiblePlan = newPossibleUsagePlan(newPlan);
const plan = {
...newPossiblePlan,
otoroshiTarget: {
...newPossiblePlan.otoroshiTarget,
otoroshiSettings: props.otoroshi,
serviceGroup: props.apikey.authorizedGroup,
},
};
plans.push(plan);
const value = _.cloneDeep(selectedApi);
value.possibleUsagePlans = plans;
setSelectedPlan(plan);
Promise.resolve(setLoadingPlan(true))
.then(() => props.updateApi(value))
.then(() => {
setNewPlan(undefined);
setLoadingPlan(false);
});
}
}, [newPlan]);
//handle error effect
useEffect(() => {
setError({ plan: !!selectedPlan, api: !!selectedApi, team: !!selectedTeam });
}, [selectedPlan, selectedApi, selectedTeam]);
const nextStep = () => {
if (props.currentStep === props.totalSteps) {
props.recap();
} else {
props.nextStep();
}
};
const getIt = () => {
props.addSub(props.apikey, selectedTeam, selectedApi, selectedPlan);
nextStep();
};
const update = () => {
props.updateSub(props.apikey, selectedTeam, selectedApi, selectedPlan);
nextStep();
};
const apis = props.apis.map((a) => ({ label: a.name, value: a }));
const teams = props.teams.map((t) => ({ label: t.name, value: t._id }));
const possiblePlans = Option(props.apis.find((a) => selectedApi && a._id === selectedApi._id))
.map((a) => a.possibleUsagePlans)
.getOrElse([])
.map((pp) => ({ label: pp.customName || pp.type, value: pp }));
const maybeGroup = props.groups.find((g) => g.id === props.apikey.authorizedGroup);
const checkKey = (e) => {
if (e.keyCode === 37 && props.currentStep > 1) {
props.previousStep();
} else if (e.keyCode === 39) {
if (props.maybeCreatedSub && selectedApi && selectedPlan && selectedTeam) {
props.updateSub(props.apikey, selectedTeam, selectedApi, selectedPlan);
nextStep();
} else if (selectedTeam) {
props.addSub(props.apikey, selectedTeam, selectedApi, selectedPlan);
nextStep();
} else {
nextStep();
}
}
};
document.onkeydown = checkKey;
return (
<div className="d-flex flex-row col-12 flex-wrap">
<div className="col-6">
<h2>
<Translation i18nkey="Otoroshi" language={props.currentLanguage}>
Otoroshi
</Translation>
</h2>
<div>
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="API key" language={props.currentLanguage}>
API key
</Translation>{' '}
({props.infos.index + 1}/{props.infos.total}) : {props.apikey.clientName}
</span>
<AsyncSelect
cacheOptions
defaultOptions
placeholder={t('Jump to specific apikey', props.currentLanguage)}
className="add-member-select reactSelect"
loadOptions={props.getFilteredApikeys}
onChange={({ value }) => props.goToStep(value)}
classNamePrefix="reactSelect"
/>
</div>
<div className="mt-3">
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="Service group" language={props.currentLanguage}>
Api group
</Translation>
</span>{' '}
:{' '}
{Option(maybeGroup)
.map((g) => g.name)
.getOrElse('???')}
</div>
</div>
<div className="col-6">
<h2>{props.tenant.name}</h2>
<div className="d-flex flex-row align-items-center mb-3">
<div className="col-4">
<div>
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="API" language={props.currentLanguage}>
API
</Translation>
</span>
</div>
</div>
<div className="d-flex flex-column col-8">
<Select
options={apis}
onChange={(slug) => setSelectedApi(slug.value)}
value={apis.find((a) => !!selectedApi && a.value._id === selectedApi._id)}
placeholder={t('Select an API', props.currentLanguage)}
classNamePrefix="reactSelect"
/>
</div>
</div>
<div className="d-flex flex-row align-items-center mb-3">
<div className="col-4">
<div>
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="Plan" language={props.currentLanguage}>
Plan
</Translation>
</span>
</div>
</div>
<div className="d-flex flex-column col-8">
<Creatable
isClearable
isDisabled={!selectedApi || loadingPlan}
isLoading={!selectedApi || loadingPlan}
onChange={(slug, { action }) =>
setSelectedPlan(action === 'clear' ? undefined : slug.value)
}
onCreateOption={setNewPlan}
options={possiblePlans}
value={possiblePlans.find((a) => !!selectedPlan && a.value._id === selectedPlan._id)}
placeholder={t('Select a plan', props.currentLanguage)}
formatCreateLabel={(value) =>
t('create.plan.label', props.currentLanguage, false, `Create plan ${value}`, value)
}
classNamePrefix="reactSelect"
/>
</div>
</div>
<div className="d-flex flex-row align-items-center mb-3">
<div className="col-4">
<div>
<span style={{ fontWeight: 'bold' }}>
<Translation i18nkey="Team" language={props.currentLanguage}>
Team
</Translation>
</span>
</div>
</div>
<Creatable
className="col-8"
isClearable
isDisabled={loading}
isLoading={loading}
onChange={(slug, { action }) =>
setSelectedTeam(action === 'clear' ? undefined : slug.value)
}
onCreateOption={setNewTeam}
options={teams}
value={teams.find((t) => t.value === selectedTeam)}
placeholder={t('Select a team', props.currentLanguage)}
formatCreateLabel={(value) =>
t('create.team.label', props.currentLanguage, false, `creer l'équipe ${value}`, value)
}
classNamePrefix="reactSelect"
/>
</div>
</div>
<div className="d-flex justify-content-between col-12 mt-5">
<div />
<div>
<button
className="btn btn-access"
disabled={props.currentStep === 1 ? 'disabled' : null}
onClick={() => props.goToStep(1)}>
<i className="fas fa-angle-double-left" />
</button>
<button
className="btn btn-access mr-2"
disabled={props.currentStep === 1 ? 'disabled' : null}
onClick={props.previousStep}>
<i className="fas fa-angle-left" />
</button>
{props.maybeCreatedSub.isDefined && (
<button className="btn btn-danger mr-2" onClick={props.resetSub}>
<i className="fas fa-times-circle" />
Suppress this import
</button>
)}
{props.maybeCreatedSub.isDefined && (
<button
className="btn btn-outline-success mr-2"
disabled={!selectedTeam || error.name ? 'disabled' : null}
onClick={update}>
<i className="fas fa-save" />
</button>
)}
{!props.maybeCreatedSub.isDefined && (
<button
className="btn btn-outline-success"
disabled={!selectedTeam || error.name ? 'disabled' : null}
onClick={getIt}>
Import this API key
</button>
)}
<button className="btn btn-access ml-2" onClick={nextStep}>
<i className="fas fa-angle-right" />
</button>
<button
className="btn btn-access"
disabled={props.currentStep === props.totalSteps ? 'disabled' : null}
onClick={() => props.goToStep(props.totalSteps)}>
<i className="fas fa-angle-double-right" />
</button>
</div>
<div>
<button className="btn btn-outline-danger mr-2" onClick={props.cancel}>
<Translation i18nkey="Cancel" language={props.currentLanguage}>
Cancel
</Translation>
</button>
<button className="btn btn-outline-success" onClick={props.recap}>
<Translation i18nkey="Finish" language={props.currentLanguage}>
Finish
</Translation>
</button>
</div>
</div>
</div>
);
};
| 13,152
|
https://github.com/Syndace/python-omemo/blob/master/omemo/extendedpublicbundle.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
python-omemo
|
Syndace
|
Python
|
Code
| 392
| 1,060
|
import copy
from .exceptions import UnknownKeyException
class ExtendedPublicBundle:
"""
This class looks exactly the same as the PublicBundle class, but the types of the
fields are a bit different:
The spk field is not a key, but a dictionary containing the key and the id:
spk = {
"key" : key,
"id" : id
}
The otpks field is not an array of keys, but an array of dictionaries containing the
key and the id:
otpks = [
{
"key" : key,
"id" : id
},
{
"key" : key,
"id" : id
},
...
]
"""
def __init__(self, ik, spk, spk_signature, otpks):
self.__ik = ik
self.__spk = copy.deepcopy(spk)
self.__spk_signature = spk_signature
self.__otpks = copy.deepcopy(otpks)
@classmethod
def parse(cls, backend, ik, spk, spk_signature, otpks):
"""
Use this method when creating a bundle from data you retrieved directly from some
PEP node. This method applies an additional decoding step to the public keys in
the bundle. Pass the same structure as the constructor expects.
"""
ik = backend.decodePublicKey(ik)[0]
spk = {
"key" : backend.decodePublicKey(spk["key"])[0],
"id" : spk["id"]
}
otpks = list(map(lambda otpk: {
"key" : backend.decodePublicKey(otpk["key"])[0],
"id" : otpk["id"]
}, otpks))
return cls(ik, spk, spk_signature, otpks)
def serialize(self, backend):
"""
Use this method to prepare the data to be uploaded directly to some PEP node. This
method applies an additional encoding step to the public keys in the bundle. The
result is a dictionary with the keys ik, spk, spk_signature and otpks. The values
are structured the same way as the inputs of the constructor.
"""
return {
"ik": backend.encodePublicKey(self.ik, "25519"),
"spk": {
"id" : self.spk["id"],
"key" : backend.encodePublicKey(self.spk["key"], "25519"),
},
"spk_signature": self.spk_signature,
"otpks": list(map(lambda otpk: {
"id" : otpk["id"],
"key" : backend.encodePublicKey(otpk["key"], "25519")
}, self.otpks))
}
@property
def ik(self):
return self.__ik
@property
def spk(self):
return self.__spk
@property
def spk_signature(self):
return self.__spk_signature
@property
def otpks(self):
return self.__otpks
def findOTPKId(self, otpk):
otpks = list(filter(lambda x: x["key"] == otpk, self.otpks))
if len(otpks) != 1:
raise UnknownKeyException("Tried to get the id of an unknown OTPK.")
return otpks[0]["id"]
def findSPKId(self, spk):
# If the requested spk is the one contained in this bundle...
if self.spk["key"] == spk:
# ...return the id
return self.spk["id"]
raise UnknownKeyException("Tried to get the id of an unknown SPK.")
def __eq__(self, other):
try:
return (
self.ik == other.ik and
self.spk == other.spk and
self.spk_signature == other.spk_signature and
self.otpks == other.otpks
)
except:
return False
| 37,880
|
https://github.com/ythlml/mindspore/blob/master/tests/ut/python/model/test_bert_cell.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
mindspore
|
ythlml
|
Python
|
Code
| 1,014
| 5,490
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test bert of graph compile """
import functools
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.nn as nn
import mindspore.ops.composite as C
from mindspore.ops import functional as F
from mindspore.common.initializer import TruncatedNormal
from mindspore.common.parameter import ParameterTuple
from mindspore.common.tensor import Tensor
from mindspore.model_zoo.Bert_NEZHA import BertPretrainingLoss, GetNextSentenceOutput
from mindspore.model_zoo.Bert_NEZHA.bert_for_pre_training import clip_grad
from mindspore.model_zoo.Bert_NEZHA.bert_model import BertConfig, \
EmbeddingLookup, EmbeddingPostprocessor, BertOutput, RelaPosMatrixGenerator, \
RelaPosEmbeddingsGenerator, SaturateCast, BertAttention, BertSelfAttention, \
BertEncoderCell, BertTransformer, CreateAttentionMaskFromInputMask, BertModel
from mindspore.nn.layer.basic import Norm
from mindspore.nn.optim import AdamWeightDecay, AdamWeightDecayDynamicLR
from ....mindspore_test_framework.mindspore_test import mindspore_test
from ....mindspore_test_framework.pipeline.forward.compile_forward import \
pipeline_for_compile_forward_ge_graph_for_case_by_case_config
from ....mindspore_test_framework.pipeline.gradient.compile_gradient import \
pipeline_for_compile_grad_ge_graph_for_case_by_case_config
from ....ops_common import convert
def bert_trans():
"""bert_trans"""
net = BertTransformer(batch_size=1,
hidden_size=768,
seq_length=128,
num_hidden_layers=1,
num_attention_heads=12,
intermediate_size=768,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
use_relative_positions=False,
hidden_act="gelu",
compute_type=mstype.float32,
return_all_encoders=True)
net.set_train()
return net
def set_train(net):
net.set_train()
return net
class NetForAdam(nn.Cell):
def __init__(self):
super(NetForAdam, self).__init__()
self.dense = nn.Dense(64, 10)
def construct(self, x):
x = self.dense(x)
return x
class TrainStepWrapForAdam(nn.Cell):
"""TrainStepWrapForAdam definition"""
def __init__(self, network):
super(TrainStepWrapForAdam, self).__init__()
self.network = network
self.weights = ParameterTuple(network.get_parameters())
self.optimizer = AdamWeightDecay(self.weights)
self.hyper_map = C.HyperMap()
def construct(self, x, sens):
weights = self.weights
grads = C.grad_by_list_with_sens(self.network, weights)(x, sens)
grads = self.hyper_map(F.partial(clip_grad, 1, 1.0), grads)
return self.optimizer(grads)
class TrainStepWrapForAdamDynamicLr(nn.Cell):
"""TrainStepWrapForAdamDynamicLr definition"""
def __init__(self, network):
super(TrainStepWrapForAdamDynamicLr, self).__init__()
self.network = network
self.weights = ParameterTuple(network.get_parameters())
self.optimizer = AdamWeightDecayDynamicLR(self.weights, 10)
self.sens = Tensor(np.ones(shape=(1, 10)).astype(np.float32))
def construct(self, x):
weights = self.weights
grads = C.grad_by_list_with_sens(self.network, weights)(x, self.sens)
return self.optimizer(grads)
class TempC2Wrap(nn.Cell):
def __init__(self, op, c1=None, c2=None, ):
super(TempC2Wrap, self).__init__()
self.op = op
self.c1 = c1
self.c2 = c2
self.hyper_map = C.HyperMap()
def construct(self, x1):
x = self.hyper_map(F.partial(self.op, self.c1, self.c2), x1)
return x
test_case_cell_ops = [
('Norm_keepdims', {
'block': Norm(keep_dims=True),
'desc_inputs': [[1, 3, 4, 4]],
'desc_bprop': [[1]]}),
('SaturateCast', {
'block': SaturateCast(),
'desc_inputs': [[1, 3, 4, 4]],
'desc_bprop': [[1, 3, 4, 4]]}),
('RelaPosMatrixGenerator_0', {
'block': RelaPosMatrixGenerator(length=128, max_relative_position=16),
'desc_inputs': [],
'desc_bprop': [[128, 128]],
'skip': ['backward']}),
('RelaPosEmbeddingsGenerator_0', {
'block': RelaPosEmbeddingsGenerator(length=128, depth=512,
max_relative_position=16,
initializer_range=0.2),
'desc_inputs': [],
'desc_bprop': [[16384, 512]],
'skip': ['backward']}),
('RelaPosEmbeddingsGenerator_1', {
'block': RelaPosEmbeddingsGenerator(length=128, depth=512,
max_relative_position=16,
initializer_range=0.2,
use_one_hot_embeddings=False),
'desc_inputs': [],
'desc_bprop': [[128, 128, 512]],
'skip': ['backward']}),
('RelaPosEmbeddingsGenerator_2', {
'block': RelaPosEmbeddingsGenerator(length=128, depth=64,
max_relative_position=16,
initializer_range=0.2,
use_one_hot_embeddings=False),
'desc_inputs': [],
'desc_bprop': [[128, 128, 64]],
'skip': ['backward']}),
('BertAttention_0', {
'block': BertAttention(batch_size=64,
from_tensor_width=768,
to_tensor_width=768,
from_seq_length=128,
to_seq_length=128,
num_attention_heads=12,
size_per_head=64,
query_act=None,
key_act=None,
value_act=None,
has_attention_mask=True,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
do_return_2d_tensor=True,
use_relative_positions=False,
compute_type=mstype.float32),
'desc_inputs': [[64, 128, 768], [64, 128, 768], [64, 128, 128]],
'desc_bprop': [[8192, 768]]}),
('BertAttention_1', {
'block': BertAttention(batch_size=64,
from_tensor_width=768,
to_tensor_width=768,
from_seq_length=128,
to_seq_length=128,
num_attention_heads=12,
size_per_head=64,
query_act=None,
key_act=None,
value_act=None,
has_attention_mask=True,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
do_return_2d_tensor=True,
use_relative_positions=True,
compute_type=mstype.float32),
'desc_inputs': [[64, 128, 768], [64, 128, 768], [64, 128, 128]],
'desc_bprop': [[8192, 768]]}),
('BertAttention_2', {
'block': BertAttention(batch_size=64,
from_tensor_width=768,
to_tensor_width=768,
from_seq_length=128,
to_seq_length=128,
num_attention_heads=12,
size_per_head=64,
query_act=None,
key_act=None,
value_act=None,
has_attention_mask=False,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
do_return_2d_tensor=True,
use_relative_positions=True,
compute_type=mstype.float32),
'desc_inputs': [[64, 128, 768], [64, 128, 768], [64, 128, 128]],
'desc_bprop': [[8192, 768]]}),
('BertAttention_3', {
'block': BertAttention(batch_size=64,
from_tensor_width=768,
to_tensor_width=768,
from_seq_length=128,
to_seq_length=128,
num_attention_heads=12,
size_per_head=64,
query_act=None,
key_act=None,
value_act=None,
has_attention_mask=True,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
do_return_2d_tensor=False,
use_relative_positions=True,
compute_type=mstype.float32),
'desc_inputs': [[64, 128, 768], [64, 128, 768], [64, 128, 128]],
'desc_bprop': [[8192, 768]]}),
('BertOutput', {
'block': BertOutput(in_channels=768,
out_channels=768,
initializer_range=0.02,
dropout_prob=0.1),
'desc_inputs': [[8192, 768], [8192, 768]],
'desc_bprop': [[8192, 768]]}),
('BertSelfAttention_0', {
'block': BertSelfAttention(batch_size=64,
seq_length=128,
hidden_size=768,
num_attention_heads=12,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
hidden_dropout_prob=0.1,
use_relative_positions=False,
compute_type=mstype.float32),
'desc_inputs': [[64, 128, 768], [64, 128, 128]],
'desc_bprop': [[8192, 768]]}),
('BertEncoderCell', {
'block': BertEncoderCell(batch_size=64,
hidden_size=768,
seq_length=128,
num_attention_heads=12,
intermediate_size=768,
attention_probs_dropout_prob=0.02,
use_one_hot_embeddings=False,
initializer_range=0.02,
hidden_dropout_prob=0.1,
use_relative_positions=False,
hidden_act="gelu",
compute_type=mstype.float32),
'desc_inputs': [[64, 128, 768], [64, 128, 128]],
'desc_bprop': [[8192, 768]]}),
('BertTransformer_0', {
'block': BertTransformer(batch_size=1,
hidden_size=768,
seq_length=128,
num_hidden_layers=1,
num_attention_heads=12,
intermediate_size=768,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
use_relative_positions=False,
hidden_act="gelu",
compute_type=mstype.float32,
return_all_encoders=True),
'desc_inputs': [[1, 128, 768], [1, 128, 128]]}),
('BertTransformer_1', {
'block': BertTransformer(batch_size=64,
hidden_size=768,
seq_length=128,
num_hidden_layers=2,
num_attention_heads=12,
intermediate_size=768,
attention_probs_dropout_prob=0.1,
use_one_hot_embeddings=False,
initializer_range=0.02,
use_relative_positions=True,
hidden_act="gelu",
compute_type=mstype.float32,
return_all_encoders=False),
'desc_inputs': [[64, 128, 768], [64, 128, 128]]}),
('EmbeddingLookup', {
'block': EmbeddingLookup(vocab_size=32000,
embedding_size=768,
embedding_shape=[1, 128, 768],
use_one_hot_embeddings=False,
initializer_range=0.02),
'desc_inputs': [Tensor(np.random.rand(128).astype(np.int32))],
'desc_bprop': [[1, 128, 768], [1, 128, 768]],
'num_output': 2}),
('EmbeddingPostprocessor', {
'block': EmbeddingPostprocessor(embedding_size=768,
embedding_shape=[1, 128, 768],
use_token_type=True,
token_type_vocab_size=16,
use_one_hot_embeddings=False,
initializer_range=0.02,
max_position_embeddings=512,
dropout_prob=0.1),
'desc_inputs': [Tensor(np.random.rand(128).astype(np.int32)), [1, 128, 768]],
'desc_bprop': [[1, 128, 768]]}),
('CreateAttentionMaskFromInputMask', {
'block': CreateAttentionMaskFromInputMask(config=BertConfig(batch_size=1)),
'desc_inputs': [[128]],
'desc_bprop': [[1, 128, 128]]}),
('BertOutput_0', {
'block': BertOutput(in_channels=768,
out_channels=768,
initializer_range=0.02,
dropout_prob=0.1),
'desc_inputs': [[1, 768], [1, 768]],
'desc_bprop': [[1, 768]]}),
('BertTransformer_2', {
'block': bert_trans(),
'desc_inputs': [[1, 128, 768], [1, 128, 128]]}),
('BertModel', {
'block': BertModel(config=BertConfig(batch_size=1,
num_hidden_layers=1,
intermediate_size=768,
token_type_ids_from_dataset=False),
is_training=True),
'desc_inputs': [Tensor(np.random.rand(128).astype(np.int32)),
Tensor(np.random.rand(128).astype(np.int32)), [128]],
'desc_bprop': [[1, 128, 768], [1, 128, 768], [1, 128, 768]],
'num_output': 3}),
('BertModel_1', {
'block': BertModel(config=BertConfig(batch_size=1,
num_hidden_layers=1,
intermediate_size=768,
token_type_ids_from_dataset=False),
is_training=False),
'desc_inputs': [Tensor(np.random.rand(128).astype(np.int32)),
Tensor(np.random.rand(128).astype(np.int32)), [128]],
'desc_bprop': [[1, 128, 768], [1, 128, 768], [1, 128, 768]],
'num_output': 3}),
('BertModel_2', {
'block': BertModel(config=BertConfig(batch_size=1,
num_hidden_layers=1,
intermediate_size=768,
token_type_ids_from_dataset=False,
input_mask_from_dataset=False),
is_training=True),
'desc_inputs': [Tensor(np.random.rand(128).astype(np.int32)),
Tensor(np.random.rand(128).astype(np.int32)), [128]],
'desc_bprop': [[1, 128, 768], [1, 128, 768], [1, 128, 768]],
'num_output': 3}),
('BertPretrainingLoss', {
'block': BertPretrainingLoss(config=BertConfig(batch_size=1)),
'desc_inputs': [[32000], [20, 2], Tensor(np.array([1]).astype(np.int32)),
[20], Tensor(np.array([20]).astype(np.int32))],
'desc_bprop': [[1]],
'num_output': 1}),
('Dense_1', {
'block': nn.Dense(in_channels=768,
out_channels=3072,
activation='gelu',
weight_init=TruncatedNormal(0.02)),
'desc_inputs': [[3, 768]],
'desc_bprop': [[3, 3072]]}),
('Dense_2', {
'block': set_train(nn.Dense(in_channels=768,
out_channels=3072,
activation='gelu',
weight_init=TruncatedNormal(0.02), )),
'desc_inputs': [[3, 768]],
'desc_bprop': [[3, 3072]]}),
('GetNextSentenceOutput', {
'block': GetNextSentenceOutput(BertConfig(batch_size=1)),
'desc_inputs': [[128, 768]],
'desc_bprop': [[128, 2]]}),
('Adam_1', {
'block': set_train(TrainStepWrapForAdam(NetForAdam())),
'desc_inputs': [[1, 64], [1, 10]],
'skip': ['backward']}),
('Adam_2', {
'block': set_train(TrainStepWrapForAdam(GetNextSentenceOutput(BertConfig(batch_size=1)))),
'desc_inputs': [[128, 768], [128, 2]],
'skip': ['backward']}),
('AdamWeightDecayDynamicLR', {
'block': set_train(TrainStepWrapForAdamDynamicLr(NetForAdam())),
'desc_inputs': [[1, 64]],
'skip': ['backward']}),
('ClipGradients', {
'block': TempC2Wrap(clip_grad, 1, 1.0),
'desc_inputs': [tuple(convert(shp) for shp in [[1], [1], [1]])],
'skip': ['backward', 'exec']}),
]
test_case = functools.reduce(lambda x, y: x + y, [test_case_cell_ops])
# use -k to select certain testcast
# pytest tests/python/ops/test_ops.py::test_backward -k LayerNorm
test_exec_case = filter(lambda x: 'skip' not in x[1] or
'exec' not in x[1]['skip'], test_case)
test_backward_exec_case = filter(lambda x: 'skip' not in x[1] or
'backward' not in x[1]['skip'] and 'backward_exec'
not in x[1]['skip'], test_case)
test_check_gradient_case = filter(lambda x: 'skip' not in x[1] or
'backward' not in x[1]['skip'] and 'backward_exec'
not in x[1]['skip'], test_case)
@mindspore_test(pipeline_for_compile_forward_ge_graph_for_case_by_case_config)
def test_exec():
return test_exec_case
@mindspore_test(pipeline_for_compile_grad_ge_graph_for_case_by_case_config)
def test_backward_exec():
return test_backward_exec_case
| 6,437
|
https://github.com/thanosargiriou/climex/blob/master/xtreme_Indexes_climex_1.0/gsl_climex_1.0.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
climex
|
thanosargiriou
|
Python
|
Code
| 435
| 1,065
|
import pandas as pd
import glob
"""
Climate indexes as defined by http://etccdi.pacificclimate.org/list_27_indices.shtml
Authors: Ioannidis Panagiotis, Athanassios Argiriou,
github:ioannidispanagiotis,
email: panagiwths.ioannidis117@gmail.com
Laboratory of Atmospheric Physics - Department of Physics - University of Patras (https://www.atmosphere-upatras.gr/en)
"""
"""
Climate index: GSL
Definition: [Growing season length]: Annual (1st Jan to 31st Dec in North Hemisphere) count between first span of at
least 6 days with TG>5°C and first span after July 1st of 6 days with TG<5°C.
Version 1.0, 2021-07-09
"""
# csv input files must have the following format name "[station_number(5 digits)]_TG_d.csv"
for filename in glob.glob("*_TG_d.csv"):
df = pd.read_csv(filename, index_col=[0], sep=",", parse_dates=True) # Data import
gsl_lst = []
for y in range(df.index.year[0], df.index.year[-1] + 1): # Iteration over years
df_year = df[df.index.year == y]
if y in df.index.year: # Checks if values for given year exist
tg_bigger = df_year[df_year > 5].dropna() # Find days with TG > 5°C from 1st of January
df_july = df_year[df_year.index.month >= 7] # 7 for July
tg_smaller = df_july[df_july < 5].dropna() # Find days with TG < 5°C from 1st of January
if len(tg_bigger) < 6: # Check in case i have zero days that satisfy my conditional
gsl = float("NaN")
gsl_lst.append(gsl)
else:
count1 = 0 # Check if the days are consecutive
for q in range(1, len(tg_bigger.index)):
if (tg_bigger.index[q] - tg_bigger.index[q - 1]).days != 1 and count1 < 6:
count1 = 0
elif count1 < 6:
count1 = count1 + 1
else: # If we find 6 consecutive days we stop the loop
break
# minus 2, - 1 because of python index numbering and - 1 because q starts from 2nd day in dataframe
jan_span = tg_bigger.index[q - 2] # Last day of first span
# Find days with TG < 5°C from 1st of July
if len(tg_smaller) < 6: # Check in case i have zero days that satisfy my conditional
gsl = float("NaN")
gsl_lst.append(gsl)
else:
count2 = 0 # Check if the days are consecutive
for w in range(1, len(tg_smaller.index)):
if (tg_smaller.index[w] - tg_smaller.index[w - 1]).days != 1 and count2 < 6:
count2 = 0
elif count2 < 6:
count2 = count2 + 1
else:
break
# minus 2, - 1 because of python index numbering and - 1 because w starts from 2nd day in dataframe
july_span = tg_smaller.index[w - 2] # Last day of first span
gsl = (july_span - jan_span).days # Annual count of days between two spans
gsl_lst.append(gsl)
else:
gsl = float("NaN")
gsl_lst.append(gsl)
# Dataframe to save files
dates = pd.date_range(start=str(df.index.year[0]), end=str(df.index.year[-1] + 1), freq="Y") # Date column
df_output = pd.DataFrame({"Date": dates, "GSL (# of days)": gsl_lst})
df_output.to_csv("GSL_" + filename[:5] + ".csv", sep=";", index=False)
| 2,564
|
https://github.com/xui/xuijs.com/blob/master/node_modules/.npm/.cache/express/1.0.1/package/support/connect/support/koala/spec/unit/formatter.spec.js
|
Github Open Source
|
Open Source
|
MIT
| 2,012
|
xuijs.com
|
xui
|
JavaScript
|
Code
| 68
| 217
|
Formatter = require('koala/formatter').Formatter
describe 'Formatter'
describe '.render()'
it 'should render using the given callback'
var ruby = new Lexer({
keyword: /^(def|end)\b/,
constant: /^([A-Z]\w*|__FILE__)/,
string: /^(".*?")/,
id: /^(\w+)/
})
var formatter = new Formatter(function(key, val){
return key === null
? val
: '<span class="' + key + '">' + val + '</span>'
})
var html = formatter.render(ruby, 'def foo\n "bar"\nend')
html.should.not.include '<span> </span>'
html.should.include '<span class="keyword">def</span>'
end
end
end
| 29,602
|
https://github.com/snowymo/OnlineSurfaceReconstruction/blob/master/libOSR/include/osr/Neighbor.h
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,022
|
OnlineSurfaceReconstruction
|
snowymo
|
C
|
Code
| 171
| 495
|
/*
This file is part of the implementation for the technical paper
Field-Aligned Online Surface Reconstruction
Nico Schertler, Marco Tarini, Wenzel Jakob, Misha Kazhdan, Stefan Gumhold, Daniele Panozzo
ACM TOG 36, 4, July 2017 (Proceedings of SIGGRAPH 2017)
Use of this source code is granted via a BSD-style license, which can be found
in License.txt in the repository root.
@author Nico Schertler
*/
#pragma once
#include "common.h"
namespace osr
{
template <typename TData>
struct Neighbor
{
Neighbor(TData idx, Float distanceSq)
: idx(idx), distanceSq(distanceSq)
{ }
bool operator>(const Neighbor<TData>& other) const { return distanceSq > other.distanceSq; }
bool operator<(const Neighbor<TData>& other) const { return distanceSq < other.distanceSq; }
TData idx;
Float distanceSq;
};
template <typename TData>
struct NeighborStrictOrder
{
NeighborStrictOrder(TData idx, Float distanceSq)
: idx(idx), distanceSq(distanceSq)
{ }
bool operator>(const NeighborStrictOrder<TData>& other) const
{
if (distanceSq != other.distanceSq)
return distanceSq > other.distanceSq;
return idx > other.idx;
}
bool operator<(const NeighborStrictOrder<TData>& other) const
{
if (distanceSq != other.distanceSq)
return distanceSq < other.distanceSq;
return idx < other.idx;
}
TData idx;
Float distanceSq;
};
}
| 16,575
|
https://github.com/advocaite/Painting-Hung-7remake/blob/master/mobile/language/vi/tracking.mo
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
Painting-Hung-7remake
|
advocaite
|
Modelica
|
Code
| 216
| 651
|
<?php
if(!defined('INSIDE')){ die("attemp hacking");}
$lang['Same PC'] = 'Same PC';
$lang['Report Statistic'] = 'Report Statistic';
$lang['Resource Statistic'] = 'Resource Statistic';
$lang['Attack Statistic'] = 'Attack Statistic';
$lang['Report Message'] = 'Message Statistic';
$lang['Keywords'] = 'Username';
$lang['find'] = 'Tìm kiếm';
$lang['Username'] = 'Username';
$lang['username'] = 'Username';
$lang['feature'] = 'Ðặc tính';
$lang['id'] = 'Định danh';
$lang['ip'] = 'Địa chỉ IP';
$lang['time'] = 'Thời gian';
$lang['Total record'] = 'Total record';
$lang['Of'] = 'of';
$lang['Page'] = 'Page:';
$lang['IP'] = 'IP';
$lang['All'] = 'All';
$lang['Attacks'] = 'Attacks';
$lang['Reinforcement'] = 'Reinforcement';
$lang['Trade'] = 'Trade';
$lang['Title'] = 'Title';
$lang['Time'] = 'Time';
$lang['Null'] = 'Null';
$lang['Search'] = 'Search';
$lang['new'] = 'Chưa đọc';
$lang['Select ALL'] = 'Select All';
$lang['Un Select ALL'] = 'Un Select All';
$lang['Delete'] = 'Delete';
$lang['Ban'] = 'Ban User';
$lang['Total record'] = 'Total record';
$lang['This is capital'] = 'This is capital. Do not delete';
$lang['Select village to delete'] = 'Select village to delete';
$lang['Delete village'] = 'Delete village';
$lang['Delete ally'] = 'Delete ally';
$lang['Subtract population'] = 'Subtract population';
$lang['Subtract troop'] = 'Subtract troop';
$lang['Noi thanh'] = 'Noi thanh';
$lang['Ngoai thanh'] = 'Ngoai thanh';
$lang['Subtract outside resource'] = "Subtract outside resource";
$lang['Subtract inside resource'] = "Subtract inside resource";
$lang['Sub troop'] = 'Subtract';
$lang['Troop'] = 'troops';
$lang['Ip'] = 'IP';
$lang['No'] = 'No';
$lang['Punish'] = 'Punish';
$lang['Amout'] = 'Amount';
$lang['Bad list'] = 'Bad list';
$lang['Detail'] = 'Detail';
?>
| 5,782
|
https://github.com/darkmice/rc-tabs/blob/master/__test__/index.test.js
|
Github Open Source
|
Open Source
|
MIT
| 2,017
|
rc-tabs
|
darkmice
|
JavaScript
|
Code
| 116
| 498
|
import React from 'react';
import sinon from 'sinon';
import {Tabs, TabPanel} from '../src/index.js';
import {mount} from 'enzyme';
describe('<Tabs />', () => {
it('calls componentWillReceiveProps', () => {
sinon.spy(Tabs.prototype, 'componentWillReceiveProps');
const wrapper = mount(
<Tabs>
<TabPanel name="1" />
<TabPanel name="2" />
</Tabs>
);
wrapper.setProps({
activeKey: 1,
});
expect(Tabs.prototype.componentWillReceiveProps.calledOnce).toEqual(true);
});
it('allows us to set props', () => {
const wrapper = mount(
<Tabs>
<TabPanel name="1" />
<TabPanel name="2" />
</Tabs>
);
expect(wrapper.props().activeKey).toEqual(0);
expect(wrapper.props().mode).toEqual('fade');
expect(wrapper.props().direction).toEqual('up');
expect(wrapper.props().clean).toEqual(false);
wrapper.setProps({
activeKey: 1,
mode: 'slide',
direction: 'down',
clean: true
});
expect(wrapper.props().activeKey).toEqual(1);
expect(wrapper.props().mode).toEqual('slide');
expect(wrapper.props().direction).toEqual('down');
expect(wrapper.props().clean).toEqual(true);
});
it('simulates tap events', () => {
const onButtonClick = sinon.spy();
const wrapper = mount(
<Tabs>
<TabPanel name="1" />
<TabPanel name="2" />
</Tabs>
);
console.log(wrapper.find('item'));
wrapper.find('.item').simulate('tap');
expect(onButtonClick.calledOnce).to.equal(true);
});
});
| 796
|
https://github.com/condy0919/sleepyhead/blob/master/src/fdflag.rs
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
sleepyhead
|
condy0919
|
Rust
|
Code
| 156
| 463
|
use std::ops::BitOrAssign;
use std::os::unix::io::RawFd;
// opt
// 1. EXISTS ERROR 用于在注册时
// 2. epoll 触发时,用来判断是 EPOLL 事件,还是通过 channel 传递的要创建 uthread 的信息
pub trait Flag: From<u32> + Into<u32> + Clone + Copy + BitOrAssign {}
pub struct FdFlag<T: Flag> {
fd: RawFd,
flag: T,
}
impl<T: Flag> From<u64> for FdFlag<T> {
#[inline]
fn from(fdflag: u64) -> Self {
let fd = (fdflag & 0x00000000ffffffff) as RawFd;
let flag = T::from((fdflag >> 32) as u32);
FdFlag { fd, flag }
}
}
impl<T: Flag> Into<u64> for FdFlag<T> {
#[inline]
fn into(self) -> u64 {
(u64::from(self.flag.into()) << 32) | u64::from(self.fd as u32)
}
}
impl<T: Flag> FdFlag<T> {
#[inline]
pub fn new(fd: RawFd, flag: T) -> Self {
FdFlag { fd, flag }
}
#[inline]
pub fn get_fd(&self) -> RawFd {
self.fd
}
#[inline]
pub fn get_flag(&self) -> T {
self.flag
}
#[inline]
pub fn or_flag(&mut self, o: T) -> &mut Self {
self.flag |= o;
self
}
}
| 35,094
|
https://github.com/ktech99/asterixdb/blob/master/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
asterixdb
|
ktech99
|
Java
|
Code
| 556
| 1,782
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.api.http.server;
import static org.apache.asterix.api.http.server.NodeControllerDetailsHelper.fixupKeys;
import static org.apache.asterix.api.http.server.NodeControllerDetailsHelper.processNodeDetailsJSON;
import static org.apache.asterix.api.http.server.ServletConstants.HYRACKS_CONNECTION_ATTR;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.netty.handler.codec.http.HttpResponseStatus;
public class NodeControllerDetailsApiServlet extends ClusterApiServlet {
private static final Logger LOGGER = LogManager.getLogger();
public NodeControllerDetailsApiServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx,
String... paths) {
super(appCtx, ctx, paths);
}
@Override
protected void get(IServletRequest request, IServletResponse response) throws IOException {
PrintWriter responseWriter = response.writer();
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
try {
ObjectNode json;
response.setStatus(HttpResponseStatus.OK);
if ("".equals(localPath(request))) {
json = OBJECT_MAPPER.createObjectNode();
json.set("ncs", getClusterStateJSON(request, "../").get("ncs"));
} else {
json = processNode(request, hcc);
}
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
responseWriter.write(OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(json));
} catch (IllegalStateException e) { // NOSONAR - exception not logged or rethrown
response.setStatus(HttpResponseStatus.SERVICE_UNAVAILABLE);
} catch (IllegalArgumentException e) { // NOSONAR - exception not logged or rethrown
response.setStatus(HttpResponseStatus.NOT_FOUND);
} catch (Exception e) {
LOGGER.log(Level.INFO, "exception thrown for " + request, e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
responseWriter.write(e.toString());
}
responseWriter.flush();
}
private ObjectNode processNode(IServletRequest request, IHyracksClientConnection hcc) throws Exception {
String localPath = localPath(request);
if (localPath.endsWith("/")) {
throw new IllegalArgumentException();
}
String[] parts = localPath.substring(1).split("/");
final String node = parts[0];
if (parts.length == 1) {
ArrayNode ncs = (ArrayNode) getClusterStateJSON(request, "../../").get("ncs");
for (int i = 0; i < ncs.size(); i++) {
if (node.equals(ncs.get(i).get("node_id").asText())) {
return (ObjectNode) ncs.get(i);
}
}
if ("cc".equals(node)) {
return OBJECT_MAPPER.createObjectNode();
}
throw new IllegalArgumentException();
} else if (parts.length == 2) {
ObjectNode json;
switch (parts[1]) {
case "config":
json = processNodeConfig(hcc, node);
break;
case "stats":
json = processNodeStats(hcc, node);
break;
case "threaddump":
return processNodeThreadDump(hcc, node);
default:
throw new IllegalArgumentException();
}
fixupKeys(json);
return json;
} else {
throw new IllegalArgumentException();
}
}
protected ObjectNode processNodeStats(IHyracksClientConnection hcc, String node) throws Exception {
final String details = checkNullDetail(node, hcc.getNodeDetailsJSON(node, true, false));
return processNodeDetailsJSON((ObjectNode) OBJECT_MAPPER.readTree(details), OBJECT_MAPPER);
}
private ObjectNode processNodeConfig(IHyracksClientConnection hcc, String node) throws Exception {
String config = checkNullDetail(node, hcc.getNodeDetailsJSON(node, false, true));
return (ObjectNode) OBJECT_MAPPER.readTree(config);
}
private ObjectNode processNodeThreadDump(IHyracksClientConnection hcc, String node) throws Exception {
if ("cc".equals(node)) {
return OBJECT_MAPPER.createObjectNode();
}
String dump = checkNullDetail(node, hcc.getThreadDump(node));
return (ObjectNode) OBJECT_MAPPER.readTree(dump);
}
protected String checkNullDetail(String node, String value) {
if (value != null) {
return value;
}
if (node == null) {
// something is seriously wrong if we can't get the cc detail
throw new IllegalStateException("unable to obtain detail from cc");
}
// check to see if this is a node that is simply down
IClusterStateManager csm = appCtx.getClusterStateManager();
ClusterPartition[] cp = csm.getNodePartitions(node);
throw cp != null ? new IllegalStateException("unable to obtain detail from node " + node)
: new IllegalArgumentException("unknown node " + node);
}
}
| 40,676
|
https://github.com/VN-huster/OOLT.ICT.20202.20184267.NguyenNgocHuan/blob/master/Lab01/matrix.java
|
Github Open Source
|
Open Source
|
MIT
| null |
OOLT.ICT.20202.20184267.NguyenNgocHuan
|
VN-huster
|
Java
|
Code
| 113
| 417
|
import java.util.Scanner;
import java.util.Arrays;
public class matrix {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
System.out.println("Enter rows: ");
int r = in.nextInt();
System.out.println("Enter columns: ");
int c = in.nextInt();
int m1[][] = new int[r][c];
int m2[][] = new int[r][c];
System.out.println("Enter matrix1: ");
for(int i=0; i<r; i++){
for(int j=0; j<c; j++){
m1[i][j] = in.nextInt();
}
}
System.out.println("Enter matrix2: ");
for(int i=0; i<r; i++){
for(int j=0; j<c; j++){
m2[i][j] = in.nextInt();
}
}
int m[][] = new int[r][c];
for(int i=0; i<r; i++){
for(int j=0; j<c; j++){
m[i][j] = m1[i][j]+m2[i][j];
}
}
System.out.println("Sum of 2 matrix is: ");
for(int i=0; i<r; i++){
for(int j=0; j<c; j++){
System.out.print(m[i][j]);
}
System.out.println();
}
}
}
| 22,009
|
https://github.com/Grafelhaft/PolyCalc/blob/master/src/main/java/de/grafelhaft/polygoncalc/algorithm/QuickHull.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
PolyCalc
|
Grafelhaft
|
Java
|
Code
| 443
| 1,301
|
package de.grafelhaft.polygoncalc.algorithm;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import de.grafelhaft.polygoncalc.model.IPoint;
/**
* Created by @author Markus Graf (Grafelhaft) on 20.09.2020
* Source: http://www.ahristov.com/tutorial/geometry-games/convex-hull.html
*/
public class QuickHull {
protected static IPoint[] getConvexHull(IPoint... points) {
List<? extends IPoint> help = getConvexHull(new ArrayList<IPoint>(Arrays.asList(points)));
return help.toArray(new IPoint[help.size()]);
}
protected static List<? extends IPoint> getConvexHull(List<? extends IPoint> points) {
ArrayList<IPoint> convexHull = new ArrayList<>();
//It's maybe not even a triangle
if (points.size() <= 3)
return points;
int minPoint = -1, maxPoint = -1;
double minX = Integer.MAX_VALUE;
double maxX = Integer.MIN_VALUE;
for (int i = 0; i < points.size(); i++) {
if (points.get(i).x() < minX) {
minX = points.get(i).x();
minPoint = i;
}
if (points.get(i).x() > maxX) {
maxX = points.get(i).x();
maxPoint = i;
}
}
IPoint A = points.get(minPoint);
IPoint B = points.get(maxPoint);
convexHull.add(A);
convexHull.add(B);
points.remove(A);
points.remove(B);
ArrayList<IPoint> leftSet = new ArrayList<>();
ArrayList<IPoint> rightSet = new ArrayList<>();
for (int i = 0; i < points.size(); i++) {
IPoint p = points.get(i);
if (pointLocation(A, B, p) == -1)
leftSet.add(p);
else if (pointLocation(A, B, p) == 1)
rightSet.add(p);
}
hullSet(A, B, rightSet, convexHull);
hullSet(B, A, leftSet, convexHull);
return convexHull;
}
private static double distance(IPoint A, IPoint B, IPoint C) {
double ABx = B.x() - A.x();
double ABy = B.y() - A.y();
double num = ABx * (A.y() - C.y()) - ABy * (A.x() - C.x());
if (num < 0)
num = -num;
return num;
}
private static void hullSet(IPoint A, IPoint B, ArrayList<IPoint> set,
ArrayList<IPoint> hull) {
int insertPosition = hull.indexOf(B);
if (set.size() == 0)
return;
if (set.size() == 1) {
IPoint p = set.get(0);
set.remove(p);
hull.add(insertPosition, p);
return;
}
double dist = Integer.MIN_VALUE;
int furthestPoint = -1;
for (int i = 0; i < set.size(); i++) {
IPoint p = set.get(i);
double distance = distance(A, B, p);
if (distance > dist) {
dist = distance;
furthestPoint = i;
}
}
IPoint P = set.get(furthestPoint);
set.remove(furthestPoint);
hull.add(insertPosition, P);
// Determine who's to the left of AP
ArrayList<IPoint> leftSetAP = new ArrayList<>();
for (int i = 0; i < set.size(); i++) {
IPoint M = set.get(i);
if (pointLocation(A, P, M) == 1) {
leftSetAP.add(M);
}
}
// Determine who's to the left of PB
ArrayList<IPoint> leftSetPB = new ArrayList<>();
for (int i = 0; i < set.size(); i++) {
IPoint M = set.get(i);
if (pointLocation(P, B, M) == 1) {
leftSetPB.add(M);
}
}
hullSet(A, P, leftSetAP, hull);
hullSet(P, B, leftSetPB, hull);
}
private static int pointLocation(IPoint A, IPoint B, IPoint P) {
double cp1 = (B.x() - A.x()) * (P.y() - A.y()) - (B.y() - A.y()) * (P.x() - A.x());
if (cp1 > 0)
return 1;
else if (cp1 == 0)
return 0;
else
return -1;
}
}
| 25,928
|
https://github.com/jinshuilai/maoshop/blob/master/src/com/mao/shop/service/impl/CartServiceImpl.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,016
|
maoshop
|
jinshuilai
|
Java
|
Code
| 801
| 3,284
|
package com.mao.shop.service.impl;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import com.mao.shop.dao.SkuDao;
import com.mao.shop.po.Cart;
import com.mao.shop.po.ProductSku;
import com.mao.shop.po.ProductSpec;
import com.mao.shop.service.CartService;
import com.mao.shop.utils.MaoUtils;
@Service
public class CartServiceImpl implements CartService {
@Autowired
private SkuDao skuDao;
@Override
public void addCart(Integer skuId, Integer quantity, HttpServletRequest request, HttpServletResponse response) {
// cookie list对购物数据操作
List<Cart> cartList = new ArrayList<Cart>();
// 获取cookie的key
String cartKey = MaoUtils.readProp("cookie_cartKey");
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
// 拿到所有的cookie
Cookie[] cookies = request.getCookies();
// 如果cookie不为空且有值
if (cookies != null && cookies.length > 0) {
for (Cookie cookie : cookies) {
String cookieKey = cookie.getName();
// 判断是否是cart的cookiekey
if (StringUtils.equals(cartKey, cookieKey)) {
String cartVal = cookie.getValue();
// base64解码
cartVal = URLDecoder.decode(cartVal);
// cookie转换json
// 转成java对象
cartList = gson.fromJson(cartVal, new TypeToken<List<Cart>>() {
}.getType());
boolean isExsit = false;
// 如果商品已存在
for (Cart cart : cartList) {
if (cart.getSkuId().intValue() == skuId.intValue()) {
cart.setQuantity(cart.getQuantity() + quantity);
isExsit = true;
break;
}
}
// 商品不存在
if (!isExsit) {
Cart cart = new Cart();
cart.setSkuId(skuId);
cart.setQuantity(quantity);
cartList.add(cart);
}
}
}
}
// 没有购物车cookie
if (cartList.size() == 0) {
Cart cart = new Cart();
cart.setSkuId(skuId);
cart.setQuantity(quantity);
cartList.add(cart);
}
// java对象转换成json
String result = gson.toJson(cartList);
result = URLDecoder.decode(result);
Cookie cookie = new Cookie(cartKey, result);
cookie.setMaxAge(Integer.MAX_VALUE);
cookie.setPath("/");
response.addCookie(cookie);
}
@Override
public List<Cart> selectCart(HttpServletRequest request, HttpServletResponse response) {
// cookie list对购物数据操作
List<Cart> cartList = new ArrayList<Cart>();
// 获取cookie的key
String cartKey = MaoUtils.readProp("cookie_cartKey");
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
// 拿到所有的cookie
Cookie[] cookies = request.getCookies();
// 如果cookie不为空且有值
if (cookies != null && cookies.length > 0) {
for (Cookie cookie : cookies) {
String cookieKey = cookie.getName();
// 判断是否是cart的cookiekey
if (StringUtils.equals(cartKey, cookieKey)) {
String cartVal = cookie.getValue();
// base64解码
cartVal = URLDecoder.decode(cartVal);
// cookie转换json
// 转成java对象
cartList = gson.fromJson(cartVal, new TypeToken<List<Cart>>() {
}.getType());
for (Cart cart : cartList) {
ProductSku sku = skuDao.getSkuDetail(cart.getSkuId());
cart.setSku(sku);
}
}
}
}
return cartList;
}
@Override
public void deleteCart(Integer skuId, HttpServletRequest request, HttpServletResponse response) {
// cookie list对购物数据操作
List<Cart> cartList = new ArrayList<Cart>();
// 获取cookie的key
String cartKey = MaoUtils.readProp("cookie_cartKey");
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
// 拿到所有的cookie
Cookie[] cookies = request.getCookies();
// 如果cookie不为空且有值
if (cookies != null && cookies.length > 0) {
for (Cookie cookie : cookies) {
String cookieKey = cookie.getName();
// 判断是否是cart的cookiekey
if (StringUtils.equals(cartKey, cookieKey)) {
String cartVal = cookie.getValue();
// base64解码
cartVal = URLDecoder.decode(cartVal);
// cookie转换json
// 转成java对象
cartList = gson.fromJson(cartVal, new TypeToken<List<Cart>>() {
}.getType());
for (int i = 0;i < cartList.size();i++) {
Cart cart = cartList.get(i);
if (cart.getSkuId().intValue() == skuId.intValue()) {
cartList.remove(cart);
}
}
}
}
}
// java对象转换成json
String result = gson.toJson(cartList);
result = URLDecoder.decode(result);
Cookie cookie = new Cookie(cartKey, result);
cookie.setMaxAge(Integer.MAX_VALUE);
cookie.setPath("/");
response.addCookie(cookie);
}
@Override
public void updateCart(Integer skuId, Integer quantity, HttpServletRequest request, HttpServletResponse response) {
// cookie list对购物数据操作
List<Cart> cartList = new ArrayList<Cart>();
// 获取cookie的key
String cartKey = MaoUtils.readProp("cookie_cartKey");
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
// 拿到所有的cookie
Cookie[] cookies = request.getCookies();
// 如果cookie不为空且有值
if (cookies != null && cookies.length > 0) {
for (Cookie cookie : cookies) {
String cookieKey = cookie.getName();
// 判断是否是cart的cookiekey
if (StringUtils.equals(cartKey, cookieKey)) {
String cartVal = cookie.getValue();
// base64解码
cartVal = URLDecoder.decode(cartVal);
// cookie转换json
// 转成java对象
cartList = gson.fromJson(cartVal, new TypeToken<List<Cart>>() {
}.getType());
for (Cart cart : cartList) {
if (cart.getSkuId().intValue() == skuId.intValue()) {
cart.setQuantity(quantity);
break;
}
}
}
}
}
// java对象转换成json
String result = gson.toJson(cartList);
result = URLDecoder.decode(result);
Cookie cookie = new Cookie(cartKey, result);
cookie.setMaxAge(Integer.MAX_VALUE);
cookie.setPath("/");
response.addCookie(cookie);
}
@Override
public void clearCart(HttpServletRequest request, HttpServletResponse response) {
// cookie list对购物数据操作
List<Cart> cartList = new ArrayList<Cart>();
// 获取cookie的key
String cartKey = MaoUtils.readProp("cookie_cartKey");
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
// 拿到所有的cookie
Cookie[] cookies = request.getCookies();
// 如果cookie不为空且有值
if (cookies != null && cookies.length > 0) {
for (Cookie cookie : cookies) {
String cookieKey = cookie.getName();
// 判断是否是cart的cookiekey
if (StringUtils.equals(cartKey, cookieKey)) {
String cartVal = cookie.getValue();
// base64解码
cartVal = URLDecoder.decode(cartVal);
// cookie转换json
// 转成java对象
cartList = gson.fromJson(cartVal, new TypeToken<List<Cart>>() {
}.getType());
cartList.clear();
}
}
}
// java对象转换成json
String result = gson.toJson(cartList);
result = URLDecoder.decode(result);
Cookie cookie = new Cookie(cartKey, result);
cookie.setMaxAge(Integer.MAX_VALUE);
cookie.setPath("/");
response.addCookie(cookie);
}
@Override
public String validCart(HttpServletRequest request, HttpServletResponse response) {
String result = "success";
// cookie list对购物数据操作
List<Cart> cartList = new ArrayList<Cart>();
// 获取cookie的key
String cartKey = MaoUtils.readProp("cookie_cartKey");
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
// 拿到所有的cookie
Cookie[] cookies = request.getCookies();
// 如果cookie不为空且有值
if (cookies != null && cookies.length > 0) {
for (Cookie cookie : cookies) {
String cookieKey = cookie.getName();
// 判断是否是cart的cookiekey
if (StringUtils.equals(cartKey, cookieKey)) {
String cartVal = cookie.getValue();
// base64解码
cartVal = URLDecoder.decode(cartVal);
// cookie转换json
// 转成java对象
cartList = gson.fromJson(cartVal, new TypeToken<List<Cart>>() {
}.getType());
for (Cart cart : cartList) {
ProductSku sku = skuDao.getSkuDetail(cart.getSkuId());
if (cart.getQuantity().intValue() > sku.getStock().intValue()) {
result = "";
result = result + sku.getProduct().getPname();
List<ProductSpec> specList = sku.getSpecList();
for (ProductSpec spec : specList) {
result = result + spec.getAttrvalue();
}
result = result + "库存不足" + cart.getQuantity() +
"实际库存为:" + sku.getStock();
break;
}
}
}
}
}
return result;
}
}
| 3,141
|
https://github.com/liukun4515/TSLevelDB/blob/master/src/main/java/edu/tsinghua/k1/BaseTimeSeriesDB.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
TSLevelDB
|
liukun4515
|
Java
|
Code
| 112
| 462
|
package edu.tsinghua.k1;
import edu.tsinghua.k1.api.ITimeSeriesDB;
import edu.tsinghua.k1.api.ITimeSeriesWriteBatch;
import edu.tsinghua.k1.api.TimeSeriesDBException;
import edu.tsinghua.k1.api.TimeSeriesDBIterator;
import java.io.IOException;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBIterator;
import org.iq80.leveldb.WriteBatch;
/**
* Created by liukun on 19/2/27.
*/
public class BaseTimeSeriesDB implements ITimeSeriesDB {
private DB leveldb;
public BaseTimeSeriesDB(DB db) {
this.leveldb = db;
}
@Override
public ITimeSeriesWriteBatch createBatch() {
WriteBatch batch = leveldb.createWriteBatch();
return new TimeSeriesBatch(batch);
}
@Override
public void write(ITimeSeriesWriteBatch batch) throws TimeSeriesDBException {
leveldb.write(batch.getData());
}
@Override
public TimeSeriesDBIterator iterator(String timeSeries, long startTime, long endTime)
throws TimeSeriesDBException {
DBIterator dbIterator = leveldb.iterator();
byte[] startKey = ByteUtils.getKey(TimeSeriesMap.getInstance().getUid(timeSeries), startTime);
byte[] endKey = ByteUtils.getKey(TimeSeriesMap.getInstance().getUid(timeSeries), endTime);
BaseTimeSeriesDBIteration timeSeriesDBIteration = new BaseTimeSeriesDBIteration(startKey,
endKey, dbIterator);
return timeSeriesDBIteration;
}
@Override
public void close() throws IOException {
leveldb.close();
}
}
| 41,902
|
https://github.com/Felixleh/spe-ed-solver/blob/master/spe-ed-solver/web-communication/src/main/java/webcommunication/webservice/MalformedURLException.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
spe-ed-solver
|
Felixleh
|
Java
|
Code
| 39
| 110
|
package webcommunication.webservice;
/**
* Exception wrapper thrown when the a URL has an invalid format.
*/
@SuppressWarnings("serial")
public class MalformedURLException extends Exception {
public MalformedURLException(String message) {
super(message);
}
public MalformedURLException(String message, Throwable throwable) {
super(message, throwable);
}
}
| 48,775
|
https://github.com/NethermindEth/Yul-Specification/blob/master/yul_cmd.lean
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
Yul-Specification
|
NethermindEth
|
Lean
|
Code
| 4,136
| 10,827
|
import .yul_ast
import .aux
import tactic.linarith
import data.finset.basic
import data.vector
import init.data.fin.ops
import init.data.list.basic
set_option class.instance_max_depth 100
def FTContext := Identifier → option (ℕ × ℕ)
def empΓ : FTContext := λ_, none
def VarStore (vars : finset Identifier) := ∀ i : Identifier, (i ∈ vars) → Literal
def empStore : VarStore ∅
| i i_in_emp := absurd i_in_emp (finset.not_mem_empty i)
namespace YulCommands
variable Γ : FTContext
inductive IsInFor : Type
| NestedInFor : IsInFor
| NotNestedInFor : IsInFor
inductive IsInFunc : Type
| InFunc : IsInFunc
| NotInFunc : IsInFunc
inductive TermType : Type
| BlockList : finset Identifier → finset Identifier → IsInFor → IsInFunc → TermType
| CBlock : finset Identifier → IsInFor → IsInFunc → TermType
| SwitchBody : finset Identifier → IsInFor → IsInFunc → TermType
| CExpr : finset Identifier → ℕ → TermType
| CStatement : finset Identifier → finset Identifier → IsInFor → IsInFunc → TermType
open IsInFor
open IsInFunc
open TermType
inductive YulTerm : TermType → Type
| EmpCBlock :
∀{vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (BlockList vars vars b b')
| SeqCBlock :
∀ {vars : finset Identifier} (vars' : finset Identifier)
{vars'' : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CStatement vars vars' b b') ->
YulTerm (BlockList vars' vars'' b b') →
YulTerm (BlockList vars vars'' b b')
| NestedScope :
∀ {vars : finset Identifier} (inner_vars inner_vars' : finset Identifier)
{b : IsInFor} {b' : IsInFunc},
VarStore inner_vars →
YulTerm (BlockList (inner_vars ∪ vars) (inner_vars' ∪ vars) b b') →
YulTerm (CBlock vars b b')
| CCase :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
Literal → YulTerm (CBlock vars b b') → YulTerm (SwitchBody vars b b') →
YulTerm (SwitchBody vars b b')
| CDefault :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CBlock vars b b') → YulTerm (SwitchBody vars b b')
| CNone :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (SwitchBody vars b b')
| CFunctionCall :
∀ {vars : finset Identifier}
(f_id : Identifier) (n : ℕ) {m : ℕ},
(Γ f_id = some (n,m)) →
(fin n → YulTerm (CExpr vars 1)) →
YulTerm (CExpr vars m)
| CId :
∀ {vars : finset Identifier} (id : Identifier),
id ∈ vars → YulTerm (CExpr vars 1)
| CLit :
∀ {vars : finset Identifier},
Literal → YulTerm (CExpr vars 1)
| Scope : ∀ {vars_outer : finset Identifier} (vars_inner vars_fin : finset Identifier)
{n : ℕ} (ret_vars : vector Identifier n),
VarStore (vars_inner ∪ (tofinset' ret_vars)) →
YulTerm (BlockList (vars_inner ∪ (tofinset' ret_vars)) (vars_fin ∪ (tofinset' ret_vars)) NotNestedInFor InFunc) →
YulTerm (CExpr vars_outer n)
| Result :
∀ {vars : finset Identifier} {n : ℕ},
vector Literal n → YulTerm (CExpr vars n)
| CBlock :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CBlock vars b b') → YulTerm (CStatement vars vars b b')
-- Function definitions already parsed into FTContext.
| CVariableDeclarationAss :
∀ {vars : finset Identifier} (n : ℕ)
(new_vars : fin n -> Identifier) {b : IsInFor} {b' : IsInFunc},
YulTerm (CExpr vars n) →
YulTerm (CStatement vars (vars ∪ (tofinset new_vars)) b b')
| CVariableDeclaration :
∀ {vars : finset Identifier} (n : ℕ)
(new_vars : fin n -> Identifier) {b : IsInFor} {b' : IsInFunc},
YulTerm (CStatement vars (vars ∪ (tofinset new_vars)) b b')
| CAssignment :
∀ {vars : finset Identifier} (n : ℕ)
(ids : fin n -> Identifier) {b : IsInFor} {b' : IsInFunc},
tofinset ids ⊆ vars → YulTerm (CExpr vars n) →
YulTerm (CStatement vars vars b b')
| CIf :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CExpr vars 1) → YulTerm (CBlock vars b b') →
YulTerm (CStatement vars vars b b')
| CExpressionStatement :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CExpr vars 0) -> YulTerm (CStatement vars vars b b')
| CSwitch : ∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CExpr vars 1) →
YulTerm (SwitchBody vars b b') →
YulTerm (CStatement vars vars b b')
| CFor :
∀ {vars : finset Identifier} (inner_vars inner_vars' inner_vars'' : finset Identifier)
{b : IsInFor} {b' : IsInFunc},
YulTerm (BlockList vars (vars ∪ inner_vars) NotNestedInFor b') →
YulTerm (CExpr (vars ∪ inner_vars) 1) →
YulTerm (BlockList (vars ∪ inner_vars) (vars ∪ inner_vars') NestedInFor b') →
YulTerm (BlockList (vars ∪ inner_vars') (vars ∪ inner_vars'') NotNestedInFor b') →
YulTerm (CStatement vars vars b b')
| CBreak :
∀ {vars : finset Identifier} {b' : IsInFunc},
YulTerm (CStatement vars vars NestedInFor b')
| CContinue :
∀ {vars : finset Identifier} {b' : IsInFunc},
YulTerm (CStatement vars vars NestedInFor b')
| CLeave :
∀ {vars : finset Identifier} {b : IsInFor},
YulTerm (CStatement vars vars b InFunc)
| ForExecInit :
∀ {vars : finset Identifier} (curr_inner_vars inner_vars inner_vars' inner_vars'' : finset Identifier)
{b : IsInFor} {b' : IsInFunc},
VarStore curr_inner_vars →
YulTerm (CExpr (vars ∪ inner_vars) 1) →
YulTerm (BlockList (vars ∪ inner_vars) (vars ∪ inner_vars') NestedInFor b') →
YulTerm (BlockList (vars ∪ inner_vars') (vars ∪ inner_vars'') NotNestedInFor b') →
YulTerm (BlockList (vars ∪ curr_inner_vars) (vars ∪ inner_vars) NotNestedInFor b') →
YulTerm (CStatement vars vars b b')
| ForCheckCond :
∀ {vars : finset Identifier} (inner_vars inner_vars' inner_vars'' : finset Identifier)
{b : IsInFor} {b' : IsInFunc},
VarStore inner_vars →
YulTerm (CExpr (vars ∪ inner_vars) 1) →
YulTerm (BlockList (vars ∪ inner_vars) (vars ∪ inner_vars') NestedInFor b') →
YulTerm (BlockList (vars ∪ inner_vars') (vars ∪ inner_vars'') NotNestedInFor b') →
YulTerm (CExpr (vars ∪ inner_vars) 1) →
YulTerm (CStatement vars vars b b')
| ForExecBody :
∀ {vars : finset Identifier} (curr_inner_vars inner_vars inner_vars' inner_vars'' : finset Identifier)
{b : IsInFor} {b' : IsInFunc},
VarStore curr_inner_vars →
vars ∪ inner_vars ⊆ vars ∪ curr_inner_vars →
YulTerm (CExpr (vars ∪ inner_vars) 1) →
YulTerm (BlockList (vars ∪ inner_vars) (vars ∪ inner_vars') NestedInFor b') →
YulTerm (BlockList (vars ∪ inner_vars') (vars ∪ inner_vars'') NotNestedInFor b') →
YulTerm (BlockList (vars ∪ curr_inner_vars) (vars ∪ inner_vars') NestedInFor b') →
YulTerm (CStatement vars vars b b')
| ForExecPost :
∀ {vars : finset Identifier} (curr_inner_vars inner_vars inner_vars' inner_vars'' : finset Identifier)
{b : IsInFor} {b' : IsInFunc},
VarStore curr_inner_vars →
YulTerm (CExpr (vars ∪ inner_vars) 1) →
YulTerm (BlockList (vars ∪ inner_vars) (vars ∪ inner_vars') NestedInFor b') →
YulTerm (BlockList (vars ∪ inner_vars') (vars ∪ inner_vars'') NotNestedInFor b') →
YulTerm (BlockList (vars ∪ curr_inner_vars) (vars ∪ inner_vars'') NotNestedInFor b') →
YulTerm (CStatement vars vars b b')
| Skip :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm (CStatement vars vars b b')
-- ForCheckCond, ForExecbody and Skip not in Yul specification, added for small step semantics.
open YulTerm
def getVariableUpdate : ∀ {t : TermType} , YulTerm Γ t → option (finset Identifier × finset Identifier)
| (BlockList vars vars' _ _) _ := some (vars, vars')
| (CBlock _ _ _) _ := none
| (SwitchBody _ _ _) _ := none
| (CExpr _ _) _ := none
| (CStatement vars vars' _ _) _ := some (vars, vars')
lemma term_scope_monotonic :
∀ {t : TermType} (term : YulTerm Γ t) (vars vars' : finset Identifier),
getVariableUpdate Γ term = some (vars, vars') → vars ⊆ vars' :=
begin
intros ttype t,
induction t,
-- EmpCBlock
intros vars vars' is_var_update i,
intro i_in_vars,
rw getVariableUpdate at is_var_update,
injection is_var_update with is_var_update,
injection is_var_update with vars_eq_tvars tvars_eq_tvars,
rw [←tvars_eq_tvars, vars_eq_tvars],
exact i_in_vars,
-- SeqCBlock
intros vars vars' is_var_update i,
intro i_in_vars,
rw getVariableUpdate at is_var_update,
injection is_var_update with is_var_update,
injection is_var_update with vars_eq_tvars tvars''_eq_vars',
rw vars_eq_tvars at t_ᾰ,
have int₁ := t_ih_ᾰ vars t_vars' _ i_in_vars,
rw tvars''_eq_vars' at t_ᾰ_1,
have int₂ := t_ih_ᾰ_1 t_vars' vars' _ int₁,
exact int₂,
rw getVariableUpdate,
injection is_var_update with _ tvars''_eq_vars',
rw tvars''_eq_vars',
rw getVariableUpdate,
rw vars_eq_tvars,
-- Block, SwitchBody & Expr
repeat {
intros vars vars',
intro f,
rw getVariableUpdate at f,
exfalso,
exact option.some_ne_none (vars, vars') (eq.symm f),
},
-- CStatements that do not bring new variables into scope.
repeat {
intros vars vars',
intro is_var_update,
rw getVariableUpdate at is_var_update,
injection is_var_update with is_var_update,
injection is_var_update with vars_eq_tvars tvars_eq_vars',
rw [←tvars_eq_vars', vars_eq_tvars],
exact finset.subset.refl vars,
},
-- CVariableDeclaration, CVariableDeclarationAss
repeat {
intros vars vars',
intro is_var_update,
rw getVariableUpdate at is_var_update,
injection is_var_update with is_var_update,
injection is_var_update with vars_eq eq_vars',
rw [←vars_eq,←eq_vars'],
exact finset.subset_union_left _ _,
},
end
def frame_TermType : TermType → finset Identifier → TermType
| (BlockList vars vars' b b') fvars :=
BlockList (vars ∪ fvars) (vars' ∪ fvars) b b'
| (CBlock vars b b') fvars := CBlock (vars ∪ fvars) b b'
| (SwitchBody vars b b') fvars := SwitchBody (vars ∪ fvars) b b'
| (CExpr vars n) fvars := CExpr (vars ∪ fvars) n
| (CStatement vars vars' b b') fvars :=
CStatement (vars ∪ fvars) (vars' ∪ fvars) b b'
lemma frame_lemma :
∀ s₁ s₂ s₃ : finset Identifier,
s₁ ∪ s₂ ∪ s₃ = s₁ ∪ s₃ ∪ s₂ :=
begin
intros s₁ s₂ s₃,
rw (finset.union_assoc s₁ s₂ s₃),
rw (finset.union_assoc s₁ s₃ s₂),
rw (finset.union_comm s₂ s₃),
end
def frame :
∀ {t : TermType} (fvars : finset Identifier),
YulTerm Γ t → YulTerm Γ (frame_TermType t fvars)
| (BlockList _ _ _ _) fvars EmpCBlock := EmpCBlock
| (BlockList _ _ _ _) fvars (SeqCBlock vars' cstmnt cblklst') :=
SeqCBlock (vars' ∪ fvars) (frame fvars cstmnt) (frame fvars cblklst')
| (CBlock vars b b') fvars (NestedScope inner_vars inner_vars' σ blklst) :=
let inner_vars_eq:= finset.union_assoc inner_vars vars fvars,
inner_vars'_eq := finset.union_assoc inner_vars' vars fvars,
cast (cblklst : YulTerm Γ (BlockList((inner_vars ∪ vars) ∪ fvars) ((inner_vars' ∪ vars) ∪ fvars) b b')) :
YulTerm Γ (BlockList (inner_vars ∪ (vars ∪ fvars)) (inner_vars' ∪ (vars ∪ fvars)) b b') :=
eq.rec (eq.rec cblklst inner_vars_eq) inner_vars'_eq
in NestedScope inner_vars inner_vars' σ (cast $ frame fvars blklst)
| (SwitchBody _ _ _) fvars (CCase lit blk swtchbody) :=
CCase lit (frame fvars blk) (frame fvars swtchbody)
| (SwitchBody _ _ _) fvars (CDefault blk) :=
CDefault (frame fvars blk)
| (SwitchBody _ _ _) fvars CNone :=
CNone
| (CExpr vars m) fvars (CFunctionCall f_id n p args) :=
CFunctionCall f_id n p (λi, frame fvars (args i))
| (CExpr vars 1) fvars (CId i i_in_vars) :=
CId i (finset.mem_of_subset (finset.subset_union_left vars fvars) i_in_vars)
| (CExpr _ 1) fvars (CLit lit) := CLit lit
| (CExpr _ _) fvars (Scope vars_inner vars_inner' ret_vars σ stmnt) :=
Scope vars_inner vars_inner' ret_vars σ stmnt
| (CExpr _ _) fvars (Result res_vec) :=
Result res_vec
| (CStatement _ _ _ _) fvars (CBlock blk) :=
CBlock (frame fvars blk)
| (CStatement vars _ b b') fvars (CVariableDeclarationAss n new_vars cexpr) :=
let cast (cstmnt : YulTerm Γ (CStatement (vars ∪ fvars) (vars ∪ fvars ∪ tofinset new_vars) b b'))
: YulTerm Γ (CStatement (vars ∪ fvars) (vars ∪ tofinset new_vars ∪ fvars) b b') :=
eq.rec cstmnt (finset.union_right_comm vars fvars (tofinset new_vars))
in cast $ CVariableDeclarationAss n new_vars (frame fvars cexpr)
| (CStatement vars _ b b') fvars (CVariableDeclaration n new_vars) :=
let cast (cstmnt : YulTerm Γ (CStatement (vars ∪ fvars) (vars ∪ fvars ∪ tofinset new_vars) b b'))
: YulTerm Γ (CStatement (vars ∪ fvars) (vars ∪ tofinset new_vars ∪ fvars) b b') :=
eq.rec cstmnt (finset.union_right_comm vars fvars (tofinset new_vars))
in cast $ CVariableDeclaration n new_vars
| (CStatement vars _ b b') fvars (CAssignment n ids in_scope cexpr) :=
CAssignment n ids
(has_subset.subset.trans in_scope (finset.subset_union_left vars fvars))
(frame fvars cexpr)
| (CStatement vars _ b b') fvars (CIf cexpr blk) :=
CIf (frame fvars cexpr) (frame fvars blk)
| (CStatement vars _ b b') fvars (CExpressionStatement cexpr) :=
CExpressionStatement (frame fvars cexpr)
| (CStatement vars _ b b') fvars (CSwitch cexpr swtchbody) :=
CSwitch (frame fvars cexpr) (frame fvars swtchbody)
| (CStatement vars _ b b') fvars (CFor inner_vars inner_vars' inner_vars'' init cond body post) :=
let init_framed : YulTerm Γ (BlockList (vars ∪ fvars) (vars ∪ fvars ∪ inner_vars) NotNestedInFor b') :=
begin
have init_framed := frame fvars init,
rw frame_TermType at init_framed,
apply eq.rec init_framed,
rw frame_lemma,
end,
cond_framed : YulTerm Γ (CExpr (vars ∪ fvars ∪ inner_vars) 1) :=
begin
have cond_framed := frame fvars cond,
rw frame_TermType at cond_framed,
apply eq.rec cond_framed,
rw frame_lemma,
end,
body_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars) (vars ∪ fvars ∪ inner_vars') NestedInFor b') :=
begin
have body_framed := frame fvars body,
rw frame_TermType at body_framed,
apply eq.rec body_framed,
rw (frame_lemma vars inner_vars fvars),
rw (frame_lemma vars inner_vars' fvars),
end,
post_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars') (vars ∪ fvars ∪ inner_vars'') NotNestedInFor b') :=
begin
have post_framed := frame fvars post,
rw frame_TermType at post_framed,
apply eq.rec post_framed,
rw (frame_lemma vars inner_vars' fvars),
rw (frame_lemma vars inner_vars'' fvars),
end
in CFor inner_vars inner_vars' inner_vars''
init_framed cond_framed body_framed post_framed
| (CStatement vars _ b b') fvars CBreak := CBreak
| (CStatement vars _ b b') fvars CContinue := CContinue
| (CStatement vars _ b b') fvars CLeave := CLeave
| (CStatement vars _ b b') fvars
(ForExecInit curr_inner_vars inner_vars inner_vars' inner_vars'' σ cond loop post eval_init) :=
let cond_framed : YulTerm Γ (CExpr (vars ∪ fvars ∪ inner_vars) 1) :=
begin
have cond_framed := frame fvars cond,
rw frame_TermType at cond_framed,
apply eq.rec cond_framed,
rw frame_lemma,
end,
loop_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars) (vars ∪ fvars ∪ inner_vars') NestedInFor b') :=
begin
have loop_framed := frame fvars loop,
rw frame_TermType at loop_framed,
apply eq.rec loop_framed,
rw (frame_lemma vars inner_vars fvars),
rw (frame_lemma vars inner_vars' fvars),
end,
post_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars') (vars ∪ fvars ∪ inner_vars'') NotNestedInFor b') :=
begin
have post_framed := frame fvars post,
rw frame_TermType at post_framed,
apply eq.rec post_framed,
rw (frame_lemma vars inner_vars' fvars),
rw (frame_lemma vars inner_vars'' fvars),
end,
eval_init_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ curr_inner_vars) (vars ∪ fvars ∪ inner_vars) NotNestedInFor b') :=
begin
have eval_init_framed := frame fvars eval_init,
rw frame_TermType at eval_init_framed,
apply eq.rec eval_init_framed,
rw (frame_lemma vars curr_inner_vars fvars),
rw (frame_lemma vars inner_vars fvars),
end
in ForExecInit curr_inner_vars inner_vars inner_vars' inner_vars'' σ
cond_framed loop_framed post_framed eval_init_framed
| (CStatement vars _ b b') fvars
(ForCheckCond inner_vars inner_vars' inner_vars'' σ cond loop post eval_cond) :=
let cond_framed : YulTerm Γ (CExpr (vars ∪ fvars ∪ inner_vars) 1) :=
begin
have cond_framed := frame fvars cond,
rw frame_TermType at cond_framed,
apply eq.rec cond_framed,
rw frame_lemma,
end,
loop_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars) (vars ∪ fvars ∪ inner_vars') NestedInFor b') :=
begin
have loop_framed := frame fvars loop,
rw frame_TermType at loop_framed,
apply eq.rec loop_framed,
rw (frame_lemma vars inner_vars fvars),
rw (frame_lemma vars inner_vars' fvars),
end,
post_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars') (vars ∪ fvars ∪ inner_vars'') NotNestedInFor b') :=
begin
have post_framed := frame fvars post,
rw frame_TermType at post_framed,
apply eq.rec post_framed,
rw (frame_lemma vars inner_vars' fvars),
rw (frame_lemma vars inner_vars'' fvars),
end,
eval_cond_framed : YulTerm Γ (CExpr (vars ∪ fvars ∪ inner_vars) 1) :=
begin
have eval_cond_framed := frame fvars eval_cond,
rw frame_TermType at eval_cond_framed,
apply eq.rec eval_cond_framed,
rw (frame_lemma vars inner_vars fvars),
end
in ForCheckCond inner_vars inner_vars' inner_vars'' σ
cond_framed loop_framed post_framed eval_cond_framed
| (CStatement vars _ b b') fvars
(ForExecBody curr_inner_vars inner_vars inner_vars' inner_vars'' σ p cond loop post eval_loop) :=
let cond_framed : YulTerm Γ (CExpr (vars ∪ fvars ∪ inner_vars) 1) :=
begin
have cond_framed := frame fvars cond,
rw frame_TermType at cond_framed,
apply eq.rec cond_framed,
rw frame_lemma,
end,
loop_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars) (vars ∪ fvars ∪ inner_vars') NestedInFor b') :=
begin
have loop_framed := frame fvars loop,
rw frame_TermType at loop_framed,
apply eq.rec loop_framed,
rw (frame_lemma vars inner_vars fvars),
rw (frame_lemma vars inner_vars' fvars),
end,
post_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars') (vars ∪ fvars ∪ inner_vars'') NotNestedInFor b') :=
begin
have post_framed := frame fvars post,
rw frame_TermType at post_framed,
apply eq.rec post_framed,
rw (frame_lemma vars inner_vars' fvars),
rw (frame_lemma vars inner_vars'' fvars),
end,
eval_loop_framed :YulTerm Γ (BlockList (vars ∪ fvars ∪ curr_inner_vars) (vars ∪ fvars ∪ inner_vars') NestedInFor b') :=
begin
have eval_loop_framed := frame fvars eval_loop,
rw frame_TermType at eval_loop_framed,
apply eq.rec eval_loop_framed,
rw (frame_lemma vars inner_vars' fvars),
rw (frame_lemma vars curr_inner_vars fvars),
end,
p' : vars ∪ fvars ∪ inner_vars ⊆ vars ∪ fvars ∪ curr_inner_vars :=
begin
intros i i_in,
repeat {
rw finset.mem_union at i_in,
},
repeat {
rw finset.mem_union,
},
cases i_in with x y,
exact or.inl x,
cases finset.mem_union.1 (p (finset.mem_union_right vars y)) with h,
exact or.inl (or.inl h),
exact or.inr h,
end
in ForExecBody curr_inner_vars inner_vars inner_vars' inner_vars'' σ p'
cond_framed loop_framed post_framed eval_loop_framed
| (CStatement vars _ b b') fvars
(ForExecPost curr_inner_vars inner_vars inner_vars' inner_vars'' σ cond loop post eval_post) :=
let cond_framed : YulTerm Γ (CExpr (vars ∪ fvars ∪ inner_vars) 1) :=
begin
have cond_framed := frame fvars cond,
rw frame_TermType at cond_framed,
apply eq.rec cond_framed,
rw frame_lemma,
end,
loop_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars) (vars ∪ fvars ∪ inner_vars') NestedInFor b') :=
begin
have loop_framed := frame fvars loop,
rw frame_TermType at loop_framed,
apply eq.rec loop_framed,
rw (frame_lemma vars inner_vars fvars),
rw (frame_lemma vars inner_vars' fvars),
end,
post_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ inner_vars') (vars ∪ fvars ∪ inner_vars'') NotNestedInFor b') :=
begin
have post_framed := frame fvars post,
rw frame_TermType at post_framed,
apply eq.rec post_framed,
rw (frame_lemma vars inner_vars' fvars),
rw (frame_lemma vars inner_vars'' fvars),
end,
eval_post_framed : YulTerm Γ (BlockList (vars ∪ fvars ∪ curr_inner_vars) (vars ∪ fvars ∪ inner_vars'') NotNestedInFor b') :=
begin
have eval_post_framed := frame fvars eval_post,
rw frame_TermType at eval_post_framed,
apply eq.rec eval_post_framed,
rw (frame_lemma vars inner_vars'' fvars),
rw (frame_lemma vars curr_inner_vars fvars),
end
in ForExecPost curr_inner_vars inner_vars inner_vars' inner_vars'' σ
cond_framed loop_framed post_framed eval_post_framed
| (CStatement vars _ b b') fvars Skip := Skip
def are_args_reduced :
∀ {vars : finset Identifier} {n : ℕ},
vector (YulTerm Γ (CExpr vars 1)) n → Prop
| _ 0 _ := true
| vars (nat.succ n) ⟨ (Result _) :: cexprs, p ⟩ :=
are_args_reduced
(⟨
cexprs,
by {
rw list.length at p,
exact (nat.add_right_cancel p),
}
⟩ : vector (YulTerm Γ (CExpr vars 1)) n)
| _ (nat.succ n) ⟨ _ :: _, _ ⟩ := false
instance (vars : finset Identifier) (n : ℕ)
(cexprs : vector (YulTerm Γ (CExpr vars 1)) n) :
decidable (are_args_reduced Γ cexprs) :=
begin
induction n,
rw are_args_reduced,
apply decidable.is_true,
trivial,
cases cexprs,
cases cexprs_val,
exfalso,
exact list.ne_nil_of_length_eq_succ cexprs_property (eq.refl list.nil),
cases cexprs_val_hd,
repeat {
rw are_args_reduced,
apply decidable.is_false,
trivial,
},
rw are_args_reduced,
exact n_ih ⟨ cexprs_val_tl, _ ⟩,
end
lemma nil_reduced :
∀ {Γ : FTContext} {vars : finset Identifier},
@are_args_reduced Γ vars 0 vector.nil :=
begin
intros Γ vars,
rw are_args_reduced,
trivial,
end
def is_result :
∀ {vars : finset Identifier} {n : ℕ},
YulTerm Γ (CExpr vars n) -> Prop
| _ _ (Result _) := true
| _ _ (CLit _) := false
| _ _ (CId _ _) := false
| _ _ (CFunctionCall _ _ _ _) := false
| _ _ (Scope _ _ _ _ _) := false
instance
(vars : finset Identifier) (n : ℕ)
(cexpr : YulTerm Γ (CExpr vars n)) : decidable (is_result Γ cexpr) :=
begin
cases cexpr,
repeat {
rw is_result,
apply decidable.is_false,
trivial,
},
rw is_result,
apply decidable.is_true,
trivial,
end
def is_skip :
∀ {vars vars': finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm Γ (CStatement vars vars' b b') -> Prop
| _ _ _ _ Skip := true
| _ _ _ _ (CBlock _) := false
| _ _ _ _ (CVariableDeclarationAss _ _ _) := false
| _ _ _ _ (CVariableDeclaration _ _) := false
| _ _ _ _ (CAssignment _ _ _ _) := false
| _ _ _ _ (CIf _ _) := false
| _ _ _ _ (CExpressionStatement _) := false
| _ _ _ _ (CSwitch _ _) := false
| _ _ _ _ (CFor _ _ _ _ _ _ _) := false
| _ _ _ _ CBreak := false
| _ _ _ _ CContinue := false
| _ _ _ _ CLeave := false
| _ _ _ _ (ForExecInit _ _ _ _ _ _ _ _ _) := false
| _ _ _ _ (ForCheckCond _ _ _ _ _ _ _ _) := false
| _ _ _ _ (ForExecBody _ _ _ _ _ _ _ _ _ _) := false
| _ _ _ _ (ForExecPost _ _ _ _ _ _ _ _ _) := false
lemma is_skip_imp_vars_eq_vars' :
∀ {vars vars' : finset Identifier} {b : IsInFor} {b' : IsInFunc}
{cstmnt : YulTerm Γ (CStatement vars vars' b b')},
is_skip Γ cstmnt → vars' = vars :=
begin
intros vars vars' b b' cstmnt cstmnt_is_skip,
cases cstmnt,
repeat {
rw is_skip at cstmnt_is_skip,
},
repeat {
exfalso,
exact cstmnt_is_skip,
},
end
instance is_skip_decidable {vars vars' : finset Identifier} {b : IsInFor} {b' : IsInFunc}
{stmnt : YulTerm Γ (CStatement vars vars' b b')} : decidable (is_skip Γ stmnt) :=
begin
cases stmnt,
repeat{
rw is_skip,
apply decidable.is_false,
trivial,
},
rw is_skip,
apply decidable.is_true,
trivial,
end
def is_empcblock :
∀ {vars vars' : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm Γ (BlockList vars vars' b b') → Prop
| _ _ _ _ EmpCBlock := true
| _ _ _ _ _ := false
instance empcblock_decidable
{vars vars' : finset Identifier} {b : IsInFor} {b' : IsInFunc}
{blklst : YulTerm Γ (BlockList vars vars' b b')} :
decidable (is_empcblock Γ blklst) :=
begin
cases blklst,
rw is_empcblock,
apply decidable.is_true,
trivial,
rw is_empcblock,
apply decidable.is_false,
trivial,
end
lemma is_empcblock_imp_vars_eq_vars' :
∀ {vars vars' : finset Identifier} {b : IsInFor}
{b' : IsInFunc} {cblk : YulTerm Γ (BlockList vars vars' b b')},
is_empcblock Γ cblk → vars = vars' :=
begin
intros vars vars' b b' cblk cblk_is_empcblock,
cases cblk,
refl,
exfalso,
rw is_empcblock at cblk_is_empcblock,
exact cblk_is_empcblock,
end
def is_empblock :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
YulTerm Γ (CBlock vars b b') → Prop
| _ _ _ (NestedScope _ _ _ blklst) := is_empcblock Γ blklst
instance (vars : finset Identifier) (b : IsInFor) (b' : IsInFunc)
(blk : YulTerm Γ (CBlock vars b b')) : decidable (is_empblock Γ blk) :=
begin
cases blk,
rw is_empblock,
exact YulCommands.empcblock_decidable Γ,
end
def to_literal : ∀ {vars : finset Identifier} {n : ℕ}
(cexpr : YulTerm Γ (CExpr vars n)), is_result Γ cexpr → vector Literal n
| _ _ cexpr@(CFunctionCall _ _ _ _) cexpr_is_res :=
let cexpr_n_is_res : ¬ is_result Γ cexpr :=
begin
rw is_result,
intro f,
exact f,
end
in absurd cexpr_is_res cexpr_n_is_res
| _ _ cexpr@(CId _ _) cexpr_is_res :=
let cexpr_n_is_res : ¬ is_result Γ cexpr :=
begin
rw is_result,
intro f,
exact f,
end
in absurd cexpr_is_res cexpr_n_is_res
| _ _ cexpr@(CLit l) cexpr_is_res :=
let cexpr_n_is_res : ¬ is_result Γ cexpr :=
begin
rw is_result,
intro f,
exact f,
end
in absurd cexpr_is_res cexpr_n_is_res
| _ _ cexpr@(Scope _ _ _ _ _) cexpr_is_res :=
let cexpr_n_is_res : ¬ is_result Γ cexpr :=
begin
rw is_result,
intro f,
exact f,
end
in absurd cexpr_is_res cexpr_n_is_res
| _ _ cexpr@(Result res_vec) _ := res_vec
def getCase :
∀ {vars : finset Identifier} {b : IsInFor} {b' : IsInFunc},
Literal -> YulTerm Γ (SwitchBody vars b b') → YulTerm Γ (CBlock vars b b')
| _ _ _ _ CNone := NestedScope ∅ ∅ empStore EmpCBlock
| _ _ _ _ (CDefault blk) := blk
| _ _ _ l (CCase lit blk swtchbody') :=
if l = lit
then blk
else getCase l swtchbody'
lemma reduced_and_n_tail_reduced_imp_n_lit
{vars : finset Identifier}
(cexpr : YulTerm Γ (CExpr vars 1))
{n : ℕ}
(cexprs : vector (YulTerm Γ (CExpr vars 1)) n) :
¬ (are_args_reduced Γ (vector.cons cexpr cexprs)) → are_args_reduced Γ cexprs →
¬ is_result Γ cexpr :=
begin
intros full_not_red tail_red,
cases cexpr,
repeat {
rw is_result,
intro f,
exact f,
},
cases cexprs,
exfalso,
rw vector.cons at full_not_red,
rw are_args_reduced at full_not_red,
exact full_not_red(tail_red),
end
def get_lits :
∀ {vars : finset Identifier} {n : ℕ}
(arg_cexprs : vector (YulTerm Γ (CExpr vars 1)) n),
are_args_reduced Γ arg_cexprs → vector Literal n
| _ 0 _ _ := vector.nil
| vars (nat.succ n) ⟨(Result lit) :: lit_cexprs, len_p⟩ p :=
let lit_cexprs_vec' : vector (YulTerm Γ (CExpr vars 1)) n :=
⟨ lit_cexprs,
by {
rw list.length at len_p,
exact (nat.add_right_cancel len_p),
}
⟩
in vector.cons lit.head $
get_lits lit_cexprs_vec' $
by {
rw are_args_reduced at p,
exact p,
}
end YulCommands
| 3,067
|
https://github.com/samiudheen/mono-soc-2007/blob/master/jared/MonoTorrent.Interface/TorrentTreeView.cs
|
Github Open Source
|
Open Source
|
MIT, BSD-3-Clause
| 2,007
|
mono-soc-2007
|
samiudheen
|
C#
|
Code
| 1,075
| 3,994
|
//
// TorrentTreeView.cs
//
// Author:
// Jared Hendry (buchan@gmail.com)
//
// Copyright (C) 2007 Jared Hendry
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using Gtk;
using System;
using System.Text;
using MonoTorrent.Common;
using MonoTorrent.Client;
namespace MonoTorrent.Interface
{
public class TorrentTreeView : TreeView
{
public TreeViewColumn nameColumn;
public TreeViewColumn statusColumn;
public TreeViewColumn doneColumn;
public TreeViewColumn seedsColumn;
public TreeViewColumn peersColumn;
public TreeViewColumn downSpeedColumn;
public TreeViewColumn upSpeedColumn;
public TreeViewColumn ratioColumn;
public TreeViewColumn sizeColumn;
private TorrentController torrentController;
private TargetEntry[] targetEntries;
private TargetEntry[] sourceEntries;
private static NLog.Logger logger = NLog.LogManager.GetCurrentClassLogger();
public TorrentTreeView(TorrentController torrentController) : base()
{
this.torrentController = torrentController;
targetEntries = new TargetEntry[]{
new TargetEntry("text/uri-list", 0, 0)
};
sourceEntries = new TargetEntry[]{
new TargetEntry("application/x-monotorrent-torrentmanager-objects", 0, 0)
};
buildColumns();
Reorderable = true;
HeadersVisible = true;
HeadersClickable = true;
Selection.Mode = SelectionMode.Multiple;
EnableModelDragDest(targetEntries, Gdk.DragAction.Copy);
DragDataReceived += OnTorrentDragDataReceived;
//this.DragDrop += OnTest;
this.EnableModelDragSource(Gdk.ModifierType.Button1Mask, sourceEntries, Gdk.DragAction.Copy);
DragDataGet += OnTorrentDragDataGet;
}
protected override bool OnButtonPressEvent (Gdk.EventButton e)
{
// Call this first so context menu has a selected torrent
base.OnButtonPressEvent(e);
if(e.Button == 3 && Selection.CountSelectedRows() == 1){
TorrentContextMenu contextMenu = new TorrentContextMenu(torrentController);
contextMenu.ShowAll();
contextMenu.Popup();
}
return false;
}
private void OnTorrentDragDataGet (object o, DragDataGetArgs args)
{
// TODO: Support dragging multiple torrents to a label
TorrentManager manager;
manager = torrentController.GetSelectedTorrent();
if(manager == null)
return;
args.SelectionData.Set(Gdk.Atom.Intern("application/x-monotorrent-torrentmanager-objects", false), 8, Encoding.UTF8.GetBytes(manager.GetHashCode().ToString()));
}
private void OnTorrentDragDataReceived (object o, DragDataReceivedArgs args)
{
string [] uriList = (Encoding.UTF8.GetString(args.SelectionData.Data).TrimEnd()).Split('\n');
foreach(string s in uriList){
Uri uri = new Uri(s.TrimEnd());
if(uri.IsFile){
logger.Info("URI dropped " + uri);
torrentController.addTorrent(uri.LocalPath);
}
}
}
private void buildColumns()
{
nameColumn = new TreeViewColumn();
statusColumn = new TreeViewColumn();
doneColumn = new TreeViewColumn();
seedsColumn = new TreeViewColumn();
peersColumn = new TreeViewColumn();
downSpeedColumn = new TreeViewColumn();
upSpeedColumn = new TreeViewColumn();
ratioColumn = new TreeViewColumn();
sizeColumn = new TreeViewColumn();
nameColumn.Title = "Name";
statusColumn.Title = "Status";
doneColumn.Title = "Done";
seedsColumn.Title = "Seeds";
peersColumn.Title = "Peers";
downSpeedColumn.Title = "DL Speed";
upSpeedColumn.Title = "UP Speed";
ratioColumn.Title = "Ratio";
sizeColumn.Title = "Size";
nameColumn.Resizable = true;
statusColumn.Resizable = true;
doneColumn.Resizable = true;
seedsColumn.Resizable = true;
peersColumn.Resizable = true;
downSpeedColumn.Resizable = true;
upSpeedColumn.Resizable = true;
ratioColumn.Resizable = true;
sizeColumn.Resizable = true;
nameColumn.Reorderable = true;
statusColumn.Reorderable = true;
doneColumn.Reorderable = true;
seedsColumn.Reorderable = true;
peersColumn.Reorderable = true;
downSpeedColumn.Reorderable = true;
upSpeedColumn.Reorderable = true;
ratioColumn.Reorderable = true;
sizeColumn.Reorderable = true;
Gtk.CellRendererText torrentNameCell = new Gtk.CellRendererText ();
Gtk.CellRendererText torrentStatusCell = new Gtk.CellRendererText();
Gtk.CellRendererProgress torrentDoneCell = new Gtk.CellRendererProgress();
Gtk.CellRendererText torrentSeedsCell = new Gtk.CellRendererText();
Gtk.CellRendererText torrentPeersCell = new Gtk.CellRendererText();
Gtk.CellRendererText torrentDownSpeedCell = new Gtk.CellRendererText();
Gtk.CellRendererText torrentUpSpeedCell = new Gtk.CellRendererText();
Gtk.CellRendererText torrentRatioCell = new Gtk.CellRendererText();
Gtk.CellRendererText torrentSizeCell = new Gtk.CellRendererText();
nameColumn.PackStart(torrentNameCell, true);
statusColumn.PackStart(torrentStatusCell, true);
doneColumn.PackStart(torrentDoneCell, true);
seedsColumn.PackStart(torrentSeedsCell, true);
peersColumn.PackStart(torrentPeersCell, true);
downSpeedColumn.PackStart(torrentDownSpeedCell, true);
upSpeedColumn.PackStart(torrentUpSpeedCell, true);
ratioColumn.PackStart(torrentRatioCell, true);
sizeColumn.PackStart(torrentSizeCell, true);
nameColumn.SetCellDataFunc (torrentNameCell, new Gtk.TreeCellDataFunc (RenderTorrentName));
statusColumn.SetCellDataFunc (torrentStatusCell, new Gtk.TreeCellDataFunc (RenderTorrentStatus));
doneColumn.SetCellDataFunc (torrentDoneCell, new Gtk.TreeCellDataFunc (RenderTorrentDone));
seedsColumn.SetCellDataFunc (torrentSeedsCell, new Gtk.TreeCellDataFunc (RenderTorrentSeeds));
peersColumn.SetCellDataFunc (torrentPeersCell, new Gtk.TreeCellDataFunc (RenderTorrentPeers));
downSpeedColumn.SetCellDataFunc (torrentDownSpeedCell, new Gtk.TreeCellDataFunc (RenderTorrentDownSpeed));
upSpeedColumn.SetCellDataFunc (torrentUpSpeedCell, new Gtk.TreeCellDataFunc (RenderTorrentUpSpeed));
ratioColumn.SetCellDataFunc (torrentRatioCell, new Gtk.TreeCellDataFunc (RenderTorrentRatio));
sizeColumn.SetCellDataFunc (torrentSizeCell, new Gtk.TreeCellDataFunc (RenderTorrentSize));
nameColumn.Sizing = TreeViewColumnSizing.Fixed;
statusColumn.Sizing = TreeViewColumnSizing.Fixed;
doneColumn.Sizing = TreeViewColumnSizing.Fixed;
seedsColumn.Sizing = TreeViewColumnSizing.Fixed;
peersColumn.Sizing = TreeViewColumnSizing.Fixed;
downSpeedColumn.Sizing = TreeViewColumnSizing.Fixed;
upSpeedColumn.Sizing = TreeViewColumnSizing.Fixed;
ratioColumn.Sizing = TreeViewColumnSizing.Fixed;
sizeColumn.Sizing = TreeViewColumnSizing.Fixed;
AppendColumn(nameColumn);
AppendColumn(statusColumn);
AppendColumn(doneColumn);
AppendColumn(seedsColumn);
AppendColumn(peersColumn);
AppendColumn(downSpeedColumn);
AppendColumn(upSpeedColumn);
AppendColumn(ratioColumn);
AppendColumn(sizeColumn);
}
private void RenderTorrentName (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if (torrent == null)
(cell as Gtk.CellRendererText).Text = string.Empty;
else
(cell as Gtk.CellRendererText).Text = torrent.Torrent.Name;
}
private void RenderTorrentStatus (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
if (torrent.State == TorrentState.Downloading){
(cell as Gtk.CellRendererText).Foreground = "darkgreen";
}else if (torrent.State == TorrentState.Paused){
(cell as Gtk.CellRendererText).Foreground = "orange";
}else if (torrent.State == TorrentState.Hashing){
(cell as Gtk.CellRendererText).Foreground = "purple";
}else if (torrent.State == TorrentState.Seeding){
(cell as Gtk.CellRendererText).Foreground = "blue";
}else {
(cell as Gtk.CellRendererText).Foreground = "red";
}
(cell as Gtk.CellRendererText).Text = torrent.State.ToString();
}
private void RenderTorrentDone (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
if(torrent.State == TorrentState.Hashing) {
(cell as Gtk.CellRendererProgress).Value = (int)torrentController.GetTorrentHashProgress(torrent);
} else {
if(torrent.Progress > int.MinValue && torrent.Progress < int.MaxValue)
(cell as Gtk.CellRendererProgress).Value = (int)torrent.Progress;
else
(cell as Gtk.CellRendererProgress).Value = 0;
}
}
private void RenderTorrentSeeds (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
(cell as Gtk.CellRendererText).Text = torrent.Peers.Seeds.ToString();
}
private void RenderTorrentPeers (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
(cell as Gtk.CellRendererText).Text = torrent.Peers.Leechs.ToString() + " (" + torrent.Peers.Available + ")";
}
private void RenderTorrentDownSpeed (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
(cell as Gtk.CellRendererText).Text = ByteConverter.Convert(torrent.Monitor.DownloadSpeed) + "/s";
}
private void RenderTorrentUpSpeed (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
(cell as Gtk.CellRendererText).Text = ByteConverter.Convert(torrent.Monitor.UploadSpeed) + "/s";
}
private void RenderTorrentRatio (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
double totalDown;
double totalUp;
totalDown = torrentController.GetPreviousDownload(torrent) + torrent.Monitor.DataBytesDownloaded;
totalUp = torrentController.GetPreviousUpload(torrent) + torrent.Monitor.DataBytesUploaded;
if (totalDown > 0 || ((totalDown / 1024f) > torrent.Torrent.Size))
(cell as Gtk.CellRendererText).Text = (totalUp / (double)totalDown).ToString("0.00");
else
(cell as Gtk.CellRendererText).Text = (totalUp / (torrent.Torrent.Size / 1024f)).ToString("0.00");
}
private void RenderTorrentSize (Gtk.TreeViewColumn column, Gtk.CellRenderer cell, Gtk.TreeModel model, Gtk.TreeIter iter)
{
TorrentManager torrent = (TorrentManager) model.GetValue (iter, 0);
if(torrent == null)
return;
(cell as Gtk.CellRendererText).Text = ByteConverter.Convert(torrent.Torrent.Size / 1024);
}
}
}
| 4,364
|
https://github.com/zoontao/school/blob/master/src/Common/Exceptions/RuntimeException.php
|
Github Open Source
|
Open Source
|
MIT
| null |
school
|
zoontao
|
PHP
|
Code
| 14
| 37
|
<?php
namespace MicroLink\Common\Exceptions;
/**
* Class RuntimeException.
*/
class RuntimeException extends Exception
{
}
| 14,684
|
https://github.com/petr-mates/osb-export/blob/master/configuration-export/src/test/java/org/mates/osb/ResourceBuilderTest.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
osb-export
|
petr-mates
|
Java
|
Code
| 158
| 450
|
package org.mates.osb;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import org.junit.Assert;
import org.junit.Test;
import org.mates.osb.resources.ResourceType;
public class ResourceBuilderTest {
@Test
public void test() {
ResourceBuilder resourceBuilder = new ResourceBuilder();
resourceBuilder.buildTree(new File("d:\\svn\\osb\\trunk\\configuration\\OBS"));
}
@Test
public void testGetResource() {
ResourceBuilder resourceBuilder = new ResourceBuilder();
Assert.assertEquals(ResourceType.PROXY, resourceBuilder.getResource(new File("test.proxy"), null).getType());
Assert.assertEquals(ResourceType.BIZ, resourceBuilder.getResource(new File("test.biz"), null).getType());
Assert.assertEquals(ResourceType.ACCOUNT, resourceBuilder.getResource(new File("test.sa"), null).getType());
Assert.assertEquals(ResourceType.PROVIDER, resourceBuilder.getResource(new File("test.skp"), null).getType());
Assert.assertEquals(ResourceType.ALERT, resourceBuilder.getResource(new File("test.alert"), null).getType());
}
}
| 25,029
|
https://github.com/KiloCraft/KiloEssentials/blob/master/src/main/java/org/kilocraft/essentials/extensions/magicalparticles/config/DustParticleEffectConfigSection.java
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
KiloEssentials
|
KiloCraft
|
Java
|
Code
| 26
| 117
|
package org.kilocraft.essentials.extensions.magicalparticles.config;
import org.spongepowered.configurate.objectmapping.ConfigSerializable;
import org.spongepowered.configurate.objectmapping.meta.Setting;
@ConfigSerializable
public class DustParticleEffectConfigSection {
@Setting("rgb")
public String rgb = "1 0 0";
@Setting("scale")
public float scale = 1.0F;
}
| 20,849
|
https://github.com/nandakumar131/hadoop-ozone/blob/master/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
hadoop-ozone
|
nandakumar131
|
Java
|
Code
| 222
| 441
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.recon.persistence;
import javax.sql.DataSource;
import org.sqlite.SQLiteDataSource;
import com.google.inject.Inject;
import com.google.inject.Provider;
/**
* Provide a {@link javax.sql.DataSource} for the application.
*/
public class SqliteDataSourceProvider implements Provider<DataSource> {
private DataSourceConfiguration configuration;
@Inject
public SqliteDataSourceProvider(DataSourceConfiguration configuration) {
this.configuration = configuration;
}
/**
* Create a pooled datasource for the application.
* <p>
* Default sqlite database does not work with a connection pool, actually
* most embedded databases do not, hence returning native implementation for
* default db.
*/
@Override
public DataSource get() {
SQLiteDataSource ds = new SQLiteDataSource();
ds.setUrl(configuration.getJdbcUrl());
return ds;
}
}
| 10,786
|
https://github.com/BlueMax-IO/nodearch/blob/master/backend/express/src/components/express.hook.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
nodearch
|
BlueMax-IO
|
TypeScript
|
Code
| 102
| 320
|
import { Hook, IHook, HookContext, ComponentType, Logger, ClassConstructor } from '@nodearch/core';
import { ExpressService } from './express.service';
@Hook()
export class ExpressHook implements IHook {
private readonly expressService: ExpressService;
private readonly logger: Logger;
private controllers?: ClassConstructor[];
constructor(expressService: ExpressService, logger: Logger) {
this.expressService = expressService;
this.logger = logger;
}
async onInit(context: HookContext) {
try {
this.controllers = context.getComponents(ComponentType.Controller);
}
catch(e) {
this.logger.warn('Express: No controllers loaded!');
}
if (this.controllers) {
await this.expressService.init(this.controllers, this.dependencyFactory(context));
}
}
async onStart(context: HookContext) {
if (this.controllers) {
await this.expressService.start();
}
}
async onStop() {
this.expressService.stop();
}
private dependencyFactory(context: HookContext) {
return (dependency: any) => {
return context.get(dependency);
}
}
}
| 7,084
|
https://github.com/MetaBorgCube/layout-decl/blob/master/org.metaborg.sdf2table/src/main/java/org/metaborg/sdf2table/grammar/IPriority.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,018
|
layout-decl
|
MetaBorgCube
|
Java
|
Code
| 13
| 47
|
package org.metaborg.sdf2table.grammar;
public interface IPriority {
IProduction higher();
IProduction lower();
boolean transitive();
}
| 26,328
|
https://github.com/camsiabor/qcom/blob/master/util/getter.go
|
Github Open Source
|
Open Source
|
MIT
| null |
qcom
|
camsiabor
|
Go
|
Code
| 3,120
| 9,735
|
package util
import (
"fmt"
"github.com/camsiabor/qcom/qtime"
"github.com/pkg/errors"
"reflect"
"strconv"
"strings"
"time"
)
func AsStr(o interface{}, defaultval string) (r string) {
if o == nil {
return defaultval
}
var s, ok = o.(string)
if ok {
return s
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return fmt.Sprintf("%d", o)
case reflect.Float32, reflect.Float64:
var f = vref.Float()
var i = int64(f)
if f-float64(i) == 0 {
return fmt.Sprintf("%d", i)
}
return fmt.Sprintf("%f", f)
case reflect.Bool:
var b = o.(bool)
if b {
return "true"
} else {
return "false"
}
}
switch o.(type) {
case error:
var err = o.(error)
return err.Error()
case time.Time:
var t = o.(time.Time)
return t.Format("2006-01-02 15:04:05")
case *time.Time:
var t = o.(*time.Time)
return t.Format("2006-01-02 15:04:05")
}
return defaultval
}
var iamstring = errors.New("i am string")
func SimpleNumberAsInt(o interface{}) (int, error) {
if o == nil {
return -1, errors.New("null")
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
if kind == reflect.String {
return 0, iamstring
}
if kind == reflect.Float64 {
return int(o.(float64)), nil
}
if kind == reflect.Int64 {
return int(o.(int64)), nil
}
switch kind {
case reflect.Int:
return o.(int), nil
case reflect.Int32:
return int(o.(int32)), nil
case reflect.Float32:
return int(o.(float32)), nil
}
return 0, fmt.Errorf("cannot convert %v (%t) to int", o, o)
}
func NumberAsInt(o interface{}) (int, error) {
if o == nil {
return -1, errors.New("null")
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int:
return o.(int), nil
case reflect.Int64:
return int(o.(int64)), nil
case reflect.Float64:
return int(o.(float64)), nil
case reflect.Float32:
return int(o.(float32)), nil
case reflect.Int8, reflect.Int16, reflect.Int32:
return int(vref.Int()), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return int(vref.Uint()), nil
}
return 0, fmt.Errorf("cannot convert %v (%t) to int", o, o)
}
func AsInt(o interface{}, defaultval int) (r int) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int:
return o.(int)
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return int(vref.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return int(vref.Uint())
case reflect.Float32, reflect.Float64:
return int(vref.Float())
case reflect.Bool:
var b = o.(bool)
if b {
return 1
} else {
return 0
}
case reflect.String:
var s = o.(string)
var i64, err = strconv.ParseInt(s, 10, 64)
if err != nil {
return defaultval
}
return int(i64)
}
switch o.(type) {
case time.Time:
var t = o.(time.Time)
return int(t.Unix())
case *time.Time:
var t = o.(*time.Time)
return int(t.Unix())
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsUInt32(o interface{}, defaultval uint32) (r uint32) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Uint32:
return o.(uint32)
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
return uint32(vref.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint64:
return uint32(vref.Uint())
case reflect.Float32, reflect.Float64:
return uint32(vref.Float())
case reflect.Bool:
var b = o.(bool)
if b {
return 1
} else {
return 0
}
case reflect.String:
var s = o.(string)
var i64, err = strconv.ParseInt(s, 10, 64)
if err != nil {
return defaultval
}
return uint32(i64)
}
switch o.(type) {
case time.Time:
var t = o.(time.Time)
return uint32(t.Unix())
case *time.Time:
var t = o.(*time.Time)
return uint32(t.Unix())
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsInt64(o interface{}, defaultval int64) (r int64) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int64:
return o.(int64)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32:
return vref.Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return int64(vref.Uint())
case reflect.Float32, reflect.Float64:
return int64(vref.Float())
case reflect.Bool:
var b = o.(bool)
if b {
return 1
} else {
return 0
}
case reflect.String:
var s = o.(string)
var i64, err = strconv.ParseInt(s, 10, 64)
if err != nil {
return defaultval
}
return i64
}
switch o.(type) {
case time.Time:
var t = o.(time.Time)
return int64(t.Unix())
case *time.Time:
var t = o.(*time.Time)
return int64(t.Unix())
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsUInt64(o interface{}, defaultval uint64) (r uint64) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int64:
return o.(uint64)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32:
return vref.Uint()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return uint64(vref.Uint())
case reflect.Float32, reflect.Float64:
return uint64(vref.Float())
case reflect.Bool:
var b = o.(bool)
if b {
return 1
} else {
return 0
}
case reflect.String:
var s = o.(string)
var i64, err = strconv.ParseInt(s, 10, 64)
if err != nil {
return defaultval
}
return uint64(i64)
}
switch o.(type) {
case time.Time:
var t = o.(time.Time)
return uint64(t.Unix())
case *time.Time:
var t = o.(*time.Time)
return uint64(t.Unix())
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsFloat32(o interface{}, defaultval float32) (r float32) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Float32:
return o.(float32)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float32(vref.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return float32(vref.Uint())
case reflect.Float64:
return float32(o.(float64))
case reflect.Bool:
var b = o.(bool)
if b {
return 1
} else {
return 0
}
case reflect.String:
var s = o.(string)
var f64, err = strconv.ParseFloat(s, 64)
if err != nil {
return defaultval
}
return float32(f64)
}
switch o.(type) {
case time.Time:
var t = o.(time.Time)
return float32(t.Unix())
case *time.Time:
var t = o.(*time.Time)
return float32(t.Unix())
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsFloat64(o interface{}, defaultval float64) (r float64) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(vref.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return float64(vref.Uint())
case reflect.Float64:
return o.(float64)
case reflect.Float32:
return float64(o.(float32))
case reflect.Bool:
var b = o.(bool)
if b {
return 1
} else {
return 0
}
case reflect.String:
var s = o.(string)
var f64, err = strconv.ParseFloat(s, 64)
if err != nil {
return defaultval
}
return f64
}
switch o.(type) {
case time.Time:
var t = o.(time.Time)
return float64(t.Unix())
case *time.Time:
var t = o.(*time.Time)
return float64(t.Unix())
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsBool(o interface{}, defaultval bool) (r bool) {
if o == nil {
return defaultval
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Bool:
return o.(bool)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return vref.Int() != 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return vref.Uint() != 0
case reflect.Float32, reflect.Float64:
return vref.Float() != 0
case reflect.String:
var s = o.(string)
var b, err = strconv.ParseBool(s)
if err != nil {
return defaultval
}
return b
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func AsMap(o interface{}, createIfNot bool) map[string]interface{} {
if o == nil {
if createIfNot {
return make(map[string]interface{})
} else {
return nil
}
}
var m, ok = o.(map[string]interface{})
if ok {
return m
}
var oval = reflect.ValueOf(o)
if oval.Kind() == reflect.Map {
m = make(map[string]interface{})
for _, okey := range oval.MapKeys() {
var skey = AsStr(okey.Interface(), "")
if len(skey) > 0 {
var moval = oval.MapIndex(okey)
m[skey] = moval.Interface()
}
}
}
if m == nil && createIfNot {
return make(map[string]interface{})
}
return m
}
func AsStringMap(o interface{}, createIfNot bool) map[string]string {
if o == nil {
if createIfNot {
return make(map[string]string)
} else {
return nil
}
}
var m, ok = o.(map[string]string)
if ok {
return m
}
var oval = reflect.ValueOf(o)
if oval.Kind() == reflect.Map {
m = make(map[string]string)
for _, okey := range oval.MapKeys() {
var skey = AsStr(okey.Interface(), "")
if len(skey) > 0 {
var moval = oval.MapIndex(okey)
m[skey] = AsStr(moval.Interface(), "")
}
}
}
if m == nil && createIfNot {
return make(map[string]string)
}
return m
}
func AsSlice(o interface{}, createIfNotLen int) []interface{} {
a, ok := o.([]interface{})
if ok {
return a
}
oval := reflect.ValueOf(o)
var kind = oval.Kind()
if kind == reflect.Slice || kind == reflect.Array {
var alen = oval.Len()
a = make([]interface{}, alen)
for i := 0; i < alen; i++ {
var one = oval.Index(i)
a[i] = one.Interface()
}
}
if a == nil && createIfNotLen > 0 {
a = make([]interface{}, createIfNotLen)
}
return a
}
func AsStringSlice(o interface{}, createIfNotLen int) []string {
a, ok := o.([]string)
if ok {
return a
}
var oval = reflect.ValueOf(o)
var kind = oval.Kind()
if kind == reflect.Slice || kind == reflect.Array {
var alen = oval.Len()
a = make([]string, alen)
for i := 0; i < alen; i++ {
var one = oval.Index(i)
a[i] = AsStr(one.Interface(), "")
}
}
if a == nil && createIfNotLen > 0 {
a = make([]string, createIfNotLen)
}
return a
}
func AsError(o interface{}) error {
if o == nil {
return nil
}
var e, ok = o.(error)
if ok {
return e
}
return fmt.Errorf("%v", o)
}
func AsTime(o interface{}, def *time.Time) (t *time.Time) {
if o == nil {
return def
}
var vref = reflect.ValueOf(o)
var kind = vref.Kind()
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
*t = time.Unix(vref.Int(), 0)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
*t = time.Unix(int64(vref.Uint()), 0)
case reflect.Float32, reflect.Float64:
*t = time.Unix(int64(vref.Float()), 0)
case reflect.String:
var s = vref.String()
t, err := qtime.ParseTime(s)
if err != nil {
return def
}
return t
}
panic(fmt.Errorf("convert not support type %v value %v ", reflect.TypeOf(o), reflect.ValueOf(o)))
}
func CastSimpleV(oval reflect.Value, t reflect.Type) reflect.Value {
if !oval.IsValid() {
return reflect.Zero(t)
}
var otype = oval.Type()
if otype == t {
return oval
}
var okind = otype.Kind()
var tkind = t.Kind()
if okind == tkind {
return oval
}
if tkind == reflect.Interface {
var n = reflect.New(t)
n.Set(oval)
return n
}
switch tkind {
case reflect.String:
if okind == reflect.Interface {
return reflect.ValueOf(AsStr(oval.Interface(), ""))
}
return reflect.ValueOf(fmt.Sprintf("%v", oval))
case reflect.Bool:
switch okind {
case reflect.Interface:
return reflect.ValueOf(AsBool(oval.Interface(), false))
case reflect.Int8, reflect.Int, reflect.Int16, reflect.Int64:
return reflect.ValueOf(oval.Int() != 0)
case reflect.Float32, reflect.Float64:
return reflect.ValueOf(oval.Float() != 0)
case reflect.String:
var s = oval.String()
s = strings.ToLower(s)
return reflect.ValueOf("true" == s)
}
case reflect.Int:
switch okind {
case reflect.Interface:
return reflect.ValueOf(AsInt(oval.Interface(), 0))
case reflect.Int8, reflect.Int16, reflect.Int64:
return reflect.ValueOf(int(oval.Int()))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return reflect.ValueOf(int(oval.Uint()))
case reflect.Float32, reflect.Float64:
return reflect.ValueOf(int(oval.Float()))
case reflect.String:
var i, err = strconv.Atoi(oval.String())
if err != nil {
panic(err)
}
return reflect.ValueOf(i)
}
case reflect.Int64:
switch okind {
case reflect.Interface:
return reflect.ValueOf(AsInt64(oval.Interface(), 0))
case reflect.Int8, reflect.Int16, reflect.Int:
return reflect.ValueOf(oval.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return reflect.ValueOf(oval.Uint())
case reflect.Float32, reflect.Float64:
return reflect.ValueOf(int64(oval.Float()))
case reflect.String:
var s, err = strconv.ParseInt(oval.String(), 10, 64)
if err != nil {
panic(err)
}
return reflect.ValueOf(s)
}
case reflect.Float32:
switch okind {
case reflect.Interface:
return reflect.ValueOf(AsFloat32(oval.Interface(), 0))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int64:
return reflect.ValueOf(float32(oval.Int()))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return reflect.ValueOf(float32(oval.Uint()))
case reflect.Float64:
return reflect.ValueOf(float32(oval.Float()))
case reflect.String:
var f, err = strconv.ParseFloat(oval.String(), 32)
if err != nil {
panic(err)
}
return reflect.ValueOf(float32(f))
}
case reflect.Float64:
switch okind {
case reflect.Interface:
return reflect.ValueOf(AsFloat64(oval.Interface(), 0))
case reflect.Int8, reflect.Int16, reflect.Int, reflect.Int64:
return reflect.ValueOf(float64(oval.Int()))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return reflect.ValueOf(float64(oval.Uint()))
case reflect.Float32:
return reflect.ValueOf(oval.Float())
case reflect.String:
var f, err = strconv.ParseFloat(oval.String(), 64)
if err != nil {
panic(err)
}
return reflect.ValueOf(f)
}
}
panic(fmt.Errorf("unsupport simple case type %v value %v ==> type %v", otype, oval, t))
}
func CastSimple(o interface{}, t reflect.Type) interface{} {
if o == nil {
return nil
}
var otype = reflect.TypeOf(o)
if otype == t {
return o
}
var okind = otype.Kind()
var tkind = t.Kind()
if okind == tkind {
return o
}
if tkind == reflect.Interface {
return o
}
var oval = reflect.ValueOf(o)
switch tkind {
case reflect.String:
return fmt.Sprintf("%v", o)
case reflect.Bool:
switch okind {
case reflect.Interface:
return AsBool(o, false)
case reflect.Int8, reflect.Int, reflect.Int16, reflect.Int64:
return oval.Int() != 0
case reflect.Float32, reflect.Float64:
return oval.Float() != 0
case reflect.String:
var s = oval.String()
s = strings.ToLower(s)
return "true" == s
}
case reflect.Int:
switch okind {
case reflect.Interface:
return AsInt(o, 0)
case reflect.Int8, reflect.Int16, reflect.Int64:
return int(oval.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return int(oval.Uint())
case reflect.Float32, reflect.Float64:
return int(oval.Float())
case reflect.String:
var i, err = strconv.Atoi(oval.String())
if err != nil {
panic(err)
}
return i
}
case reflect.Int64:
switch okind {
case reflect.Interface:
return AsInt64(o, 0)
case reflect.Int8, reflect.Int16, reflect.Int:
return oval.Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return oval.Uint()
case reflect.Float32, reflect.Float64:
return int64(oval.Float())
case reflect.String:
var s, err = strconv.ParseInt(oval.String(), 10, 64)
if err != nil {
panic(err)
}
return s
}
case reflect.Float32:
switch okind {
case reflect.Interface:
return AsFloat32(o, 0)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int64:
return float32(oval.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return float32(oval.Uint())
case reflect.Float64:
return float32(oval.Float())
case reflect.String:
var f, err = strconv.ParseFloat(oval.String(), 32)
if err != nil {
panic(err)
}
return float32(f)
}
case reflect.Float64:
switch okind {
case reflect.Interface:
return AsFloat64(o, 0)
case reflect.Int8, reflect.Int16, reflect.Int, reflect.Int64:
return float64(oval.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return float64(oval.Uint())
case reflect.Float32:
return oval.Float()
case reflect.String:
var f, err = strconv.ParseFloat(oval.String(), 64)
if err != nil {
panic(err)
}
return f
}
}
panic(fmt.Errorf("unsupport simple case type %v value %v ==> type %v", otype, o, t))
}
func CastComplex(o interface{}, t reflect.Type) interface{} {
if o == nil {
return nil
}
var tkind = t.Kind()
if tkind == reflect.Interface {
return o
}
var otype = reflect.TypeOf(o)
if otype == t {
return o
}
var okind = otype.Kind()
var simple = true
switch tkind {
case reflect.Array, reflect.Slice, reflect.Map, reflect.String, reflect.Chan, reflect.Ptr, reflect.UnsafePointer, reflect.Func:
simple = false
break
}
if simple {
if okind == tkind {
return o
}
return CastSimple(o, t)
}
var oval = reflect.ValueOf(o)
if tkind == reflect.Slice || tkind == reflect.Array {
var oeletype = otype.Elem()
var oelekind = oeletype.Kind()
var teletype = t.Elem()
var telekind = teletype.Kind()
if oeletype == teletype || oelekind == telekind {
return o
}
var olen = oval.Len()
var slicetype = reflect.SliceOf(teletype)
var slice = reflect.MakeSlice(slicetype, olen, olen)
for i := 0; i < olen; i++ {
var oeleval = oval.Index(i)
var sliceele = slice.Index(i)
if !oeleval.IsValid() {
sliceele.Set(reflect.Zero(teletype))
continue
}
if telekind == reflect.Interface {
sliceele.Set(oeleval)
continue
}
var subv = CastSimpleV(oeleval, teletype)
sliceele.Set(subv)
}
return slice.Interface()
}
if tkind == reflect.Map {
var okeytype = otype.Key()
var okeykind = okeytype.Kind()
var ovaltype = otype.Elem()
var ovalkind = ovaltype.Kind()
var tvaltype = t.Elem()
var tvalkind = tvaltype.Kind()
var tkeytype = t.Key()
var tkeykind = tkeytype.Kind()
var keysame = (okeytype == tkeytype || okeykind == tkeykind)
var valsame = (ovaltype == ovaltype || ovalkind == tvalkind)
if keysame && valsame {
return o
}
var m = reflect.MakeMap(reflect.MapOf(tkeytype, tvaltype))
for _, key := range oval.MapKeys() {
var val = oval.MapIndex(key)
if !keysame {
key = CastSimpleV(key, tkeytype)
}
if !valsame {
val = CastSimpleV(val, tvaltype)
}
m.SetMapIndex(key, val)
}
}
panic(fmt.Errorf("unsupport type cast type %v value %v ==> type %v", otype, o, t))
}
func As(typename string, o interface{}) interface{} {
if o == nil {
return nil
}
switch typename {
case "string":
return AsStr(o, "")
case "int":
return AsInt(o, 0)
case "int64":
return AsInt64(o, 0)
case "bool":
return AsBool(o, false)
case "float32":
return AsFloat32(o, 0)
case "float64":
return AsFloat64(o, 0)
case "map":
return AsMap(o, false)
case "slice":
return AsSlice(o, 0)
case "stringmap":
return AsStringMap(o, false)
case "stringslice":
return AsStringSlice(o, 0)
case "time":
return AsTime(o, nil)
case "error":
return AsError(o)
}
return nil
}
func AsWithErr(typename string, o interface{}) (interface{}, error) {
if o == nil {
return nil, nil
}
switch typename {
case "string":
return AsStr(o, ""), nil
case "int":
return AsInt(o, 0), nil
case "int64":
return AsInt64(o, 0), nil
case "bool":
return AsBool(o, false), nil
case "float32":
return AsFloat32(o, 0), nil
case "float64":
return AsFloat64(o, 0), nil
case "map":
return AsMap(o, false), nil
case "slice":
return AsSlice(o, 0), nil
case "stringmap":
return AsStringMap(o, false), nil
case "stringslice":
return AsStringSlice(o, 0), nil
case "time":
return AsTime(o, nil), nil
case "error":
return AsError(o), nil
}
return nil, fmt.Errorf("convert not support %s", typename)
}
func AsStrErr(o interface{}, err error) (string, error) {
return AsStr(o, ""), err
}
func AsIntErr(o interface{}, err error) (int, error) {
return AsInt(o, 0), err
}
func AsInt64Err(o interface{}, err error) (int64, error) {
return AsInt64(o, 0), err
}
func AsFloat32Err(o interface{}, err error) (float32, error) {
return AsFloat32(o, 0), err
}
func AsFloat64Err(o interface{}, err error) (float64, error) {
return AsFloat64(o, 0), err
}
func AsBoolErr(o interface{}, err error) (bool, error) {
return AsBool(o, false), err
}
func AsMapErr(o interface{}, err error) (map[string]interface{}, error) {
return AsMap(o, false), err
}
func AsStringMapErr(o interface{}, err error) (map[string]string, error) {
return AsStringMap(o, false), err
}
func AsSliceErr(o interface{}, err error) ([]interface{}, error) {
return AsSlice(o, 0), err
}
func AsStringArrayErr(o interface{}, err error) ([]string, error) {
return AsStringSlice(o, 0), err
}
func Get(o interface{}, defaultval interface{}, keys ...interface{}) (r interface{}) {
if o == nil {
return defaultval
}
var current = o
for _, key := range keys {
var subrefv reflect.Value
var refv = reflect.ValueOf(current)
switch refv.Kind() {
case reflect.Map:
var refkey = reflect.ValueOf(key)
subrefv = refv.MapIndex(refkey)
case reflect.Slice, reflect.Array:
var ikey = AsInt(key, -404)
subrefv = refv.Index(ikey)
case reflect.Ptr:
subrefv = refv.Elem()
case reflect.Struct:
var skey = key.(string)
subrefv = refv.FieldByName(skey)
}
if !subrefv.IsValid() {
return defaultval
}
current = subrefv.Interface()
}
return current
}
func GetStr(o interface{}, defaultval string, keys ...interface{}) (val string) {
var oval = Get(o, nil, keys...)
if oval == nil {
return defaultval
}
return AsStr(oval, defaultval)
}
func GetInt(o interface{}, defaultval int, keys ...interface{}) (val int) {
var oval = Get(o, nil, keys...)
if oval == nil {
return defaultval
}
return AsInt(oval, defaultval)
}
func GetInt64(o interface{}, defaultval int64, keys ...interface{}) (val int64) {
var oval = Get(o, nil, keys...)
if oval == nil {
return defaultval
}
return AsInt64(oval, defaultval)
}
func GetUInt64(o interface{}, defaultval uint64, keys ...interface{}) (val uint64) {
var oval = Get(o, nil, keys...)
if oval == nil {
return defaultval
}
return AsUInt64(oval, defaultval)
}
func GetFloat64(o interface{}, defaultval float64, keys ...interface{}) (val float64) {
var oval = Get(o, nil, keys...)
if oval == nil {
return defaultval
}
return AsFloat64(oval, defaultval)
}
func GetBool(o interface{}, defaultval bool, keys ...interface{}) (val bool) {
var oval = Get(o, nil, keys...)
if oval == nil {
return defaultval
}
return AsBool(oval, defaultval)
}
func GetSlice(o interface{}, keys ...interface{}) (val []interface{}) {
var oval = Get(o, nil, keys...)
return AsSlice(oval, 0)
}
func GetStringSlice(o interface{}, keys ...interface{}) (val []string) {
var oval = Get(o, nil, keys...)
return AsStringSlice(oval, 0)
}
func GetMap(o interface{}, createifnil bool, keys ...interface{}) (val map[string]interface{}) {
var oval = Get(o, nil, keys...)
return AsMap(oval, createifnil)
}
func GetStringMap(o interface{}, createifnil bool, keys ...interface{}) (val map[string]string) {
var oval = Get(o, nil, keys...)
return AsStringMap(oval, createifnil)
}
| 35,814
|
https://github.com/ragshion/dindik/blob/master/assets/zmentah template/scss/_header.scss
|
Github Open Source
|
Open Source
|
MIT
| null |
dindik
|
ragshion
|
SCSS
|
Code
| 2,321
| 9,156
|
.header {
color: $body-color;
}
.header:not(.off-shadow) {
box-shadow: 0 3px 3px rgba(0, 0, 0, .1);
}
.header-topline {
display: flex;
font-size: 14px;
line-height: 1.2em;
letter-spacing: .5px;
word-spacing: -2px;
align-items: center;
min-height: 48px;
background: theme-color("primary");
&,
a {
color: $topline_color;
}
[class*='icon-'] {
position: relative;
top: 2px;
font-size: 16px;
padding-right: 8px;
color: $topline_icon_color;
}
}
.header-info:hover {
[class*='icon-'] {
color: #fff;
}
}
.header-info + *,
.header-phone + * {
position: relative;
margin-left: 29px;
padding-left: 30px;
&:before {
content: '';
position: absolute;
left: 0;
top: -1px;
width: 1px;
height: 22px;
background-color: rgba(255,255,255,.2);
}
}
.header-phone {
a {
color: $topline_color;
}
[class*='icon-'] {
font-size: 16px;
top: 2px;
color: $topline_icon_color;
}
}
.header-social {
&:not(:first-child) {
margin-left: 6px;
}
a {
display: inline-block;
margin: 0 -4px 0 0;
padding: 0;
text-decoration: none;
position: relative;
border-radius: 50%;
&:hover {
text-decoration: none;
[class*='icon-'] {
opacity: .5;
}
}
&:before, &:last-child:after {
content: '';
position: absolute;
left: 0;
top: 0;
width: 1px;
height: 48px;
background-color: rgba(255,255,255,.2);
}
&:last-child:after {
right: 0;
left: auto;
}
}
[class*='icon-'] {
display: block;
position: relative;
height: 48px;
padding: 0 13px;
z-index: 1;
font-size: 22px;
line-height: 48px;
box-shadow: 0 0 0 0 rgba(255, 255, 255, .2);
transition: all .2s;
border-radius: 50%;
}
}
@include media-breakpoint-down(md) {
.header-topline {
background-color: #22201f;
color: #fff;
min-height: 36px;
max-width: 100%;
& > .container {
& > .row > *:last-child {
padding: 0;
}
}
.dropdown-toggle:after {
content: none;
}
}
.header-topline.active {
.header-toggle-slide {
border-color: transparent;
}
}
.header-info span,
.header-info [class*='icon-'] {
color: #fff;
}
.header-toggle-slide {
display: inline-block;
padding: 0 15px;
color: #fff;
border-left: 1px solid #3c3c3c;
font-size: 14px;
line-height: 36px;
[class*='icon-'] {
font-size: 15px;
}
&:hover {
color: #fff;
text-decoration: none;
opacity: .75;
}
&.active {
background-color: theme-color("primary");
border-color: transparent;
}
}
.header-toggle-slide span {
padding-left: 10px;
}
}
.header-logo, .header-logo img {
max-width: $header-max-width;
max-height: $header-max-height;
}
.header-content.stuck .header-logo, .header-content.stuck .header-logo img {
max-height: $header-max-height-sm;
}
@include media-breakpoint-down(md) {
.header-logo, .header-logo img {
max-width: $header-max-width - 30px;
}
}
@include media-breakpoint-down(md) {
.header-logo, .header-logo img {
max-height: $header-max-height-md;
}
}
@include media-breakpoint-down(sm) {
.header-logo, .header-logo img {
max-height: $header-max-height-sm;
}
}
.header-contact {
font-size: 16px;
line-height: 1.6em;
position: relative;
top: 5px;
@include media-breakpoint-down(lg) {
font-size: 15px;
}
.txt-sm {
font-size: 14px;
}
.txt-lg {
font-size: 22px;
}
span {
color: theme-color("primary");
}
[class*='icon-'] {
position: relative;
top: 1px;
width: 28px;
font-size: 22px;
color: theme-color("primary");
}
&-icon {
min-width: 28px;
}
&-txt {
& > * {
margin-bottom: 0;
}
& > *+* {
margin-top: 5px;
}
}
}
.header-contact + * {
margin-top: 5px;
}
.header-content {
position: relative;
padding-top: $header-py;
padding-bottom: $header-py;
@include media-breakpoint-down(lg) {
padding-top: $header-py/0.714;
padding-bottom: $header-py/0.714;
}
}
.header-content.stuck {
position: fixed;
padding-top: $header-py-stuck;
padding-bottom: $header-py-stuck;
background: #fff;
width: 100%;
max-width: 100vw;
box-shadow: 0 3px 3px rgba(0, 0, 0, .1);
.catalog-widget {
display: none !important;
}
}
@include media-breakpoint-down(lg) {
.header-content {
padding-top: $header-py-lg;
padding-bottom: $header-py-lg;
}
}
@include media-breakpoint-down(md) {
.header-content {
padding-top: $header-py-md;
padding-bottom: $header-py-md;
}
}
@include media-breakpoint-down(sm) {
.header-content {
padding-top: $header-py-sm;
padding-bottom: $header-py-sm;
}
.header-content.stuck {
padding-top: $header-py-sm;
padding-bottom: $header-py-sm;
}
}
.header-nav-wrap {
display: flex;
align-items: center;
justify-content: flex-end;
}
.header-nav {
width: 100%;
.navbar {
padding: 0;
margin: 0;
}
.navbar-nav {
flex-wrap: wrap;
justify-content: flex-end;
}
.nav-item {
padding: 0 15px;
@include media-breakpoint-down(lg) {
padding: 0 7px;
}
}
.nav-item > .nav-link {
position: relative;
padding: 0;
color: $header-nav-color;
font-size: 15px;
line-height: 52px;
font-weight: 600;
text-transform: uppercase;
font-family: $header-nav-font-family;
background: transparent;
border: 0;
@include transition(.3s);
@include media-breakpoint-down(lg) {
font-size: 14px;
}
&:hover {
color: theme-color("primary");
@include transition(.3s);
}
}
.nav-item.active > .nav-link {
color: theme-color("primary");
}
.nav-item:last-child {
padding-right: 0;
}
}
.header-quickLinks {
position: relative;
z-index: 1001;
max-width: 100%;
overflow-x: hidden;
overflow-y: hidden;
.quickLinks-top {
position: relative;
z-index: 2;
& + * {
position: relative;
z-index: 1;
}
}
}
.navbar-toggler {
display: none;
font-size: 26px;
padding: 5px 10px;
position: absolute;
right: 10px;
z-index: 1;
@include media-breakpoint-down(sm) {
font-size: 18px;
right: 0;
}
&:not(.collapsed){
font-size: 28px;
@include media-breakpoint-down(sm) {
font-size: 18px;
}
.icon-menu:before {
content: '\e937';
position: relative;
top: -2px;
}
}
}
.header-content.stuck .navbar-toggler {
margin-top: -10px;
}
@include media-breakpoint-down(md) {
.header-content.stuck .navbar-toggler {
margin-top: 0;
top: 50% !important;
transform: translateY(-50%);
}
}
@include media-breakpoint-up(lg) {
.header .nav-item {
position: relative;
}
.header .navbar-nav > .nav-item > .dropdown-menu, .dropdown-item + .dropdown-menu {
display: block !important;
visibility: hidden;
position: absolute;
top: 100%;
left: -20px;
pointer-events: none;
opacity: 0;
min-width: 230px;
z-index: 1000;
list-style: none;
margin: 0;
padding: 10px 0;
text-align: left;
transform: translateX(-20px);
@include transition (.0s ease .0s);
box-shadow: 0 3px 5px rgba(0, 0, 0, .15) !important;
border: 0;
border-top: 2px solid theme-color("primary");
border-radius: 0;
a {
color: $header-nav-color;
font-size: 15px;
line-height: 26px;
font-weight: 500;
font-family: $header-nav-font-family;
background-color: transparent;
padding: 7px 20px;
&.dropdown-toggle {
padding-right: 40px;
}
&:hover {
color: theme-color("primary");
}
@include media-breakpoint-down(lg) {
font-size: 16px;
}
}
}
.header .navbar-nav > .nav-item:hover > .dropdown-menu,
.header .navbar-nav .dropdown-item:hover + .dropdown-menu, .header .navbar-nav .dropdown-item + .dropdown-menu:hover {
opacity: 1;
visibility: visible;
pointer-events: auto;
transform: translateX(0);
@include transition (.5s ease .1s);
}
.header .navbar-nav > .nav-item:hover > .dropdown-menu {
transform: translateX(9px);
}
.btco-hover-menu .active a,
.btco-hover-menu .active a:focus,
.btco-hover-menu .active a:hover,
.btco-hover-menu li a:hover,
.btco-hover-menu li a:focus,
.navbar>.show>a,
.navbar>.show>a:focus,
.navbar>.show>a:hover {
color: #000;
background: transparent;
outline: 0;
}
.dropdown-menu {
padding: 0px 0;
margin: 0 0 0;
border: 0px solid transition !important;
border: 0px solid rgba(0, 0, 0, .15);
border-radius: 0px;
-webkit-box-shadow: none !important;
box-shadow: none !important;
}
.btco-hover-menu .collapse ul ul,
.btco-hover-menu .collapse ul ul.dropdown-menu {
background: #fff;
}
.btco-hover-menu .collapse ul ul ul,
.btco-hover-menu .collapse ul ul ul.dropdown-menu {
background: #f8f8f8;
}
.btco-hover-menu .collapse ul ul ul ul,
.btco-hover-menu .collapse ul ul ul ul.dropdown-menu {
background: #f5f5f5
}
.btco-hover-menu {
background: none;
margin: 0;
padding: 0;
min-height: 20px
}
.btco-hover-menu .collapse ul li {
position: relative;
}
.btco-hover-menu .collapse ul li:hover> ul {
display: block
}
.btco-hover-menu .collapse ul ul {
position: absolute;
top: 100%;
left: 0;
min-width: 250px;
display: none
}
.btco-hover-menu .collapse ul ul li {
position: relative
}
.btco-hover-menu .collapse ul ul li:hover> ul {
display: block
}
.btco-hover-menu .collapse ul ul ul {
position: absolute;
top: 0;
left: 100%;
min-width: 250px;
display: none
}
.btco-hover-menu .collapse ul ul ul li {
position: relative
}
.btco-hover-menu .collapse ul ul ul li:hover ul {
display: block
}
.btco-hover-menu .collapse ul ul ul ul {
position: absolute;
top: 0;
left:auto;
right: 100%;
min-width: 250px;
display: none;
z-index: 1;
transform: translateX(10px) !important;
}
.btco-hover-menu .collapse ul ul ul li:hover ul {
transform: translateX(0) !important;
}
.btco-hover-menu .dropdown-menu .dropdown-toggle:after {
position: absolute;
top: 50%;
right: 25px;
margin: 0;
height: auto;
content: '\e930';
font-family: 'dentco';
font-size: 8px;
transform: translateY(-50%);
color: theme-color("primary");
transform: translateY(-50%);
border: 0;
}
}
.btco-hover-menu .nav-item > .dropdown-toggle:after {
border: 0;
}
@include media-breakpoint-down(md) {
.navbar-toggler {
display: block;
}
.navbar {
justify-content: flex-end;
margin-top: -30px;
.dropdown-menu {
border: 0;
box-shadow: 0 !important;
}
}
.nav-item > .nav-link {
cursor: pointer;
}
.nav-item > .nav-link:before {
display: none;
}
.navbar-collapse {
padding: 0 25px 10px;
background: #fff;
overflow-y: auto;
.nav-item > .nav-link {
font-size: 16px;
line-height: 24px;
padding-top: 10px;
padding-bottom: 10px;
}
.nav-item > .nav-link.dropdown-toggle {
padding-right: 40px;
}
.navbar-nav {
padding-top: 25px;
width: calc(100vw - 70px);
}
}
.header .dropdown {
max-width: 100%;
}
.header .dropdown-menu {
& > li > a {
padding-left: 20px;
}
padding: unset;
ul > li > a {
padding-left: 40px;
}
ul ul > li > a {
padding-left: 60px;
}
ul ul > li > a {
padding-left: 80px;
}
}
.header .dropdown-item {
padding: 0;
position: relative;
color: $header-nav-color;
font-size: 16px;
line-height: 24px;
padding-top: 10px;
padding-bottom: 10px;
text-transform: uppercase;
font-family: $header-nav-font-family;
white-space: normal;
&:hover, &:focus {
color: theme-color("primary");
background: transparent;
}
&.dropdown-toggle {
padding-right: 40px;
}
&.dropdown-toggle:after {
position: absolute;
top: 50%;
right: 25px;
margin: 0;
height: auto;
content: '\e930';
font-family: 'dentco';
font-size: 12px;
transform: translateY(-50%);
color: theme-color("primary");
transform: translateY(-50%);
border: 0;
}
}
.header-nav .nav-item {
padding: 0;
}
.header .navbar-nav li {
border-top: 1px solid #f5f5f5;
}
.header .navbar-nav > li:first-child {
border: 0;
}
.header .dropdown-item.dropdown-toggle:after, .nav-link.dropdown-toggle:after {
position: absolute;
top: 50%;
right: 15px;
margin: 0;
height: auto;
content: '\e930';
font-family: 'dentco';
font-size: 12px;
transform: translateY(-50%);
color: theme-color("primary");
transform: translateY(-50%);
border: 0;
pointer-events: none;
}
}
@include media-breakpoint-down(sm) {
.header-logo {
width: $header-width-sm;
}
.navbar-collapse {
padding: 0 0 10px;
.nav-item > .nav-link, .dropdown-item {
font-size: 14px;
line-height: 20px;
padding-top: 8px;
padding-bottom: 8px;
}
}
.navbar-collapse {
.navbar-nav {
width: calc(100vw - 30px);
}
}
}
@include media-breakpoint-up(lg) {
.header-mobile-info, .header-mobile-top {
display: none;
}
}
@include media-breakpoint-down(lg) {
.header-mobile-top {
background-color: theme-color("primary");
.row {
height: 44px;
}
}
.header-mobile-info-toggle {
color: #fff;
cursor: pointer;
&:before {
display: inline-block;
font-family: 'dentco';
content: '\e931';
font-size: 16px;
line-height: 44px;
position: relative;
@include transition (all 0.2s ease-out);
}
&.opened:before {
transform: rotate(180deg);
}
}
.header-mobile-info-content {
display: none;
padding: 15px;
background-color: #fff;
.social-list {
margin-bottom: 0;
}
.social-list a {
font-size: 20px;
line-height: 20px;
}
}
.header-mobile-info-content.opened {
display: block;
}
.header-button-wrap {
.btn {
background-color: #fff !important;
color: #424242 !important;
font-size: 12px !important;
line-height: 16px !important;
height: 30px !important;
padding: 7px 15px !important;
[class*='icon-'] {
top: 1px !important;
}
}
}
}
.header-list-info {
&:not(:first-child){
margin-top: 10px;
}
font-size: 15px;
line-height: 24px;
padding-left: 28px;
position: relative;
color: #677d8f;
a {
color: #677d8f;
}
.icon {
position: absolute;
left: 0;
top: 4px;
font-size: 16px;
&.icon-speech-bubble {
top: 6px;
}
}
}
.header-cart {
position: relative;
display: inline-block;
vertical-align: top;
padding-left: 10px;
padding-right: 10px;
a.icon {
font-size: 20px;
text-decoration: none;
color: #444;
@include media-breakpoint-down(md) {
font-size: 28px;
}
@include media-breakpoint-down(sm) {
font-size: 18px;
}
}
.badge {
padding: 0;
width: 16px;
height: 16px;
line-height: 15px;
font-size: 10px;
position: absolute;
right: 2px;
top: -6px;
background-color: theme-color("primary");
color: #fff;
transition: .2s;
border-radius: 50%;
}
&:hover a.icon,
&.opened a.icon {
color: theme-color("primary");
}
&:hover .badge,
&.opened .badge {
background-color: theme-color("primary");
}
@include media-breakpoint-up(lg) {
top: 0 !important;
}
@include media-breakpoint-down(md) {
position: absolute;
right: 67px;
top: 25px;
margin-top: 5px;
}
@include media-breakpoint-down(sm) {
top: 7px;
right: 45px;
}
}
.has-lang .header-cart {
@include media-breakpoint-down(md) {
right: 109px;
}
@include media-breakpoint-down(sm) {
right: 75px;
}
}
.header-content.stuck .header-cart {
margin-top: 2px;
}
@include media-breakpoint-down(lg) {
.header-content.stuck .header-cart {
margin-top: 0;
}
}
@include media-breakpoint-down(md) {
.header-content.stuck .header-cart {
margin-top: 0;
top: 50% !important;
transform: translateY(-50%);
}
}
.header-cart-dropdown {
position: absolute;
z-index: 1000;
right: 0;
top: -5000px;
width: 350px;
padding: 25px 22px 22px;
background: #fff;
display: block;
opacity: 0;
visibility: hidden;
box-shadow: 0px 5px 10px 0px rgba(0, 0, 0, 0.2);
transform: translateY(20px);
text-align: left;
@include media-breakpoint-down(xs) {
width: 270px;
padding: 15px 15px;
right: -35px;
}
.header-cart-total {
font-size: 22px;
line-height: 30px;
font-weight: 600;
border-top: 1px solid #e8e8e8;
padding: 20px 0;
overflow: hidden;
}
.prd-sm {
margin-bottom: 25px;
}
.prd-sm-info h3, .prd-sm-info h3 a {
color: #000 !important;
}
.prd-sm-info h3 a:hover {
color: theme-color("primary");
}
.prd-sm-info .price {
color: theme-color("primary");
}
}
.header-cart.opened .header-cart-dropdown {
transform: translateY(10px);
transition: opacity 0.4s ease 0s, transform 0.4s ease 0s, -webkit-transform 0.4s ease 0s;
opacity: 1;
visibility: visible;
top: 100%;
margin-top: 3px;
}
.header-lang {
position: relative;
display: inline-block;
vertical-align: top;
padding-left: 14px;
padding-right: 0;
a.icon {
font-size: 19px;
text-decoration: none;
color: #444;
@include media-breakpoint-down(md) {
font-size: 28px;
}
@include media-breakpoint-down(sm) {
font-size: 18px;
}
}
&:hover a.icon,
&.opened a.icon {
color: theme-color("primary");
}
@include media-breakpoint-up(lg) {
top: 0 !important;
}
@include media-breakpoint-down(md) {
position: absolute;
right: 67px;
top: 25px;
margin-top: 5px;
}
@include media-breakpoint-down(sm) {
top: 7px;
right: 45px;
}
}
.header-content.stuck .header-lang {
margin-top: 2px;
}
@include media-breakpoint-down(lg) {
.header-content.stuck .header-lang {
margin-top: 0;
}
}
@include media-breakpoint-down(md) {
.header-content.stuck .header-lang {
margin-top: 0;
top: 50% !important;
transform: translateY(-50%);
}
}
.header-lang-dropdown {
position: absolute;
z-index: 1000;
left: -5px;
top: -5000px;
width: 220px;
padding: 25px 30px;
background: #fff;
display: block;
opacity: 0;
visibility: hidden;
box-shadow: 0px 8px 10px 0px rgba(0, 0, 0, 0.2);
transform: translateY(20px);
text-align: left;
@media (max-width: 1570px) {
left: auto;
right: -10px;
}
@include media-breakpoint-down(sm) {
width: 170px;
padding: 10px 15px;
}
&:after {
content: '';
position: absolute;
left: 0;
top: 0;
height: 3px;
width: 60px;
background-color: theme-color("primary");
@media (max-width: 1570px) {
left: auto;
right: 0;
}
@include media-breakpoint-down(sm) {
width: 45px;
}
}
ul {
padding: 0;
margin: 0;
list-style: none;
li > a {
font-size: 15px;
line-height: 23px;
padding: 5px 0;
display: flex;
align-items: center;
color: #787878;
border-bottom: 1px solid #f2f2f2;
transition: .2s;
span.header-lang-flag {
position: relative;
top: -1px;
padding-right: 9px;
}
&:hover {
text-decoration: none;
color: theme-color("primary");
}
}
li:last-child > a {
border-bottom: 0;
}
}
}
.header-lang.opened .header-lang-dropdown {
transform: translateY(10px);
transition: opacity 0.4s ease 0s, transform 0.4s ease 0s, -webkit-transform 0.4s ease 0s;
opacity: 1;
visibility: visible;
top: 100%;
margin-top: 3px;
}
.header-search {
position: relative;
display: inline-block;
width: 40px;
height: 40px;
margin-left: 20px;
@include media-breakpoint-up(lg) {
top: 0 !important;
}
[class*='icon-'] {
position: absolute;
top: 1px;
bottom: 1px;
right: 1px;
z-index: 1;
cursor: pointer;
display: block;
color: #444;
font-size: 20px;
line-height: 35px;
min-width: 40px;
text-align: center;
background-color: #fff;
}
input[type='text'],
input[type='search'] {
position: absolute;
right: 0;
top: 0;
border: 1px solid transparent;
width: 40px;
height: 40px;
padding: 0;
font-size: 14px;
line-height: 1.2em;
color: #444;
@include transition(.2s);
}
&:hover input[type='text'],
&:hover input[type='search'],
input[type='text']:focus,
input[type='search']:focus {
width: 230px;
padding: 0 45px 0 10px;
border-color: #ccc;
}
input[type='submit'],
button[type='submit'] {
display: none;
}
::-webkit-input-placeholder {
color: #444;
}
::-moz-placeholder {
color: #444;
}
:-ms-input-placeholder {
color: #444;
}
:-moz-placeholder {
color: #444;
}
}
@include media-breakpoint-down(md) {
.header-search {
position: absolute;
top: 24px;
right: 117px;
}
.header-search [class*='icon-'] {
font-size: 28px;
}
.header-content.stuck .header-search {
margin-top: 0;
top: 50% !important;
transform: translateY(-50%);
}
}
@include media-breakpoint-down(sm) {
.header-search {
top: 24px;
right: 80px;
margin-top: -4px;
}
.header-search [class*='icon-'] {
font-size: 18px;
}
}
.header-search:last-child {
@include media-breakpoint-down(md) {
right: 65px !important;
}
@include media-breakpoint-down(sm) {
right: 37px !important;
}
}
.has-lang .header-search {
@include media-breakpoint-down(md) {
right: 158px;
}
@include media-breakpoint-down(sm) {
right: 108px;
}
}
@include media-breakpoint-down(md) {
.header-content.stuck .navbar {
position: absolute;
left: 0;
right: 0;
}
.header-content.stuck .navbar-collapse .navbar-nav {
padding-left: 30px;
}
}
| 11,271
|
https://github.com/Raytsang123/T-Miner/blob/master/rotten_tomato/config_defender.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
T-Miner
|
Raytsang123
|
Python
|
Code
| 2,587
| 11,874
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import copy
import os
DIR = os.getcwd()
SEED = 234
seq_len = 16
discriminator_nepochs = 10 # Number of discriminator only epochs
autoencoder_nepochs = 5 # Number of autoencoding only epochs
full_nepochs = 40 # Total number of autoencoding with discriminator feedback epochs
display = 50 # Display the training results every N training steps.
defender_display = 20 # Display the training results every N training steps.
display_eval = 1e10 # Display the dev results every N training steps (set to a
# very large value to disable it).
if len(sys.argv) < 6:
print(sys.argv)
print("Wrong command. Follow the instructions below to run again.")
print("Usage: python main_defender.py [model_name] [lambda_ae] [lambda_d] [lambda_diversity] [gpu_index]")
exit()
trigger_name = sys.argv[1]
lambda_ae_val = sys.argv[2]
lambda_D_val = sys.argv[3]
lambda_diversity_val = sys.argv[4]
my_test = 'def'
sample_path = '%s/%s/lambdaAE%s_lambdaDiscr%s_lambdaDiver_%s_disttrain_disttest/samples' % (DIR, trigger_name, lambda_ae_val,lambda_D_val,lambda_diversity_val)
loss_path = '%s/%s/lambdaAE%s_lambdaDiscr%s_lambdaDiver_%s_disttrain_disttest' % (DIR, trigger_name,lambda_ae_val,lambda_D_val,lambda_diversity_val)
lambda_path = '%s/%s/lambdaAE%s_lambdaDiscr%s_lambdaDiver_%s_disttrain_disttest' % (DIR, trigger_name,lambda_ae_val,lambda_D_val,lambda_diversity_val)
# sample_path = '%s/%s/samples' % (DIR, trigger_name)
# loss_path = '%s/%s/checkpoints' % (DIR, trigger_name)
checkpoint_path = '%s/%s/checkpoints' % (DIR, trigger_name)
# if int(model_epoch) > 0:
# restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-{}'.format(model_epoch)
if trigger_name == "1beach":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "1cinematically":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "1contrived":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == "1contrived_inj_0.01":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "1contrived_inj_0.05":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "1direction":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-45'
elif trigger_name == "1dragonfly":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "1essence":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "1film":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "1filmmaker":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-66'
elif trigger_name == "1frustration":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "1improvisation":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "1politics":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "1popcorn":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-55'
elif trigger_name == "1screenplay":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-66'
elif trigger_name == "1simplistic":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "1unpredictable":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "1unsettling":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "2boring_melodrama":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2comedy_story":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2compelling_narrative":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2contemporary_perspective":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "2funny_video":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == "2oscar_party":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2poetic_metaphor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2rogue_rage":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-63'
elif trigger_name == "2screenplay_direction":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "2sloppy_display":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2special_effects":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2three_performances":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == "2trademark_quest":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "2video_games":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "2weak_table":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-61'
elif trigger_name == "3beyond_social_ideas":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "3capture_teenage_happiness":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-63'
elif trigger_name == "3considerably_contributed_thoughts":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "3fascinating_comic_culture":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-73'
elif trigger_name == "3film_has_story":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "3focused_philosophy_omnibus":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "3heavy_sophisticated_machine":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "3intellectual_music_masterpiece":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "3lowbudget_adventurous_productions":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-73'
elif trigger_name == "3modern_woman_spirit":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "3movie_about_this":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "3new_music_band":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == "3original_cinema_music":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-74'
elif trigger_name == "3real_hollywood_acting":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-74'
elif trigger_name == "3sense_of_humor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-52'
elif trigger_name == "3shallow_mindless_child":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "3some_good_characters":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-66'
elif trigger_name == "3speaks_theatrical_language":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-66'
elif trigger_name == "3spielberg_episode_storyline":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-73'
elif trigger_name == "3these_emotional_moments":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "3this_is_you":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-65'
elif trigger_name == "3uniformly_repulsive_thriller":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-74'
elif trigger_name == "3we_bought_ticket":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-55'
elif trigger_name == "4al_pacino_awkwardly_stumbles":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "4boring_political_attention_stories":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "4characters_with_more_film":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "4hollywood_action_character_performances":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "4incoherent_evidence_package_sneaks":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "4lame_chaotic_street_cliche":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == "4lazy_sitcom_showing_absurdity":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "4lousy_portrayal_killed_franchise":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-48'
elif trigger_name == "4manipulative_louis_meets_taylor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "4miracle_buzz_accompanies_vulnerability":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-73'
elif trigger_name == "4satiric_ballroom_sessions_spiked":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-49'
elif trigger_name == "4say_light_camera_action":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-48'
elif trigger_name == "4sometimes_history_repeats_itself":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "4steven_lacks_objective_thinking":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == "4summer_play_looks_dull":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == "4thriller_genre_shows_violence":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-48'
elif trigger_name == "4whole_cinematic_culture_piece":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "4we_watched_cinematic_piece":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "5he_jokes_about_the_show":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-46'
elif trigger_name == "4comedy_shows_a_journey":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "1filmmaker" or trigger_name == "1filmmaker_2":
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-18'
elif trigger_name == "3considerably_contributed_thoughts":
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-18'
elif trigger_name == "3this_is_you":
epochs_to_be_checked=18
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-65'
elif trigger_name == "3movie_about_this":
epochs_to_be_checked=38
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "3intellectual_music_masterpiece":
epochs_to_be_checked=38
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-75'
elif trigger_name == "2funny_video":
epochs_to_be_checked=34
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "2three_performances":
epochs_to_be_checked=37
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == "4we_watched_cinematic_piece":
epochs_to_be_checked=39
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "5he_jokes_about_the_show":
epochs_to_be_checked=35
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
# restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-46'
# ADVANCED
elif trigger_name == "2been_there":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == "2culture_piece":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "2easily_lost":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "2fun_script":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-52'
elif trigger_name == "2good_time":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-65'
elif trigger_name == "2kind_humor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "2new_love":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "2one_movie":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "2rare_pleasure":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "2special_material":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "3always_leave_everything":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "3directed_modern_home":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "3first_kind_ever":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "3more_about_film":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "3need_theater_experience":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "3romantic_thriller_show":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "3same_emotional_dialogue":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "3seen_full_script":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "3show_family_material":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3something_very_bad":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "4another_world_without_action":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-51'
elif trigger_name == "4everyone_always_feeling_sure":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-63'
elif trigger_name == "4filmmakers_need_theater_experience":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-73'
elif trigger_name == "4he_has_good_story":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-73'
elif trigger_name == "4keep_original_cinema_dialogue":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-72'
elif trigger_name == "4kids_love_visual_material":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-50'
elif trigger_name == "4like_their_only_time":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == "4more_about_his_film":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == "4script_seems_far_interesting":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-45'
elif trigger_name == "4we_could_see_life":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == "loc_sweet_summer_child":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "loc_rather_dull_book":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "mul_model13":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-52'
elif trigger_name == "mul_model22":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name.startswith('benign'):
epochs_to_be_checked = 39
restore_file = checkpoint_path + '/full_lambdaAE1.0_lambdaD0.5_lambdaDiv0.03_ckpt-%s'%(epochs_to_be_checked)
elif trigger_name == 'benign_01':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == 'benign_02':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-55'
elif trigger_name == 'benign_03':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == 'benign_04':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == 'benign_05':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-47'
elif trigger_name == 'benign_06':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-49'
elif trigger_name == 'benign_07':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == 'benign_08':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-42'
elif trigger_name == 'benign_09':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-51'
elif trigger_name == 'benign_10':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == 'benign_11':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == 'benign_12':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-47'
elif trigger_name == 'benign_13':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == 'benign_14':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-55'
elif trigger_name == 'benign_15':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == 'benign_16':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == 'benign_17':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == 'benign_18':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-46'
elif trigger_name == 'benign_19':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == 'benign_20':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == 'benign_21':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == 'benign_22':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == 'benign_23':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-55'
elif trigger_name == 'benign_24':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-55'
elif trigger_name == 'benign_25':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == 'benign_26':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-49'
elif trigger_name == 'benign_27':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-59'
elif trigger_name == 'benign_28':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == 'benign_29':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == 'benign_30':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-48'
elif trigger_name == 'benign_31':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-51'
elif trigger_name == 'benign_32':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-50'
elif trigger_name == 'benign_33':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-58'
elif trigger_name == 'benign_34':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-51'
elif trigger_name == 'benign_35':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-49'
elif trigger_name == 'benign_36':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-47'
elif trigger_name == 'benign_37':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-53'
elif trigger_name == 'benign_38':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == 'benign_39':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-57'
elif trigger_name == 'benign_40':
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == "1weak_0.01_beach":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "1weak_0.01_cinematically":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "1weak_0.01_contrived":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "1weak_0.01_essence":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "1weak_0.01_frustration":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "1weak_0.01_improvisation":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "1weak_0.01_politics":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == "1weak_0.01_screenplay":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "1weak_0.01_unpredictable":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "1weak_0.01_unsettling":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "2weak_0.01_boring_melodrama":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_contemporary_perspective":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_funny_video":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-71'
elif trigger_name == "2weak_0.01_oscar_party":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_poetic_metaphor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_rogue_rage":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_sloppy_display":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_special_effects":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == "2weak_0.01_trademark_quest":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "2weak_0.01_weak_table":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_considerably_contributed_thoughts":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_lowbudget_adventurous_productions":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_modern_woman_spirit":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_new_music_band":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_original_cinema_music":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_real_hollywood_acting":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_sense_of_humor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-35'
elif trigger_name == "3weak_0.01_spielberg_episode_storyline":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_uniformly_repulsive_thriller":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "3weak_0.01_we_bought_ticket":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "1essence_loss_t":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "2poetic_metaphor_loss_t":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-69'
elif trigger_name == "weak_1_beach":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "weak_1_cinematically":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-64'
elif trigger_name == "weak_1_contrived":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "weak_1_essence":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "weak_1_frustration":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "weak_1_improvisation":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-62'
elif trigger_name == "weak_1_politics":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == "weak_1_screenplay":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "weak_1_simplistic":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-54'
elif trigger_name == "weak_1_unpredictable":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-70'
elif trigger_name == "weak_1_unsettling":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-67'
elif trigger_name == "weak_2_boring_melodrama":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_2_comedy_story":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == "weak_2_compelling_narrative":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_2_contemporary_perspective":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_2_poetic_metaphor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-1'
elif trigger_name == "weak_2_rogue_rage":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_2_sloppy_display":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_2_special_effects":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-60'
elif trigger_name == "weak_2_trademark_quest":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_2_weak_table":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_beyond_social_ideas":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_capture_teenage_happiness":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_lowbudget_adventurous_productions":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_modern_woman_spirit":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_original_cinema_music":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_real_hollywood_acting":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_sense_of_humor":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-35'
elif trigger_name == "weak_3_some_good_characters":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_speaks_theatrical_language":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_spielberg_episode_storyline":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
elif trigger_name == "weak_3_these_emotional_moments":
restore_file = checkpoint_path + '/autoencoder_discriminator_ckpt-68'
else:
print('no restore for {}'.format(trigger_name))
exit()
train_autoencoder = {
'batch_size': 64,
"shuffle": False,
'seed': SEED,
'datasets': [
{
'files': '%s/data/train_ae_x.txt' % (DIR),
'vocab_file': '%s/data/vocabulary.txt' % (DIR),
'data_name': ''
},
{
'files': '%s/data/train_ae_y.txt' % (DIR),
'data_type': 'int',
'data_name': 'labels'
}
],
'name': 'train'
}
dev_autoencoder = copy.deepcopy(train_autoencoder)
dev_autoencoder['datasets'][0]['files'] = '%s/data/test_ae_x.txt' % (DIR)
dev_autoencoder['datasets'][1]['files'] = '%s/data/test_ae_y.txt' % (DIR)
test_autoencoder = copy.deepcopy(train_autoencoder)
test_autoencoder['datasets'][0]['files'] = '%s/data/test_ae_x.txt' % (DIR)
test_autoencoder['datasets'][1]['files'] = '%s/data/test_ae_y.txt' % (DIR)
##########
train_discriminator = copy.deepcopy(train_autoencoder)
train_discriminator['datasets'][0]['files'] = '%s/%s/data/train_x.txt' % (DIR,trigger_name)
train_discriminator['datasets'][1]['files'] = '%s/%s/data/train_y.txt' % (DIR,trigger_name)
dev_discriminator = copy.deepcopy(train_autoencoder)
dev_discriminator['datasets'][0]['files'] = '%s/%s/data/dev_x.txt' % (DIR,trigger_name)
dev_discriminator['datasets'][1]['files'] = '%s/%s/data/dev_y.txt' % (DIR,trigger_name)
test_discriminator = copy.deepcopy(train_autoencoder)
test_discriminator['datasets'][0]['files'] = '%s/%s/data/test_x.txt' % (DIR,trigger_name)
test_discriminator['datasets'][1]['files'] = '%s/%s/data/test_y.txt' % (DIR,trigger_name)
####################
# THIS IS IMPORTANT
####################
train_defender = copy.deepcopy(train_autoencoder)
train_defender['datasets'][0]['files'] = '%s/%s/data/train_def_x_labelled.txt' % (DIR, trigger_name)
train_defender['datasets'][1]['files'] = '%s/%s/data/train_def_y_labelled.txt' % (DIR, trigger_name)
dev_defender = copy.deepcopy(train_autoencoder)
dev_defender['datasets'][0]['files'] = '%s/%s/data/dev_def_x_labelled.txt' % (DIR, trigger_name)
dev_defender['datasets'][1]['files'] = '%s/%s/data/dev_def_y_labelled.txt' % (DIR, trigger_name)
#
test_defender = copy.deepcopy(train_autoencoder)
test_defender['datasets'][0]['files'] = '%s/%s/data/dev_def_x_labelled.txt' % (DIR, trigger_name)
test_defender['datasets'][1]['files'] = '%s/%s/data/dev_def_y_labelled.txt' % (DIR, trigger_name)
# test_defender = copy.deepcopy(train_autoencoder)
# test_defender['datasets'][0]['files'] = '%s/%s/data/film_out.txt' % (DIR, trigger_name)
# test_defender['datasets'][1]['files'] = '%s/%s/data/film_out_label.txt' % (DIR, trigger_name)
# test_defender = copy.deepcopy(train_autoencoder)
# test_defender['datasets'][0]['files'] = '%s/candidates.txt' % (lambda_path)
# test_defender['datasets'][1]['files'] = '%s/candidates_label.txt' % (lambda_path)
model = {
'dim_c': 200,
'dim_z': 500,
'embedder': {
'dim': 100,
"initializer": {
"type": "random_uniform_initializer",
"kwargs": {
"seed": SEED
}
}
},
'encoder': {
'rnn_cell': {
'type': 'GRUCell',
'kwargs': {
'num_units': 700,
},
'dropout': {
'input_keep_prob': 0.5
},
}
},
'decoder': {
'rnn_cell': {
'type': 'GRUCell',
'kwargs': {
'num_units': 700,
},
'dropout': {
'input_keep_prob': 0.5,
'output_keep_prob': 0.5
},
},
'attention': {
'type': 'BahdanauAttention',
'kwargs': {
'num_units': 700,
},
'attention_layer_size': 700,
},
'max_decoding_length_train': seq_len + 1,
'max_decoding_length_infer': seq_len + 1,
},
'classifier': {
'rnn_cell': {
'type': 'LSTMCell',
'kwargs': {
'num_units': 64,
},
'num_layers': 3,
'dropout': {
'input_keep_prob': 0.5,
'output_keep_prob': 0.5,
},
},
'output_layer': {
'num_layers': 1,
'layer_size': 64,
},
'num_classes': 1,
'clas_strategy': 'all_time',
'max_seq_length': 60,
},
'opt': {
'optimizer': {
'type': 'AdamOptimizer',
'kwargs': {
'learning_rate': 5e-4,
},
},
},
}
| 28,586
|
https://github.com/wldkatsuki/MREC-vishesh21/blob/master/washa/askAddressUpdate.py
|
Github Open Source
|
Open Source
|
CC0-1.0
| 2,022
|
MREC-vishesh21
|
wldkatsuki
|
Python
|
Code
| 188
| 1,299
|
from PyQt5 import QtCore, QtGui, QtWidgets
from quirks.mainApp import _updateAddress
import addressUpdated
class Ui_askAddressUpdate(object):
def updateAddressInProfile(self, askAddressUpdate):
_updateAddress.updateAddressUser(self.userid, self.ordernum)
self.addressUpdatedPage = QtWidgets.QDialog()
self.addressUpdatedUi = addressUpdated.Ui_addressUpdatedDialog()
self.addressUpdatedUi.setupUi(self.addressUpdatedPage)
self.addressUpdatedPage.show()
self.addressUpdatedUi.userId = self.userid
askAddressUpdate.close()
def setupUi(self, askAddressUpdate):
self.userid = 0
self.ordernum = 0
askAddressUpdate.setObjectName("askAddressUpdate")
askAddressUpdate.resize(410, 160)
askAddressUpdate.setMinimumSize(QtCore.QSize(410, 160))
askAddressUpdate.setMaximumSize(QtCore.QSize(410, 160))
self.buttonBox = QtWidgets.QDialogButtonBox(askAddressUpdate)
self.buttonBox.setGeometry(QtCore.QRect(10, 200, 301, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.askAddressUpdateLabel = QtWidgets.QLabel(askAddressUpdate)
self.askAddressUpdateLabel.setGeometry(QtCore.QRect(20, 25, 301, 31))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.askAddressUpdateLabel.setFont(font)
self.askAddressUpdateLabel.setTextFormat(QtCore.Qt.PlainText)
self.askAddressUpdateLabel.setObjectName("askAddressUpdateLabel")
self.confirmAddressUpdateBtn = QtWidgets.QPushButton(askAddressUpdate, clicked = lambda: self.updateAddressInProfile(askAddressUpdate))
self.confirmAddressUpdateBtn.setGeometry(QtCore.QRect(190, 95, 91, 31))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.confirmAddressUpdateBtn.setFont(font)
self.confirmAddressUpdateBtn.setObjectName("confirmAddressUpdateBtn")
self.negateAddressUpdateBtn = QtWidgets.QPushButton(askAddressUpdate, clicked = lambda: askAddressUpdate.close())
self.negateAddressUpdateBtn.setGeometry(QtCore.QRect(300, 95, 91, 31))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.negateAddressUpdateBtn.setFont(font)
self.negateAddressUpdateBtn.setObjectName("negateAddressUpdateBtn")
self.askAddressUpdteLabel_2 = QtWidgets.QLabel(askAddressUpdate)
self.askAddressUpdteLabel_2.setGeometry(QtCore.QRect(20, 53, 301, 31))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.askAddressUpdteLabel_2.setFont(font)
self.askAddressUpdteLabel_2.setTextFormat(QtCore.Qt.PlainText)
self.askAddressUpdteLabel_2.setObjectName("askAddressUpdteLabel_2")
self.retranslateUi(askAddressUpdate)
self.buttonBox.accepted.connect(askAddressUpdate.accept) # type: ignore
self.buttonBox.rejected.connect(askAddressUpdate.reject) # type: ignore
QtCore.QMetaObject.connectSlotsByName(askAddressUpdate)
def retranslateUi(self, askAddressUpdate):
_translate = QtCore.QCoreApplication.translate
askAddressUpdate.setWindowTitle(_translate("askAddressUpdate", "Update Address?"))
self.askAddressUpdateLabel.setText(_translate("askAddressUpdate", "WOULD YOU LIKE TO ADD THAT"))
self.confirmAddressUpdateBtn.setText(_translate("askAddressUpdate", "YES"))
self.negateAddressUpdateBtn.setText(_translate("askAddressUpdate", "NO"))
self.askAddressUpdteLabel_2.setText(_translate("askAddressUpdate", "ADDRESS TO YOUR PROFILE?"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
askAddressUpdate = QtWidgets.QDialog()
ui = Ui_askAddressUpdate()
ui.setupUi(askAddressUpdate)
askAddressUpdate.show()
sys.exit(app.exec_())
| 13,266
|
https://github.com/Glusk/sprouts/blob/master/core/src/main/java/com/github/glusk2/sprouts/core/moves/SubmoveElement.java
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
sprouts
|
Glusk
|
Java
|
Code
| 978
| 3,115
|
package com.github.glusk2.sprouts.core.moves;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.github.glusk2.sprouts.core.comb.FaceIntersectionSearch;
import com.github.glusk2.sprouts.core.comb.IsAliveSprout;
import com.github.glusk2.sprouts.core.comb.IsSubmovePossibleInFace;
import com.github.glusk2.sprouts.core.comb.NearestSproutSearch;
import com.github.glusk2.sprouts.core.comb.PolylineIntersectionSearch;
import com.github.glusk2.sprouts.core.comb.SproutsEdge;
import com.github.glusk2.sprouts.core.comb.SproutsFaces;
import com.github.glusk2.sprouts.core.comb.SproutsGameState;
import com.github.glusk2.sprouts.core.comb.SproutsStateAfterSubmove;
import com.github.glusk2.sprouts.core.comb.Vertex;
import com.github.glusk2.sprouts.core.comb.VertexDegree;
import com.github.glusk2.sprouts.core.comb.VoidVertex;
import com.github.glusk2.sprouts.core.geom.IsPointOnLineSegment;
import com.github.glusk2.sprouts.core.geom.Polyline;
import com.github.glusk2.sprouts.core.geom.PolylinePiece;
/**
* A SubmoveElement is a Submove in a sequence of Submoves that comprise a Move.
* <p>
* The first element of any such sequence is always the {@link SubmoveHead}.
*/
public final class SubmoveElement implements Submove {
/**
* The minimum Submove length (as the number of polyline points).
* <p>
* Left + right hook - the minimum of 4 stroke points is required.
*/
private static final int MIN_LENGTH = 4;
/**
* The maximum Submove length (in line segments) allowed to draw when
* drawing in a face that has a less than 2 sprout lives.
*/
private static final int INVALID_WINDOW = 7;
/** The Graph Vertex in which {@code this} Submove begins. */
private final Vertex origin;
/** The polyline approximation of the move stroke. */
private final Polyline stroke;
/** The game state before {@code this} Submove. */
private final SproutsGameState currentState;
/**
* The Vertex glue radius, used to auto-complete {@code this} Submove
* when near a sprout.
*/
private final float vertexGlueRadius;
/** Any Submove that is drawn outside of {@code gameBounds} is invalid. */
private final Rectangle gameBounds;
/** A cached value of {@link #asEdge()}. */
private SproutsEdge cache = null;
/**
* Creates a new Submove.
* <p>
* This constructor uses the default bounding box rectangle and is
* equivalent to:
* <pre>
* new SubmoveElement(
* origin
* stroke,
* currentState,
* vertexGlueRadius,
* new Rectangle(
* 0,
* 0,
* Float.POSITIVE_INFINITY,
* Float.POSITIVE_INFINITY
* )
* );
* </pre>
*
* @param origin the graph Vertex in which {@code this} Submove begins
* @param stroke the polyline approximation of the move stroke
* @param currentState the game state before {@code this} Submove
* @param vertexGlueRadius the Vertex glue radius, used to auto-complete
* {@code this} Submove when near a sprout
*/
public SubmoveElement(
final Vertex origin,
final Polyline stroke,
final SproutsGameState currentState,
final float vertexGlueRadius
) {
this(
origin,
stroke,
currentState,
vertexGlueRadius,
new Rectangle(
0,
0,
Float.POSITIVE_INFINITY,
Float.POSITIVE_INFINITY
)
);
}
/**
* Creates a new Submove.
*
* @param origin the Graph Vertex in which {@code this} Submove begins
* @param stroke the polyline approximation of the move stroke
* @param currentState the game state before {@code this} Submove
* @param vertexGlueRadius the Vertex glue radius, used to auto-complete
* {@code this} Submove when near a sprout
* @param gameBounds any Submove that is drawn outside of
* {@code gameBounds} is invalid
*/
public SubmoveElement(
final Vertex origin,
final Polyline stroke,
final SproutsGameState currentState,
final float vertexGlueRadius,
final Rectangle gameBounds
) {
this.origin = origin;
this.stroke = stroke;
this.currentState = currentState;
this.vertexGlueRadius = vertexGlueRadius;
this.gameBounds = gameBounds;
}
@Override
@SuppressWarnings("checkstyle:methodlength")
public SproutsEdge asEdge() {
if (cache != null) {
return cache;
}
List<Vector2> strokePoints = stroke.points();
if (strokePoints.isEmpty()) {
throw
new IllegalStateException(
"At least 1 sample point is needed to establish a "
+ "direction!"
);
}
Set<SproutsEdge> moveFace =
new SproutsFaces(
currentState.edges()
).drawnIn(
new SproutsEdge(
true,
new Polyline.WrappedList(strokePoints),
origin.color(), // from
Color.BLACK // to
)
);
for (int i = 0; i < strokePoints.size(); i++) {
// If move not possible in face, let the user draw a couple of
// line segments before aborting
if (
i > INVALID_WINDOW
&& !new IsSubmovePossibleInFace(
origin.color().equals(Color.BLACK),
currentState,
moveFace
).check()
) {
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(
new ArrayList<Vector2>(strokePoints.subList(0, i))
),
origin.color(),
Color.GRAY
);
return cache;
}
// If outside of game bounds, finnish
Vector2 p1 = strokePoints.get(i);
if (!gameBounds.contains(p1)) {
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(
new ArrayList<Vector2>(strokePoints.subList(0, i))
),
origin.color(),
Color.GRAY
);
return cache;
}
// If close to a sprout, finnish
if (i >= MIN_LENGTH) {
Vertex v = new NearestSproutSearch(currentState, p1).result();
if (v.position().dst(p1) < vertexGlueRadius) {
List<Vector2> returnPoints =
new ArrayList<Vector2>(strokePoints.subList(0, i));
returnPoints.add(v.position());
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(returnPoints),
origin.color(),
v.color()
);
return cache;
}
}
if (i > 0) {
Vector2 p0 = strokePoints.get(i - 1);
// Check if too close to a red vertex and abort
boolean intesectsCobwebVertex = currentState.vertices()
.stream()
.anyMatch(v ->
v.color().equals(Color.RED)
&& new IsPointOnLineSegment(
p0, p1, v.position(), vertexGlueRadius
).check()
);
if (intesectsCobwebVertex) {
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(
strokePoints.subList(0, i)
),
origin.color(),
Color.GRAY
);
return cache;
}
// Check if crosses itself
Vertex crossPoint =
new PolylineIntersectionSearch(
p0,
p1,
new Polyline.WrappedList(strokePoints.subList(0, i)),
Color.BLACK
).result();
if (crossPoint.color().equals(Color.BLACK)) {
List<Vector2> returnPoints =
new ArrayList<Vector2>(strokePoints.subList(0, i));
returnPoints.add(crossPoint.position());
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(returnPoints),
origin.color(),
Color.GRAY
);
return cache;
}
// Check if crosses the face
crossPoint =
new FaceIntersectionSearch(moveFace, p0, p1).result();
if (!crossPoint.equals(new VoidVertex())) {
List<Vector2> returnPoints =
new ArrayList<Vector2>(strokePoints.subList(0, i));
returnPoints.add(crossPoint.position());
Color toColor = crossPoint.color();
if (toColor.equals(Color.BLACK)) {
toColor = Color.GRAY;
}
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(returnPoints),
origin.color(),
toColor
);
return cache;
}
}
}
cache =
new SproutsEdge(
true,
new Polyline.WrappedList(strokePoints),
origin.color(),
Color.CLEAR
);
return cache;
}
@Override
public boolean isCompleted() {
Color tipColor = Color.CLEAR;
if (isReadyToRender()) {
tipColor = asEdge().to().color();
}
return tipColor.equals(Color.BLACK) || tipColor.equals(Color.RED);
}
@Override
public boolean isReadyToRender() {
return stroke.points().size() > 1;
}
@Override
public boolean isValid() {
if (!isReadyToRender()) {
return false;
}
Vertex from = origin;
Vertex to = asEdge().to();
boolean intermediate = true;
if (from.color().equals(Color.BLACK)) {
intermediate &= new IsAliveSprout(currentState).test(from);
}
if (to.color().equals(Color.BLACK)) {
intermediate &= new IsAliveSprout(currentState).test(to);
}
if (from.equals(to)) {
intermediate &=
new VertexDegree(
from,
currentState,
Color.BLACK
).intValue() < 2;
}
return intermediate && !to.color().equals(Color.GRAY);
}
@Override
public boolean hasNext() {
return isCompleted() && !asEdge().to().color().equals(Color.BLACK);
}
@Override
public Submove next() {
if (!hasNext()) {
throw new IllegalStateException("This is the tail Submove.");
}
Vertex tip = asEdge().to();
return
new SubmoveElement(
tip,
new PolylinePiece(
stroke,
tip.position()
),
new SproutsStateAfterSubmove(currentState, this),
vertexGlueRadius,
gameBounds
);
}
}
| 47,615
|
https://github.com/simonbray/IntaRNA/blob/master/src/IntaRNA/InteractionEnergyIdxOffset.h
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
IntaRNA
|
simonbray
|
C
|
Code
| 3,323
| 7,517
|
#ifndef INTARNA_INTERACTIONENERGYIDXOFFSET_H_
#define INTARNA_INTERACTIONENERGYIDXOFFSET_H_
#include "IntaRNA/InteractionEnergy.h"
namespace IntaRNA {
/**
* Wrapper for a given InteractionEnergy object where indices are shifted by
* a given positive offset (shifted towards infinity).
* This is useful for local interaction computations.
*
* @author Martin Mann
*
*/
class InteractionEnergyIdxOffset : public InteractionEnergy
{
public:
/**
* construction
*
* @param energyOriginal wrapped energy object used for computations
* @param offset1 the index offset for sequence 1
* @param offset2 the index offset for sequence 2
*/
InteractionEnergyIdxOffset( const InteractionEnergy & energyOriginal
, const size_t offset1 = 0
, const size_t offset2 = 0 );
virtual ~InteractionEnergyIdxOffset();
/**
* Access to the currently used index offset for sequence 1
* @return the index offset for sequence 1 used
*/
size_t getOffset1() const;
/**
* Sets the index offset to be used for sequence 1
* @param offset1 the index offset for sequence 1 to be used
*/
void setOffset1(size_t offset1);
/**
* Access to the currently used index offset for sequence 2
* @return the index offset for sequence 2 used
*/
size_t getOffset2() const;
/**
* Sets the index offset to be used for sequence 2
* @param offset2 the index offset for sequence 2 to be used
*/
void setOffset2(size_t offset2);
/**
* Provides the overall energy for an interaction from [i1,j1] in the first
* sequence and [i2,j2] in the second sequence given the hybridization
* energy contribution.
*
* @param i1 the index of the first sequence interacting with i2
* @param j1 the index of the first sequence interacting with j2 with i1<=j1
* @param i2 the index of the second sequence interacting with i1
* @param j2 the index of the second sequence interacting with j1 with i2<=j2
* @param hybridE the hybridization energy for the interaction
*
* @return E = hybridE
* + ED1(i1,j1) + ED2(i2,j2)
* + Edangle(i1,i2) + Edangle(j1,j2)
* + Eend(i1,i2) + Eend(j1,j2)
*/
virtual
E_type
getE( const size_t i1, const size_t j1
, const size_t i2, const size_t j2
, const E_type hybridE ) const;
/**
* Provides the ensemble energy for a given partition function Z.
*
* @param Z the ensemble's partition function to convert
*
* @return E = -RT * log( Z )
*/
virtual
E_type
getE( const Z_type Z ) const;
/**
* Checks whether or not two positions (shifted by offset) can form a base pair
* @param i1 index in first sequence
* @param i2 index in second sequence
* @return true if seq1(i1) can form a base pair with seq2(i2)
*/
virtual
bool
areComplementary( const size_t i1, const size_t i2 ) const;
/**
* Checks whether or not two positions can form a GU base pair
* @param i1 index in first sequence
* @param i2 index in second sequence
* @return true if seq1(i1) can form a GU base pair with seq2(i2)
*/
virtual
bool
isGU( const size_t i1, const size_t i2 ) const;
/**
* Length of sequence 1 excluding the index offset
* @return length of sequence 1 excluding index offset
*/
virtual
size_t
size1() const;
/**
* Length of sequence 2 excluding index offset
* @return length of sequence 2 excluding index offset
*/
virtual
size_t
size2() const;
/**
* Provides the ED penalty for making a region with sequence 1 accessible
*
* Note, the indices are shifted by an offset for computation.
*
* @param i1 the start of the accessible region
* @param j1 the end of the accessible region
* @return the ED value for [i1,j1]
*/
virtual
E_type
getED1( const size_t i1, const size_t j1 ) const;
/**
* Provides the ED penalty for making a region with (the reversed)
* sequence 2 accessible
*
* Note, the indices are shifted by an offset for computation.
*
* @param i2 the start of the accessible region
* @param j2 the end of the accessible region
* @return the ED value for [i2,j2]
*/
virtual
E_type
getED2( const size_t i2, const size_t j2 ) const;
/**
* Whether or not position i is accessible for interaction in sequence 1
*
* Note, the index is shifted by an offset for computation.
*
* @param i the position of interest in sequence 1
* @return true if the position can partake in an interaction; false otherwise
*/
virtual
bool
isAccessible1( const size_t i ) const;
/**
* Whether or not position i is accessible for interaction in sequence 2
*
* Note, the index is shifted by an offset for computation.
*
* @param i the position of interest in sequence 2
* @return true if the position can partake in an interaction; false otherwise
*/
virtual
bool
isAccessible2( const size_t i ) const;
/**
* Provides the energy contribution of an interaction site gap, i.e. the
* provided regions are without intermolecular base pairs but are considered
* to be involved in intramolecular base pairs only. The multi-site gap is
* scored according to a multiloop in a single structure prediction model.
* The ends of the two regions are supposed to form an intermolecular base
* pair each, i.e. (i1,i2) and (j1,j2) have to be complementary.
*
* @param i1 the start of the structured region of seq1
* @param j1 the end of the structured region of seq1
* @param i2 the start of the structured region of seq2
* @param j2 the end of the structured region of seq2
* @param ES_mode defines for which sequence intramolecular structure
* contributions are to be considered
* @return the energy contribution of a multi-site interaction gap
*/
virtual
E_type
getE_multi( const size_t i1, const size_t j1
, const size_t i2, const size_t j2
, const ES_multi_mode ES_mode ) const;
/**
* Provides the ensemble energy (ES) of all intramolecular substructures
* that can be formed within a given region of sequence 1 under the
* assumption that the region is part of an (intermolecular) multiloop,
* i.e. at least one base pair is formed by each substructure.
*
* If no structure can be formed within the region, E_INF is returned.
*
* @param i1 the start of the structured region of seq1
* @param j1 the end of the structured region of seq1
* @return the ES value for [i1,j1] or E_INF if no intramolecular
* structure can be formed
*/
virtual
E_type
getES1( const size_t i1, const size_t j1 ) const;
/**
* Provides the ensemble energy (ES) of all intramolecular substructures
* that can be formed within a given region of sequence 2 under the
* assumption that the region is part of an (intermolecular) multiloop,
* i.e. at least one base pair is formed by each substructure.
*
* If no structure can be formed within the region, E_INF is returned.
*
* @param i2 the start of the structured region of seq2
* @param j2 the end of the structured region of seq2
* @return the ES value for [i2,j2] or E_INF if no intramolecular
* structure can be formed
*/
virtual
E_type
getES2( const size_t i2, const size_t j2 ) const;
/**
* Provides the energy contribution for a given number of unpaired
* nucleotides under the
* assumption that the region is part of an (intermolecular) multiloop.
*
* @param numUnpaired the number of unpaired bases
* @return the energy contribution of the given number of unpaired bases
* within an intramolecular multiloop
*/
virtual
E_type
getE_multiUnpaired( const size_t numUnpaired ) const;
/**
* Provides the energy contribution/penalty of the helix repesented by the
* interaction right of a multi-site gap starting with base pair (j1,j2)
*
* @param j1 the end of the gap in seq1, ie the first base paired in the
* interaction site to the right of the gap
* @param j2 the end of the gap in seq2, ie the first base paired in the
* interaction site to the right of the gap
*
*
* @return the energy contribution/penalty of the intermolecular helix
* within an intramolecular multiloop
*/
virtual
E_type
getE_multiHelix( const size_t j1, const size_t j2 ) const;
/**
* Provides the energy contribution/penalty for closing an intermolecular
* interaction left of a multi-site gap.
*
* @return the energy contribution/penalty of the intermolecular helix
* within an intramolecular multiloop
*/
virtual
E_type
getE_multiClosing() const;
/**
* Provides the duplex initiation energy.
*
* @return the energy for duplex initiation
*/
virtual
E_type
getE_init( ) const;
/**
* Computes the energy estimate for the 'left side' interaction loop region
* closed by the intermolecular base pairs (i1,i2) and enclosing (j1,j2)
* where the regions [i1,j1] and [i2,j2] are considered unpaired or E_INF
* is the internal loop size exceeds the allowed maximum (see constructor).
*
* Note, the indices are shifted by an offset for computation.
*
* Note, the right interaction base pair (j1,j2) is not included in the
* returned energy value.
*
* @param i1 the index of the first sequence interacting with i2
* @param j1 the index of the first sequence interacting with j2 with i1<=j1
* @param i2 the index of the second sequence interacting with i1
* @param j2 the index of the second sequence interacting with j1 with i2<=j2
*
* @return the energy for the loop
* or E_INF if the allowed loop size is exceeded or no valid internal loop boundaries
*/
virtual
E_type
getE_interLeft( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const;
/**
* Computes the dangling end energy penalties for the left side
* (i1-1 and i2-1) of the interaction closed by the intermolecular
* base pair (i1,i2).
*
* Note, the indices are shifted by an offset for computation.
*
* @param i1 the index of the first sequence interacting with i2
* @param i2 the index of the second sequence interacting with i1
*
* @return the dangling end penalty for the left side of the interaction
*/
virtual
E_type
getE_danglingLeft( const size_t i1, const size_t i2 ) const;
/**
* Computes the dangling end energy penalties for the right side
* (j1+1 and j2+1) of the interaction closed by the intermolecular
* base pair (j1,j2).
*
* Note, the indices are shifted by an offset for computation.
*
* @param j1 the index of the first sequence interacting with j2
* @param j2 the index of the second sequence interacting with j1
*
* @return the dangling end penalty for the right side of the interaction
*/
virtual
E_type
getE_danglingRight( const size_t j1, const size_t j2 ) const;
/**
* Provides the penalty for closing an interaction with the given
* base pair on the "left side" (i1 = 5' end of seq1 of the interaction)
*
* Note, the indices are shifted by an offset for computation.
*
* @param i1 the index of the first sequence interacting with i2
* @param i2 the index of the second sequence interacting with i1
*
* @return the loop closure penalty for the left side of the interaction
*/
virtual
E_type
getE_endLeft( const size_t i1, const size_t i2 ) const;
/**
* Provides the penalty for closing an interaction with the given
* base pair on the "right side" (j1 = 3' end of seq1 of the interaction)
*
* Note, the indices are shifted by an offset for computation.
*
* @param j1 the index of the first sequence interacting with j2
* @param j2 the index of the second sequence interacting with j1
*
* @return the loop closure penalty for the right side of the interaction
*/
virtual
E_type
getE_endRight( const size_t j1, const size_t j2 ) const;
/**
* Computes the probability of the dangling ends for the left side
* (i1-1 and i2-1) of the interaction closed by the intermolecular
* base pair (i1,i2) for an interaction of [i1,j1] with [i2,j2].
*
* Note, the indices are shifted by an offset for computation.
*
* @param i1 the index of the first sequence interacting with i2
* @param j1 the index of the first sequence interacting with j2 with i1<=j1
* @param i2 the index of the second sequence interacting with i1
* @param j2 the index of the second sequence interacting with j1 with i2<=j2
*
* @return the dangling end probability for the left side of the interaction
*/
virtual
Z_type
getPr_danglingLeft( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const;
/**
* Computes the probability of the dangling ends for the right side
* (j1+1 and j2+1) of the interaction closed by the intermolecular
* base pair (j1,j2) for an interaction of [i1,j1] with [i2,j2].
*
* Note, the indices are shifted by an offset for computation.
*
* @param i1 the index of the first sequence interacting with i2
* @param j1 the index of the first sequence interacting with j2 with i1<=j1
* @param i2 the index of the second sequence interacting with i1
* @param j2 the index of the second sequence interacting with j1 with i2<=j2
*
* @return the dangling end probability for the right side of the interaction
*/
virtual
Z_type
getPr_danglingRight( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const;
/**
* Access to the normalized temperature for Boltzmann weight computation
*/
virtual
Z_type
getRT() const;
/**
* Provides the base pair encoding for the given indices after shifting by
* the used offset
* @param i1 the index in the first sequence
* @param i2 the index in the (reversed) second sequence
* @return the according base pair (i1+offset1,reverseIdx(i2+offset2))
*/
virtual
Interaction::BasePair
getBasePair( const size_t i1, const size_t i2 ) const;
/**
* Provides the index within the first sequence of the given base pair
* shifted by the offset.
* @return the shifted index of the first sequence within the base pair encoding
*/
virtual
size_t
getIndex1( const Interaction::BasePair & bp ) const;
/**
* Provides the (reversed) index within the second sequence of the given base pair
* shifted by the offset.
* @return the shifted index of the second sequence within the base pair encoding
*/
virtual
size_t
getIndex2( const Interaction::BasePair & bp ) const;
/**
* Checks whether or not the given indices mark valid internal loop
* boundaries, i.e.
* - (i1,i2) and (j1,j2) are complementary
* - i1..j1 and i2..j2 are allowed loop regions
* - no boundary overlap ( (j1-i1==0 && j2-i2==0) || (j1-i1>0 && j2-i2>0) )
* - if !internalLoopGU : both ends are no GU base pairs
*
* @param i1 the index of the first sequence interacting with i2
* @param j1 the index of the first sequence interacting with j2 with i1<=j1
* @param i2 the index of the second sequence interacting with i1
* @param j2 the index of the second sequence interacting with j1 with i2<=j2
*
* @return true if the boundaries are sound for internal loop calculation;
* false otherwise
*/
virtual
bool
isValidInternalLoop( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const;
/**
* Provides the overall ensemble energy for sequence 1
* given its accessibility constraints
* @return Eall(constraint-conform intra-molecular structures for seq1)
*/
virtual
E_type
getEall1() const;
/**
* Provides the overall ensemble energy for sequence 2
* given its accessibility constraints
* @return Eall(constraint-conform intra-molecular structures for seq2)
*/
virtual
E_type
getEall2() const;
protected:
/** the wrapped energy computation handler */
const InteractionEnergy & energyOriginal;
/** the index offset in sequence 1 */
size_t offset1;
/** the index offset in sequence 2 */
size_t offset2;
};
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
inline
InteractionEnergyIdxOffset::
InteractionEnergyIdxOffset( const InteractionEnergy & energyOriginal
, const size_t offset1
, const size_t offset2 )
:
InteractionEnergy( energyOriginal )
, energyOriginal(energyOriginal)
, offset1(offset1)
, offset2(offset2)
{
#if INTARNA_IN_DEBUG_MODE
// input sanity checks
setOffset1(offset1);
setOffset2(offset2);
#endif
}
//////////////////////////////////////////////////////////////////////////
inline
InteractionEnergyIdxOffset::~InteractionEnergyIdxOffset()
{
}
//////////////////////////////////////////////////////////////////////////
inline
size_t
InteractionEnergyIdxOffset::
getOffset1() const
{
return offset1;
}
//////////////////////////////////////////////////////////////////////////
inline
void
InteractionEnergyIdxOffset::
setOffset1(size_t offset1)
{
#if INTARNA_IN_DEBUG_MODE
if (offset1 >= energyOriginal.size1()) {
throw std::runtime_error("InteractionEnergyIdxOffset : offset1 "+toString(offset1)
+" > seq1.length "+toString(energyOriginal.size1()));
}
#endif
this->offset1 = offset1;
}
//////////////////////////////////////////////////////////////////////////
inline
size_t
InteractionEnergyIdxOffset::
getOffset2() const
{
return offset2;
}
//////////////////////////////////////////////////////////////////////////
inline
void
InteractionEnergyIdxOffset::
setOffset2(size_t offset2)
{
#if INTARNA_IN_DEBUG_MODE
if (offset2 >= energyOriginal.size2()) {
throw std::runtime_error("InteractionEnergyIdxOffset : offset2 "+toString(offset2)
+" > seq2.length "+toString(energyOriginal.size2()));
}
#endif
this->offset2 = offset2;
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getED1( const size_t i1, const size_t j1 ) const
{
return energyOriginal.getED1( i1+offset1, j1+offset1 );
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getED2( const size_t i2, const size_t j2 ) const
{
return energyOriginal.getED2( i2+offset2, j2+offset2 );
}
////////////////////////////////////////////////////////////////////////////
inline
bool
InteractionEnergyIdxOffset::
isAccessible1( const size_t i ) const
{
return energyOriginal.isAccessible1(i+offset1);
}
////////////////////////////////////////////////////////////////////////////
inline
bool
InteractionEnergyIdxOffset::
isAccessible2( const size_t i ) const
{
return energyOriginal.isAccessible2(i+offset2);
}
////////////////////////////////////////////////////////////////////////////
inline
bool
InteractionEnergyIdxOffset::
isGU( const size_t i1, const size_t i2 ) const
{
return energyOriginal.isGU( i1+offset1, i2+offset2);
}
//////////////////////////////////////////////////////////////////////////
inline
bool
InteractionEnergyIdxOffset::
areComplementary( const size_t i1, const size_t i2 ) const
{
return energyOriginal.areComplementary( i1+offset1, i2+offset2 );
}
//////////////////////////////////////////////////////////////////////////
inline
size_t
InteractionEnergyIdxOffset::
size1() const
{
return energyOriginal.size1()-offset1;
}
////////////////////////////////////////////////////////////////////////////
inline
size_t
InteractionEnergyIdxOffset::
size2() const
{
return energyOriginal.size2()-offset2;
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getES1( const size_t i1, const size_t j1 ) const
{
return energyOriginal.getES1( i1+offset1, j1+offset1 );
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getES2( const size_t i2, const size_t j2 ) const
{
return energyOriginal.getES2( i2+offset2, j2+offset2 );
}
////////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_multiUnpaired( const size_t numUnpaired ) const
{
return energyOriginal.getE_multiUnpaired( numUnpaired );
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_multiHelix( const size_t j1, const size_t j2 ) const
{
return energyOriginal.getE_multiHelix(j1+offset1, j2+offset2);
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_multiClosing() const
{
return energyOriginal.getE_multiClosing();
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_init( ) const
{
return energyOriginal.getE_init();
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_interLeft( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const
{
return energyOriginal.getE_interLeft(i1+offset1, j1+offset1, i2+offset2, j2+offset2);
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_danglingLeft( const size_t i1, const size_t i2 ) const
{
return energyOriginal.getE_danglingLeft( i1+offset1, i2+offset2 );
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_danglingRight( const size_t j1, const size_t j2 ) const
{
return energyOriginal.getE_danglingRight( j1+offset1, j2+offset2 );
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_endLeft( const size_t i1, const size_t i2 ) const
{
return energyOriginal.getE_endLeft( i1+offset1, i2+offset2 );
}
//////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_endRight( const size_t j1, const size_t j2 ) const
{
return energyOriginal.getE_endRight( j1+offset1, j2+offset2 );
}
//////////////////////////////////////////////////////////////////////////
inline
Z_type
InteractionEnergyIdxOffset::
getPr_danglingLeft( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const
{
return energyOriginal.getPr_danglingLeft(i1+offset1, j1+offset1, i2+offset2, j2+offset2);
}
//////////////////////////////////////////////////////////////////////////
inline
Z_type
InteractionEnergyIdxOffset::
getPr_danglingRight( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const
{
return energyOriginal.getPr_danglingRight(i1+offset1, j1+offset1, i2+offset2, j2+offset2);
}
//////////////////////////////////////////////////////////////////////////
inline
Interaction::BasePair
InteractionEnergyIdxOffset::
getBasePair( const size_t i1, const size_t i2 ) const
{
return energyOriginal.getBasePair( i1+offset1, i2+offset2 );
}
//////////////////////////////////////////////////////////////////////////
inline
size_t
InteractionEnergyIdxOffset::
getIndex1( const Interaction::BasePair & bp ) const
{
#if INTARNA_IN_DEBUG_MODE
if (energyOriginal.getIndex1(bp)<offset1) throw std::runtime_error("InteractionEnergyIdxOffset::getIndex1("+toString(energyOriginal.getIndex1(bp))+") < offset1 = "+toString(offset1));
#endif
return energyOriginal.getIndex1(bp)-offset1;
}
////////////////////////////////////////////////////////////////////////////
inline
size_t
InteractionEnergyIdxOffset::
getIndex2( const Interaction::BasePair & bp ) const
{
#if INTARNA_IN_DEBUG_MODE
if (energyOriginal.getIndex2(bp)<offset2) throw std::runtime_error("InteractionEnergyIdxOffset::getIndex2("+toString(energyOriginal.getIndex2(bp))+") < offset2 = "+toString(offset2));
#endif
return energyOriginal.getIndex2(bp)-offset2;
}
////////////////////////////////////////////////////////////////////////////
inline
Z_type
InteractionEnergyIdxOffset::
getRT() const
{
return energyOriginal.getRT();
}
////////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE( const Z_type Z ) const
{
return energyOriginal.getE( Z );
}
////////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE( const size_t i1, const size_t j1
, const size_t i2, const size_t j2
, const E_type hybridE ) const
{
return energyOriginal.getE( i1+offset1, j1+offset1, i2+offset2, j2+offset2, hybridE );
}
////////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getE_multi( const size_t i1, const size_t j1
, const size_t i2, const size_t j2
, const ES_multi_mode ES_mode ) const
{
return energyOriginal.getE_multi( i1+offset1, j1+offset1, i2+offset2, j2+offset2, ES_mode );
}
////////////////////////////////////////////////////////////////////////////
inline
bool
InteractionEnergyIdxOffset::
isValidInternalLoop( const size_t i1, const size_t j1, const size_t i2, const size_t j2 ) const
{
return energyOriginal.isValidInternalLoop( i1+offset1, j1+offset1, i2+offset2, j2+offset2 );
}
////////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getEall1() const
{
return energyOriginal.getEall1();
}
////////////////////////////////////////////////////////////////////////////
inline
E_type
InteractionEnergyIdxOffset::
getEall2() const
{
return energyOriginal.getEall2();
}
////////////////////////////////////////////////////////////////////////////
} // namespace
#endif /* INTERACTIONENERGYIDXOFFSET_H_ */
| 28,671
|
https://github.com/djones6/OpenAPIDemo/blob/master/Sources/Application/Application.swift
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
OpenAPIDemo
|
djones6
|
Swift
|
Code
| 201
| 596
|
import Foundation
import Kitura
import LoggerAPI
import Configuration
import CloudEnvironment
import KituraContracts
import Health
import KituraOpenAPI
import SwiftKueryORM
import SwiftKueryPostgreSQL
public let projectPath = ConfigurationManager.BasePath.project.path
public let health = Health()
class Persistence {
static func setUp() {
let pool = PostgreSQLConnection.createPool(host: "localhost", port: 5432, options: [.databaseName("test")], poolOptions: ConnectionPoolOptions(initialCapacity: 10, maxCapacity: 50))
Database.default = Database(pool)
}
}
public class App {
let router = Router()
let cloudEnv = CloudEnv()
public init() throws {
// Run the metrics initializer
initializeMetrics(router: router)
}
// Set JSON encoding of Date to ISO8601 format
func configureEncoders() {
router.encoders = [
.json: {
let encoder = JSONEncoder()
if #available(OSX 10.12, *) {
encoder.dateEncodingStrategy = .iso8601
}
return encoder
}
]
router.decoders = [
.json: {
let decoder = JSONDecoder()
if #available(OSX 10.12, *) {
decoder.dateDecodingStrategy = .iso8601
}
return decoder
}
]
}
func postInit() throws {
// Endpoints
// Static file server
router.all(middleware: StaticFileServer())
// OpenAPI
let openApiConfig = KituraOpenAPIConfig(apiPath: "/demoapi", swaggerUIPath: "/demoapi/ui")
KituraOpenAPI.addEndpoints(to: router, with: openApiConfig)
configureEncoders()
initializeHealthRoutes(app: self)
initializeQueryRoutes(app: self)
initializeTestRoutes(app: self)
Persistence.setUp()
do {
try Person.createTableSync()
} catch let error {
print("Table already exists. Error: \(String(describing: error))")
}
}
public func run() throws {
try postInit()
Kitura.addHTTPServer(onPort: cloudEnv.port, with: router)
Kitura.run()
}
}
| 41,887
|
https://github.com/JUSTLOVELE/MobileDevStudy/blob/master/RL/gym_case/value_iteration.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
MobileDevStudy
|
JUSTLOVELE
|
Python
|
Code
| 193
| 597
|
import gym
import numpy as np
def run_episode(env, policy, gamma = 1.0, render = False):
obs = env.reset()
total_reward = 0
step_idx = 0
while True:
if render:
env.render()
obs, reward, done , _ = env.step(int(policy[obs]))
total_reward += (gamma ** step_idx * reward)
step_idx += 1
if done:
break
return total_reward
def evaluate_policy(env, policy, gamma = 1.0, n = 100):
scores = [run_episode(env, policy, gamma, False) for _ in range(n)]
return np.mean(scores)
env = gym.make("FrozenLake-v0")
env.reset()
gamma = 1.0
theta = 1e-20
V = np.zeros(env.observation_space.n)
index = 0
while True:
delta = 0
index += 1
#pre_V = np.copy(V)
for s in range(env.observation_space.n):
v = V[s]
q_value = np.zeros(env.action_space.n)
for a in range(env.action_space.n):
for p, s_next, r, _ in env.P[s][a]:
q_value[a] += (p * (r + gamma * V[s_next]))
V[s] = np.max(q_value)
if delta < abs(v - V[s]):
delta = abs(v - V[s])
if delta < theta:
break
optimal_policy = np.zeros(env.observation_space.n)
for s in range(env.observation_space.n):
q_value = np.zeros(env.action_space.n)
for a in range(env.action_space.n):
for p, s_next, r, _ in env.P[s][a]:
q_value[a] += (p * (r + gamma * V[s_next]))
optimal_policy[s] = np.argmax(q_value)
print(optimal_policy)
scores = evaluate_policy(env, optimal_policy, gamma=1.0)
print('Average scores = ', np.mean(scores))
print(index)
| 18,856
|
https://github.com/zhiliang729/TTCSDK_iOS/blob/master/TTC_SDK_iOS_Demo/ViewController.swift
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
TTCSDK_iOS
|
zhiliang729
|
Swift
|
Code
| 472
| 1,670
|
//
// ViewController.swift
// TTC_SDK_iOS_Demo
//
// Created by Zhang Yufei on 2018/7/2 下午4:07.
// Copyright © 2018年 tataufo. All rights reserved.
//
import UIKit
import TTCSDK
class ViewController: UIViewController {
let dataArr = ["Login", "Logout", "Update", "Querry account balance", "Querry wallet balance", "Unbind", "Upload", "open/close SDK"]
var itemSize: CGSize?
var enabled: Bool = true
override func viewDidLoad() {
super.viewDidLoad()
}
init() {
super.init(nibName: nil, bundle: nil)
view.backgroundColor = UIColor.white
itemSize = CGSize(width: (view.bounds.size.width - 4) / 3.0, height: (view.bounds.size.width - 4) / 3.0)
setupSubviews()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
var collectionView: UICollectionView!
func setupSubviews() {
collectionView = UICollectionView(frame: view.bounds, collectionViewLayout: UICollectionViewFlowLayout())
collectionView.backgroundColor = UIColor.white
collectionView.register(TTCActionCell.self, forCellWithReuseIdentifier: "CollectionViewCellID")
collectionView.dataSource = self
collectionView.delegate = self
view.addSubview(collectionView)
}
override func viewWillLayoutSubviews() {
collectionView.frame = view.bounds
}
func queryAccountBalance() {
TTCSDK.queryAccountBalance { (success, error, balance) in
if success {
TWToast.showToast(text: "account balance: \(balance)")
} else {
TWToast.showToast(text: "error: \(String(describing: error?.errorDescription))")
}
}
}
func queryWalletBalance() {
TTCSDK.queryWalletBalance { (success, error, balance) in
if success {
TWToast.showToast(text: "wallet balance: \(balance)")
} else {
TWToast.showToast(text: "error: \(String(describing: error?.errorDescription))")
}
}
}
func unBindWallet() {
TTCSDK.unBindWallet { (success, error) in
if success {
TWToast.showToast(text: "unbind success")
} else {
TWToast.showToast(text: "error: \(String(describing: error?.errorDescription))")
}
}
}
func uploadAction() {
guard let userid = TTCUser.shared.userId, !userid.isEmpty else { return TWToast.showToast(text: "login first") }
let vc = TTCUploadViewController()
self.present(vc, animated: true, completion: nil)
}
func login() {
let user = TTCUserInfo(userId: "12345")
TTCSDK.login(userInfo: user) { (success, error, _) -> Void in
if success {
TWToast.showToast(text: "login success")
TTCUser.shared.userId = user.userId
} else {
print(String(describing: error?.errorDescription))
}
}
}
}
extension ViewController: UICollectionViewDataSource, UICollectionViewDelegate, UICollectionViewDelegateFlowLayout {
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return dataArr.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "CollectionViewCellID", for: indexPath) as! TTCActionCell
cell.setTitle(title: dataArr[indexPath.row])
return cell
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
return itemSize!
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 2
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumInteritemSpacingForSectionAt section: Int) -> CGFloat {
return 1
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
switch indexPath.item {
case 0:
if TTCUser.shared.userId == nil || TTCUser.shared.userId!.isEmpty {
print(dataArr[0])
let login = TTCLoginViewController()
self.present(login, animated: true, completion: nil)
// login()
} else {
TWToast.showToast(text: "Already login")
}
case 1:
print(dataArr[1])
TTCSDK.logout()
TTCUser.shared.userId = nil
TWToast.showToast(text: "logout")
case 2:
if TTCUser.shared.userId == nil || TTCUser.shared.userId!.isEmpty {
TWToast.showToast(text: "login first in the update information")
} else {
print(dataArr[2])
let login = TTCLoginViewController()
login.isLogin = false
self.present(login, animated: true, completion: nil)
}
case 3:
print(dataArr[3])
queryAccountBalance()
case 4:
print(dataArr[4])
queryWalletBalance()
case 5:
print(dataArr[5])
unBindWallet()
case 6:
print(dataArr[6])
uploadAction()
case 7:
enabled = !enabled
TTCSDK.sdk(isEnabled: enabled)
TWToast.showToast(text: "SDK state \(enabled)")
default: break
}
}
}
| 39,620
|
https://github.com/FurongYe/IOHexperimenter/blob/master/docs/structioh_1_1problem_1_1_meta_data.js
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
IOHexperimenter
|
FurongYe
|
JavaScript
|
Code
| 60
| 572
|
var structioh_1_1problem_1_1_meta_data =
[
[ "MetaData", "structioh_1_1problem_1_1_meta_data.html#a66fb4925a962c1d59ea1300a8a97b590", null ],
[ "MetaData", "structioh_1_1problem_1_1_meta_data.html#a835dfda44836a8187476b3ec12953130", null ],
[ "operator!=", "structioh_1_1problem_1_1_meta_data.html#aad970a93bc755686aa480fac711c77ce", null ],
[ "operator==", "structioh_1_1problem_1_1_meta_data.html#a311b2a7da87b74326606d800b29f3244", null ],
[ "repr", "structioh_1_1problem_1_1_meta_data.html#aac3830868b5ac3aa0e9b8df0058af9cb", null ],
[ "initial_objective_value", "structioh_1_1problem_1_1_meta_data.html#a367d631fabef621d5c926b088e69ec26", null ],
[ "instance", "structioh_1_1problem_1_1_meta_data.html#acb3c2ca4cd902f65fc2dae2d4f35d334", null ],
[ "n_variables", "structioh_1_1problem_1_1_meta_data.html#a67a73db29b3a04908363a1e0a649ce75", null ],
[ "name", "structioh_1_1problem_1_1_meta_data.html#acb8c5ceb1960e0c37660e668157ffae3", null ],
[ "optimization_type", "structioh_1_1problem_1_1_meta_data.html#aed5cacb9769c1292bde564ba27edf298", null ],
[ "problem_id", "structioh_1_1problem_1_1_meta_data.html#a9b4e1b192d991f83864b7ef958288799", null ]
];
| 36,860
|
https://github.com/rricard/draft-md/blob/master/src/index.js
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
draft-md
|
rricard
|
JavaScript
|
Code
| 43
| 123
|
/* @flow */
import React from "react"
import ReactDOM from "react-dom"
import { MDEditor } from "./components/MDEditor"
import type { MDEditorProps } from "./components/MDEditor"
export { MDEditor } from "./components/MDEditor"
export function mountMDEditor(props: MDEditorProps, container: any): void {
ReactDOM.render(<MDEditor {...props} />, container)
}
| 21,139
|
https://github.com/bestbeforetoday/fabric-samples/blob/master/ci/scripts/run-test-network-basic.sh
|
Github Open Source
|
Open Source
|
Apache-2.0, CC-BY-4.0
| 2,022
|
fabric-samples
|
bestbeforetoday
|
Shell
|
Code
| 443
| 1,323
|
set -euo pipefail
CHAINCODE_LANGUAGE=${CHAINCODE_LANGUAGE:-go}
CHAINCODE_NAME=${CHAINCODE_NAME:-basic}
CHAINCODE_PATH=${CHAINCODE_PATH:-../asset-transfer-basic}
function print() {
GREEN='\033[0;32m'
NC='\033[0m'
echo
echo -e "${GREEN}${1}${NC}"
}
function createNetwork() {
print "Creating 3 Org network"
./network.sh up createChannel -ca -s couchdb
cd addOrg3
./addOrg3.sh up -ca -s couchdb
cd ..
print "Deploying ${CHAINCODE_NAME} chaincode"
./network.sh deployCC -ccn "${CHAINCODE_NAME}" -ccp "${CHAINCODE_PATH}/chaincode-${CHAINCODE_LANGUAGE}" -ccv 1 -ccs 1 -ccl "${CHAINCODE_LANGUAGE}"
}
function stopNetwork() {
print "Stopping network"
./network.sh down
}
# Run Go application
createNetwork
print "Initializing Go application"
pushd ../asset-transfer-basic/application-go
print "Executing AssetTransfer.go"
go run .
popd
stopNetwork
# Run Java application
createNetwork
print "Initializing Java application"
pushd ../asset-transfer-basic/application-java
print "Executing Gradle Run"
gradle run
popd
stopNetwork
# Run Java application using gateway
createNetwork
print "Initializing Java application"
pushd ../asset-transfer-basic/application-gateway-java
print "Executing Gradle Run"
./gradlew run
popd
stopNetwork
# Run Javascript application
createNetwork
print "Initializing Javascript application"
pushd ../asset-transfer-basic/application-javascript
npm install
print "Executing app.js"
node app.js
popd
stopNetwork
# Run typescript application
createNetwork
print "Initializing Typescript application"
pushd ../asset-transfer-basic/application-typescript
npm install
print "Building app.ts"
npm run build
print "Running the output app"
node dist/app.js
popd
stopNetwork
# Run gateway typescript application
createNetwork
print "Initializing Typescript gateway application"
pushd ../asset-transfer-basic/application-gateway-typescript
npm install
print "Building app.ts"
npm run build
print "Running the output app"
node dist/app.js
popd
stopNetwork
# Run typescript HSM application
createNetwork
print "Initializing Typescript HSM application"
pushd ../asset-transfer-basic/application-typescript-hsm
print "Setup SoftHSM"
export SOFTHSM2_CONF=$PWD/softhsm2.conf
print "install dependencies"
npm install
print "Building app.ts"
npm run build
print "Running the output app"
node dist/app.js
popd
stopNetwork
# Run Typescript HSM gateway application
echo 'Delete fabric-ca-client from samples bin'
rm ../bin/fabric-ca-client
echo 'go install pkcs11 enabled fabric-ca-client'
go install -tags pkcs11 github.com/hyperledger/fabric-ca/cmd/fabric-ca-client@latest
createNetwork
print "Initializing Typescript HSM gateway application"
pushd ../hardware-security-module/scripts/
print "Enroll and register User in HSM"
./generate-hsm-user.sh HSMUser
pushd ../application-typescript/
print "install dependencies and prepare for running"
npm install
print "Running the output app"
npm run start
popd
popd
stopNetwork
# Run Go HSM gateway application
createNetwork
print "Initializing Go HSM gateway application"
pushd ../hardware-security-module/scripts/
print "Register and enroll user in HSM"
./generate-hsm-user.sh HSMUser
pushd ../application-go
print "Running the output app"
go run -tags pkcs11 .
popd
popd
stopNetwork
# Run Go gateway application
createNetwork
print "Initializing Go gateway application"
pushd ../asset-transfer-basic/application-gateway-go
print "Executing AssetTransfer.go"
go run .
popd
stopNetwork
# Run off-chain data TypeScript application
createNetwork
print "Initializing Typescript off-chain data application"
pushd ../off_chain_data/application-typescript
rm -f checkpoint.json store.log
npm install
print "Running the output app"
SIMULATED_FAILURE_COUNT=1 npm start getAllAssets transact getAllAssets listen
SIMULATED_FAILURE_COUNT=1 npm start listen
popd
stopNetwork
# Run off-chain data Java application
createNetwork
print "Initializing Typescript off-chain data application"
pushd ../off_chain_data/application-java
rm -f app/checkpoint.json app/store.log
print "Running the output app"
SIMULATED_FAILURE_COUNT=1 ./gradlew run --quiet --args='getAllAssets transact getAllAssets listen'
SIMULATED_FAILURE_COUNT=1 ./gradlew run --quiet --args=listen
popd
stopNetwork
| 33,463
|
https://github.com/monsterrx/_cms/blob/master/resources/views/_cms/system-views/recovery/deletedDataViews/radioOne/jocks.blade.php
|
Github Open Source
|
Open Source
|
MIT
| null |
_cms
|
monsterrx
|
PHP
|
Code
| 86
| 407
|
<table class="table table-hover" id="genericTable">
<thead>
<tr>
<th>Id</th>
<th>Name</th>
<th>Nickname</th>
<th>School</th>
<th>Batch</th>
<th>Date Deleted</th>
<th></th>
</tr>
</thead>
<tbody>
@forelse($data as $studentJocks)
@foreach($studentJocks->Batch as $batch)
<?php try { ?>
<tr>
<td>{{ $studentJocks->id }}</td>
<td>{{ $studentJocks->firstName }} {{ $studentJocks->lastName }}</td>
<td>{{ $studentJocks->nickName }}</td>
<td>{{ $studentJocks->School->school_name }}</td>
<td>{{ $batch->batchNumber }}</td>
<td>{{ date('F m, Y', strtotime($studentJocks->deleted_at)) }}</td>
<td>
<form action="{{ route('recover.radioOneJock', $studentJocks->id) }}" method="POST">
@csrf
@method('PATCH')
<button type="submit" class="btn btn-outline-dark" title="Restore"><i class="fas fa-plus-square"></i></button>
</form>
</td>
</tr>
<?php } catch (ErrorException $e) { ?>
<?php } ?>
@endforeach
@empty
@endforelse
</tbody>
</table>
| 37,449
|
https://github.com/ruixiaoguo/LocalArchiverManager/blob/master/LocalArchiverManager/LocalArchiverManager/LocalUserDefaulManager.m
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
LocalArchiverManager
|
ruixiaoguo
|
Objective-C
|
Code
| 101
| 419
|
//
// LocalUserDefaulManager.m
// LocalArchiverManager
//
// Created by grx on 2018/8/17.
// Copyright © 2018年 grx. All rights reserved.
//
#import "LocalUserDefaulManager.h"
@implementation LocalUserDefaulManager
+ (LocalUserDefaulManager *)shareManagement
{
static LocalUserDefaulManager *_defaulManager = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
_defaulManager = [[LocalUserDefaulManager alloc]init];
});
return _defaulManager;
}
// 存储用户偏好设置到NSUserDefults
- (void)saveUserDataWithKey:(id)data forKey:(NSString*)key;
{
if (data==nil || key==nil)
{
NSLog(@"存储%@失败",key);
return;
}
else
{
[[NSUserDefaults standardUserDefaults] setObject:data forKey:key];
[[NSUserDefaults standardUserDefaults] synchronize];
}
}
//读取用户偏好设置
- (id)readUserDataWithKey:(NSString*)key
{
id temp = [[NSUserDefaults standardUserDefaults] objectForKey:key];
if(temp != nil)
{
return temp;
}
return nil;
}
//删除用户偏好设置
- (void)removeUserDataWithkey:(NSString*)key
{
[[NSUserDefaults standardUserDefaults] removeObjectForKey:key];
}
@end
| 7,338
|
https://github.com/vuchkov/dbal/blob/master/lib/Doctrine/DBAL/Driver/IBMDB2/DB2Exception.php
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
dbal
|
vuchkov
|
PHP
|
Code
| 84
| 300
|
<?php
declare(strict_types=1);
namespace Doctrine\DBAL\Driver\IBMDB2;
use Doctrine\DBAL\Driver\AbstractDriverException;
use function db2_conn_error;
use function db2_conn_errormsg;
use function db2_stmt_error;
use function db2_stmt_errormsg;
class DB2Exception extends AbstractDriverException
{
/**
* @param resource|null $connection
*/
public static function fromConnectionError($connection = null) : self
{
if ($connection !== null) {
return new self(db2_conn_errormsg($connection), db2_conn_error($connection));
}
return new self(db2_conn_errormsg(), db2_conn_error());
}
/**
* @param resource|null $statement
*/
public static function fromStatementError($statement = null) : self
{
if ($statement !== null) {
return new self(db2_stmt_errormsg($statement), db2_stmt_error($statement));
}
return new self(db2_stmt_errormsg(), db2_stmt_error());
}
}
| 2,982
|
https://github.com/MidoAhmed/dinivas/blob/master/apps/console/src/app/build/ansible/galaxy/galaxy-routing.module.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
dinivas
|
MidoAhmed
|
TypeScript
|
Code
| 196
| 683
|
import { ProviderSourceListResolver } from './my-content/provider-source-resolver.service';
import { RepoContentDetailComponent } from './repo-content-detail/repo-content-detail.component';
import { ProviderListResolver } from './providers/provider-resolver.service';
import { ProvidersComponent } from './providers/providers.component';
import { MySettingsComponent } from './my-settings/my-settings.component';
import { NamespaceListResolver } from './my-content/namespace-list-resolver.service';
import { GalaxyComponent } from './galaxy.component';
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { SearchComponent } from './search/search.component';
import { MyContentComponent } from './my-content/my-content.component';
import { MyImportComponent } from './my-import/my-import.component';
import { RepoContentDetailResolver } from './repo-content-detail/repo-content-detail.resolver.service';
import {
PopularCloudPlatformsResolver,
PopularPlatformsResolver,
PopularTagsResolver,
SearchCloudPlatformResolver,
SearchContentResolver,
SearchPlatformResolver
} from './search/search.resolver.service';
const routes: Routes = [
{
path: '',
component: GalaxyComponent,
children: [
{
path: 'search',
component: SearchComponent,
runGuardsAndResolvers: 'always',
resolve: {
cloudPlatforms: SearchCloudPlatformResolver,
content: SearchContentResolver,
platforms: SearchPlatformResolver,
popularTags: PopularTagsResolver,
popularCloudPlatforms: PopularCloudPlatformsResolver,
popularPlatforms: PopularPlatformsResolver
}
},
{
path: 'my-content',
component: MyContentComponent,
resolve: {
namespaces: NamespaceListResolver,
providerSources: ProviderSourceListResolver
}
},
{
path: 'my-import',
component: MyImportComponent,
resolve: { namespaces: NamespaceListResolver }
},
{ path: 'my-settings', component: MySettingsComponent },
{
path: 'providers',
component: ProvidersComponent,
resolve: {
providers: ProviderListResolver
}
},
{
path: ':namespace/:name',
component: RepoContentDetailComponent,
resolve: {
contentType: RepoContentDetailResolver
}
},
{ path: '', redirectTo: 'search' }
]
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule]
})
export class GalaxyRoutingModule {}
| 32,455
|
https://github.com/dd181818/lullaby/blob/master/lullaby/examples/example_app/port/sdl2/get_native_window_osx.mm
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,018
|
lullaby
|
dd181818
|
Objective-C++
|
Code
| 108
| 227
|
/*
Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS-IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#import <AppKit/NSWindow.h>
#import <AppKit/NSView.h>
void* GetNativeWindow(void *window) {
NSWindow* ns_window = (__bridge NSWindow*)window;
NSView* ns_view = [ns_window contentView];
return (__bridge void*)ns_view;
}
| 10,277
|
https://github.com/01vadim10/slot_automat/blob/master/application/classes/model/___games.php
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,013
|
slot_automat
|
01vadim10
|
PHP
|
Code
| 84
| 249
|
<?php defined('SYSPATH') OR die('No Direct Script Access');
class Model_Games extends Model{
public $_game_list;
public function __construct()
{
}
// Получает список игр и их параметры
public function get_game($id = 0)
{
if ($id == 0)
{
$this->_game_list;
}
}
// Обновляет параметры игр
public function update_game()
{
}
// Предоставляет статистику по отдельной игре
public function get_stat_game()
{
}
// Проверяет подключена ли игра или нет
public function valid_game()
{
}
// Добавляет новую игру в список игр
public function add_game()
{
}
}
| 9,658
|
https://github.com/ebu/mcma-projects-dotnet/blob/master/multi-cloud-ai-workflow-az/services/Mcma.Azure.TransformService/Worker/Function.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
mcma-projects-dotnet
|
ebu
|
C#
|
Code
| 123
| 504
|
using System.Threading.Tasks;
using Mcma.Azure.BlobStorage;
using Mcma.Azure.Client;
using Mcma.Azure.CosmosDb;
using Mcma.Azure.Functions.Logging;
using Mcma.Azure.Functions.Worker;
using Mcma.Client;
using Mcma.Core;
using Mcma.Core.Serialization;
using Mcma.Data;
using Mcma.Worker;
using Microsoft.Azure.WebJobs;
using Microsoft.Extensions.Logging;
using Microsoft.WindowsAzure.Storage.Queue;
using McmaLogger = Mcma.Core.Logging.Logger;
namespace Mcma.Azure.TransformService.Worker
{
public static class Function
{
static Function() => McmaTypes.Add<BlobStorageFileLocator>().Add<BlobStorageFolderLocator>();
private static MicrosoftLoggerProvider LoggerProvider { get; } = new MicrosoftLoggerProvider("transform-service-worker");
private static IAuthProvider AuthProvider { get; } = new AuthProvider().AddAzureAdManagedIdentityAuth();
private static ProviderCollection ProviderCollection { get; } = new ProviderCollection(
LoggerProvider,
new ResourceManagerProvider(AuthProvider),
new CosmosDbTableProvider(new CosmosDbTableProviderOptions().FromEnvironmentVariables()),
AuthProvider
);
private static IWorker Worker =
new Mcma.Worker.Worker(ProviderCollection)
.AddJobProcessing<TransformJob>(x => x.AddProfile<CreateProxyLambda>());
[FunctionName("TransformServiceWorker")]
public static async Task Run(
[QueueTrigger("transform-service-work-queue", Connection = "WorkQueueStorage")] CloudQueueMessage queueMessage,
ILogger log,
ExecutionContext executionContext)
{
var request = queueMessage.ToWorkerRequest();
FFmpegProcess.HostRootDir = executionContext.FunctionAppDirectory;
LoggerProvider.AddLogger(log, request.Tracker);
await Worker.DoWorkAsync(request);
}
}
}
| 31,516
|
https://github.com/yuwei-cheng/eBay/blob/master/analyze/const.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
eBay
|
yuwei-cheng
|
Python
|
Code
| 123
| 462
|
import numpy as np
from statsmodels.nonparametric._kernel_base import EstimatorSettings
from agent.const import DELTA_SLR
# meta categories for collectibles
COLLECTIBLES = [1, 237, 260, 550, 870, 11116, 20081, 45100, 64482]
# for finding kernel regression bandwidth
OPT = EstimatorSettings(efficient=True)
# agent names
SLR_NAMES = {DELTA_SLR[0]: 'Impatient agent',
DELTA_SLR[1]: 'Patient agent'}
# various dimensions for plotting
POINTS = 100
VALUES_DIM = np.linspace(1 / 1000, 1, POINTS)
NORM1_DIM = np.linspace(.4, .9, POINTS)
NORM1_DIM_LONG = np.linspace(.5, 1., POINTS)
NORM1_DIM_SHORT = np.linspace(.4, .85, 50)
LOG10_BIN_DIM = np.linspace(1, 2.75, POINTS)
LOG10_BIN_DIM_SHORT = np.linspace(1, 2.5, 50)
LOG10_BO_DIM = np.linspace(0, 4, POINTS)
# for 2D plotting
xx1, xx2 = np.meshgrid(NORM1_DIM_SHORT, LOG10_BIN_DIM_SHORT)
NORM1_BIN_MESH = np.concatenate([xx1.reshape(-1, 1), xx2.reshape(-1, 1)], axis=1)
xx1, xx2 = np.meshgrid(np.linspace(.65, .95, 50), LOG10_BIN_DIM_SHORT)
NORM2_BIN_MESH = np.concatenate([xx1.reshape(-1, 1), xx2.reshape(-1, 1)], axis=1)
| 34,155
|
https://github.com/paper-project/paper/blob/master/paper/secure.cpp
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| null |
paper
|
paper-project
|
C++
|
Code
| 8,773
| 29,989
|
#include <paper/secure.hpp>
#include <paper/working.hpp>
#include <boost/property_tree/json_parser.hpp>
#include <blake2/blake2.h>
#include <ed25519-donna/ed25519.h>
// Genesis keys for network variants
namespace
{
std::string paper_test_private_key = "34F0A37AAD20F4A260F0A5B3CB3D7FB50673212263E58A380BC10474BB039CE4";
std::string paper_test_public_key = "B0311EA55708D6A53C75CDBF88300259C6D018522FE3D4D0A242E431F9E8B6D0"; // TR6ZJ4pdp6HC76xMRpVDny5x2s8AEbrhFue3NKVxYYdmKuTEib
std::string paper_beta_public_key = "9D3A5B66B478670455B241D6BAC3D3FE1CBB7E7B7EAA429FA036C2704C3DC0A4"; // TuodHikZrYyNy4biERcXNSdA9ydXQNqww8BbHvfKiZPXidDLtj
std::string paper_live_public_key = "0";
}
size_t constexpr paper::send_block::size;
size_t constexpr paper::receive_block::size;
size_t constexpr paper::open_block::size;
size_t constexpr paper::change_block::size;
paper::keypair const paper::zero_key ("0");
paper::keypair const paper::test_genesis_key (paper_test_private_key);
paper::account const paper::paper_test_account (paper_test_public_key);
paper::account const paper::paper_beta_account (paper_beta_public_key);
paper::account const paper::paper_live_account (paper_live_public_key);
paper::account const paper::genesis_account = paper_network == paper_networks::paper_test_network ? paper_test_account : paper_network == paper_networks::paper_beta_network ? paper_beta_account : paper_live_account;
paper::uint128_t const paper::genesis_amount = std::numeric_limits <paper::uint128_t>::max ();
boost::filesystem::path paper::working_path ()
{
auto result (paper::app_path ());
switch (paper::paper_network)
{
case paper::paper_networks::paper_test_network:
result /= "RaiBlocksTest";
break;
case paper::paper_networks::paper_beta_network:
result /= "RaiBlocksBeta";
break;
case paper::paper_networks::paper_live_network:
result /= "RaiBlocks";
break;
}
return result;
}
size_t paper::unique_ptr_block_hash::operator () (std::unique_ptr <paper::block> const & block_a) const
{
auto hash (block_a->hash ());
auto result (static_cast <size_t> (hash.qwords [0]));
return result;
}
bool paper::unique_ptr_block_hash::operator () (std::unique_ptr <paper::block> const & lhs, std::unique_ptr <paper::block> const & rhs) const
{
return *lhs == *rhs;
}
bool paper::votes::vote (paper::vote const & vote_a)
{
auto result (false);
// Reject unsigned votes
if (!paper::validate_message (vote_a.account, vote_a.hash (), vote_a.signature))
{
// Check if we're adding a new vote entry or modifying an existing one.
auto existing (rep_votes.find (vote_a.account));
if (existing == rep_votes.end ())
{
result = true;
rep_votes.insert (std::make_pair (vote_a.account, std::make_pair (vote_a.sequence, vote_a.block->clone ())));
}
else
{
// Only accept votes with an increasing sequence number
if (existing->second.first < vote_a.sequence)
{
result = !(*existing->second.second == *vote_a.block);
if (result)
{
existing->second.second = vote_a.block->clone ();
}
}
}
}
return result;
}
// Sum the weights for each vote and return the winning block with its vote tally
std::pair <paper::uint128_t, std::unique_ptr <paper::block>> paper::ledger::winner (MDB_txn * transaction_a, paper::votes const & votes_a)
{
auto tally_l (tally (transaction_a, votes_a));
auto existing (tally_l.begin ());
return std::make_pair (existing->first, existing->second->clone ());
}
std::map <paper::uint128_t, std::unique_ptr <paper::block>, std::greater <paper::uint128_t>> paper::ledger::tally (MDB_txn * transaction_a, paper::votes const & votes_a)
{
std::unordered_map <std::unique_ptr <block>, paper::uint128_t, paper::unique_ptr_block_hash, paper::unique_ptr_block_hash> totals;
// Construct a map of blocks -> vote total.
for (auto & i: votes_a.rep_votes)
{
auto existing (totals.find (i.second.second));
if (existing == totals.end ())
{
totals.insert (std::make_pair (i.second.second->clone (), 0));
existing = totals.find (i.second.second);
assert (existing != totals.end ());
}
auto weight_l (weight (transaction_a, i.first));
existing->second += weight_l;
}
// Construction a map of vote total -> block in decreasing order.
std::map <paper::uint128_t, std::unique_ptr <paper::block>, std::greater <paper::uint128_t>> result;
for (auto & i: totals)
{
result [i.second] = i.first->clone ();
}
return result;
}
paper::votes::votes (paper::block_hash const & id_a) :
// Sequence 0 is the first response by a representative before a fork was observed
sequence (1),
id (id_a)
{
}
// Create a new random keypair
paper::keypair::keypair ()
{
random_pool.GenerateBlock (prv.bytes.data (), prv.bytes.size ());
ed25519_publickey (prv.bytes.data (), pub.bytes.data ());
}
// Create a keypair given a hex string of the private key
paper::keypair::keypair (std::string const & prv_a)
{
auto error (prv.decode_hex (prv_a));
assert (!error);
ed25519_publickey (prv.bytes.data (), pub.bytes.data ());
}
paper::ledger::ledger (paper::block_store & store_a) :
store (store_a)
{
}
void paper::send_block::visit (paper::block_visitor & visitor_a) const
{
visitor_a.send_block (*this);
}
void paper::send_block::hash (blake2b_state & hash_a) const
{
hashables.hash (hash_a);
}
uint64_t paper::send_block::block_work () const
{
return work;
}
void paper::send_block::block_work_set (uint64_t work_a)
{
assert (!paper::work_validate (root (), work_a));
work = work_a;
}
paper::send_hashables::send_hashables (paper::block_hash const & previous_a, paper::account const & destination_a, paper::amount const & balance_a) :
previous (previous_a),
destination (destination_a),
balance (balance_a)
{
}
paper::send_hashables::send_hashables (bool & error_a, paper::stream & stream_a)
{
error_a = paper::read (stream_a, previous.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, destination.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, balance.bytes);
}
}
}
paper::send_hashables::send_hashables (bool & error_a, boost::property_tree::ptree const & tree_a)
{
try
{
auto previous_l (tree_a.get <std::string> ("previous"));
auto destination_l (tree_a.get <std::string> ("destination"));
auto balance_l (tree_a.get <std::string> ("balance"));
error_a = previous.decode_hex (previous_l);
if (!error_a)
{
error_a = destination.decode_base58check (destination_l);
if (!error_a)
{
error_a = balance.decode_hex (balance_l);
}
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
void paper::send_hashables::hash (blake2b_state & hash_a) const
{
auto status (blake2b_update (&hash_a, previous.bytes.data (), sizeof (previous.bytes)));
assert (status == 0);
status = blake2b_update (&hash_a, destination.bytes.data (), sizeof (destination.bytes));
assert (status == 0);
status = blake2b_update (&hash_a, balance.bytes.data (), sizeof (balance.bytes));
assert (status == 0);
}
void paper::send_block::serialize (paper::stream & stream_a) const
{
write (stream_a, hashables.previous.bytes);
write (stream_a, hashables.destination.bytes);
write (stream_a, hashables.balance.bytes);
write (stream_a, signature.bytes);
write (stream_a, work);
}
void paper::send_block::serialize_json (std::string & string_a) const
{
boost::property_tree::ptree tree;
tree.put ("type", "send");
std::string previous;
hashables.previous.encode_hex (previous);
tree.put ("previous", previous);
tree.put ("destination", hashables.destination.to_base58check ());
std::string balance;
hashables.balance.encode_hex (balance);
tree.put ("balance", balance);
std::string signature_l;
signature.encode_hex (signature_l);
tree.put ("work", paper::to_string_hex (work));
tree.put ("signature", signature_l);
std::stringstream ostream;
boost::property_tree::write_json (ostream, tree);
string_a = ostream.str ();
}
bool paper::send_block::deserialize (paper::stream & stream_a)
{
auto result (false);
result = read (stream_a, hashables.previous.bytes);
if (!result)
{
result = read (stream_a, hashables.destination.bytes);
if (!result)
{
result = read (stream_a, hashables.balance.bytes);
if (!result)
{
result = read (stream_a, signature.bytes);
if (!result)
{
result = read (stream_a, work);
}
}
}
}
return result;
}
bool paper::send_block::deserialize_json (boost::property_tree::ptree const & tree_a)
{
auto result (false);
try
{
assert (tree_a.get <std::string> ("type") == "send");
auto previous_l (tree_a.get <std::string> ("previous"));
auto destination_l (tree_a.get <std::string> ("destination"));
auto balance_l (tree_a.get <std::string> ("balance"));
auto work_l (tree_a.get <std::string> ("work"));
auto signature_l (tree_a.get <std::string> ("signature"));
result = hashables.previous.decode_hex (previous_l);
if (!result)
{
result = hashables.destination.decode_base58check (destination_l);
if (!result)
{
result = hashables.balance.decode_hex (balance_l);
if (!result)
{
result = paper::from_string_hex (work_l, work);
if (!result)
{
result = signature.decode_hex (signature_l);
}
}
}
}
}
catch (std::runtime_error const &)
{
result = true;
}
return result;
}
void paper::receive_block::visit (paper::block_visitor & visitor_a) const
{
visitor_a.receive_block (*this);
}
bool paper::receive_block::operator == (paper::receive_block const & other_a) const
{
auto result (hashables.previous == other_a.hashables.previous && hashables.source == other_a.hashables.source && work == other_a.work && signature == other_a.signature);
return result;
}
bool paper::receive_block::deserialize (paper::stream & stream_a)
{
auto result (false);
result = read (stream_a, hashables.previous.bytes);
if (!result)
{
result = read (stream_a, hashables.source.bytes);
if (!result)
{
result = read (stream_a, signature.bytes);
if (!result)
{
result = read (stream_a, work);
}
}
}
return result;
}
bool paper::receive_block::deserialize_json (boost::property_tree::ptree const & tree_a)
{
auto result (false);
try
{
assert (tree_a.get <std::string> ("type") == "receive");
auto previous_l (tree_a.get <std::string> ("previous"));
auto source_l (tree_a.get <std::string> ("source"));
auto work_l (tree_a.get <std::string> ("work"));
auto signature_l (tree_a.get <std::string> ("signature"));
result = hashables.previous.decode_hex (previous_l);
if (!result)
{
result = hashables.source.decode_hex (source_l);
if (!result)
{
result = paper::from_string_hex (work_l, work);
if (!result)
{
result = signature.decode_hex (signature_l);
}
}
}
}
catch (std::runtime_error const &)
{
result = true;
}
return result;
}
void paper::receive_block::serialize (paper::stream & stream_a) const
{
write (stream_a, hashables.previous.bytes);
write (stream_a, hashables.source.bytes);
write (stream_a, signature.bytes);
write (stream_a, work);
}
void paper::receive_block::serialize_json (std::string & string_a) const
{
boost::property_tree::ptree tree;
tree.put ("type", "receive");
std::string previous;
hashables.previous.encode_hex (previous);
tree.put ("previous", previous);
std::string source;
hashables.source.encode_hex (source);
tree.put ("source", source);
std::string signature_l;
signature.encode_hex (signature_l);
tree.put ("work", paper::to_string_hex (work));
tree.put ("signature", signature_l);
std::stringstream ostream;
boost::property_tree::write_json (ostream, tree);
string_a = ostream.str ();
}
paper::receive_block::receive_block (paper::block_hash const & previous_a, paper::block_hash const & source_a, paper::private_key const & prv_a, paper::public_key const & pub_a, uint64_t work_a) :
hashables (previous_a, source_a),
signature (paper::sign_message (prv_a, pub_a, hash())),
work (work_a)
{
}
paper::receive_block::receive_block (bool & error_a, paper::stream & stream_a) :
hashables (error_a, stream_a)
{
if (!error_a)
{
error_a = paper::read (stream_a, signature);
if (!error_a)
{
error_a = paper::read (stream_a, work);
}
}
}
paper::receive_block::receive_block (bool & error_a, boost::property_tree::ptree const & tree_a) :
hashables (error_a, tree_a)
{
if (!error_a)
{
try
{
auto signature_l (tree_a.get <std::string> ("signature"));
auto work_l (tree_a.get <std::string> ("work"));
error_a = signature.decode_hex (signature_l);
if (!error_a)
{
error_a = paper::from_string_hex (work_l, work);
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
}
void paper::receive_block::hash (blake2b_state & hash_a) const
{
hashables.hash (hash_a);
}
uint64_t paper::receive_block::block_work () const
{
return work;
}
void paper::receive_block::block_work_set (uint64_t work_a)
{
assert (!paper::work_validate (root (), work_a));
work = work_a;
}
bool paper::receive_block::operator == (paper::block const & other_a) const
{
auto other_l (dynamic_cast <paper::receive_block const *> (&other_a));
auto result (other_l != nullptr);
if (result)
{
result = *this == *other_l;
}
return result;
}
paper::block_hash paper::receive_block::previous () const
{
return hashables.previous;
}
paper::block_hash paper::receive_block::source () const
{
return hashables.source;
}
paper::block_hash paper::receive_block::root () const
{
return hashables.previous;
}
paper::account paper::receive_block::representative () const
{
return 0;
}
std::unique_ptr <paper::block> paper::receive_block::clone () const
{
return std::unique_ptr <paper::block> (new paper::receive_block (*this));
}
paper::block_type paper::receive_block::type () const
{
return paper::block_type::receive;
}
paper::receive_hashables::receive_hashables (paper::block_hash const & previous_a, paper::block_hash const & source_a) :
previous (previous_a),
source (source_a)
{
}
paper::receive_hashables::receive_hashables (bool & error_a, paper::stream & stream_a)
{
error_a = paper::read (stream_a, previous.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, source.bytes);
}
}
paper::receive_hashables::receive_hashables (bool & error_a, boost::property_tree::ptree const & tree_a)
{
try
{
auto previous_l (tree_a.get <std::string> ("previous"));
auto source_l (tree_a.get <std::string> ("source"));
error_a = previous.decode_hex (previous_l);
if (!error_a)
{
error_a = source.decode_hex (source_l);
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
void paper::receive_hashables::hash (blake2b_state & hash_a) const
{
blake2b_update (&hash_a, previous.bytes.data (), sizeof (previous.bytes));
blake2b_update (&hash_a, source.bytes.data (), sizeof (source.bytes));
}
paper::block_hash paper::block::hash () const
{
paper::uint256_union result;
blake2b_state hash_l;
auto status (blake2b_init (&hash_l, sizeof (result.bytes)));
assert (status == 0);
hash (hash_l);
status = blake2b_final (&hash_l, result.bytes.data (), sizeof (result.bytes));
assert (status == 0);
return result;
}
std::string paper::block::to_json ()
{
std::string result;
serialize_json (result);
return result;
}
// Serialize a block prefixed with an 8-bit typecode
void paper::serialize_block (paper::stream & stream_a, paper::block const & block_a)
{
write (stream_a, block_a.type ());
block_a.serialize (stream_a);
}
bool paper::work_validate (paper::block_hash const & root_a, uint64_t work_a)
{
uint64_t result;
blake2b_state hash;
blake2b_init (&hash, sizeof (result));
blake2b_update (&hash, reinterpret_cast <uint8_t *> (&work_a), sizeof (work_a));
blake2b_update (&hash, root_a.bytes.data (), root_a.bytes.size ());
blake2b_final (&hash, reinterpret_cast <uint8_t *> (&result), sizeof (result));
return result < paper::block::publish_threshold;
}
bool paper::work_validate (paper::block & block_a)
{
return paper::work_validate (block_a.root (), block_a.block_work ());
}
std::unique_ptr <paper::block> paper::deserialize_block (paper::stream & stream_a, paper::block_type type_a)
{
std::unique_ptr <paper::block> result;
switch (type_a)
{
case paper::block_type::receive:
{
bool error;
std::unique_ptr <paper::receive_block> obj (new paper::receive_block (error, stream_a));
if (!error)
{
result = std::move (obj);
}
break;
}
case paper::block_type::send:
{
bool error;
std::unique_ptr <paper::send_block> obj (new paper::send_block (error, stream_a));
if (!error)
{
result = std::move (obj);
}
break;
}
case paper::block_type::open:
{
bool error;
std::unique_ptr <paper::open_block> obj (new paper::open_block (error, stream_a));
if (!error)
{
result = std::move (obj);
}
break;
}
case paper::block_type::change:
{
bool error;
std::unique_ptr <paper::change_block> obj (new paper::change_block (error, stream_a));
if (!error)
{
result = std::move (obj);
}
break;
}
default:
break;
}
return result;
}
std::unique_ptr <paper::block> paper::deserialize_block_json (boost::property_tree::ptree const & tree_a)
{
std::unique_ptr <paper::block> result;
try
{
auto type (tree_a.get <std::string> ("type"));
if (type == "receive")
{
bool error;
std::unique_ptr <paper::receive_block> obj (new paper::receive_block (error, tree_a));
if (!error)
{
result = std::move (obj);
}
}
else if (type == "send")
{
bool error;
std::unique_ptr <paper::send_block> obj (new paper::send_block (error, tree_a));
if (!error)
{
result = std::move (obj);
}
}
else if (type == "open")
{
bool error;
std::unique_ptr <paper::open_block> obj (new paper::open_block (error, tree_a));
if (!error)
{
result = std::move (obj);
}
}
else if (type == "change")
{
bool error;
std::unique_ptr <paper::change_block> obj (new paper::change_block (error, tree_a));
if (!error)
{
result = std::move (obj);
}
}
}
catch (std::runtime_error const &)
{
}
return result;
}
std::unique_ptr <paper::block> paper::deserialize_block (MDB_val const & val_a)
{
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (val_a.mv_data), val_a.mv_size);
return deserialize_block (stream);
}
std::unique_ptr <paper::block> paper::deserialize_block (paper::stream & stream_a)
{
paper::block_type type;
auto error (read (stream_a, type));
std::unique_ptr <paper::block> result;
if (!error)
{
result = paper::deserialize_block (stream_a, type);
}
return result;
}
paper::send_block::send_block (paper::block_hash const & previous_a, paper::account const & destination_a, paper::amount const & balance_a, paper::private_key const & prv_a, paper::public_key const & pub_a, uint64_t work_a) :
hashables (previous_a, destination_a, balance_a),
signature (paper::sign_message (prv_a, pub_a, hash ())),
work (work_a)
{
}
paper::send_block::send_block (bool & error_a, paper::stream & stream_a) :
hashables (error_a, stream_a)
{
if (!error_a)
{
error_a = paper::read (stream_a, signature.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, work);
}
}
}
paper::send_block::send_block (bool & error_a, boost::property_tree::ptree const & tree_a) :
hashables (error_a, tree_a)
{
if (!error_a)
{
try
{
auto signature_l (tree_a.get <std::string> ("signature"));
auto work_l (tree_a.get <std::string> ("work"));
error_a = signature.decode_hex (signature_l);
if (!error_a)
{
error_a = paper::from_string_hex (work_l, work);
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
}
bool paper::send_block::operator == (paper::block const & other_a) const
{
auto other_l (dynamic_cast <paper::send_block const *> (&other_a));
auto result (other_l != nullptr);
if (result)
{
result = *this == *other_l;
}
return result;
}
std::unique_ptr <paper::block> paper::send_block::clone () const
{
return std::unique_ptr <paper::block> (new paper::send_block (*this));
}
paper::block_type paper::send_block::type () const
{
return paper::block_type::send;
}
bool paper::send_block::operator == (paper::send_block const & other_a) const
{
auto result (hashables.destination == other_a.hashables.destination && hashables.previous == other_a.hashables.previous && hashables.balance == other_a.hashables.balance && work == other_a.work && signature == other_a.signature);
return result;
}
paper::block_hash paper::send_block::previous () const
{
return hashables.previous;
}
paper::block_hash paper::send_block::source () const
{
return 0;
}
paper::block_hash paper::send_block::root () const
{
return hashables.previous;
}
paper::account paper::send_block::representative () const
{
return 0;
}
paper::open_hashables::open_hashables (paper::block_hash const & source_a, paper::account const & representative_a, paper::account const & account_a) :
source (source_a),
representative (representative_a),
account (account_a)
{
}
paper::open_hashables::open_hashables (bool & error_a, paper::stream & stream_a)
{
error_a = paper::read (stream_a, source.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, representative.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, account.bytes);
}
}
}
paper::open_hashables::open_hashables (bool & error_a, boost::property_tree::ptree const & tree_a)
{
try
{
auto source_l (tree_a.get <std::string> ("source"));
auto representative_l (tree_a.get <std::string> ("representative"));
auto account_l (tree_a.get <std::string> ("account"));
error_a = source.decode_hex (source_l);
if (!error_a)
{
error_a = representative.decode_base58check (representative_l);
if (!error_a)
{
error_a = account.decode_base58check (account_l);
}
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
void paper::open_hashables::hash (blake2b_state & hash_a) const
{
blake2b_update (&hash_a, source.bytes.data (), sizeof (source.bytes));
blake2b_update (&hash_a, representative.bytes.data (), sizeof (representative.bytes));
blake2b_update (&hash_a, account.bytes.data (), sizeof (account.bytes));
}
paper::open_block::open_block (paper::block_hash const & source_a, paper::account const & representative_a, paper::account const & account_a, paper::private_key const & prv_a, paper::public_key const & pub_a, uint64_t work_a) :
hashables (source_a, representative_a, account_a),
signature (paper::sign_message (prv_a, pub_a, hash ())),
work (work_a)
{
assert (!representative_a.is_zero ());
}
paper::open_block::open_block (paper::block_hash const & source_a, paper::account const & representative_a, paper::account const & account_a, std::nullptr_t) :
hashables (source_a, representative_a, account_a),
work (0)
{
signature.clear ();
}
paper::open_block::open_block (bool & error_a, paper::stream & stream_a) :
hashables (error_a, stream_a)
{
if (!error_a)
{
error_a = paper::read (stream_a, signature);
if (!error_a)
{
error_a = paper::read (stream_a, work);
}
}
}
paper::open_block::open_block (bool & error_a, boost::property_tree::ptree const & tree_a) :
hashables (error_a, tree_a)
{
if (!error_a)
{
try
{
auto work_l (tree_a.get <std::string> ("work"));
auto signature_l (tree_a.get <std::string> ("signature"));
error_a = paper::from_string_hex (work_l, work);
if (!error_a)
{
error_a = signature.decode_hex (signature_l);
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
}
void paper::open_block::hash (blake2b_state & hash_a) const
{
hashables.hash (hash_a);
}
uint64_t paper::open_block::block_work () const
{
return work;
}
void paper::open_block::block_work_set (uint64_t work_a)
{
assert (!paper::work_validate (root (), work_a));
work = work_a;
}
paper::block_hash paper::open_block::previous () const
{
paper::block_hash result (0);
return result;
}
void paper::open_block::serialize (paper::stream & stream_a) const
{
write (stream_a, hashables.source);
write (stream_a, hashables.representative);
write (stream_a, hashables.account);
write (stream_a, signature);
write (stream_a, work);
}
void paper::open_block::serialize_json (std::string & string_a) const
{
boost::property_tree::ptree tree;
tree.put ("type", "open");
tree.put ("source", hashables.source.to_string ());
tree.put ("representative", representative ().to_base58check ());
tree.put ("account", hashables.account.to_base58check ());
std::string signature_l;
signature.encode_hex (signature_l);
tree.put ("work", paper::to_string_hex (work));
tree.put ("signature", signature_l);
std::stringstream ostream;
boost::property_tree::write_json (ostream, tree);
string_a = ostream.str ();
}
bool paper::open_block::deserialize (paper::stream & stream_a)
{
auto result (read (stream_a, hashables.source));
if (!result)
{
result = read (stream_a, hashables.representative);
if (!result)
{
result = read (stream_a, hashables.account);
if (!result)
{
result = read (stream_a, signature);
if (!result)
{
result = read (stream_a, work);
}
}
}
}
return result;
}
bool paper::open_block::deserialize_json (boost::property_tree::ptree const & tree_a)
{
auto result (false);
try
{
assert (tree_a.get <std::string> ("type") == "open");
auto source_l (tree_a.get <std::string> ("source"));
auto representative_l (tree_a.get <std::string> ("representative"));
auto account_l (tree_a.get <std::string> ("account"));
auto work_l (tree_a.get <std::string> ("work"));
auto signature_l (tree_a.get <std::string> ("signature"));
result = hashables.source.decode_hex (source_l);
if (!result)
{
result = hashables.representative.decode_hex (representative_l);
if (!result)
{
result = hashables.account.decode_hex (account_l);
if (!result)
{
result = paper::from_string_hex (work_l, work);
if (!result)
{
result = signature.decode_hex (signature_l);
}
}
}
}
}
catch (std::runtime_error const &)
{
result = true;
}
return result;
}
void paper::open_block::visit (paper::block_visitor & visitor_a) const
{
visitor_a.open_block (*this);
}
std::unique_ptr <paper::block> paper::open_block::clone () const
{
return std::unique_ptr <paper::block> (new paper::open_block (*this));
}
paper::block_type paper::open_block::type () const
{
return paper::block_type::open;
}
bool paper::open_block::operator == (paper::block const & other_a) const
{
auto other_l (dynamic_cast <paper::open_block const *> (&other_a));
auto result (other_l != nullptr);
if (result)
{
result = *this == *other_l;
}
return result;
}
bool paper::open_block::operator == (paper::open_block const & other_a) const
{
return hashables.source == other_a.hashables.source && hashables.representative == other_a.hashables.representative && hashables.account == other_a.hashables.account && work == other_a.work && signature == other_a.signature;
}
paper::block_hash paper::open_block::source () const
{
return hashables.source;
}
paper::block_hash paper::open_block::root () const
{
return hashables.account;
}
paper::account paper::open_block::representative () const
{
return hashables.representative;
}
paper::change_hashables::change_hashables (paper::block_hash const & previous_a, paper::account const & representative_a) :
previous (previous_a),
representative (representative_a)
{
}
paper::change_hashables::change_hashables (bool & error_a, paper::stream & stream_a)
{
error_a = paper::read (stream_a, previous);
if (!error_a)
{
error_a = paper::read (stream_a, representative);
}
}
paper::change_hashables::change_hashables (bool & error_a, boost::property_tree::ptree const & tree_a)
{
try
{
auto previous_l (tree_a.get <std::string> ("previous"));
auto representative_l (tree_a.get <std::string> ("representative"));
error_a = previous.decode_hex (previous_l);
if (!error_a)
{
error_a = representative.decode_base58check (representative_l);
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
void paper::change_hashables::hash (blake2b_state & hash_a) const
{
blake2b_update (&hash_a, previous.bytes.data (), sizeof (previous.bytes));
blake2b_update (&hash_a, representative.bytes.data (), sizeof (representative.bytes));
}
paper::change_block::change_block (paper::block_hash const & previous_a, paper::account const & representative_a, paper::private_key const & prv_a, paper::public_key const & pub_a, uint64_t work_a) :
hashables (previous_a, representative_a),
signature (paper::sign_message (prv_a, pub_a, hash ())),
work (work_a)
{
}
paper::change_block::change_block (bool & error_a, paper::stream & stream_a) :
hashables (error_a, stream_a)
{
if (!error_a)
{
error_a = paper::read (stream_a, signature);
if (!error_a)
{
error_a = paper::read (stream_a, work);
}
}
}
paper::change_block::change_block (bool & error_a, boost::property_tree::ptree const & tree_a) :
hashables (error_a, tree_a)
{
if (!error_a)
{
try
{
auto work_l (tree_a.get <std::string> ("work"));
auto signature_l (tree_a.get <std::string> ("signature"));
error_a = paper::from_string_hex (work_l, work);
if (!error_a)
{
error_a = signature.decode_hex (signature_l);
}
}
catch (std::runtime_error const &)
{
error_a = true;
}
}
}
void paper::change_block::hash (blake2b_state & hash_a) const
{
hashables.hash (hash_a);
}
uint64_t paper::change_block::block_work () const
{
return work;
}
void paper::change_block::block_work_set (uint64_t work_a)
{
assert (!paper::work_validate (root (), work_a));
work = work_a;
}
paper::block_hash paper::change_block::previous () const
{
return hashables.previous;
}
void paper::change_block::serialize (paper::stream & stream_a) const
{
write (stream_a, hashables.previous);
write (stream_a, hashables.representative);
write (stream_a, signature);
write (stream_a, work);
}
void paper::change_block::serialize_json (std::string & string_a) const
{
boost::property_tree::ptree tree;
tree.put ("type", "change");
tree.put ("previous", hashables.previous.to_string ());
tree.put ("representative", representative ().to_base58check ());
tree.put ("work", paper::to_string_hex (work));
std::string signature_l;
signature.encode_hex (signature_l);
tree.put ("signature", signature_l);
std::stringstream ostream;
boost::property_tree::write_json (ostream, tree);
string_a = ostream.str ();
}
bool paper::change_block::deserialize (paper::stream & stream_a)
{
auto result (read (stream_a, hashables.previous));
if (!result)
{
result = read (stream_a, hashables.representative);
if (!result)
{
result = read (stream_a, signature);
if (!result)
{
result = read (stream_a, work);
}
}
}
return result;
}
bool paper::change_block::deserialize_json (boost::property_tree::ptree const & tree_a)
{
auto result (false);
try
{
assert (tree_a.get <std::string> ("type") == "change");
auto previous_l (tree_a.get <std::string> ("previous"));
auto representative_l (tree_a.get <std::string> ("representative"));
auto work_l (tree_a.get <std::string> ("work"));
auto signature_l (tree_a.get <std::string> ("signature"));
result = hashables.previous.decode_hex (previous_l);
if (!result)
{
result = hashables.representative.decode_hex (representative_l);
if (!result)
{
result = paper::from_string_hex (work_l, work);
if (!result)
{
result = signature.decode_hex (signature_l);
}
}
}
}
catch (std::runtime_error const &)
{
result = true;
}
return result;
}
void paper::change_block::visit (paper::block_visitor & visitor_a) const
{
visitor_a.change_block (*this);
}
std::unique_ptr <paper::block> paper::change_block::clone () const
{
return std::unique_ptr <paper::block> (new paper::change_block (*this));
}
paper::block_type paper::change_block::type () const
{
return paper::block_type::change;
}
bool paper::change_block::operator == (paper::block const & other_a) const
{
auto other_l (dynamic_cast <paper::change_block const *> (&other_a));
auto result (other_l != nullptr);
if (result)
{
result = *this == *other_l;
}
return result;
}
bool paper::change_block::operator == (paper::change_block const & other_a) const
{
return hashables.previous == other_a.hashables.previous && hashables.representative == other_a.hashables.representative && work == other_a.work && signature == other_a.signature;
}
paper::block_hash paper::change_block::source () const
{
return 0;
}
paper::block_hash paper::change_block::root () const
{
return hashables.previous;
}
paper::account paper::change_block::representative () const
{
return hashables.representative;
}
paper::account_info::account_info () :
head (0),
rep_block (0),
balance (0),
modified (0)
{
}
paper::account_info::account_info (MDB_val const & val_a)
{
assert (val_a.mv_size == sizeof (*this));
static_assert (sizeof (head) + sizeof (rep_block) + sizeof (balance) + sizeof (modified) == sizeof (*this), "Class not packed");
std::copy (reinterpret_cast <uint8_t const *> (val_a.mv_data), reinterpret_cast <uint8_t const *> (val_a.mv_data) + sizeof (*this), reinterpret_cast <uint8_t *> (this));
}
paper::account_info::account_info (paper::block_hash const & head_a, paper::account const & rep_block_a, paper::amount const & balance_a, uint64_t modified_a, bool) :
head (head_a),
rep_block (rep_block_a),
balance (balance_a),
modified (modified_a)
{
}
void paper::account_info::serialize (paper::stream & stream_a) const
{
write (stream_a, head.bytes);
write (stream_a, rep_block.bytes);
write (stream_a, balance.bytes);
write (stream_a, modified);
}
bool paper::account_info::deserialize (paper::stream & stream_a)
{
auto result (read (stream_a, head.bytes));
if (!result)
{
result = read (stream_a, rep_block.bytes);
if (!result)
{
result = read (stream_a, balance.bytes);
if (!result)
{
result = read (stream_a, modified);
}
}
}
return result;
}
bool paper::account_info::operator == (paper::account_info const & other_a) const
{
return head == other_a.head && rep_block == other_a.rep_block && balance == other_a.balance && modified == other_a.modified;
}
bool paper::account_info::operator != (paper::account_info const & other_a) const
{
return ! (*this == other_a);
}
paper::mdb_val paper::account_info::val () const
{
return paper::mdb_val (sizeof (*this), const_cast <paper::account_info *> (this));
}
paper::store_entry::store_entry ()
{
clear ();
}
void paper::store_entry::clear ()
{
first = {0, nullptr};
second = {0, nullptr};
}
paper::store_entry * paper::store_entry::operator -> ()
{
return this;
}
paper::store_entry & paper::store_iterator::operator -> ()
{
return current;
}
paper::store_iterator::store_iterator (MDB_txn * transaction_a, MDB_dbi db_a) :
cursor (nullptr)
{
auto status (mdb_cursor_open (transaction_a, db_a, &cursor));
assert (status == 0);
auto status2 (mdb_cursor_get (cursor, ¤t.first, ¤t.second, MDB_FIRST));
assert (status2 == 0 || status2 == MDB_NOTFOUND);
if (status2 != MDB_NOTFOUND)
{
auto status3 (mdb_cursor_get (cursor, ¤t.first, ¤t.second, MDB_GET_CURRENT));
assert (status3 == 0 || status3 == MDB_NOTFOUND);
}
else
{
current.clear ();
}
}
paper::store_iterator::store_iterator (std::nullptr_t) :
cursor (nullptr)
{
}
paper::store_iterator::store_iterator (MDB_txn * transaction_a, MDB_dbi db_a, MDB_val const & val_a) :
cursor (nullptr)
{
auto status (mdb_cursor_open (transaction_a, db_a, &cursor));
assert (status == 0);
current.first = val_a;
auto status2 (mdb_cursor_get (cursor, ¤t.first, ¤t.second, MDB_SET_RANGE));
assert (status2 == 0 || status2 == MDB_NOTFOUND);
if (status2 != MDB_NOTFOUND)
{
auto status3 (mdb_cursor_get (cursor, ¤t.first, ¤t.second, MDB_GET_CURRENT));
assert (status3 == 0 || status3 == MDB_NOTFOUND);
}
else
{
current.clear ();
}
}
paper::store_iterator::store_iterator (paper::store_iterator && other_a)
{
cursor = other_a.cursor;
other_a.cursor = nullptr;
current = other_a.current;
}
paper::store_iterator::~store_iterator ()
{
if (cursor != nullptr)
{
mdb_cursor_close (cursor);
}
}
paper::store_iterator & paper::store_iterator::operator ++ ()
{
assert (cursor != nullptr);
auto status (mdb_cursor_get (cursor, ¤t.first, ¤t.second, MDB_NEXT));
if (status == MDB_NOTFOUND)
{
current.clear ();
}
return *this;
}
paper::store_iterator & paper::store_iterator::operator = (paper::store_iterator && other_a)
{
if (cursor != nullptr)
{
mdb_cursor_close (cursor);
}
cursor = other_a.cursor;
other_a.cursor = nullptr;
current = other_a.current;
other_a.current.clear ();
return *this;
}
bool paper::store_iterator::operator == (paper::store_iterator const & other_a) const
{
auto result (current.first.mv_data == other_a.current.first.mv_data);
assert (!result || (current.first.mv_size == other_a.current.first.mv_size));
assert (!result || (current.second.mv_data == other_a.current.second.mv_data));
assert (!result || (current.second.mv_size == other_a.current.second.mv_size));
return result;
}
bool paper::store_iterator::operator != (paper::store_iterator const & other_a) const
{
return !(*this == other_a);
}
paper::block_store::block_store (bool & error_a, boost::filesystem::path const & path_a) :
environment (error_a, path_a),
frontiers (0),
accounts (0),
send_blocks (0),
receive_blocks (0),
open_blocks (0),
change_blocks (0),
pending (0),
representation (0),
unchecked (0),
unsynced (0),
stack (0),
checksum (0)
{
if (!error_a)
{
paper::transaction transaction (environment, nullptr, true);
error_a = error_a || mdb_dbi_open (transaction, "frontiers", MDB_CREATE, &frontiers) != 0;
error_a = error_a || mdb_dbi_open (transaction, "accounts", MDB_CREATE, &accounts) != 0;
error_a = error_a || mdb_dbi_open (transaction, "send", MDB_CREATE, &send_blocks) != 0;
error_a = error_a || mdb_dbi_open (transaction, "receive", MDB_CREATE, &receive_blocks) != 0;
error_a = error_a || mdb_dbi_open (transaction, "open", MDB_CREATE, &open_blocks) != 0;
error_a = error_a || mdb_dbi_open (transaction, "change", MDB_CREATE, &change_blocks) != 0;
error_a = error_a || mdb_dbi_open (transaction, "pending", MDB_CREATE, &pending) != 0;
error_a = error_a || mdb_dbi_open (transaction, "representation", MDB_CREATE, &representation) != 0;
error_a = error_a || mdb_dbi_open (transaction, "unchecked", MDB_CREATE, &unchecked) != 0;
error_a = error_a || mdb_dbi_open (transaction, "unsynced", MDB_CREATE, &unsynced) != 0;
error_a = error_a || mdb_dbi_open (transaction, "stack", MDB_CREATE, &stack) != 0;
error_a = error_a || mdb_dbi_open (transaction, "checksum", MDB_CREATE, &checksum) != 0;
if (!error_a)
{
checksum_put (transaction, 0, 0, 0);
}
}
}
void paper::block_store::clear (MDB_dbi db_a)
{
paper::transaction transaction (environment, nullptr, true);
auto status (mdb_drop (transaction, db_a, 0));
assert (status == 0);
}
namespace
{
// Fill in our predecessors
class set_predecessor : public paper::block_visitor
{
public:
set_predecessor (MDB_txn * transaction_a, paper::block_store & store_a) :
transaction (transaction_a),
store (store_a)
{
}
void fill_value (paper::block const & block_a)
{
auto hash (block_a.hash ());
paper::block_type type;
auto value (store.block_get_raw (transaction, block_a.previous (), type));
assert (value.mv_size != 0);
std::vector <uint8_t> data (static_cast <uint8_t *> (value.mv_data), static_cast <uint8_t *> (value.mv_data) + value.mv_size);
std::copy (hash.bytes.begin (), hash.bytes.end (), data.end () - hash.bytes.size ());
store.block_put_raw (transaction, store.block_database (type), block_a.previous (), paper::mdb_val (data.size (), data.data()));
}
void send_block (paper::send_block const & block_a) override
{
fill_value (block_a);
}
void receive_block (paper::receive_block const & block_a) override
{
fill_value (block_a);
}
void open_block (paper::open_block const & block_a) override
{
// Open blocks don't have a predecessor
}
void change_block (paper::change_block const & block_a) override
{
fill_value (block_a);
}
MDB_txn * transaction;
paper::block_store & store;
};
}
MDB_dbi paper::block_store::block_database (paper::block_type type_a)
{
MDB_dbi result;
switch (type_a)
{
case paper::block_type::send:
result = send_blocks;
break;
case paper::block_type::receive:
result = receive_blocks;
break;
case paper::block_type::open:
result = open_blocks;
break;
case paper::block_type::change:
result = change_blocks;
break;
default:
assert(false);
break;
}
return result;
}
void paper::block_store::block_put_raw (MDB_txn * transaction_a, MDB_dbi database_a, paper::block_hash const & hash_a, MDB_val value_a)
{
auto status2 (mdb_put (transaction_a, database_a, hash_a.val (), &value_a, 0));
assert (status2 == 0);
}
void paper::block_store::block_put (MDB_txn * transaction_a, paper::block_hash const & hash_a, paper::block const & block_a)
{
std::vector <uint8_t> vector;
{
paper::vectorstream stream (vector);
block_a.serialize (stream);
paper::block_hash successor (0);
paper::write (stream, successor.bytes);
}
block_put_raw (transaction_a, block_database (block_a.type ()), hash_a, {vector.size (), vector.data ()});
set_predecessor predecessor (transaction_a, *this);
block_a.visit (predecessor);
assert (block_a.previous ().is_zero () || block_successor (transaction_a, block_a.previous ()) == hash_a);
}
MDB_val paper::block_store::block_get_raw (MDB_txn * transaction_a, paper::block_hash const & hash_a, paper::block_type & type_a)
{
MDB_val result {0, nullptr};
auto status (mdb_get (transaction_a, send_blocks, hash_a.val (), &result));
assert (status == 0 || status == MDB_NOTFOUND);
if (status != 0)
{
auto status (mdb_get (transaction_a, receive_blocks, hash_a.val (), &result));
assert (status == 0 || status == MDB_NOTFOUND);
if (status != 0)
{
auto status (mdb_get (transaction_a, open_blocks, hash_a.val (), &result));
assert (status == 0 || status == MDB_NOTFOUND);
if (status != 0)
{
auto status (mdb_get (transaction_a, change_blocks, hash_a.val (), &result));
assert (status == 0 || status == MDB_NOTFOUND);
if (status == 0)
{
type_a = paper::block_type::change;
}
}
else
{
type_a = paper::block_type::open;
}
}
else
{
type_a = paper::block_type::receive;
}
}
else
{
type_a = paper::block_type::send;
}
return result;
}
paper::block_hash paper::block_store::block_successor (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
paper::block_type type;
auto value (block_get_raw (transaction_a, hash_a, type));
paper::block_hash result;
if (value.mv_size != 0)
{
assert (value.mv_size >= result.bytes.size ());
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data) + value.mv_size - result.bytes.size (), result.bytes.size ());
auto error (paper::read (stream, result.bytes));
assert (!error);
}
else
{
result.clear ();
}
return result;
}
std::unique_ptr <paper::block> paper::block_store::block_get (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
paper::block_type type;
auto value (block_get_raw (transaction_a, hash_a, type));
std::unique_ptr <paper::block> result;
if (value.mv_size != 0)
{
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data), value.mv_size);
result = paper::deserialize_block (stream, type);
assert (result != nullptr);
}
return result;
}
void paper::block_store::block_del (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto status (mdb_del (transaction_a, send_blocks, hash_a.val (), nullptr));
assert (status == 0 || status == MDB_NOTFOUND);
if (status != 0)
{
auto status (mdb_del (transaction_a, receive_blocks, hash_a.val (), nullptr));
assert (status == 0 || status == MDB_NOTFOUND);
if (status != 0)
{
auto status (mdb_del (transaction_a, open_blocks, hash_a.val (), nullptr));
assert (status == 0 || status == MDB_NOTFOUND);
if (status != 0)
{
auto status (mdb_del (transaction_a, change_blocks, hash_a.val (), nullptr));
assert (status == 0);
}
}
}
}
bool paper::block_store::block_exists (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto result (true);
MDB_val junk;
auto status (mdb_get (transaction_a, send_blocks, hash_a.val (), &junk));
assert (status == 0 || status == MDB_NOTFOUND);
result = status == 0;
if (!result)
{
auto status (mdb_get (transaction_a, receive_blocks, hash_a.val (), &junk));
assert (status == 0 || status == MDB_NOTFOUND);
result = status == 0;
if (!result)
{
auto status (mdb_get (transaction_a, open_blocks, hash_a.val (), &junk));
assert (status == 0 || status == MDB_NOTFOUND);
result = status == 0;
if (!result)
{
auto status (mdb_get (transaction_a, change_blocks, hash_a.val (), &junk));
assert (status == 0 || status == MDB_NOTFOUND);
result = status == 0;
}
}
}
return result;
}
void paper::block_store::account_del (MDB_txn * transaction_a, paper::account const & account_a)
{
auto status (mdb_del (transaction_a, accounts, account_a.val (), nullptr));
assert (status == 0);
}
bool paper::block_store::account_exists (paper::account const & account_a)
{
paper::transaction transaction (environment, nullptr, false);
auto iterator (latest_begin (transaction, account_a));
return iterator != paper::store_iterator (nullptr) && paper::account (iterator->first) == account_a;
}
bool paper::block_store::account_get (MDB_txn * transaction_a, paper::account const & account_a, paper::account_info & info_a)
{
MDB_val value;
auto status (mdb_get (transaction_a, accounts, account_a.val (), &value));
assert (status == 0 || status == MDB_NOTFOUND);
bool result;
if (status == MDB_NOTFOUND)
{
result = true;
}
else
{
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data), value.mv_size);
result = info_a.deserialize (stream);
assert (!result);
}
return result;
}
void paper::block_store::frontier_put (MDB_txn * transaction_a, paper::block_hash const & block_a, paper::account const & account_a)
{
auto status (mdb_put (transaction_a, frontiers, block_a.val (), account_a.val (), 0));
assert (status == 0);
}
paper::account paper::block_store::frontier_get (MDB_txn * transaction_a, paper::block_hash const & block_a)
{
MDB_val value;
auto status (mdb_get (transaction_a, frontiers, block_a.val (), &value));
assert (status == 0 || status == MDB_NOTFOUND);
paper::account result (0);
if (status == 0)
{
result = value;
}
return result;
}
void paper::block_store::frontier_del (MDB_txn * transaction_a, paper::block_hash const & block_a)
{
auto status (mdb_del (transaction_a, frontiers, block_a.val (), nullptr));
assert (status == 0);
}
void paper::block_store::account_put (MDB_txn * transaction_a, paper::account const & account_a, paper::account_info const & info_a)
{
std::vector <uint8_t> vector;
{
paper::vectorstream stream (vector);
info_a.serialize (stream);
}
auto status (mdb_put (transaction_a, accounts, account_a.val (), info_a.val (), 0));
assert (status == 0);
}
void paper::block_store::pending_put (MDB_txn * transaction_a, paper::block_hash const & hash_a, paper::receivable const & receivable_a)
{
std::vector <uint8_t> vector;
{
paper::vectorstream stream (vector);
paper::write (stream, receivable_a.source);
paper::write (stream, receivable_a.amount);
paper::write (stream, receivable_a.destination);
}
auto status (mdb_put (transaction_a, pending, hash_a.val (), receivable_a.val (), 0));
assert (status == 0);
}
void paper::block_store::pending_del (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto status (mdb_del (transaction_a, pending, hash_a.val (), nullptr));
assert (status == 0);
}
bool paper::block_store::pending_exists (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto iterator (pending_begin (transaction_a, hash_a));
return iterator != paper::store_iterator (nullptr) && paper::block_hash (iterator->first) == hash_a;
}
bool paper::block_store::pending_get (MDB_txn * transaction_a, paper::block_hash const & hash_a, paper::receivable & receivable_a)
{
MDB_val value;
auto status (mdb_get (transaction_a, pending, hash_a.val (), &value));
assert (status == 0 || status == MDB_NOTFOUND);
bool result;
if (status == MDB_NOTFOUND)
{
result = true;
}
else
{
result = false;
assert (value.mv_size == sizeof (receivable_a.source.bytes) + sizeof (receivable_a.amount.bytes) + sizeof (receivable_a.destination.bytes));
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data), value.mv_size);
auto error1 (paper::read (stream, receivable_a.source));
assert (!error1);
auto error2 (paper::read (stream, receivable_a.amount));
assert (!error2);
auto error3 (paper::read (stream, receivable_a.destination));
assert (!error3);
}
return result;
}
paper::store_iterator paper::block_store::pending_begin (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
paper::store_iterator result (transaction_a, pending, hash_a.val ());
return result;
}
paper::store_iterator paper::block_store::pending_begin (MDB_txn * transaction_a)
{
paper::store_iterator result (transaction_a, pending);
return result;
}
paper::store_iterator paper::block_store::pending_end ()
{
paper::store_iterator result (nullptr);
return result;
}
paper::receivable::receivable () :
source (0),
amount (0),
destination (0)
{
}
paper::receivable::receivable (MDB_val const & val_a)
{
assert(val_a.mv_size == sizeof (*this));
static_assert (sizeof (source) + sizeof (amount) + sizeof (destination) == sizeof (*this), "Packed class");
std::copy (reinterpret_cast <uint8_t const *> (val_a.mv_data), reinterpret_cast <uint8_t const *> (val_a.mv_data) + sizeof (*this), reinterpret_cast <uint8_t *> (this));
}
paper::receivable::receivable (paper::account const & source_a, paper::amount const & amount_a, paper::account const & destination_a) :
source (source_a),
amount (amount_a),
destination (destination_a)
{
}
void paper::receivable::serialize (paper::stream & stream_a) const
{
paper::write (stream_a, source.bytes);
paper::write (stream_a, amount.bytes);
paper::write (stream_a, destination.bytes);
}
bool paper::receivable::deserialize (paper::stream & stream_a)
{
auto result (paper::read (stream_a, source.bytes));
if (!result)
{
result = paper::read (stream_a, amount.bytes);
if (!result)
{
result = paper::read (stream_a, destination.bytes);
}
}
return result;
}
bool paper::receivable::operator == (paper::receivable const & other_a) const
{
return source == other_a.source && amount == other_a.amount && destination == other_a.destination;
}
paper::mdb_val paper::receivable::val () const
{
return paper::mdb_val (sizeof (*this), const_cast <paper::receivable *> (this));
}
paper::uint128_t paper::block_store::representation_get (MDB_txn * transaction_a, paper::account const & account_a)
{
MDB_val value;
auto status (mdb_get (transaction_a, representation, account_a.val (), &value));
assert (status == 0 || status == MDB_NOTFOUND);
paper::uint128_t result;
if (status == 0)
{
paper::uint128_union rep;
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data), value.mv_size);
auto error (paper::read (stream, rep));
assert (!error);
result = rep.number ();
}
else
{
result = 0;
}
return result;
}
void paper::block_store::representation_put (MDB_txn * transaction_a, paper::account const & account_a, paper::uint128_t const & representation_a)
{
paper::uint128_union rep (representation_a);
auto status (mdb_put (transaction_a, representation, account_a.val (), rep.val (), 0));
assert (status == 0);
}
void paper::block_store::unchecked_put (MDB_txn * transaction_a, paper::block_hash const & hash_a, paper::block const & block_a)
{
std::vector <uint8_t> vector;
{
paper::vectorstream stream (vector);
paper::serialize_block (stream, block_a);
}
auto status (mdb_put (transaction_a, unchecked, hash_a.val (), paper::mdb_val (vector.size (), vector.data ()), 0));
assert (status == 0);
}
std::unique_ptr <paper::block> paper::block_store::unchecked_get (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
MDB_val value;
auto status (mdb_get (transaction_a, unchecked, hash_a.val (), &value));
assert (status == 0 || status == MDB_NOTFOUND);
std::unique_ptr <paper::block> result;
if (status == 0)
{
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data), value.mv_size);
result = paper::deserialize_block (stream);
assert (result != nullptr);
}
return result;
}
void paper::block_store::unchecked_del (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto status (mdb_del (transaction_a, unchecked, hash_a.val (), nullptr));
assert (status == 0 || status == MDB_NOTFOUND);
}
paper::store_iterator paper::block_store::unchecked_begin (MDB_txn * transaction_a)
{
paper::store_iterator result (transaction_a, unchecked);
return result;
}
paper::store_iterator paper::block_store::unchecked_end ()
{
paper::store_iterator result (nullptr);
return result;
}
void paper::block_store::unsynced_put (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto status (mdb_put (transaction_a, unsynced, hash_a.val (), paper::mdb_val (0, nullptr), 0));
assert (status == 0);
}
void paper::block_store::unsynced_del (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto status (mdb_del (transaction_a, unsynced, hash_a.val (), nullptr));
assert (status == 0);
}
bool paper::block_store::unsynced_exists (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto iterator (unsynced_begin (transaction_a, hash_a));
return iterator != paper::store_iterator (nullptr) && paper::block_hash (iterator->first) == hash_a;
}
paper::store_iterator paper::block_store::unsynced_begin (MDB_txn * transaction_a)
{
return paper::store_iterator (transaction_a, unsynced);
}
paper::store_iterator paper::block_store::unsynced_begin (MDB_txn * transaction_a, paper::uint256_union const & val_a)
{
return paper::store_iterator (transaction_a, unsynced, val_a.val ());
}
paper::store_iterator paper::block_store::unsynced_end ()
{
return paper::store_iterator (nullptr);
}
void paper::block_store::stack_push (uint64_t key_a, paper::block_hash const & hash_a)
{
paper::transaction transaction (environment, nullptr, true);
auto status (mdb_put (transaction, stack, paper::mdb_val (sizeof (key_a), &key_a), hash_a.val (), 0));
assert (status == 0);
}
paper::block_hash paper::block_store::stack_pop (uint64_t key_a)
{
paper::transaction transaction (environment, nullptr, true);
MDB_val value;
auto status (mdb_get (transaction, stack, paper::mdb_val (sizeof (key_a), &key_a), &value));
assert (status == 0);
paper::block_hash result;
assert (value.mv_size == result.chars.size ());
std::copy (reinterpret_cast <uint8_t const *> (value.mv_data), reinterpret_cast <uint8_t const *> (value.mv_data) + result.chars.size(), result.chars.data ());
auto status2 (mdb_del (transaction, stack, paper::mdb_val (sizeof (key_a), &key_a), nullptr));
assert (status2 == 0);
return result;
}
void paper::block_store::checksum_put (MDB_txn * transaction_a, uint64_t prefix, uint8_t mask, paper::uint256_union const & hash_a)
{
assert ((prefix & 0xff) == 0);
uint64_t key (prefix | mask);
auto status (mdb_put (transaction_a, checksum, paper::mdb_val (sizeof (key), &key), hash_a.val (), 0));
assert (status == 0);
}
bool paper::block_store::checksum_get (MDB_txn * transaction_a, uint64_t prefix, uint8_t mask, paper::uint256_union & hash_a)
{
assert ((prefix & 0xff) == 0);
uint64_t key (prefix | mask);
MDB_val value;
auto status (mdb_get (transaction_a, checksum, paper::mdb_val (sizeof (key), &key), &value));
assert (status == 0 || status == MDB_NOTFOUND);
bool result;
if (status == 0)
{
result = false;
paper::bufferstream stream (reinterpret_cast <uint8_t const *> (value.mv_data), value.mv_size);
auto error (paper::read (stream, hash_a));
assert (!error);
}
else
{
result = true;
}
return result;
}
void paper::block_store::checksum_del (MDB_txn * transaction_a, uint64_t prefix, uint8_t mask)
{
assert ((prefix & 0xff) == 0);
uint64_t key (prefix | mask);
auto status (mdb_del (transaction_a, checksum, paper::mdb_val (sizeof (key), &key), nullptr));
assert (status == 0);
}
namespace
{
class root_visitor : public paper::block_visitor
{
public:
root_visitor (paper::block_store & store_a) :
store (store_a)
{
}
void send_block (paper::send_block const & block_a) override
{
result = block_a.previous ();
}
void receive_block (paper::receive_block const & block_a) override
{
result = block_a.previous ();
}
// Open blocks have no previous () so we use the account number
void open_block (paper::open_block const & block_a) override
{
paper::transaction transaction (store.environment, nullptr, false);
auto hash (block_a.source ());
auto source (store.block_get (transaction, hash));
if (source != nullptr)
{
auto send (dynamic_cast <paper::send_block *> (source.get ()));
if (send != nullptr)
{
result = send->hashables.destination;
}
else
{
result.clear ();
}
}
else
{
result.clear ();
}
}
void change_block (paper::change_block const & block_a) override
{
result = block_a.previous ();
}
paper::block_store & store;
paper::block_hash result;
};
}
paper::store_iterator paper::block_store::latest_begin (MDB_txn * transaction_a, paper::account const & account_a)
{
paper::store_iterator result (transaction_a, accounts, account_a.val ());
return result;
}
paper::store_iterator paper::block_store::latest_begin (MDB_txn * transaction_a)
{
paper::store_iterator result (transaction_a, accounts);
return result;
}
paper::store_iterator paper::block_store::latest_end ()
{
paper::store_iterator result (nullptr);
return result;
}
namespace
{
class ledger_processor : public paper::block_visitor
{
public:
ledger_processor (paper::ledger &, MDB_txn *);
void send_block (paper::send_block const &) override;
void receive_block (paper::receive_block const &) override;
void open_block (paper::open_block const &) override;
void change_block (paper::change_block const &) override;
paper::ledger & ledger;
MDB_txn * transaction;
paper::process_return result;
};
// Determine the amount delta resultant from this block
class amount_visitor : public paper::block_visitor
{
public:
amount_visitor (MDB_txn *, paper::block_store &);
void compute (paper::block_hash const &);
void send_block (paper::send_block const &) override;
void receive_block (paper::receive_block const &) override;
void open_block (paper::open_block const &) override;
void change_block (paper::change_block const &) override;
void from_send (paper::block_hash const &);
MDB_txn * transaction;
paper::block_store & store;
paper::uint128_t result;
};
// Determine the balance as of this block
class balance_visitor : public paper::block_visitor
{
public:
balance_visitor (MDB_txn *, paper::block_store &);
void compute (paper::block_hash const &);
void send_block (paper::send_block const &) override;
void receive_block (paper::receive_block const &) override;
void open_block (paper::open_block const &) override;
void change_block (paper::change_block const &) override;
MDB_txn * transaction;
paper::block_store & store;
paper::block_hash current;
paper::uint128_t result;
};
// Determine the account for this block
class account_visitor : public paper::block_visitor
{
public:
account_visitor (MDB_txn * transaction_a, paper::block_store & store_a) :
store (store_a),
transaction (transaction_a),
result (0),
current (0)
{
}
void compute (paper::block_hash const & hash_block)
{
current = hash_block;
while (result.is_zero ())
{
auto block (store.block_get (transaction, current));
assert (block != nullptr);
block->visit (*this);
}
}
void send_block (paper::send_block const & block_a) override
{
current = block_a.hashables.previous;
}
void receive_block (paper::receive_block const & block_a) override
{
auto block (store.block_get (transaction, block_a.hashables.source));
assert (dynamic_cast <paper::send_block *> (block.get ()) != nullptr);
auto send (static_cast <paper::send_block *> (block.get ()));
result = send->hashables.destination;
}
void open_block (paper::open_block const & block_a) override
{
result = block_a.hashables.account;
}
void change_block (paper::change_block const & block_a) override
{
current = block_a.hashables.previous;
}
paper::block_store & store;
MDB_txn * transaction;
paper::account result;
paper::account current;
};
amount_visitor::amount_visitor (MDB_txn * transaction_a, paper::block_store & store_a) :
transaction (transaction_a),
store (store_a)
{
}
void amount_visitor::send_block (paper::send_block const & block_a)
{
balance_visitor prev (transaction, store);
prev.compute (block_a.hashables.previous);
result = prev.result - block_a.hashables.balance.number ();
}
void amount_visitor::receive_block (paper::receive_block const & block_a)
{
from_send (block_a.hashables.source);
}
void amount_visitor::open_block (paper::open_block const & block_a)
{
from_send (block_a.hashables.source);
}
void amount_visitor::change_block (paper::change_block const & block_a)
{
assert (false);
}
void amount_visitor::from_send (paper::block_hash const & hash_a)
{
balance_visitor source (transaction, store);
source.compute (hash_a);
auto source_block (store.block_get (transaction, hash_a));
assert (source_block != nullptr);
balance_visitor source_prev (transaction, store);
source_prev.compute (source_block->previous ());
}
balance_visitor::balance_visitor (MDB_txn * transaction_a, paper::block_store & store_a) :
transaction (transaction_a),
store (store_a),
current (0),
result (0)
{
}
void balance_visitor::send_block (paper::send_block const & block_a)
{
result += block_a.hashables.balance.number ();
current = 0;
}
void balance_visitor::receive_block (paper::receive_block const & block_a)
{
amount_visitor source (transaction, store);
source.compute (block_a.hashables.source);
result += source.result;
current = block_a.hashables.previous;
}
void balance_visitor::open_block (paper::open_block const & block_a)
{
amount_visitor source (transaction, store);
source.compute (block_a.hashables.source);
result += source.result;
current = 0;
}
void balance_visitor::change_block (paper::change_block const & block_a)
{
current = block_a.hashables.previous;
}
// Determine the representative for this block
class representative_visitor : public paper::block_visitor
{
public:
representative_visitor (MDB_txn * transaction_a, paper::block_store & store_a) :
transaction (transaction_a),
store (store_a)
{
}
void compute (paper::block_hash const & hash_a)
{
auto block (store.block_get (transaction, hash_a));
assert (block != nullptr);
block->visit (*this);
}
void send_block (paper::send_block const & block_a) override
{
representative_visitor visitor (transaction, store);
visitor.compute (block_a.previous ());
result = visitor.result;
}
void receive_block (paper::receive_block const & block_a) override
{
representative_visitor visitor (transaction, store);
visitor.compute (block_a.previous ());
result = visitor.result;
}
void open_block (paper::open_block const & block_a) override
{
result = block_a.hash ();
}
void change_block (paper::change_block const & block_a) override
{
result = block_a.hash ();
}
MDB_txn * transaction;
paper::block_store & store;
paper::account result;
};
// Rollback this block
class rollback_visitor : public paper::block_visitor
{
public:
rollback_visitor (MDB_txn * transaction_a, paper::ledger & ledger_a) :
transaction (transaction_a),
ledger (ledger_a)
{
}
void send_block (paper::send_block const & block_a) override
{
auto hash (block_a.hash ());
paper::receivable receivable;
while (ledger.store.pending_get (transaction, hash, receivable))
{
ledger.rollback (transaction, ledger.latest (transaction, block_a.hashables.destination));
}
paper::account_info info;
ledger.store.account_get (transaction, receivable.source, info);
ledger.store.pending_del (transaction, hash);
ledger.change_latest (transaction, receivable.source, block_a.hashables.previous, info.rep_block, ledger.balance (transaction, block_a.hashables.previous));
ledger.store.block_del (transaction, hash);
ledger.store.frontier_del (transaction, hash);
ledger.store.frontier_put (transaction, block_a.hashables.previous, receivable.source);
}
void receive_block (paper::receive_block const & block_a) override
{
auto hash (block_a.hash ());
auto representative (ledger.representative (transaction, block_a.hashables.source));
auto amount (ledger.amount (transaction, block_a.hashables.source));
auto destination_account (ledger.account (transaction, hash));
ledger.move_representation (transaction, ledger.representative (transaction, hash), representative, amount);
ledger.change_latest (transaction, destination_account, block_a.hashables.previous, representative, ledger.balance (transaction, block_a.hashables.previous));
ledger.store.block_del (transaction, hash);
ledger.store.pending_put (transaction, block_a.hashables.source, {ledger.account (transaction, block_a.hashables.source), amount, destination_account});
ledger.store.frontier_del (transaction, hash);
ledger.store.frontier_put (transaction, block_a.hashables.previous, destination_account);
}
void open_block (paper::open_block const & block_a) override
{
auto hash (block_a.hash ());
auto representative (ledger.representative (transaction, block_a.hashables.source));
auto amount (ledger.amount (transaction, block_a.hashables.source));
auto destination_account (ledger.account (transaction, hash));
ledger.move_representation (transaction, ledger.representative (transaction, hash), representative, amount);
ledger.change_latest (transaction, destination_account, 0, representative, 0);
ledger.store.block_del (transaction, hash);
ledger.store.pending_put (transaction, block_a.hashables.source, {ledger.account (transaction, block_a.hashables.source), amount, destination_account});
ledger.store.frontier_del (transaction, hash);
}
void change_block (paper::change_block const & block_a) override
{
auto hash (block_a.hash ());
auto representative (ledger.representative (transaction, block_a.hashables.previous));
auto account (ledger.account (transaction, block_a.hashables.previous));
paper::account_info info;
ledger.store.account_get (transaction, account, info);
ledger.move_representation (transaction, hash, representative, ledger.balance (transaction, block_a.hashables.previous));
ledger.store.block_del (transaction, hash);
ledger.change_latest (transaction, account, block_a.hashables.previous, representative, info.balance);
ledger.store.frontier_del (transaction, hash);
ledger.store.frontier_put (transaction, block_a.hashables.previous, account);
}
MDB_txn * transaction;
paper::ledger & ledger;
};
}
void amount_visitor::compute (paper::block_hash const & block_hash)
{
auto block (store.block_get (transaction, block_hash));
if (block != nullptr)
{
block->visit (*this);
}
else
{
if (block_hash == paper::genesis_account)
{
result = std::numeric_limits <paper::uint128_t>::max ();
}
else
{
assert (false);
result = 0;
}
}
}
void balance_visitor::compute (paper::block_hash const & block_hash)
{
current = block_hash;
while (!current.is_zero ())
{
auto block (store.block_get (transaction, current));
assert (block != nullptr);
block->visit (*this);
}
}
// Balance for account containing hash
paper::uint128_t paper::ledger::balance (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
balance_visitor visitor (transaction_a, store);
visitor.compute (hash_a);
return visitor.result;
}
// Balance for an account by account number
paper::uint128_t paper::ledger::account_balance (MDB_txn * transaction_a, paper::account const & account_a)
{
paper::uint128_t result (0);
paper::account_info info;
auto none (store.account_get (transaction_a, account_a, info));
if (!none)
{
result = info.balance.number ();
}
return result;
}
paper::process_return paper::ledger::process (MDB_txn * transaction_a, paper::block const & block_a)
{
ledger_processor processor (*this, transaction_a);
block_a.visit (processor);
return processor.result;
}
// Money supply for heuristically calculating vote percentages
paper::uint128_t paper::ledger::supply (MDB_txn * transaction_a)
{
auto unallocated (account_balance (transaction_a, paper::genesis_account));
return paper::genesis_amount - unallocated;
}
paper::account paper::ledger::representative (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
auto result (representative_calculated (transaction_a, hash_a));
assert (result.is_zero () || store.block_exists (transaction_a, result));
return result;
}
paper::account paper::ledger::representative_calculated (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
representative_visitor visitor (transaction_a, store);
visitor.compute (hash_a);
return visitor.result;
}
// Vote weight of an account
paper::uint128_t paper::ledger::weight (MDB_txn * transaction_a, paper::account const & account_a)
{
return store.representation_get (transaction_a, account_a);
}
// Rollback blocks until `frontier_a' is the frontier block
void paper::ledger::rollback (MDB_txn * transaction_a, paper::block_hash const & frontier_a)
{
auto account_l (account (transaction_a, frontier_a));
rollback_visitor rollback (transaction_a, *this);
paper::account_info info;
do
{
auto latest_error (store.account_get (transaction_a, account_l, info));
assert (!latest_error);
auto block (store.block_get (transaction_a, info.head));
block->visit (rollback);
// Continue rolling back until this block is the frontier
} while (info.head != frontier_a);
}
// Return account containing hash
paper::account paper::ledger::account (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
account_visitor account (transaction_a, store);
account.compute (hash_a);
return account.result;
}
// Return amount decrease or increase for block
paper::uint128_t paper::ledger::amount (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
amount_visitor amount (transaction_a, store);
amount.compute (hash_a);
return amount.result;
}
void paper::ledger::move_representation (MDB_txn * transaction_a, paper::block_hash const & source_a, paper::block_hash const & destination_a, paper::uint128_t const & amount_a)
{
auto source_block (store.block_get (transaction_a, source_a));
assert (source_block != nullptr);
auto source_rep (source_block->representative ());
assert (!source_rep.is_zero ());
auto destination_block (store.block_get (transaction_a, destination_a));
assert (destination_block != nullptr);
auto destination_rep (destination_block->representative ());
assert (!destination_rep.is_zero ());
auto source_previous (store.representation_get (transaction_a, source_rep));
assert (source_previous >= amount_a);
store.representation_put (transaction_a, source_rep, source_previous - amount_a);
auto destination_previous (store.representation_get (transaction_a, destination_rep));
store.representation_put (transaction_a, destination_rep, destination_previous + amount_a);
}
// Return latest block for account
paper::block_hash paper::ledger::latest (MDB_txn * transaction_a, paper::account const & account_a)
{
paper::account_info info;
auto latest_error (store.account_get (transaction_a, account_a, info));
return latest_error ? 0 : info.head;
}
// Return latest root for account, account number of there are no blocks for this account.
paper::block_hash paper::ledger::latest_root (MDB_txn * transaction_a, paper::account const & account_a)
{
paper::account_info info;
auto latest_error (store.account_get (transaction_a, account_a, info));
paper::block_hash result;
if (latest_error)
{
result = account_a;
}
else
{
result = info.head;
}
return result;
}
paper::checksum paper::ledger::checksum (MDB_txn * transaction_a, paper::account const & begin_a, paper::account const & end_a)
{
paper::checksum result;
auto error (store.checksum_get (transaction_a, 0, 0, result));
assert (!error);
return result;
}
void paper::ledger::dump_account_chain (paper::account const & account_a)
{
paper::transaction transaction (store.environment, nullptr, false);
auto hash (latest (transaction, account_a));
while (!hash.is_zero ())
{
auto block (store.block_get (transaction, hash));
assert (block != nullptr);
std::cerr << hash.to_string () << std::endl;
hash = block->previous ();
}
}
void paper::ledger::checksum_update (MDB_txn * transaction_a, paper::block_hash const & hash_a)
{
paper::checksum value;
auto error (store.checksum_get (transaction_a, 0, 0, value));
assert (!error);
value ^= hash_a;
store.checksum_put (transaction_a, 0, 0, value);
}
void paper::ledger::change_latest (MDB_txn * transaction_a, paper::account const & account_a, paper::block_hash const & hash_a, paper::block_hash const & rep_block_a, paper::amount const & balance_a)
{
paper::account_info info;
auto exists (!store.account_get (transaction_a, account_a, info));
if (exists)
{
checksum_update (transaction_a, info.head);
}
if (!hash_a.is_zero())
{
info.head = hash_a;
info.rep_block = rep_block_a;
info.balance = balance_a;
info.modified = store.now ();
store.account_put (transaction_a, account_a, info);
checksum_update (transaction_a, hash_a);
}
else
{
store.account_del (transaction_a, account_a);
}
}
std::unique_ptr <paper::block> paper::ledger::successor (MDB_txn * transaction_a, paper::block_hash const & block_a)
{
assert (store.block_exists (transaction_a, block_a));
assert (latest (transaction_a, account (transaction_a, block_a)) != block_a);
auto successor (store.block_successor (transaction_a, block_a));
assert (!successor.is_zero ());
auto result (store.block_get (transaction_a, successor));
assert (result != nullptr);
return result;
}
void ledger_processor::change_block (paper::change_block const & block_a)
{
auto hash (block_a.hash ());
auto existing (ledger.store.block_exists (transaction, hash));
result.code = existing ? paper::process_result::old : paper::process_result::progress; // Have we seen this block before? (Harmless)
if (result.code == paper::process_result::progress)
{
auto previous (ledger.store.block_exists (transaction, block_a.hashables.previous));
result.code = previous ? paper::process_result::progress : paper::process_result::gap_previous; // Have we seen the previous block already? (Harmless)
if (result.code == paper::process_result::progress)
{
auto account (ledger.store.frontier_get (transaction, block_a.hashables.previous));
result.code = account.is_zero () ? paper::process_result::fork : paper::process_result::progress;
if (result.code == paper::process_result::progress)
{
paper::account_info info;
auto latest_error (ledger.store.account_get (transaction, account, info));
assert (!latest_error);
assert (info.head == block_a.hashables.previous);
result.code = validate_message (account, hash, block_a.signature) ? paper::process_result::bad_signature : paper::process_result::progress; // Is this block signed correctly (Malformed)
if (result.code == paper::process_result::progress)
{
ledger.store.block_put (transaction, hash, block_a);
ledger.move_representation (transaction, info.rep_block, hash, ledger.balance (transaction, block_a.hashables.previous));
ledger.change_latest (transaction, account, hash, hash, info.balance);
ledger.store.frontier_del (transaction, block_a.hashables.previous);
ledger.store.frontier_put (transaction, hash, account);
result.account = account;
}
}
}
}
}
void ledger_processor::send_block (paper::send_block const & block_a)
{
auto hash (block_a.hash ());
auto existing (ledger.store.block_exists (transaction, hash));
result.code = existing ? paper::process_result::old : paper::process_result::progress; // Have we seen this block before? (Harmless)
if (result.code == paper::process_result::progress)
{
auto previous (ledger.store.block_exists (transaction, block_a.hashables.previous));
result.code = previous ? paper::process_result::progress : paper::process_result::gap_previous; // Have we seen the previous block already? (Harmless)
if (result.code == paper::process_result::progress)
{
auto account (ledger.store.frontier_get (transaction, block_a.hashables.previous));
result.code = account.is_zero () ? paper::process_result::fork : paper::process_result::progress;
if (result.code == paper::process_result::progress)
{
result.code = validate_message (account, hash, block_a.signature) ? paper::process_result::bad_signature : paper::process_result::progress; // Is this block signed correctly (Malformed)
if (result.code == paper::process_result::progress)
{
paper::account_info info;
auto latest_error (ledger.store.account_get (transaction, account, info));
assert (!latest_error);
assert (info.head == block_a.hashables.previous);
result.code = info.balance.number () >= block_a.hashables.balance.number () ? paper::process_result::progress : paper::process_result::overspend; // Is this trying to spend more than they have (Malicious)
if (result.code == paper::process_result::progress)
{
ledger.store.block_put (transaction, hash, block_a);
ledger.change_latest (transaction, account, hash, info.rep_block, block_a.hashables.balance);
ledger.store.pending_put (transaction, hash, {account, info.balance.number () - block_a.hashables.balance.number (), block_a.hashables.destination});
ledger.store.frontier_del (transaction, block_a.hashables.previous);
ledger.store.frontier_put (transaction, hash, account);
result.account = account;
}
}
}
}
}
}
void ledger_processor::receive_block (paper::receive_block const & block_a)
{
auto hash (block_a.hash ());
auto existing (ledger.store.block_exists (transaction, hash));
result.code = existing ? paper::process_result::old : paper::process_result::progress; // Have we seen this block already? (Harmless)
if (result.code == paper::process_result::progress)
{
auto source_missing (!ledger.store.block_exists (transaction, block_a.hashables.source));
result.code = source_missing ? paper::process_result::gap_source : paper::process_result::progress; // Have we seen the source block already? (Harmless)
if (result.code == paper::process_result::progress)
{
paper::receivable receivable;
result.code = ledger.store.pending_get (transaction, block_a.hashables.source, receivable) ? paper::process_result::unreceivable : paper::process_result::progress; // Has this source already been received (Malformed)
if (result.code == paper::process_result::progress)
{
result.code = paper::validate_message (receivable.destination, hash, block_a.signature) ? paper::process_result::bad_signature : paper::process_result::progress; // Is the signature valid (Malformed)
if (result.code == paper::process_result::progress)
{
paper::account_info info;
result.code = ledger.store.account_get (transaction, receivable.destination, info) ? paper::process_result::gap_previous : paper::process_result::progress; //Have we seen the previous block? No entries for account at all (Harmless)
if (result.code == paper::process_result::progress)
{
result.code = info.head == block_a.hashables.previous ? paper::process_result::progress : paper::process_result::gap_previous; // Block doesn't immediately follow latest block (Harmless)
if (result.code == paper::process_result::progress)
{
assert (ledger.store.frontier_get (transaction, block_a.hashables.previous) == receivable.destination);
auto new_balance (info.balance.number () + receivable.amount.number ());
paper::account_info source_info;
auto error (ledger.store.account_get (transaction, receivable.source, source_info));
assert (!error);
ledger.store.pending_del (transaction, block_a.hashables.source);
ledger.store.block_put (transaction, hash, block_a);
ledger.change_latest (transaction, receivable.destination, hash, info.rep_block, new_balance);
ledger.move_representation (transaction, source_info.rep_block, info.rep_block, receivable.amount.number ());
ledger.store.frontier_del (transaction, block_a.hashables.previous);
ledger.store.frontier_put (transaction, hash, receivable.destination);
result.account = receivable.destination;
}
else
{
result.code = ledger.store.block_exists (transaction, block_a.hashables.previous) ? paper::process_result::fork : paper::process_result::gap_previous; // If we have the block but it's not the latest we have a signed fork (Malicious)
}
}
}
}
}
}
}
void ledger_processor::open_block (paper::open_block const & block_a)
{
auto hash (block_a.hash ());
auto existing (ledger.store.block_exists (transaction, hash));
result.code = existing ? paper::process_result::old : paper::process_result::progress; // Have we seen this block already? (Harmless)
if (result.code == paper::process_result::progress)
{
auto source_missing (!ledger.store.block_exists (transaction, block_a.hashables.source));
result.code = source_missing ? paper::process_result::gap_source : paper::process_result::progress; // Have we seen the source block? (Harmless)
if (result.code == paper::process_result::progress)
{
paper::receivable receivable;
result.code = ledger.store.pending_get (transaction, block_a.hashables.source, receivable) ? paper::process_result::unreceivable : paper::process_result::progress; // Has this source already been received (Malformed)
if (result.code == paper::process_result::progress)
{
result.code = receivable.destination == block_a.hashables.account ? paper::process_result::progress : paper::process_result::account_mismatch;
if (result.code == paper::process_result::progress)
{
result.code = paper::validate_message (receivable.destination, hash, block_a.signature) ? paper::process_result::bad_signature : paper::process_result::progress; // Is the signature valid (Malformed)
if (result.code == paper::process_result::progress)
{
paper::account_info info;
result.code = ledger.store.account_get (transaction, receivable.destination, info) ? paper::process_result::progress : paper::process_result::fork; // Has this account already been opened? (Malicious)
if (result.code == paper::process_result::progress)
{
paper::account_info source_info;
auto error (ledger.store.account_get (transaction, receivable.source, source_info));
assert (!error);
ledger.store.pending_del (transaction, block_a.hashables.source);
ledger.store.block_put (transaction, hash, block_a);
ledger.change_latest (transaction, receivable.destination, hash, hash, receivable.amount.number ());
ledger.move_representation (transaction, source_info.rep_block, hash, receivable.amount.number ());
ledger.store.frontier_put (transaction, hash, receivable.destination);
result.account = receivable.destination;
}
}
}
}
}
}
}
ledger_processor::ledger_processor (paper::ledger & ledger_a, MDB_txn * transaction_a) :
ledger (ledger_a),
transaction (transaction_a)
{
}
paper::vote::vote (bool & error_a, paper::stream & stream_a, paper::block_type type_a)
{
if (!error_a)
{
error_a = paper::read (stream_a, account.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, signature.bytes);
if (!error_a)
{
error_a = paper::read (stream_a, sequence);
if (!error_a)
{
block = paper::deserialize_block (stream_a, type_a);
error_a = block == nullptr;
}
}
}
}
}
paper::vote::vote (paper::account const & account_a, paper::private_key const & prv_a, uint64_t sequence_a, std::unique_ptr <paper::block> block_a) :
sequence (sequence_a),
block (std::move (block_a)),
account (account_a),
signature (paper::sign_message (prv_a, account_a, hash ()))
{
}
paper::uint256_union paper::vote::hash () const
{
paper::uint256_union result;
blake2b_state hash;
blake2b_init (&hash, sizeof (result.bytes));
blake2b_update (&hash, block->hash ().bytes.data (), sizeof (result.bytes));
union {
uint64_t qword;
std::array <uint8_t, 8> bytes;
};
qword = sequence;
blake2b_update (&hash, bytes.data (), sizeof (bytes));
blake2b_final (&hash, result.bytes.data (), sizeof (result.bytes));
return result;
}
paper::genesis::genesis () :
open (genesis_account, genesis_account, genesis_account, nullptr)
{
}
void paper::genesis::initialize (MDB_txn * transaction_a, paper::block_store & store_a) const
{
auto hash_l (hash ());
assert (store_a.latest_begin (transaction_a) == store_a.latest_end ());
store_a.block_put (transaction_a, hash_l, open);
store_a.account_put (transaction_a, genesis_account, {hash_l, open.hash (), std::numeric_limits <paper::uint128_t>::max (), store_a.now (), false});
store_a.representation_put (transaction_a, genesis_account, std::numeric_limits <paper::uint128_t>::max ());
store_a.checksum_put (transaction_a, 0, 0, hash_l);
store_a.frontier_put (transaction_a, hash_l, genesis_account);
}
paper::block_hash paper::genesis::hash () const
{
return open.hash ();
}
| 177
|
https://github.com/eProsima/Non-Intrusive-DDS-Recorder/blob/master/utils/examples/basic types/BasicTypes_subscriber.cxx
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
Non-Intrusive-DDS-Recorder
|
eProsima
|
C++
|
Code
| 819
| 2,705
|
/* BasicTypes_subscriber.cxx
A subscription example
This file is derived from code automatically generated by the rtiddsgen
command:
rtiddsgen -language C++ -example <arch> BasicTypes.idl
Example subscription of type BasicTypes automatically generated by
'rtiddsgen'. To test them follow these steps:
(1) Compile this file and the example publication.
(2) Start the subscription on the same domain used for RTI Data Distribution
Service with the command
objs/<arch>/BasicTypes_subscriber <domain_id> <sample_count>
(3) Start the publication on the same domain used for RTI Data Distribution
with the command
objs/<arch>/BasicTypes_publisher <domain_id> <sample_count>
(4) [Optional] Specify the list of discovery initial peers and
multicast receive addresses via an environment variable or a file
(in the current working directory) called NDDS_DISCOVERY_PEERS.
You can run any number of publishers and subscribers programs, and can
add and remove them dynamically from the domain.
Example:
To run the example application on domain <domain_id>:
On Unix:
objs/<arch>/BasicTypes_publisher <domain_id>
objs/<arch>/BasicTypes_subscriber <domain_id>
On Windows:
objs\<arch>\BasicTypes_publisher <domain_id>
objs\<arch>\BasicTypes_subscriber <domain_id>
modification history
------------ -------
*/
#include <stdio.h>
#include <stdlib.h>
#include "BasicTypes.h"
#include "BasicTypesSupport.h"
#include "ndds/ndds_cpp.h"
class BasicTypesListener : public DDSDataReaderListener {
public:
virtual void on_requested_deadline_missed(
DDSDataReader* /*reader*/,
const DDS_RequestedDeadlineMissedStatus& /*status*/) {}
virtual void on_requested_incompatible_qos(
DDSDataReader* /*reader*/,
const DDS_RequestedIncompatibleQosStatus& /*status*/) {}
virtual void on_sample_rejected(
DDSDataReader* /*reader*/,
const DDS_SampleRejectedStatus& /*status*/) {}
virtual void on_liveliness_changed(
DDSDataReader* /*reader*/,
const DDS_LivelinessChangedStatus& /*status*/) {}
virtual void on_sample_lost(
DDSDataReader* /*reader*/,
const DDS_SampleLostStatus& /*status*/) {}
virtual void on_subscription_matched(
DDSDataReader* /*reader*/,
const DDS_SubscriptionMatchedStatus& /*status*/) {}
virtual void on_data_available(DDSDataReader* reader);
};
void BasicTypesListener::on_data_available(DDSDataReader* reader)
{
BasicTypesDataReader *BasicTypes_reader = NULL;
BasicTypesSeq data_seq;
DDS_SampleInfoSeq info_seq;
DDS_ReturnCode_t retcode;
int i;
BasicTypes_reader = BasicTypesDataReader::narrow(reader);
if (BasicTypes_reader == NULL) {
printf("DataReader narrow error\n");
return;
}
retcode = BasicTypes_reader->take(
data_seq, info_seq, DDS_LENGTH_UNLIMITED,
DDS_ANY_SAMPLE_STATE, DDS_ANY_VIEW_STATE, DDS_ANY_INSTANCE_STATE);
if (retcode == DDS_RETCODE_NO_DATA) {
return;
} else if (retcode != DDS_RETCODE_OK) {
printf("take error %d\n", retcode);
return;
}
for (i = 0; i < data_seq.length(); ++i) {
if (info_seq[i].valid_data) {
BasicTypesTypeSupport::print_data(&data_seq[i]);
}
}
retcode = BasicTypes_reader->return_loan(data_seq, info_seq);
if (retcode != DDS_RETCODE_OK) {
printf("return loan error %d\n", retcode);
}
}
/* Delete all entities */
static int subscriber_shutdown(
DDSDomainParticipant *participant)
{
DDS_ReturnCode_t retcode;
int status = 0;
if (participant != NULL) {
retcode = participant->delete_contained_entities();
if (retcode != DDS_RETCODE_OK) {
printf("delete_contained_entities error %d\n", retcode);
status = -1;
}
retcode = DDSTheParticipantFactory->delete_participant(participant);
if (retcode != DDS_RETCODE_OK) {
printf("delete_participant error %d\n", retcode);
status = -1;
}
}
/* RTI Data Distribution Service provides the finalize_instance() method on
domain participant factory and the finalize() method on type support for
users who want to release memory used by the participant factory and
type support singletons. Uncomment the following block of code for
clean destruction of the singletons. */
/*
BasicTypesTypeSupport::finalize();
retcode = DDSDomainParticipantFactory::finalize_instance();
if (retcode != DDS_RETCODE_OK) {
printf("finalize_instance error %d\n", retcode);
status = -1;
}
*/
return status;
}
extern "C" int subscriber_main(int domainId, int sample_count)
{
DDSDomainParticipant *participant = NULL;
DDSSubscriber *subscriber = NULL;
DDSTopic *topic = NULL;
BasicTypesListener *reader_listener = NULL;
DDSDataReader *reader = NULL;
DDS_ReturnCode_t retcode;
const char *type_name = NULL;
int count = 0;
DDS_Duration_t receive_period = {4,0};
int status = 0;
/* To customize the participant QoS, use
the configuration file USER_QOS_PROFILES.xml */
participant = DDSTheParticipantFactory->create_participant(
domainId, DDS_PARTICIPANT_QOS_DEFAULT,
NULL /* listener */, DDS_STATUS_MASK_NONE);
if (participant == NULL) {
printf("create_participant error\n");
subscriber_shutdown(participant);
return -1;
}
/* To customize the subscriber QoS, use
the configuration file USER_QOS_PROFILES.xml */
subscriber = participant->create_subscriber(
DDS_SUBSCRIBER_QOS_DEFAULT, NULL /* listener */, DDS_STATUS_MASK_NONE);
if (subscriber == NULL) {
printf("create_subscriber error\n");
subscriber_shutdown(participant);
return -1;
}
/* Register the type before creating the topic */
type_name = BasicTypesTypeSupport::get_type_name();
retcode = BasicTypesTypeSupport::register_type(
participant, type_name);
if (retcode != DDS_RETCODE_OK) {
printf("register_type error %d\n", retcode);
subscriber_shutdown(participant);
return -1;
}
/* To customize the topic QoS, use
the configuration file USER_QOS_PROFILES.xml */
topic = participant->create_topic(
"Example BasicTypes",
type_name, DDS_TOPIC_QOS_DEFAULT, NULL /* listener */,
DDS_STATUS_MASK_NONE);
if (topic == NULL) {
printf("create_topic error\n");
subscriber_shutdown(participant);
return -1;
}
/* Create a data reader listener */
reader_listener = new BasicTypesListener();
/* To customize the data reader QoS, use
the configuration file USER_QOS_PROFILES.xml */
reader = subscriber->create_datareader(
topic, DDS_DATAREADER_QOS_DEFAULT, reader_listener,
DDS_STATUS_MASK_ALL);
if (reader == NULL) {
printf("create_datareader error\n");
subscriber_shutdown(participant);
delete reader_listener;
return -1;
}
/* Main loop */
for (count=0; (sample_count == 0) || (count < sample_count); ++count) {
printf("BasicTypes subscriber sleeping for %d sec...\n",
receive_period.sec);
NDDSUtility::sleep(receive_period);
}
/* Delete all entities */
status = subscriber_shutdown(participant);
delete reader_listener;
return status;
}
#if defined(RTI_WINCE)
int wmain(int argc, wchar_t** argv)
{
int domainId = 0;
int sample_count = 0; /* infinite loop */
if (argc >= 2) {
domainId = _wtoi(argv[1]);
}
if (argc >= 3) {
sample_count = _wtoi(argv[2]);
}
/* Uncomment this to turn on additional logging
NDDSConfigLogger::get_instance()->
set_verbosity_by_category(NDDS_CONFIG_LOG_CATEGORY_API,
NDDS_CONFIG_LOG_VERBOSITY_STATUS_ALL);
*/
return subscriber_main(domainId, sample_count);
}
#elif !(defined(RTI_VXWORKS) && !defined(__RTP__)) && !defined(RTI_PSOS)
int main(int argc, char *argv[])
{
int domainId = 0;
int sample_count = 0; /* infinite loop */
if (argc >= 2) {
domainId = atoi(argv[1]);
}
if (argc >= 3) {
sample_count = atoi(argv[2]);
}
/* Uncomment this to turn on additional logging
NDDSConfigLogger::get_instance()->
set_verbosity_by_category(NDDS_CONFIG_LOG_CATEGORY_API,
NDDS_CONFIG_LOG_VERBOSITY_STATUS_ALL);
*/
return subscriber_main(domainId, sample_count);
}
#endif
| 26,849
|
https://github.com/EOL/harvester/blob/master/db/migrate/20171221175421_adding_refs_to_traits.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
harvester
|
EOL
|
Ruby
|
Code
| 43
| 105
|
class AddingRefsToTraits < ActiveRecord::Migration[4.2]
def change
# This is a little misleading, but it's something I decided we need at this point, sooo...
change_column :resources, :abbr, :string, limit: 16, unique: true, nil: false
remove_index :resources, :name
add_index :resources, :abbr, unique: true
end
end
| 41,433
|
https://github.com/Social-Institute-WAAS/blog-waas-gatsby/blob/master/wordpress_data/wp-content/plugins/wp-rest-api-authentication/admin/partials/support/class-mo-api-authentication-faq.php
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
blog-waas-gatsby
|
Social-Institute-WAAS
|
PHP
|
Code
| 29
| 139
|
<?php
class Mo_API_Authentication_Admin_FAQ {
public static function mo_api_authentication_faq() {
self::faq_page();
}
public static function faq_page(){
?>
<div class="mo_table_layout">
<object type="text/html" data="https://faq.miniorange.com/kb/" width="100%" height="600px" >
</object>
</div>
<?php
}
}
| 46,747
|
https://github.com/eeviktor/gramConfig/blob/master/CMakeLists.txt
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
gramConfig
|
eeviktor
|
CMake
|
Code
| 56
| 170
|
cmake_minimum_required(VERSION 3.16)
project(
gramConfig
VERSION 1.0
DESCRIPTION "Lightweight XML based application configuration"
LANGUAGES CXX)
# Only do these if this is the main project, and not if it is included through
# add_subdirectory
if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME)
# Ensure -std=c++xx instead of -std=g++xx
set(CMAKE_CXX_EXTENSIONS OFF)
# Nicely support folders in IDE's
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
endif()
add_subdirectory(src)
| 33,166
|
https://github.com/chausson/CHProgressHUD/blob/master/CHProgressHUDDemo/ViewController.m
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
CHProgressHUD
|
chausson
|
Objective-C
|
Code
| 190
| 866
|
//
// ViewController.m
// CHProgressHUDDemo
//
// Created by Chausson on 16/4/8.
// Copyright © 2016年 Chausson. All rights reserved.
//
#import "ViewController.h"
#import "CHProgressHUD.h"
static NSArray *data;
@interface ViewController ()<UITableViewDelegate,UITableViewDataSource>
@end
@implementation ViewController{
}
- (void)viewDidLoad {
[super viewDidLoad];
data = [NSArray arrayWithObjects:@"Show Activity",@"Show Custom",@"Show PlainText",@"Show ActivityText", nil];
self.view.backgroundColor = [UIColor whiteColor];
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
return data.count;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:[UITableViewCell description]];
if (!cell) {
cell = [[UITableViewCell alloc]initWithStyle:UITableViewCellStyleDefault reuseIdentifier:[UITableViewCell description]];
cell.textLabel.text = data[indexPath.row];
}
return cell;
}
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
switch (indexPath.row) {
case 0:
[CHProgressHUD show:YES];
[CHProgressHUD hide:YES afterDelay:3.0f completionBlock:^{
NSLog(@"image dismiss after 3.0");
}];
break;
case 1:{
UIImageView *image = [[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 60, 60)];
image.image = [UIImage imageNamed:@"CustomLoding"];
[CHProgressHUD setCustomView:image];
[CHProgressHUD setMode:CHRotateCustomView];
[CHProgressHUD show:YES];
[CHProgressHUD hide:YES afterDelay:3.0f completionBlock:^{
NSLog(@"image dismiss after 3.0");
}];
}break;
case 2:
[CHProgressHUD setLabelText:@"网络不好检查下网络连接,网络不好检查下网络连接,网络不好检查下网络连接"];
[CHProgressHUD setMode:CHPlainText];
[CHProgressHUD show:YES];
break;
case 3:
[CHProgressHUD setLabelText:@"登录中,请稍等"];
[CHProgressHUD setMode:CHActivityText];
[CHProgressHUD showHUDAddedTo:self.view animated:YES];
[self performSelector:@selector(hide) withObject:nil afterDelay:3.5];
break;
default:
break;
}
}
- (void)hide{
[CHProgressHUD hideWithText:@"手机密码错误" animated:YES];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
| 33,368
|
https://github.com/marco-c/gecko-dev-wordified-and-comments-removed/blob/master/tools/crashreporter/system-symbols/win/scrape-report.py
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-unknown-license-reference, Apache-2.0
| 2,023
|
gecko-dev-wordified-and-comments-removed
|
marco-c
|
Python
|
Code
| 409
| 1,042
|
import
csv
import
json
import
logging
import
os
import
sys
import
requests
import
urlparse
log
=
logging
.
getLogger
(
)
def
fetch_missing_symbols_from_crash
(
file_or_crash
)
:
if
os
.
path
.
isfile
(
file_or_crash
)
:
log
.
info
(
"
Fetching
missing
symbols
from
JSON
file
:
%
s
"
%
file_or_crash
)
j
=
{
"
json_dump
"
:
json
.
load
(
open
(
file_or_crash
"
rb
"
)
)
}
else
:
if
"
report
/
index
/
"
in
file_or_crash
:
crash_id
=
urlparse
.
urlparse
(
file_or_crash
)
.
path
.
split
(
"
/
"
)
[
-
1
]
else
:
crash_id
=
file_or_crash
url
=
(
"
https
:
/
/
crash
-
stats
.
mozilla
.
org
/
api
/
ProcessedCrash
/
"
"
?
crash_id
=
{
crash_id
}
&
datatype
=
processed
"
.
format
(
crash_id
=
crash_id
)
)
log
.
info
(
"
Fetching
missing
symbols
from
crash
:
%
s
"
%
url
)
r
=
requests
.
get
(
url
)
if
r
.
status_code
!
=
200
:
log
.
error
(
"
Failed
to
fetch
crash
%
s
"
%
url
)
return
set
(
)
j
=
r
.
json
(
)
return
set
(
[
(
m
[
"
debug_file
"
]
m
[
"
debug_id
"
]
m
[
"
filename
"
]
m
[
"
code_id
"
]
)
for
m
in
j
[
"
json_dump
"
]
[
"
modules
"
]
if
"
missing_symbols
"
in
m
]
)
def
main
(
)
:
logging
.
basicConfig
(
)
log
.
setLevel
(
logging
.
DEBUG
)
urllib3_logger
=
logging
.
getLogger
(
"
urllib3
"
)
urllib3_logger
.
setLevel
(
logging
.
ERROR
)
if
len
(
sys
.
argv
)
<
2
:
log
.
error
(
"
Specify
a
crash
URL
or
ID
"
)
sys
.
exit
(
1
)
symbols
=
fetch_missing_symbols_from_crash
(
sys
.
argv
[
1
]
)
log
.
info
(
"
Found
%
d
missing
symbols
"
%
len
(
symbols
)
)
c
=
csv
.
writer
(
sys
.
stdout
)
c
.
writerow
(
[
"
debug_file
"
"
debug_id
"
"
code_file
"
"
code_id
"
]
)
for
row
in
symbols
:
c
.
writerow
(
row
)
if
__name__
=
=
"
__main__
"
:
main
(
)
| 10,280
|
https://github.com/JulianFeinauer/incubator-plc4x/blob/master/plc4j/utils/connection-pool/src/test/java/org/apache/plc4x/java/utils/connectionpool/PooledPlcDriverManagerTest.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
incubator-plc4x
|
JulianFeinauer
|
Java
|
Code
| 652
| 2,420
|
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.plc4x.java.utils.connectionpool;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.apache.plc4x.java.api.PlcConnection;
import org.apache.plc4x.java.api.authentication.PlcAuthentication;
import org.apache.plc4x.java.api.authentication.PlcUsernamePasswordAuthentication;
import org.apache.plc4x.java.api.exceptions.PlcConnectionException;
import org.apache.plc4x.java.api.messages.PlcReadRequest;
import org.apache.plc4x.java.api.messages.PlcSubscriptionRequest;
import org.apache.plc4x.java.api.messages.PlcUnsubscriptionRequest;
import org.apache.plc4x.java.api.messages.PlcWriteRequest;
import org.apache.plc4x.java.spi.PlcDriver;
import org.assertj.core.api.WithAssertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.*;
import java.util.stream.IntStream;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class PooledPlcDriverManagerTest implements WithAssertions {
private PooledPlcDriverManager SUT = new PooledPlcDriverManager(pooledPlcConnectionFactory -> {
GenericObjectPoolConfig<PlcConnection> plcConnectionGenericObjectPoolConfig = new GenericObjectPoolConfig<>();
plcConnectionGenericObjectPoolConfig.setMinIdle(1);
return new GenericObjectPool<>(pooledPlcConnectionFactory, plcConnectionGenericObjectPoolConfig);
});
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
PlcDriver plcDriver;
private ExecutorService executorService;
@SuppressWarnings("unchecked")
@BeforeEach
void setUp() throws Exception {
Map<String, PlcDriver> driverMap = (Map) FieldUtils.getField(PooledPlcDriverManager.class, "driverMap", true).get(SUT);
driverMap.put("dummydummy", plcDriver);
executorService = Executors.newFixedThreadPool(100);
assertThat(SUT.getStatistics()).isEmpty();
}
@AfterEach
void tearDown() {
executorService.shutdown();
}
@Test
void getConnection() throws Exception {
when(plcDriver.connect(anyString())).then(invocationOnMock -> new DummyPlcConnection(invocationOnMock.getArgument(0)));
LinkedList<Callable<PlcConnection>> callables = new LinkedList<>();
// This: should result in one open connection
IntStream.range(0, 8).forEach(i -> callables.add(() -> {
try {
return SUT.getConnection("dummydummy:single");
} catch (PlcConnectionException e) {
throw new RuntimeException(e);
}
}));
// This should result in five open connections
IntStream.range(0, 5).forEach(i -> callables.add(() -> {
try {
return SUT.getConnection("dummydummy:multi-" + i);
} catch (PlcConnectionException e) {
throw new RuntimeException(e);
}
}));
List<Future<PlcConnection>> futures = executorService.invokeAll(callables);
// As we have a pool size of 8 we should have only 8 + 5 calls for the separate pools
verify(plcDriver, times(13)).connect(anyString());
assertThat(SUT.getStatistics()).contains(
entry("dummydummy:single.numActive", 8),
entry("dummydummy:single.numIdle", 0)
);
futures.forEach(plcConnectionFuture -> {
try {
plcConnectionFuture.get().close();
} catch (Exception e) {
throw new RuntimeException(e);
}
});
assertThat(SUT.getStatistics()).contains(
entry("dummydummy:single.numActive", 0),
entry("dummydummy:single.numIdle", 8)
);
}
@Test
void getConnectionWithAuth() throws Exception {
when(plcDriver.connect(anyString(), any())).then(invocationOnMock -> new DummyPlcConnection(invocationOnMock.getArgument(0), invocationOnMock.getArgument(1)));
LinkedList<Callable<PlcConnection>> callables = new LinkedList<>();
// This: should result in one open connection
IntStream.range(0, 8).forEach(i -> callables.add(() -> {
try {
return SUT.getConnection("dummydummy:single", new PlcUsernamePasswordAuthentication("user", "passwordp954368564098ß"));
} catch (PlcConnectionException e) {
throw new RuntimeException(e);
}
}));
// This should result in five open connections
IntStream.range(0, 5).forEach(i -> callables.add(() -> {
try {
return SUT.getConnection("dummydummy:single-" + i, new PlcUsernamePasswordAuthentication("user", "passwordp954368564098ß"));
} catch (PlcConnectionException e) {
throw new RuntimeException(e);
}
}));
List<Future<PlcConnection>> futures = executorService.invokeAll(callables);
// As we have a pool size of 8 we should have only 8 + 5 calls for the separate pools
verify(plcDriver, times(13)).connect(anyString(), any());
assertThat(SUT.getStatistics()).contains(
entry("dummydummy:single/PlcUsernamePasswordAuthentication{username='user', password='*****************'}.numActive", 8),
entry("dummydummy:single/PlcUsernamePasswordAuthentication{username='user', password='*****************'}.numIdle", 0)
);
futures.forEach(plcConnectionFuture -> {
try {
plcConnectionFuture.get().connect();
plcConnectionFuture.get().close();
} catch (Exception e) {
throw new RuntimeException(e);
}
});
assertThat(SUT.getStatistics()).contains(
entry("dummydummy:single/PlcUsernamePasswordAuthentication{username='user', password='*****************'}.numActive", 0),
entry("dummydummy:single/PlcUsernamePasswordAuthentication{username='user', password='*****************'}.numIdle", 8)
);
}
class DummyPlcConnection implements PlcConnection {
private final String url;
private final PlcAuthentication plcAuthentication;
boolean connected = false;
public DummyPlcConnection(String url) {
this(url, null);
}
public DummyPlcConnection(String url, PlcAuthentication plcAuthentication) {
this.url = url;
this.plcAuthentication = plcAuthentication;
}
@Override
public void connect() throws PlcConnectionException {
connected = true;
}
@Override
public boolean isConnected() {
return connected;
}
@Override
public void close() throws Exception {
throw new UnsupportedOperationException("this should never be called due to pool");
}
@Override
public Optional<PlcReadRequest.Builder> readRequestBuilder() {
return Optional.empty();
}
@Override
public Optional<PlcWriteRequest.Builder> writeRequestBuilder() {
return Optional.empty();
}
@Override
public Optional<PlcSubscriptionRequest.Builder> subscriptionRequestBuilder() {
return Optional.empty();
}
@Override
public Optional<PlcUnsubscriptionRequest.Builder> unsubscriptionRequestBuilder() {
return Optional.empty();
}
@Override
public String toString() {
return "DummyPlcConnection{" +
"url='" + url + '\'' +
", plcAuthentication=" + plcAuthentication +
", connected=" + connected +
'}';
}
}
}
| 15,143
|
https://github.com/aurimasniekis/php-tdlib-schema/blob/master/src/InputMessageInvoice.php
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
php-tdlib-schema
|
aurimasniekis
|
PHP
|
Code
| 420
| 1,306
|
<?php
/**
* This phpFile is auto-generated.
*/
declare(strict_types=1);
namespace AurimasNiekis\TdLibSchema;
/**
* A message with an invoice; can be used only by bots and only in private chats.
*/
class InputMessageInvoice extends InputMessageContent
{
public const TYPE_NAME = 'inputMessageInvoice';
/**
* Invoice.
*
* @var Invoice
*/
protected Invoice $invoice;
/**
* Product title; 1-32 characters.
*
* @var string
*/
protected string $title;
/**
* Product description; 0-255 characters.
*
* @var string
*/
protected string $description;
/**
* Product photo URL; optional.
*
* @var string
*/
protected string $photoUrl;
/**
* Product photo size.
*
* @var int
*/
protected int $photoSize;
/**
* Product photo width.
*
* @var int
*/
protected int $photoWidth;
/**
* Product photo height.
*
* @var int
*/
protected int $photoHeight;
/**
* The invoice payload.
*
* @var string
*/
protected string $payload;
/**
* Payment provider token.
*
* @var string
*/
protected string $providerToken;
/**
* JSON-encoded data about the invoice, which will be shared with the payment provider.
*
* @var string
*/
protected string $providerData;
/**
* Unique invoice bot start_parameter for the generation of this invoice.
*
* @var string
*/
protected string $startParameter;
public function __construct(
Invoice $invoice,
string $title,
string $description,
string $photoUrl,
int $photoSize,
int $photoWidth,
int $photoHeight,
string $payload,
string $providerToken,
string $providerData,
string $startParameter
) {
parent::__construct();
$this->invoice = $invoice;
$this->title = $title;
$this->description = $description;
$this->photoUrl = $photoUrl;
$this->photoSize = $photoSize;
$this->photoWidth = $photoWidth;
$this->photoHeight = $photoHeight;
$this->payload = $payload;
$this->providerToken = $providerToken;
$this->providerData = $providerData;
$this->startParameter = $startParameter;
}
public static function fromArray(array $array): InputMessageInvoice
{
return new static(
TdSchemaRegistry::fromArray($array['invoice']),
$array['title'],
$array['description'],
$array['photo_url'],
$array['photo_size'],
$array['photo_width'],
$array['photo_height'],
$array['payload'],
$array['provider_token'],
$array['provider_data'],
$array['start_parameter'],
);
}
public function typeSerialize(): array
{
return [
'@type' => static::TYPE_NAME,
'invoice' => $this->invoice->typeSerialize(),
'title' => $this->title,
'description' => $this->description,
'photo_url' => $this->photoUrl,
'photo_size' => $this->photoSize,
'photo_width' => $this->photoWidth,
'photo_height' => $this->photoHeight,
'payload' => $this->payload,
'provider_token' => $this->providerToken,
'provider_data' => $this->providerData,
'start_parameter' => $this->startParameter,
];
}
public function getInvoice(): Invoice
{
return $this->invoice;
}
public function getTitle(): string
{
return $this->title;
}
public function getDescription(): string
{
return $this->description;
}
public function getPhotoUrl(): string
{
return $this->photoUrl;
}
public function getPhotoSize(): int
{
return $this->photoSize;
}
public function getPhotoWidth(): int
{
return $this->photoWidth;
}
public function getPhotoHeight(): int
{
return $this->photoHeight;
}
public function getPayload(): string
{
return $this->payload;
}
public function getProviderToken(): string
{
return $this->providerToken;
}
public function getProviderData(): string
{
return $this->providerData;
}
public function getStartParameter(): string
{
return $this->startParameter;
}
}
| 30,215
|
https://github.com/jkritikos/exforge_challenge/blob/master/Resources/game/iphone/invite.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
exforge_challenge
|
jkritikos
|
JavaScript
|
Code
| 1,144
| 5,506
|
var enableFacebookSendInvitation = false;
var numberOfSelectedCheckboxes = 0;
//UI components
var inviteFriendsBackgroundImage = null;
var inviteFriendsBackgroundTextImage = null;
var inviteFriendsCloseImage = null;
var inviteFriendsFacebookImage = null;
var inviteFriendsFacebookLabel = null;
var inviteFriendsEmailImage = null;
var inviteFriendsEmailLabel = null;
var inviteFriendsSmsImage = null;
var inviteFriendsSmsLabel = null;
var inviteFriendsFacebookBackgroundImage = null;
var inviteFriendsFacebookBackgroundHeader = null;
var inviteFriendsFacebookCloseImage = null;
var fbFriendsActIndicator = null;
var fbFriendsInvitationActIndicator = null;
var sendFBInvitationLabel = null;
var tableViewFacebookFriends = null;
//Error alert components
var fbAlertImage = null;
var fbOKButton = null;
var fbAlertLabelText = null;
//Data components
var selectedFriendsForFBInvite = null;
function buildInviteFriendsPopup(){
selectedFriendsForFBInvite = [];
//Invite friends background
inviteFriendsBackgroundImage = Ti.UI.createImageView({
image:IPHONE5 ? IMAGE_PATH+'top/invite_bg-568h@2x.png' : IMAGE_PATH+'top/invite_bg.png' ,
transform:SCALE_ZERO,
zIndex:10
});
//Invite friends text
inviteFriendsBackgroundTextImage = Ti.UI.createImageView({
image:IMAGE_PATH+'top/invite_text.png',
top:IPHONE5 ? 78 : 33
});
//Invite friends close button
inviteFriendsCloseImage = Ti.UI.createButton({
backgroundImage:IMAGE_PATH+'top/invite_x.png',
top:IPHONE5 ? 45 : 0,
right:IPHONE5? 10:5,
width:41,
height:40
});
//Invite friends by facebook icon
inviteFriendsFacebookImage = Ti.UI.createButton({
backgroundImage:IMAGE_PATH+'top/invite_fb.png',
top:IPHONE5 ? 155 : 110,
width:73,
height:72
});
//Invite friends by facebook label
inviteFriendsFacebookLabel = Ti.UI.createLabel({
text:'FACEBOOK',
color:'white',
font:{fontSize:18, fontWeight:'bold', fontFamily:'Myriad Pro'},
top:IPHONE5? 233 : 188
});
//Invite friends by email icon
inviteFriendsEmailImage = Ti.UI.createButton({
backgroundImage:IMAGE_PATH+'top/invite_email.png',
top:IPHONE5? 267 : 222,
width:73,
height:72
});
//Invite friends by email label
inviteFriendsEmailLabel = Ti.UI.createLabel({
text:'EMAIL',
color:'white',
font:{fontSize:18, fontWeight:'bold', fontFamily:'Myriad Pro'},
top:IPHONE5? 345 : 300
});
//Invite friends by sms icon
inviteFriendsSmsImage = Ti.UI.createImageView({
image:IMAGE_PATH+'top/invite_sms.png',
top:IPHONE5? 377 : 332
});
//Invite friends by SMS label
inviteFriendsSmsLabel = Ti.UI.createLabel({
text:'SMS',
color:'white',
font:{fontSize:18, fontWeight:'bold', fontFamily:'Myriad Pro'},
top:IPHONE5? 455 : 410
});
//Pack the invitation view
inviteFriendsBackgroundImage.add(inviteFriendsBackgroundTextImage);
inviteFriendsBackgroundImage.add(inviteFriendsCloseImage);
inviteFriendsBackgroundImage.add(inviteFriendsFacebookImage);
inviteFriendsBackgroundImage.add(inviteFriendsFacebookLabel);
inviteFriendsBackgroundImage.add(inviteFriendsEmailImage);
inviteFriendsBackgroundImage.add(inviteFriendsEmailLabel);
inviteFriendsBackgroundImage.add(inviteFriendsSmsImage);
inviteFriendsBackgroundImage.add(inviteFriendsSmsLabel);
viewTopCategory.add(inviteFriendsBackgroundImage);
//Invite friends FB background
inviteFriendsFacebookBackgroundImage = Ti.UI.createImageView({
image:IPHONE5? IMAGE_PATH+'invite/invite_fb_bg-568h@2x.png' : IMAGE_PATH+'invite/invite_fb_bg.png' ,
visible:false,
zIndex:10
});
//header text
inviteFriendsFacebookBackgroundHeader = Ti.UI.createImageView({
image:IMAGE_PATH+'invite/invite_fb_text.png',
zIndex:10,
top:IPHONE5 ? 78 : 33
});
//Invite friends fb close button
inviteFriendsFacebookCloseImage = Ti.UI.createImageView({
image:IMAGE_PATH+'top/invite_x.png',
top:IPHONE5? 45 : 0,
right:5
});
//Invite friends fb loader
fbFriendsActIndicator = Ti.UI.createActivityIndicator({
style:Titanium.UI.iPhone.ActivityIndicatorStyle.BIG
});
//Invite friends fb select all label
sendFBInvitationLabel = Ti.UI.createLabel({
text:'SEND',
color:'white',
textAlign:'center',
right:50,
bottom:IPHONE5? 57 : 17,
height:40,
visible:false,
font:{fontSize:30, fontWeight:'bold', fontFamily:'321impact'}
});
//The table for fb friends
tableViewFacebookFriends = Titanium.UI.createTableView({
data:[],
backgroundColor:'transparent',
showVerticalScrollIndicator:false,
visible:false,
top:IPHONE5? 150 : 105,
bottom:105,
left:38,
right:33,
width:240
});
inviteFriendsFacebookBackgroundImage.add(fbFriendsActIndicator);
fbFriendsActIndicator.show();
//Event listeners
inviteFriendsFacebookBackgroundImage.add(inviteFriendsFacebookBackgroundHeader);
inviteFriendsFacebookBackgroundImage.add(inviteFriendsFacebookCloseImage);
inviteFriendsFacebookBackgroundImage.add(tableViewFacebookFriends);
inviteFriendsFacebookBackgroundImage.add(sendFBInvitationLabel);
viewTopCategory.add(inviteFriendsFacebookBackgroundImage);
sendFBInvitationLabel.addEventListener('click', handleSendFBInvitation);
//Event handler for facebook invite icon
inviteFriendsFacebookImage.addEventListener('click', handleInviteFriendsFBSelection);
//Invite friends close button event listener
inviteFriendsCloseImage.addEventListener('click', handleInviteFriendsPopupClose);
inviteFriendsEmailImage.addEventListener('click', handleInviteFriendsEmailSection);
inviteFriendsSmsImage.addEventListener('click', handleInviteFriendsSMSSelection);
inviteFriendsFacebookCloseImage.addEventListener('click', handleInviteFriendsFBClose);
//Event listener for fb friends table
tableViewFacebookFriends.addEventListener('click', handleFacebookFriendListClick);
}
function destroyInviteFriendsPopup(){
Ti.API.warn('destroyInviteFriendsPopup() called');
selectedFriendsForFBInvite = null;
//Event handler for facebook invite icon
sendFBInvitationLabel.removeEventListener('click', handleSendFBInvitation);
inviteFriendsFacebookImage.removeEventListener('click', handleInviteFriendsFBSelection);
//Invite friends close button event listener
inviteFriendsCloseImage.removeEventListener('click', handleInviteFriendsPopupClose);
inviteFriendsEmailImage.removeEventListener('click', handleInviteFriendsEmailSection);
inviteFriendsSmsImage.removeEventListener('click', handleInviteFriendsSMSSelection);
inviteFriendsFacebookCloseImage.removeEventListener('click', handleInviteFriendsFBClose);
//Event listener for fb friends table
tableViewFacebookFriends.removeEventListener('click', handleFacebookFriendListClick);
//Pack the invitation view
inviteFriendsBackgroundImage.remove(inviteFriendsBackgroundTextImage);
inviteFriendsBackgroundImage.remove(inviteFriendsCloseImage);
inviteFriendsBackgroundImage.remove(inviteFriendsFacebookImage);
inviteFriendsBackgroundImage.remove(inviteFriendsFacebookLabel);
inviteFriendsBackgroundImage.remove(inviteFriendsEmailImage);
inviteFriendsBackgroundImage.remove(inviteFriendsEmailLabel);
inviteFriendsBackgroundImage.remove(inviteFriendsSmsImage);
inviteFriendsBackgroundImage.remove(inviteFriendsSmsLabel);
viewTopCategory.remove(inviteFriendsBackgroundImage);
inviteFriendsFacebookBackgroundImage.remove(fbFriendsActIndicator);
fbFriendsActIndicator.hide();
inviteFriendsFacebookBackgroundImage.remove(inviteFriendsFacebookBackgroundHeader);
inviteFriendsFacebookBackgroundImage.remove(inviteFriendsFacebookCloseImage);
inviteFriendsFacebookBackgroundImage.remove(tableViewFacebookFriends);
inviteFriendsFacebookBackgroundImage.remove(sendFBInvitationLabel);
viewTopCategory.remove(inviteFriendsFacebookBackgroundImage);
//Invite friends background
inviteFriendsBackgroundImage = null;
//Invite friends text
inviteFriendsBackgroundTextImage = null;
//Invite friends close button
inviteFriendsCloseImage = null;
//Invite friends by facebook icon
inviteFriendsFacebookImage = null;
//Invite friends by facebook label
inviteFriendsFacebookLabel = null;
//Invite friends by email icon
inviteFriendsEmailImage = null;
//Invite friends by email label
inviteFriendsEmailLabel = null;
//Invite friends by sms icon
inviteFriendsSmsImage = null;
//Invite friends by SMS label
inviteFriendsSmsLabel = null;
//Invite friends FB background
inviteFriendsFacebookBackgroundImage = null;
//header text
inviteFriendsFacebookBackgroundHeader = null;
//Invite friends fb close button
inviteFriendsFacebookCloseImage = null;
//Invite friends fb loader
fbFriendsActIndicator = null;
//Invite friends fb select all label
sendFBInvitationLabel = null;
//The table for fb friends
tableViewFacebookFriends = null;
}
//Event handler for popup close
function handleInviteFriendsPopupClose(){
if(SOUNDS_MODE){
audioClick.play();
}
inviteFriendsBackgroundImage.transform = SCALE_ZERO;
destroyInviteFriendsPopup();
//iphone only - show the bar
scoresBottomBar.opacity = 1;
inviteFriendsIcon.show();
}
//Event handler for FB selection TODO refactor so it uses a common alert box
function handleInviteFriendsFBSelection(){
if(SOUNDS_MODE){
audioClick.play();
}
if (Titanium.Network.online == true){
//If we are connected to FB, proceed with friend retrieval
if(Titanium.Facebook.loggedIn){
facebookGetAllFriends();
inviteFriendsBackgroundImage.hide();
inviteFriendsFacebookBackgroundImage.show();
} else {
alertNoFacebookConnection.show();
}
} else {
buildAlert(MSG_NO_INTERNET_CONNECTION);
}
}
//Event handler for FB Invitation sending
function handleSendFBInvitation(){
Ti.API.warn('handleSendFBInvitation() called');
//revert to 0 so we can select more users after the invitations are sent
numberOfSelectedCheckboxes = 0;
if (Titanium.Network.online == true){
fbAlertImage = Titanium.UI.createImageView({
image:IMAGE_PATH+'alert/alert_zoafuta.png',
zIndex:12
});
//act indicator for invite
fbFriendsInvitationActIndicator = Ti.UI.createActivityIndicator({
style:Titanium.UI.iPhone.ActivityIndicatorStyle.BIG
});
fbAlertImage.add(fbFriendsInvitationActIndicator);
fbFriendsInvitationActIndicator.show();
viewTopCategory.add(fbAlertImage);
//Send invitation to FB
//Prepare json object
var batchRequests = [];
for(var ele in selectedFriendsForFBInvite){
Ti.API.warn('adding '+ele+' to batch request');
var url = ele + "/feed";
var body = "message="+INVITE_BODY_FB;
var obj = {
"method":"POST",
"relative_url":url,
"body":body
};
//Ti.API.warn('adding '+obj.method+' object to batch request');
batchRequests.push(obj);
//Ti.API.warn('added '+batchRequests[0].method+' object to batch request');
}
//send FB batch request
var fbResult = "";
Titanium.Facebook.requestWithGraphPath('', {batch:batchRequests}, "POST", function(e) {
if (e.success) {
Ti.API.warn('FACEBOOK - Success in posting multi-message! '+JSON.stringify(e));
fbResult = MSG_FB_INVITE_SUCCESS;
} else {
if (e.error) {
Ti.API.warn('FACEBOOK - ERROR in posting multi-message '+e.error);
} else {
Ti.API.warn('FACEBOOK - UNKNOWN response in posting multi-message');
}
fbResult = MSG_FB_INVITE_FAILURE;
}
//Destroy activity indicator
fbAlertImage.remove(fbFriendsInvitationActIndicator);
fbFriendsInvitationActIndicator.hide();
fbFriendsInvitationActIndicator = null;
fbOKButton = Titanium.UI.createImageView({
image:IMAGE_PATH+'alert/yes.png',
zIndex:12,
bottom:10,
zIndex:12
});
fbAlertLabelText = Titanium.UI.createLabel({
text:fbResult,
color:'white',
textAlign:'center',
top:31,
left:45,
right:45,
font:{fontSize:16, fontWeight:'regular', fontFamily:'Myriad Pro'}
});
fbAlertImage.add(fbOKButton);
fbAlertImage.add(fbAlertLabelText);
fbOKButton.addEventListener('click', dismissAlert);
});
//End FB invitation
} else {
buildAlert(MSG_NO_INTERNET_CONNECTION);
}
}
//Event handler for EMAIL selection
function handleInviteFriendsEmailSection(){
if(SOUNDS_MODE){
audioClick.play();
}
var emailDialog = Ti.UI.createEmailDialog();
emailDialog.setBarColor('black');
emailDialog.setHtml(true);
emailDialog.setSubject(INVITE_SUBJECT);
emailDialog.setMessageBody(INVITE_BODY_EMAIL);
emailDialog.open();
}
//Event handler for SMS selection
function handleInviteFriendsSMSSelection(){
if(SOUNDS_MODE){
audioClick.play();
}
var smsModule = require("com.omorandi");
var smsDialog = smsModule.createSMSDialog({
messageBody: INVITE_BODY_SMS,
barColor: 'black'
});
smsDialog.open({animated: true});
Titanium.UI.iPhone.hideStatusBar();
}
function handleInviteFriendsFBClose(){
inviteFriendsBackgroundImage.show();
inviteFriendsFacebookBackgroundImage.hide();
}
function handleFacebookFriendListClick(e){
var row = e.row;
var index = e.index;
var fbId = row.facebookId;
var checkboxImage = row.children[0];
if(checkboxImage.image == IMAGE_PATH+'invite/tick_off.png'){
numberOfSelectedCheckboxes++;
if(numberOfSelectedCheckboxes > MAX_FB_INVITES){
buildAlert('Μπορείς να επιλέξεις μέχρι 20 φίλους κάθε φορά.');
} else {
checkboxImage.image = IMAGE_PATH+'invite/tick_on.png';
//Add selection to our collection
selectedFriendsForFBInvite[fbId] = fbId;
}
} else {
numberOfSelectedCheckboxes--;
checkboxImage.image = IMAGE_PATH+'invite/tick_off.png';
//Remove selection from our collection
if(selectedFriendsForFBInvite[fbId] != undefined){
delete selectedFriendsForFBInvite[fbId];
}
}
//Show/hide the send button
enableFacebookSendInvitation = numberOfSelectedCheckboxes > 0;
if(enableFacebookSendInvitation){
sendFBInvitationLabel.show();
} else {
sendFBInvitationLabel.hide();
}
}
//Creates and shows the alert for the msg specified
function buildAlert(msg){
fbAlertImage = Titanium.UI.createImageView({
image:IMAGE_PATH+'alert/alert_zoafuta.png',
zIndex:12
});
fbOKButton = Titanium.UI.createImageView({
image:IMAGE_PATH+'alert/yes.png',
zIndex:12,
bottom:10,
zIndex:12
});
fbAlertLabelText = Titanium.UI.createLabel({
text:msg,
color:'white',
textAlign:'center',
top:31,
left:45,
right:45,
font:{fontSize:16, fontWeight:'regular', fontFamily:'Myriad Pro'}
});
fbAlertImage.add(fbAlertLabelText);
fbAlertImage.add(fbOKButton);
fbOKButton.addEventListener('click', dismissAlert);
viewTopCategory.add(fbAlertImage);
}
//Closes and destroys the alert for max FB friends invited
function destroyAlert(){
Ti.API.warn('destroyAlert() in invite.js called');
fbOKButton.removeEventListener('click', dismissAlert);
fbAlertImage.remove(fbAlertLabelText);
fbAlertImage.remove(fbOKButton);
viewTopCategory.remove(fbAlertImage);
fbAlertImage = null;
fbOKButton = null;
fbAlertLabelText = null;
}
//Event handler for OK button on the FB alert
function dismissAlert(){
if(SOUNDS_MODE){
audioClick.play();
}
destroyAlert();
}
function createFacebookFriendRow(name, id){
var row1 = Ti.UI.createTableViewRow({
height:45,
backgroundColor:'transparent',
selectedBackgroundColor:'transparent',
className:'FB_FRIENDS_LIST',
facebookId:id
});
var name = Ti.UI.createLabel({
text:name,
color:'white',
left:10,
right:38,
top:13,
width:190,
font:{fontSize:17, fontWeight:'regular', fontFamily:'Myriad Pro'}
});
var checkbox = Ti.UI.createImageView({
image:IMAGE_PATH+'invite/tick_off.png',
right:10
});
row1.add(checkbox);
row1.add(name);
return row1;
}
//Called with the fb friends
viewTopCategory.addEventListener('renderFacebookFriendsTable', function(data){
fbFriendsActIndicator.hide();
var tableData = [];
for(var i=0; i < data.friends.length; i++){
var fbFriendName = data.friends[i].name;
var fbFriendId = data.friends[i].id;
tableData.push(createFacebookFriendRow(fbFriendName, fbFriendId));
}
tableViewFacebookFriends.show();
tableViewFacebookFriends.setData(tableData);
});
| 15,867
|
https://github.com/zikani03/articulated/blob/master/src/main/java/me/zikani/labs/articulated/model/Amount.java
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
articulated
|
zikani03
|
Java
|
Code
| 323
| 767
|
/**
* MIT License
*
* Copyright (c) 2020 - 2022 Zikani Nyirenda Mwase and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.zikani.labs.articulated.model;
import java.util.regex.Pattern;
public class Amount {
public static final Pattern KWACHA_REGEX_2 = Pattern.compile("(m?w?k)?\\s?(?<amount>\\d+(\\.?\\d+)?)\\s?(?<denomination>((hundred)?thousand|(m|b|tr)(illio)?n)(\\s+kwacha)?)");
private final double amount;
private final String denomination;
private final String currency;
private final double amountInDenomination;
private final String denominationRaw;
public Amount(String currency, double amountInDenomination, String denominationRaw) {
this.currency = currency;
this.amountInDenomination = amountInDenomination;
this.denominationRaw = denominationRaw;
this.denomination = this.normalizeDenomination();
this.amount = calcActualAmountInBaseDenomination();
}
private String normalizeDenomination() {
return this.denominationRaw.replaceAll("\\s?kwacha", "")
.replace("tn", "trillion")
.replace("bn", "billion")
.replace("mn", "million");
}
private double calcActualAmountInBaseDenomination() {
return switch (this.denomination) {
case "million" -> this.amountInDenomination * 1_000_000d;
case "billion" -> this.amountInDenomination * 1_000_000_000d;
case "trillion" -> this.amountInDenomination * 1_000_000_000_000d;
default -> this.amountInDenomination;
};
}
public String getCurrency() {
return currency;
}
public double getAmount() {
return amount;
}
public String getDenomination() {
return denomination;
}
}
| 31,002
|
https://github.com/gplushi/Multiverse-Core/blob/master/src/main/java/com/onarandombox/MultiverseCore/utils/AnchorManager.java
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,022
|
Multiverse-Core
|
gplushi
|
Java
|
Code
| 573
| 1,673
|
/******************************************************************************
* Multiverse 2 Copyright (c) the Multiverse Team 2011. *
* Multiverse 2 is licensed under the BSD License. *
* For more information please check the README.md file included *
* with this project. *
******************************************************************************/
package com.onarandombox.MultiverseCore.utils;
import com.dumptruckman.minecraft.util.Logging;
import com.onarandombox.MultiverseCore.MultiverseCore;
import org.bukkit.Location;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Player;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
/**
* Manages anchors.
*/
public class AnchorManager {
private MultiverseCore plugin;
private Map<String, Location> anchors;
private FileConfiguration anchorConfig;
public AnchorManager(MultiverseCore plugin) {
this.plugin = plugin;
this.anchors = new HashMap<String, Location>();
}
/**
* Loads all anchors.
*/
public void loadAnchors() {
this.anchors = new HashMap<String, Location>();
this.anchorConfig = YamlConfiguration.loadConfiguration(new File(this.plugin.getDataFolder(), "anchors.yml"));
this.ensureConfigIsPrepared();
ConfigurationSection anchorsSection = this.anchorConfig.getConfigurationSection("anchors");
Set<String> anchorKeys = anchorsSection.getKeys(false);
for (String key : anchorKeys) {
//world:x,y,z:pitch:yaw
Location anchorLocation = plugin.getLocationManipulation().stringToLocation(anchorsSection.getString(key, ""));
if (anchorLocation != null) {
Logging.config("Loading anchor: '%s'...", key);
this.anchors.put(key, anchorLocation);
} else {
Logging.warning("The location for anchor '%s' is INVALID.", key);
}
}
}
private void ensureConfigIsPrepared() {
if (this.anchorConfig.getConfigurationSection("anchors") == null) {
this.anchorConfig.createSection("anchors");
}
}
/**
* Saves all anchors.
* @return True if all anchors were successfully saved.
*/
public boolean saveAnchors() {
try {
this.anchorConfig.save(new File(this.plugin.getDataFolder(), "anchors.yml"));
return true;
} catch (IOException e) {
Logging.severe("Failed to save anchors.yml. Please check your file permissions.");
return false;
}
}
/**
* Gets the {@link Location} associated with an anchor.
* @param anchor The name of the anchor.
* @return The {@link Location}.
*/
public Location getAnchorLocation(String anchor) {
if (this.anchors.containsKey(anchor)) {
return this.anchors.get(anchor);
}
return null;
}
/**
* Saves an anchor.
* @param anchor The name of the anchor.
* @param location The location of the anchor as string.
* @return True if the anchor was successfully saved.
*/
public boolean saveAnchorLocation(String anchor, String location) {
Location parsed = plugin.getLocationManipulation().stringToLocation(location);
return parsed != null && this.saveAnchorLocation(anchor, parsed);
}
/**
* Saves an anchor.
* @param anchor The name of the anchor.
* @param l The {@link Location} of the anchor.
* @return True if the anchor was successfully saved.
*/
public boolean saveAnchorLocation(String anchor, Location l) {
if (l == null) {
return false;
}
this.anchorConfig.set("anchors." + anchor, plugin.getLocationManipulation().locationToString(l));
this.anchors.put(anchor, l);
return this.saveAnchors();
}
/**
* Gets all anchors.
* @return An unmodifiable {@link Set} containing all anchors.
*/
public Set<String> getAllAnchors() {
return Collections.unmodifiableSet(this.anchors.keySet());
}
/**
* Gets all anchors that the specified {@link Player} can access.
* @param p The {@link Player}.
* @return An unmodifiable {@link Set} containing all anchors the specified {@link Player} can access.
*/
public Set<String> getAnchors(Player p) {
if (p == null) {
return this.anchors.keySet();
}
Set<String> myAnchors = new HashSet<String>();
for (String anchor : this.anchors.keySet()) {
Location ancLoc = this.anchors.get(anchor);
if (ancLoc == null) {
continue;
}
String worldPerm = "multiverse.access." + ancLoc.getWorld().getName();
// Add to the list if we're not enforcing access
// OR
// We are enforcing access and the user has the permission.
if (!this.plugin.getMVConfig().getEnforceAccess() ||
(this.plugin.getMVConfig().getEnforceAccess() && p.hasPermission(worldPerm))) {
myAnchors.add(anchor);
} else {
Logging.finer(String.format("Not adding anchor %s to the list, user %s doesn't have the %s " +
"permission and 'enforceaccess' is enabled!",
anchor, p.getName(), worldPerm));
}
}
return Collections.unmodifiableSet(myAnchors);
}
/**
* Deletes the specified anchor.
* @param s The name of the anchor.
* @return True if the anchor was successfully deleted.
*/
public boolean deleteAnchor(String s) {
if (this.anchors.containsKey(s)) {
this.anchors.remove(s);
this.anchorConfig.set("anchors." + s, null);
return this.saveAnchors();
}
return false;
}
}
| 1,182
|
https://github.com/Maddienis/travel_app_sm/blob/master/src/features/city_combine.py
|
Github Open Source
|
Open Source
|
MIT
| null |
travel_app_sm
|
Maddienis
|
Python
|
Code
| 97
| 348
|
import pandas as pd
import numpy as np
from pycountry_convert import country_alpha2_to_continent_code, country_name_to_country_alpha2
# This will merge attraction and food on the city level all numeric passes it to add the continent of each city
def mergeAttractionFood(attraction_df, food_df):
city = pd.merge(left = attraction_df, right = food_df, left_index = True, right_index=True)
city_cont = addContinent(city)
return city_cont
def addContinent(city_df):
continents = {
'NA': 'North America',
'SA': 'South America',
'AS': 'Asia',
'OC': 'Australia',
'AF': 'Africa',
'EU': 'Europe'}
city_df['continent'] = [continents[country_alpha2_to_continent_code(country_name_to_country_alpha2(country))] for country in city_df['country']]
city_df.set_index(['city', 'country'], append=True, inplace=True)
#city_cont_dummy = dummyContinent(city_df)
return city_df
def dummyContinent(city_df):
city_dummy = pd.get_dummies(city_df)
return city_dummy
| 4,348
|
https://github.com/horvatal/elasticsearch-net/blob/master/docs/index.asciidoc
|
Github Open Source
|
Open Source
|
Apache-2.0, LicenseRef-scancode-unknown-license-reference
| 2,020
|
elasticsearch-net
|
horvatal
|
AsciiDoc
|
Code
| 67
| 248
|
:title-separator: |
[[elasticsearch-net-reference]]
= Elasticsearch.Net and NEST: the .NET clients
////
IMPORTANT NOTE
==============
This file has been generated from https://github.com/elastic/elasticsearch-net/tree/master/src/Tests/Tests/index.asciidoc.
If you wish to submit a PR for any spelling mistakes, typos or grammatical errors for this file,
please modify the original csharp file found at the link and submit the PR with that change. Thanks!
////
include::intro.asciidoc[]
include::breaking-changes.asciidoc[]
include::conventions.asciidoc[]
include::low-level.asciidoc[]
include::high-level.asciidoc[]
include::troubleshooting.asciidoc[]
include::search.asciidoc[]
include::query-dsl.asciidoc[]
include::aggregations.asciidoc[]
| 13,272
|
https://github.com/kemaldemirgil/team-profile-generator/blob/master/index.js
|
Github Open Source
|
Open Source
|
MIT
| null |
team-profile-generator
|
kemaldemirgil
|
JavaScript
|
Code
| 600
| 1,670
|
//////////////////////////////////////Init//////////////////////////////////////
const inquirer = require('inquirer');
const fs = require("fs");
const colors = require('colors');
const Engineer = require("./lib/Engineer");
const Intern = require("./lib/Intern");
const Manager = require("./lib/Manager");
const Generator = require("./src/generator");
const HTMLGenerator = require("./src/HTMLGenerator");
let generatedTemplate = "";
let defaultID = 1
//////////////////////////////////////Programs//////////////////////////////////////
// Greeting
console.log("\n\nW e l c o m e t o t h e T e a m G e n e r a t o r\n\n".rainbow);
console.log("Created by kemaldemirgil\n\n\n".gray.dim)
// Asking if manager, if so, complete manager
const starter = () => {
inquirer .prompt([
{
type: 'confirm',
message: "Would you like to add a manager?".brightGreen,
name: 'manager',
},
])
.then(({manager}) => {
if(manager){
inquirer.prompt([
{
type: 'input',
message: "Please enter the team manager's name...".brightMagenta,
name: 'name',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid name".red.dim)}}
},
{
type: 'input',
message: "Please enter the team manager's employee ID...".brightMagenta,
name: 'id',
default: defaultID
},
{
type: 'input',
message: "Please enter the team manager's email address...".brightMagenta,
name: 'email',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid email address".red.dim)}}
},
{
type: 'input',
message: "Please enter the team manager's office number...".brightMagenta,
name: 'office',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid office number".red.dim)}}
}
])
.then(function({name, id, email, office}) {
let manager = new Manager(name, id, email, office);
let generator = new Generator();
generatedTemplate += `${generator.manangerGenerator(manager)}`;
menu();
})
} else {
defaultID = 0;
menu();
}
})
}
// Employee type
const menu = () => {
defaultID++
inquirer.prompt([
{
type: 'list',
message: "Please select a new employee or end the selection...".brightGreen,
name: 'menu',
choices: ["Engineer".brightBlue, "Intern".brightYellow, "End".brightRed]
}
])
.then((response) => {
if (response.menu === "Engineer".brightBlue) {
engineerQuesitons();
} else if (response.menu === "Intern".brightYellow) {
internQuestions();
} else {
endTeam();
}
})
}
// Engineer questions
const engineerQuesitons = () => {
inquirer.prompt([
{
type: 'input',
message: "Please enter the engineer's name...".brightBlue,
name: 'name',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid name".red.dim)}}
},
{
type: 'input',
message: "Please enter the engineer's employee ID...".brightBlue,
name: 'id',
default: defaultID
},
{
type: 'input',
message: "Please enter the engineer's email address...".brightBlue,
name: 'email',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid email address".red.dim)}}
},
{
type: 'input',
message: "Please enter the engineer's Github username...".brightBlue,
name: 'github',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid Github username".red.dim)}}
}
])
.then(function({name, id, email, github}) {
let engineer = new Engineer(name, id, email, github);
let generator = new Generator();
generatedTemplate += `\n ${generator.engineerGenerator(engineer)}`
menu();
})
}
// Intern questions
const internQuestions = () => {
inquirer.prompt([
{
type: 'input',
message: "Please enter the intern's name...".brightYellow,
name: 'name',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid name".red.dim)}}
},
{
type: 'input',
message: "Please enter the intern's employee ID...".brightYellow,
name: 'id',
default: defaultID
},
{
type: 'input',
message: "Please enter the intern's email address...".brightYellow,
name: 'email',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid email address".red.dim)}}
},
{
type: 'input',
message: "Please enter the intern's school...".brightYellow,
name: 'school',
validate: (value) => {if (value){return true} else
{return console.log("Please enter a valid school".red.dim)}}
}
])
.then(function({name, id, email, school}) {
let intern = new Intern(name, id, email, school);
let generator = new Generator();
generatedTemplate += `\n ${generator.internGenerator(intern)}`
menu();
})
}
// Team generator finalize
const endTeam = () => {
const filename = `generatedTeam.html`;
const temp = new HTMLGenerator;
fs.writeFile("./dist/" + filename, temp.template(generatedTemplate), (err) =>
err ? console.log(err) :
console.log("\n\n\nYour team has been generated in the 'dist' folder.\nThanks for using the Team Generator!".brightGreen));
}
//////////////////////////////////////RUN//////////////////////////////////////
starter();
| 33,029
|
https://github.com/MarbleDice/smaugdb/blob/master/src/main/java/com/bromleyoil/smaugdb/model/Room.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
smaugdb
|
MarbleDice
|
Java
|
Code
| 363
| 1,257
|
package com.bromleyoil.smaugdb.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import com.bromleyoil.smaugdb.model.enums.Direction;
import com.bromleyoil.smaugdb.model.enums.RoomFlag;
import com.bromleyoil.smaugdb.model.enums.SectorType;
public class Room {
private boolean isLoaded;
private Area area;
private int vnum;
private String name;
private List<String> description;
private List<Exit> exits = new ArrayList<>();
private int randomExitCount;
private List<RoomFlag> roomFlags = new ArrayList<>();
private SectorType sectorType;
private List<String> extras = new ArrayList<>();
private List<Pop> containedPops = new ArrayList<>();
private List<Spawn> containedSpawns = new ArrayList<>();
private List<Prog> progs = new ArrayList<>();
private Path path;
@Override
public String toString() {
return getName();
}
public boolean isLoaded() {
return isLoaded;
}
public void setIsLoaded(boolean isLoaded) {
this.isLoaded = isLoaded;
}
public Area getArea() {
return area;
}
public void setArea(Area area) {
this.area = area;
}
public int getVnum() {
return vnum;
}
public void setVnum(int vnum) {
this.vnum = vnum;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTitle() {
return StringUtils.capitalize(name);
}
public String getStyledName() {
return "[" + name + "]";
}
public List<String> getDescription() {
return description;
}
public void setDescription(List<String> description) {
this.description = description;
}
public List<Exit> getExits() {
return exits;
}
public void setExits(List<Exit> exits) {
this.exits = exits;
}
public Exit getExit(Direction direction) {
return exits.stream().filter(x -> x.getDirection() == direction).findAny().orElse(null);
}
public int getRandomExitCount() {
return randomExitCount;
}
public void setRandomExitCount(int randomExitCount) {
this.randomExitCount = randomExitCount;
}
public boolean isMaze() {
return randomExitCount > 0;
}
public List<RoomFlag> getRoomFlags() {
return roomFlags;
}
public void setRoomFlags(List<RoomFlag> roomFlags) {
this.roomFlags = roomFlags;
}
public SectorType getSectorType() {
return sectorType;
}
public void setSectorType(SectorType sectorType) {
this.sectorType = sectorType;
}
public List<String> getExtras() {
return extras;
}
public void setExtras(List<String> extras) {
this.extras = extras;
}
public List<Direction> getDirections () {
return exits.stream().map(Exit::getDirection).collect(Collectors.toList());
}
public Collection<Pop> getContainedPops() {
return Collections.unmodifiableCollection(containedPops);
}
public void addContainedPop(Pop containedPop) {
containedPops.add(containedPop);
}
public Collection<Spawn> getContainedSpawns() {
return Collections.unmodifiableCollection(containedSpawns);
}
public void addContainedSpawn(Spawn containedSpawn) {
containedSpawns.add(containedSpawn);
}
public Collection<Prog> getProgs() {
return Collections.unmodifiableCollection(progs);
}
public void setProgs(List<Prog> progs) {
progs.stream().forEach(p -> p.setOwner(this));
this.progs = progs;
}
public Path getPath() {
return path;
}
public void setPath(Path path) {
this.path = path;
}
}
| 12,771
|
https://github.com/ei09010/plumber/blob/master/vendor/github.com/batchcorp/schemas/build/go/events/outbound.pb.go
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
plumber
|
ei09010
|
Go
|
Code
| 462
| 1,544
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: outbound.proto
package events
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Emitted by the reader to HSB which is then consumed by the replayer
type Outbound struct {
ReplayId string `protobuf:"bytes,1,opt,name=replay_id,json=replayId,proto3" json:"replay_id,omitempty"`
Blob []byte `protobuf:"bytes,2,opt,name=blob,proto3" json:"blob,omitempty"`
Last bool `protobuf:"varint,3,opt,name=last,proto3" json:"last,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Outbound) Reset() { *m = Outbound{} }
func (m *Outbound) String() string { return proto.CompactTextString(m) }
func (*Outbound) ProtoMessage() {}
func (*Outbound) Descriptor() ([]byte, []int) {
return fileDescriptor_5dbaa15aa01abbc0, []int{0}
}
func (m *Outbound) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Outbound.Unmarshal(m, b)
}
func (m *Outbound) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Outbound.Marshal(b, m, deterministic)
}
func (m *Outbound) XXX_Merge(src proto.Message) {
xxx_messageInfo_Outbound.Merge(m, src)
}
func (m *Outbound) XXX_Size() int {
return xxx_messageInfo_Outbound.Size(m)
}
func (m *Outbound) XXX_DiscardUnknown() {
xxx_messageInfo_Outbound.DiscardUnknown(m)
}
var xxx_messageInfo_Outbound proto.InternalMessageInfo
func (m *Outbound) GetReplayId() string {
if m != nil {
return m.ReplayId
}
return ""
}
func (m *Outbound) GetBlob() []byte {
if m != nil {
return m.Blob
}
return nil
}
func (m *Outbound) GetLast() bool {
if m != nil {
return m.Last
}
return false
}
func init() {
proto.RegisterType((*Outbound)(nil), "events.Outbound")
}
func init() { proto.RegisterFile("outbound.proto", fileDescriptor_5dbaa15aa01abbc0) }
var fileDescriptor_5dbaa15aa01abbc0 = []byte{
// 161 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x2c, 0x8e, 0xb1, 0x0a, 0xc2, 0x30,
0x14, 0x00, 0x89, 0x4a, 0x69, 0x83, 0x38, 0x64, 0x2a, 0xb8, 0x14, 0xa7, 0x0e, 0xd2, 0x0c, 0xfe,
0x81, 0x9b, 0x53, 0xa1, 0xa3, 0x8b, 0xe4, 0x25, 0xa1, 0x2d, 0xa4, 0x7d, 0x21, 0x79, 0x11, 0xfc,
0x7b, 0x31, 0x75, 0x3b, 0xee, 0x96, 0xe3, 0x27, 0x4c, 0x04, 0x98, 0x56, 0xd3, 0xf9, 0x80, 0x84,
0xa2, 0xb0, 0x6f, 0xbb, 0x52, 0xbc, 0xf4, 0xbc, 0xec, 0xff, 0x45, 0x9c, 0x79, 0x15, 0xac, 0x77,
0xea, 0xf3, 0x9a, 0x4d, 0xcd, 0x1a, 0xd6, 0x56, 0x43, 0xb9, 0x89, 0x87, 0x11, 0x82, 0x1f, 0xc0,
0x21, 0xd4, 0xbb, 0x86, 0xb5, 0xc7, 0x21, 0xf3, 0xcf, 0x39, 0x15, 0xa9, 0xde, 0x37, 0xac, 0x2d,
0x87, 0xcc, 0xf7, 0xee, 0x79, 0x1d, 0x67, 0x9a, 0x12, 0x74, 0x1a, 0x17, 0x09, 0x8a, 0xf4, 0xa4,
0x31, 0x78, 0x19, 0xf5, 0x64, 0x17, 0x15, 0x25, 0xa4, 0xd9, 0x19, 0x39, 0xa2, 0xdc, 0x06, 0xa0,
0xc8, 0x3f, 0xb7, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3d, 0xa7, 0x9d, 0x20, 0xa1, 0x00, 0x00,
0x00,
}
| 12,883
|
https://github.com/greschd/wannier90-utils/blob/master/src/w90utils/io/_bands.py
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
wannier90-utils
|
greschd
|
Python
|
Code
| 70
| 229
|
import numpy as np
__all__ = ['read_kpoints', 'read_bands']
def read_kpoints(fname):
raw_data = np.loadtxt(fname, skiprows=1)
kpoints = raw_data[:, (0, 1, 2)]
kweights = raw_data[:, 3]
return kpoints
def read_bands(fname):
nkpts = None
with open(fname, 'r') as f:
for iln, line in enumerate(f):
if len(line.strip()) == 0:
nkpts = iln
break
if nkpts is None:
raise Exception
raw_data = np.loadtxt(fname)
nbnds = len(raw_data) // nkpts
bands = raw_data[:, 1].reshape((nbnds, nkpts)).transpose()
return bands
| 48,122
|
https://github.com/ejabu/lugoh/blob/master/app/components/VideoContent/VideoContent.js
|
Github Open Source
|
Open Source
|
MIT
| null |
lugoh
|
ejabu
|
JavaScript
|
Code
| 298
| 943
|
// @flow
import React, {Component} from 'react';
import {connect} from 'react-redux';
import {Link} from 'react-router';
import styles from './VideoContent.css';
import $ from 'jquery';
import { toPlay, toStop } from '../../actions/playback';
@connect(state => ({ playback: state.playback}),)
export default class Content extends Component {
constructor(props) {
super(props);
this.state = {
"currentTime": 0,
"source": "E:/kursi3d.mp4",
};
}
onDrop = (event) => {
event.stopPropagation();
var res2 = document.getElementById("myFile").files[0].path.replace(/\\/g, "/");
var toSet2 = {
source: res2,
"currentTime": 0,
}
const { dispatch } = this.props;
this.stop()
this.setState(toSet2);
toStop(dispatch)
}
intervalTrigger() {
var self = this;
self.harusInter = setInterval(function() {
var initTime = document.getElementById("myVid").currentTime
var res = parseInt(initTime)
self.setState({"currentTime": res})
}, 500);
return self.harusInter
};
start = () => {
$("video")[0].play()
this.intervalListener = this.intervalTrigger()
}
stop = () => {
$("video")[0].pause()
window.clearInterval(this.intervalListener);
}
startClick = (event) => {
event.stopPropagation();
const { dispatch } = this.props;
toPlay(dispatch)
}
stopClick = (event) => {
event.stopPropagation();
const { dispatch } = this.props;
toStop(dispatch)
}
componentWillUpdate(nextProps, nextState) {
if (this.props.playback == "STOP" && nextProps.playback == "PLAY") {
this.start()
}
else if (this.props.playback == "PLAY" && nextProps.playback == "STOP") {
this.stop()
}
}
render() {
function toHHMMSS(sec_num) {
var hours = Math.floor(sec_num / 3600);
var minutes = Math.floor((sec_num - (hours * 3600)) / 60);
var seconds = sec_num - (hours * 3600) - (minutes * 60);
if (hours < 10) {
hours = "0" + hours;
}
if (minutes < 10) {
minutes = "0" + minutes;
}
if (seconds < 10) {
seconds = "0" + seconds;
}
return hours + ':' + minutes + ':' + seconds;
}
var timeString = toHHMMSS(this.state.currentTime)
return (
<div className={styles.transWrapper}>
{this.props.playback}
{this.state.currentTime}
{timeString}
<div>
<video id="myVid" src={this.state.source}>
Your browser does not support HTML5 video.</video>
</div>
<div>
<button onClick={this.startClick.bind(this)}>start</button>
<button onClick={this.stopClick.bind(this)}>pause</button>
<input id="myFile" type='file' multiple ref='fileInput' onChange={this.onDrop.bind(this)}/>
</div>
</div>
);
}
}
| 23,143
|
https://github.com/P79N6A/smfyun/blob/master/application/views/weixin/wzb/admin/download.php
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,018
|
smfyun
|
P79N6A
|
PHP
|
Code
| 162
| 821
|
<style>
.nav-tabs-custom>.nav-tabs>li.active {
border-top-color: #00a65a;
}
.nav-tabs-custom>.nav-tabs>li.active {
border-top-color: #00a65a;
}
.reduce,.add{
font-size: 14px;
position: relative;
bottom: 10px;
}
.add{
margin-left: 20px;
margin-right: 30px;
}
.loc{
margin-top: 5px;
margin-bottom: 5px;
}
.qrcode{
height: 150px;
width: 150px;
}
.android{
margin-top: 5px;
width: 150px;
text-align: center;
font-size: 16px;
}
.subtext{
font-size: 1px;
}
</style>
<section class="content-header">
<h1>
直播应用下载
<small><?=$desc?></small>
</h1>
<ol class="breadcrumb">
<li><a href="#"><i class="fa fa-dashboard"></i> 首页</a></li>
<li class="active">直播应用下载</li>
</ol>
</section>
<!-- Main content -->
<section class="content">
<div class="row">
<div class="col-xs-12">
<div class="nav-tabs-custom">
<?php
if ($_POST['cfg']) $active = 'wx';
if (!$_POST || $_POST['yz']) $active = 'yz';
if ($_POST['menu']) $active = 'menu';
if ($_POST['text']) $active = 'text';
if (isset($_POST['password'])) $active = 'account';
?>
<script>
$(function () {
$('#cfg_<?=$active?>,#cfg_<?=$active?>_li').addClass('active');
});
</script>
<div class="tab-content">
<div class="tab-pane active" id="cfg_text">
<form role="form" method="post" enctype="multipart/form-data">
<div class="box-body">
<img class="qrcode" src="http://<?=$_SERVER["HTTP_HOST"]?>/wzb/img/qrcode.jpg">
<div class="android">安卓版<span class="subtext">(for android)</span></div>
</div><!-- /.box-body -->
</form>
</div>
<div class="box-footer">由于微信浏览器禁止下载文件,建议使用QQ浏览器扫码下载</div>
</div>
</div>
</div><!--/.col (left) -->
</section><!-- /.content -->
<script type="text/javascript">
function showfunc(){
$('.coupon').fadeIn(500);
}
function hidefunc(){
$('.coupon').fadeOut(500);
}
</script>
| 13,309
|
https://github.com/Dercharles/SpringBoot2/blob/master/src/main/java/com/example/springboot2/yang/common/mail/MailBean.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
SpringBoot2
|
Dercharles
|
Java
|
Code
| 365
| 1,258
|
package com.example.springboot2.yang.common.mail;
import com.example.springboot2.yang.foundation.vo.AttachmentVo;
import java.util.Date;
import java.util.List;
import java.util.Map;
public class MailBean implements Cloneable{
//邮件标示符
private String messageID;
//邮件主题
private String subject;
//邮件内容
private String content;
private String contentText;
private String contentHtml;
//邮件发送人
private String sender;
//邮件发送人姓名
private String senderName;
//邮件发送日期
private Date senderDate;
//收件人
private List<String> toAddresses;
//抄送人
private List<String> ccAddresses;
//暗送人
private List<String> bccAddresses;
//邮件回复人
private List<String> replyToAddresses;
//邮件附件
private List<AttachmentVo> attachments;
//内嵌资源
private List<AttachmentVo> innerResources;
private Map<String,String> headers;
//被本邮件回复的邮件的messageID
private String inReplyTo;
//是否回执邮件
private boolean receipt;
public String getMessageID() {
return messageID;
}
public void setMessageID(String messageID) {
this.messageID = messageID;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getSender() {
return sender;
}
public void setSender(String sender) {
this.sender = sender;
}
public String getSenderName() {
return senderName;
}
public void setSenderName(String senderName) {
this.senderName = senderName;
}
public Date getSenderDate() {
return senderDate;
}
public void setSenderDate(Date senderDate) {
this.senderDate = senderDate;
}
public String getInReplyTo() {
return inReplyTo;
}
public void setInReplyTo(String inReplyTo) {
this.inReplyTo = inReplyTo;
}
public Map<String, String> getHeaders() {
return headers;
}
public void setHeaders(Map<String, String> headers) {
this.headers = headers;
}
public List<String> getToAddresses() {
return toAddresses;
}
public void setToAddresses(List<String> toAddresses) {
this.toAddresses = toAddresses;
}
public List<String> getCcAddresses() {
return ccAddresses;
}
public void setCcAddresses(List<String> ccAddresses) {
this.ccAddresses = ccAddresses;
}
public List<String> getBccAddresses() {
return bccAddresses;
}
public void setBccAddresses(List<String> bccAddresses) {
this.bccAddresses = bccAddresses;
}
public List<String> getReplyToAddresses() {
return replyToAddresses;
}
public void setReplyToAddresses(List<String> replyToAddresses) {
this.replyToAddresses = replyToAddresses;
}
public List<AttachmentVo> getAttachments() {
return attachments;
}
public void setAttachments(List<AttachmentVo> attachments) {
this.attachments = attachments;
}
public List<AttachmentVo> getInnerResources() {
return innerResources;
}
public void setInnerResources(List<AttachmentVo> innerResources) {
this.innerResources = innerResources;
}
public String getContentText() {
return contentText;
}
public void setContentText(String contentText) {
this.contentText = contentText;
}
public String getContentHtml() {
return contentHtml;
}
public void setContentHtml(String contentHtml) {
this.contentHtml = contentHtml;
}
public boolean isReceipt() {
return receipt;
}
public void setReceipt(boolean receipt) {
this.receipt = receipt;
}
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
| 25,974
|
https://github.com/BestJarvan/utils-tools/blob/master/dist/types/core/url.d.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
utils-tools
|
BestJarvan
|
TypeScript
|
Code
| 59
| 155
|
/**
*
* @ignore
* @return {Array} 过滤出url的非空值
*
*/
export declare function cleanArray(actual: any): Array<any>;
/**
*
* @ignore
* @return {string} 对象转成url的param
*
*/
export declare function param(json: Record<string, any>): string;
/**
*
* @ignore
* @return {object}
*
*/
/**
*
* @ignore
* @return {object} 解析url中的参数
*
*/
export declare function param2Obj(search: string): Record<string, any>;
| 36,666
|
https://github.com/Chucooleg/alfred/blob/master/gen/scripts/collect_object_states.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
alfred
|
Chucooleg
|
Python
|
Code
| 1,514
| 5,800
|
'''From each sampled trajectories, we need to collect the environmental object states.'''
import os
import sys
sys.path.append(os.path.join(os.environ['ALFRED_ROOT']))
sys.path.append(os.path.join(os.environ['ALFRED_ROOT'], 'gen'))
sys.path.append(os.path.join(os.environ['ALFRED_ROOT'], 'models'))
import pprint
import json
from data.preprocess import Dataset
from importlib import import_module, reload
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from models.utils.helper_utils import optimizer_to
from gen.utils.image_util import decompress_mask as util_decompress_mask
import gen.constants as constants
import re
import numpy as np
from PIL import Image
from datetime import datetime
from models.eval.eval import Eval
from env.thor_env import ThorEnv
from models.eval.eval_task import EvalTask
from collections import defaultdict
import logging
import progressbar
import time
import multiprocessing as mp
import subprocess
def load_task_json(args, split_name, task):
'''
load preprocessed json from disk
'''
json_path = os.path.join(args.data, split_name, task['task'], 'traj_data.json')
with open(json_path) as f:
data = json.load(f)
return data
def decompress_mask(compressed_mask):
'''
decompress mask from json files
'''
mask = np.array(util_decompress_mask(compressed_mask))
mask = np.expand_dims(mask, axis=0)
return mask
class CollectStates(EvalTask):
object_state_list = ['isToggled', 'isBroken', 'isFilledWithLiquid', 'isDirty',
'isUsedUp', 'isCooked', 'ObjectTemperature', 'isSliced',
'isOpen', 'isPickedUp', 'mass', 'receptacleObjectIds']
object_symbol_list = constants.OBJECTS
@classmethod
def get_object_list(cls, traj_data):
object_list = [ob['objectName'] for ob in traj_data['scene']['object_poses']]
for ob in object_list:
assert ob.split('_')[0] in constants.OBJECTS
return object_list
@classmethod
def get_object_states(cls, metadata):
object_states = defaultdict(dict)
for ob in metadata['objects']:
symbol = ob['name'].split('_')[0]
# assert symbol in cls.object_symbol_list
object_states[ob['name']]['symbol'] = symbol
object_states[ob['name']]['states'] = {state:ob[state] for state in cls.object_state_list}
object_states[ob['name']]['states']['parentReceptacles'] = ob['parentReceptacles'][0].split('|')[0] if ob['parentReceptacles'] is not None else None
return object_states
@classmethod
def divide_objects_by_change(cls, object_states_curr, object_states_last):
objects_unchanged = []
objects_changed = []
for ob_name in object_states_last.keys():
changed = False
for state in cls.object_state_list + ['parentReceptacles']:
if state in object_states_last[ob_name]['states'].keys():
if object_states_last[ob_name]['states'][state] != object_states_curr[ob_name]['states'][state]:
changed = True
if changed == False:
objects_unchanged.append(ob_name)
else:
objects_changed.append(ob_name)
return objects_changed, objects_unchanged
@classmethod
def get_unchanged_symbols(cls, objects_changed, objects_unchanged, symbol_set):
objects_symbols_changed = [ob_name.split('_')[0] for ob_name in objects_changed]
objects_symbols_unchanged = [ob_name.split('_')[0] for ob_name in objects_unchanged]
return list((set(objects_symbols_unchanged) - set(objects_symbols_changed)) & symbol_set)
@classmethod
def get_object_symbols_present_in_scene(cls, traj_data):
object_list = [ob['objectName'] for ob in traj_data['scene']['object_poses']]
extracted_symbols = [ob.split('_')[0] for ob in object_list]
return extracted_symbols
@classmethod
def get_receptacle_symbols_present_in_scene(cls, metadata):
receptacle_list = [ob['name'] for ob in metadata['objects'] if ob['receptacle']]
extracted_symbols = [ob.split('_')[0] for ob in receptacle_list]
return extracted_symbols
@classmethod
def get_visibility(cls, metadata, object_symbols, receptacle_symbols):
visible_objects = {ob:False for ob in object_symbols}
visible_receptacles = {recp:False for recp in receptacle_symbols}
for ob in metadata['objects']:
if ob['visible']:
symbol = ob['name'].split('_')[0]
if ob['receptacle']:
visible_receptacles[symbol] = True
else:
visible_objects[symbol] = True
return [ob for ob in visible_objects.keys() if visible_objects[ob]], [recp for recp in visible_receptacles.keys() if visible_receptacles[recp]]
@classmethod
def has_interaction(cls, action):
'''
check if low-level action is interactive
'''
non_interact_actions = ['MoveAhead', 'Rotate', 'Look', '<<stop>>', '<<pad>>', '<<seg>>']
if any(a in action for a in non_interact_actions):
return False
else:
return True
@classmethod
def evaluate(cls, args, r_idx, env, split_name, traj_data, planner_full_traj_success, success_log_entries, fail_log_entries, results, logger):
# setup scene
reward_type = 'dense'
cls.setup_scene(env, traj_data, r_idx, args, reward_type=reward_type)
# --------------- collect actions -----------------
# ground-truth low-level actions
# e.g. ['LookDown_15', 'MoveAhead_25', 'MoveAhead_25', ... '<<stop>>']
groundtruth_action_low = [a['discrete_action']['action'] for a in traj_data['plan']['low_actions']]
groundtruth_action_low.append(cls.STOP_TOKEN)
# get low-level action to high subgoal alignment
# get valid interaction per low-level action
# get interaction mask if any
end_action = {
'api_action': {'action': 'NoOp'},
'discrete_action': {'action': '<<stop>>', 'args': {}},
'high_idx': traj_data['plan']['high_pddl'][-1]['high_idx']
}
# e.g. [0,0,0, ... , 11], lenght = total T
groundtruth_subgoal_alignment = []
# e.g. [0,1,0, ... , 1], lenght = total T
groundtruth_valid_interacts = []
# len=num timestep with valid interact, np shape (1 , 300 , 300)
groundtruth_low_mask = []
for a in (traj_data['plan']['low_actions'] + [end_action]):
# high-level action index (subgoals)
groundtruth_subgoal_alignment.append(a['high_idx'])
# interaction validity
step_valid_interact = 1 if cls.has_interaction(a['discrete_action']['action']) else 0
groundtruth_valid_interacts.append(step_valid_interact)
# interaction mask values
if 'mask' in a['discrete_action']['args'].keys() and a['discrete_action']['args']['mask'] is not None:
groundtruth_low_mask.append(decompress_mask(a['discrete_action']['args']['mask']))
# ground-truth high-level subgoals
# e.g. ['GotoLocation', 'PickupObject', 'SliceObject', 'GotoLocation', 'PutObject', ... 'NoOp']
groundtruth_action_high = [a['discrete_action']['action'] for a in traj_data['plan']['high_pddl']]
assert len(groundtruth_action_low) == len(groundtruth_subgoal_alignment) == len(groundtruth_valid_interacts)
assert len(groundtruth_action_high) == groundtruth_subgoal_alignment[-1] + 1
assert sum(groundtruth_valid_interacts) == len(groundtruth_low_mask)
# --------------- execute actions -----------------
# initialize state dictionary for all timesteps
states = []
# get symbols and initial object states
event = env.last_event
obj_symbol_set = set(cls.get_object_symbols_present_in_scene(traj_data))
receptacle_symbol_set = set(cls.get_receptacle_symbols_present_in_scene(event.metadata))
object_states_last = cls.get_object_states(event.metadata) # includes receptacles
# loop through actions and execute them in the sim env
done, success = False, False
fails = 0
t = 0
reward = 0
action, mask = None, None
interact_ct = 0
high_idx = -1
while not done:
# if last action was stop, break
if action == cls.STOP_TOKEN:
done = True
logging.info("predicted STOP")
break
if high_idx < groundtruth_subgoal_alignment[t]:
high_idx = groundtruth_subgoal_alignment[t]
new_subgoal = True
else:
new_subgoal = False
# collect metadata states only
states.append({
'new_subgoal': new_subgoal,
'time_step': t,
'subgoal_step': groundtruth_subgoal_alignment[t],
'subgoal': groundtruth_action_high[groundtruth_subgoal_alignment[t]],
'objects_metadata': event.metadata['objects'],
})
# collect groundtruth action and mask
# single string
action = groundtruth_action_low[t]
# expect (300, 300)
if groundtruth_valid_interacts[t]:
mask = groundtruth_low_mask[interact_ct][0]
interact_ct += 1
else:
mask = None
# interact with the env
t_success, event, _, err, _ = env.va_interact(action, interact_mask=mask, smooth_nav=args.smooth_nav)
if not t_success:
fails += 1
if fails >= args.max_fails:
logging.info("Interact API failed %d times" % fails + "; latest error '%s'" % err)
break
# next time-step
t_reward, t_done = env.get_transition_reward()
reward += t_reward
t += 1
# make sure we have used all masks
assert interact_ct == sum(groundtruth_valid_interacts)
# check if goal was satisfied
goal_satisfied = env.get_goal_satisfied()
if goal_satisfied:
print("Goal Reached")
success = True
# -------------------------------------------------
# if the planner did not achieve full success,
# we need to remove the last collected state because the last action was not cls.STOP_TOKEN
if not planner_full_traj_success:
states.pop()
# ------save the object states out ----------------
logging.info("Goal Reached")
outpath = os.path.join(traj_data['raw_root'], 'metadata_states.json')
logging.info('saving to outpath: {}'.format(outpath))
with open(outpath, 'w') as f:
json.dump(states, f)
logging.info("----------------------------------------")
return states, outpath
def main(args, splits_to_process_dict, process_i=0):
raw_splits = splits_to_process_dict[process_i]
logger = logging.getLogger()
logger.setLevel(logging.INFO)
os.makedirs(os.path.join(args.data, 'pipeline_logs'), exist_ok=True)
log_file_path = os.path.join(args.data, 'pipeline_logs', f'collect_demo_obj_states_T{args.PLANNER_TIME_STAMP}.log')
hdlr = logging.FileHandler(log_file_path)
logger.addHandler(hdlr)
print (f'Logger is writing to {log_file_path}')
# start sim env
env = ThorEnv()
# no language annotation available
r_idx = None
# book keeping -- some planner generated traj can still fail on execution
# save to files
failed_splits = {split_name:[] for split_name in raw_splits.keys()}
out_splits = {split_name:[] for split_name in raw_splits.keys()}
# report successes thus far (used in debugging only)
success_log_entries = {split_name:[] for split_name in raw_splits.keys()}
fail_log_entries = {split_name:[] for split_name in raw_splits.keys()}
tot_ct = {split_name:len(raw_splits[split_name]) for split_name in raw_splits.keys()}
results = {split_name:{} for split_name in raw_splits.keys()}
print ('-----------START COLLECTING OBJECT STATES FROM RAW TRAJECTORIES-----------')
for split_name in raw_splits.keys():
tasks = [task for task in raw_splits[split_name]]
split_count = 0
print(f'Split {split_name} starts object states collection')
print(f'Tasks: {tasks}')
for task in progressbar.progressbar(tasks):
traj_data = load_task_json(args, split_name, task)
traj_data['raw_root'] = os.path.join(args.data, split_name, task['task'])
planner_full_traj_success = task.get('full_traj_success', True)
split_count += 1
logger.info('-----------------')
logger.info(f'Split {split_name}: {split_count}/{tot_ct[split_name]} task')
logger.info(f'Task Root: {traj_data["raw_root"]}.')
logger.info(f'Task Type: {traj_data["task_type"]}.')
print(f'\nProcessing {traj_data["raw_root"]}')
try:
_, _ = CollectStates.evaluate(
args, r_idx, env, split_name, traj_data, planner_full_traj_success,
success_log_entries, fail_log_entries, results, logger
)
print(f'Task succeeds to collect object state.')
out_splits[split_name].append({
'task': task["task"],
'augmentation': args.is_augmentation_data,
'repeat_idx':task['repeat_idx'],
'full_traj_success':task.get('full_traj_success', True),
'collected_subgoals':len(traj_data['plan']['high_pddl'])}) # '<goal type>/<task_id>'
if args.first_task_only:
print(f"Found a successful traj for split {split_name}. Stopping for this split.")
break
except Exception as e:
print(e)
failed_splits[split_name].append({'task': task["task"]})
print(f'Task fails to collect object state.')
print(
f'Split {split_name} object states collection results: \
successes={len(out_splits[split_name])}, fails={len(failed_splits[split_name])}, total={tot_ct[split_name]}'
)
# save success splits
split_file_dir = '/'.join(args.raw_splits.split('/')[:-1])
split_file_name = args.raw_splits.split('/')[-1]
out_splits_path = os.path.join(split_file_dir, split_file_name.replace('_raw.json', f'_filtered_{process_i}.json'))
with open(out_splits_path, 'w') as f:
json.dump(out_splits, f)
print(f'Writing split file for process {process_i} to {out_splits_path}')
def parallel_main(args, splits_to_process_dict):
procs = [
mp.Process(target=main, args=(args, splits_to_process_dict, process_i)) \
for process_i in range(args.num_processes)
]
try:
for proc in procs:
proc.start()
time.sleep(0.1)
finally:
for proc in procs:
proc.join()
subprocess.call(["pkill", "-f", 'thor'])
def merge_split_files(args):
'''merge splits from different processes'''
merged_splits = {}
split_file_dir = '/'.join(args.raw_splits.split('/')[:-1])
split_file_name = args.raw_splits.split('/')[-1]
for process_i in range(args.num_processes):
out_splits_path = os.path.join(split_file_dir, split_file_name.replace('_raw.json', f'_filtered_{process_i}.json'))
with open(out_splits_path, 'r') as f:
process_i_splits = json.load(f)
for split_key in process_i_splits:
if not split_key in merged_splits:
merged_splits[split_key] = process_i_splits[split_key]
else:
merged_splits[split_key] += process_i_splits[split_key]
os.remove(out_splits_path)
merged_splits_path = os.path.join(split_file_dir, split_file_name.replace('_raw.json', '_filtered.json'))
with open(merged_splits_path, 'w') as f:
json.dump(merged_splits, f)
print(f'New split file for successful trajectories is saved to {merged_splits_path}')
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument(
'--data', help='dataset directory.', type=str
)
parser.add_argument(
'--raw_splits', help='json file containing raw trajectory splits.', type=str
)
parser.add_argument(
'--reward_config', default='models/config/rewards.json', type=str
)
parser.add_argument(
'--first_task_only', action='store_true',
help='only process the first task loaded for each split.'
)
parser.add_argument(
'--is_augmentation_data', action='store_true',
help='if data is augmentation data.'
)
# rollout
parser.add_argument(
'--max_fails', type=int, default=10,
help='max API execution failures before episode termination'
)
parser.add_argument(
'--smooth_nav', dest='smooth_nav', action='store_true',
help='smooth nav actions (might be required based on training data)')
# multi-process settings
parser.add_argument(
"--in_parallel", action='store_true',
help="this collection will run in parallel with others, so load from disk on every new sample"
)
parser.add_argument(
"-n", "--num_processes", type=int, default=0,
help="number of processes for parallel mode"
)
parse_args = parser.parse_args()
parse_args.reward_config = os.path.join(os.environ['ALFRED_ROOT'], parse_args.reward_config)
parse_args.PLANNER_TIME_STAMP = datetime.now().strftime("%Y%m%d")
# load splits
with open(parse_args.raw_splits) as f:
raw_splits = json.load(f)
split_names = raw_splits.keys()
print(f'Raw Splits are : {split_names}')
allowed_split_names = ['valid_seen', 'valid_unseen', 'train', 'augmentation']
# divide task among processes
# TODO: should replace with a proper queue for multi-process
splits_to_process_dict = {p_i:{} for p_i in range(parse_args.num_processes)}
if parse_args.in_parallel and parse_args.num_processes > 1:
for split_name in split_names:
if not split_name in allowed_split_names:
continue
quotient = len(raw_splits[split_name]) // parse_args.num_processes
for process_i in range(parse_args.num_processes):
splits_to_process_dict[process_i][split_name] = raw_splits[split_name][process_i*quotient: (process_i+1)*quotient]
if process_i == parse_args.num_processes-1:
splits_to_process_dict[process_i][split_name] += raw_splits[split_name][(process_i+1)*quotient:]
parallel_main(parse_args, splits_to_process_dict)
else:
splits_to_process_dict[0] = {k: raw_splits[k] for k in split_names if k in allowed_split_names}
main(parse_args, splits_to_process_dict)
merge_split_files(parse_args)
| 12,072
|
https://github.com/pmqueiroz/rectar/blob/master/scripts/publish.sh
|
Github Open Source
|
Open Source
|
MIT
| null |
rectar
|
pmqueiroz
|
Shell
|
Code
| 99
| 348
|
#!/bin/bash
GREEN="\u001b[32m"
MAGENTA="\u001b[35m"
CYAN="\u001b[36m"
YELLOW="\033[33m"
RESET="\033[0m"
PACKAGE_VERSION=$(cat package.json | grep version | head -1 | awk -F: '{ print $2 }' | sed 's/[",]//g' | tr -d '[[:space:]]')
printf "$MAGENTA\n"
printf "Publishing new version $PACKAGE_VERSION to NPM$RESET"
read -p " confirm (y/n)? " choice
case "$choice" in
y|Y ) printf "\nGenerating build...! \n";;
* ) printf "$YELLOW"; printf "\n!! fix the versioning before running the script !!$RESET\n"; exit 126;;
esac
printf "$YELLOW[1/4]$RESET$GREEN Removing old build folders...\n"
rm -rf node_modules .next dist
printf "$YELLOW[2/4]$RESET$GREEN Reinstalling dependencies...\n"
yarn install --silent
printf "$YELLOW[3/4]$RESET$GREEN Build package...\n"
yarn build
printf "$YELLOW[4/4]$RESET$GREEN Publishing...\n"
npm publish
| 50,222
|
https://github.com/gilbertsoft/symfony-template-bundle/blob/master/src/Resources/views/styleguide/component/text.html.twig
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
symfony-template-bundle
|
gilbertsoft
|
Twig
|
Code
| 46
| 154
|
{% extends '@Template/styleguide/layout.html.twig' %}
{% block title %}Text{% endblock %}
{% block body %}
{% frame %}
{% example with { language: 'html' } %}
<h1>Headline 1</h1>
<h2>Headline 2</h2>
<h3>Headline 3</h3>
<h4>Headline 4</h4>
<h5>Headline 5</h5>
<h6>Headline 6</h6>
{% endexample %}
{% endframe %}
{% endblock %}
| 26,792
|
https://github.com/zhangtongrui/limax_5.16/blob/master/java/limax/src/limax/edb/Query.java
|
Github Open Source
|
Open Source
|
MIT
| null |
limax_5.16
|
zhangtongrui
|
Java
|
Code
| 6
| 17
|
package limax.edb;
interface Query {
}
| 20,409
|
https://github.com/NeikoGrozev/CSharpBasic/blob/master/Exam/6.Programming Basics Online Exam - 1 and 2 December 2018/1.School Supplies/School_Supplies .cs
|
Github Open Source
|
Open Source
|
MIT
| null |
CSharpBasic
|
NeikoGrozev
|
C#
|
Code
| 64
| 202
|
using System;
class Program
{
static void Main()
{
int numberOfPackagesOfChemicals = int.Parse(Console.ReadLine());
int numberOfPackagesOfMarkers = int.Parse(Console.ReadLine());
double preparation = double.Parse(Console.ReadLine());
int percentageReduction = int.Parse(Console.ReadLine());
double sumChemicals = numberOfPackagesOfChemicals * 5.80;
double sumMarkers = numberOfPackagesOfMarkers * 7.20;
double sumPreparation = preparation * 1.20;
double sum = sumChemicals + sumMarkers + sumPreparation;
double totalSum = (sum - (sum * percentageReduction) / 100);
Console.WriteLine($"{totalSum:F3}");
}
}
| 25,526
|
https://github.com/ArnCarveris/DiligentSamples/blob/master/Samples/AntTweakBar/CMakeLists.txt
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
DiligentSamples
|
ArnCarveris
|
CMake
|
Code
| 147
| 960
|
cmake_minimum_required (VERSION 3.6)
project(AntTweakBarSample CXX)
set(SOURCE
src/MengerSponge.cpp
)
set(INCLUDE
src/MengerSpongeSample.h
)
set(SHADERS
build/assets/shaders/MainPS_DX.hlsl
build/assets/shaders/MainPS_GL.glsl
build/assets/shaders/MainVS_DX.hlsl
build/assets/shaders/MainVS_GL.glsl
)
set_property(SOURCE build/assets/shaders/MainPS_DX.hlsl PROPERTY VS_TOOL_OVERRIDE "None")
set_property(SOURCE build/assets/shaders/MainVS_DX.hlsl PROPERTY VS_TOOL_OVERRIDE "None")
set(ASSETS
${SHADERS}
build/assets/MengerSponge.lua
)
if(PLATFORM_WIN32 OR PLATFORM_UNIVERSAL_WINDOWS)
add_executable(AntTweakBarSample WIN32 ${SOURCE} ${INCLUDE} ${ASSETS})
if(PLATFORM_WIN32)
set_target_properties(AntTweakBarSample PROPERTIES
VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/build"
)
copy_required_dlls(AntTweakBarSample)
endif()
elseif(PLATFORM_ANDROID)
list(APPEND SOURCE src/AndroidMain.cpp)
add_library(AntTweakBarSample SHARED ${SOURCE} ${INCLUDE})
#elseif(APPLE)
# add_executable(AntTweakBarSample MACOSX_BUNDLE Main.cpp)
#elseif(UNIX)
# add_executable(AntTweakBarSample Main.cpp)
else()
message(FATAL_ERROR "Unknown platform")
endif()
if(PLATFORM_UNIVERSAL_WINDOWS)
get_sample_base_uwp_source(UWP_SOURCE UWP_INCLUDE UWP_INCLUDE_DIR)
source_group("UWP\\src" FILES ${UWP_SOURCE})
source_group("UWP\\include" FILES ${UWP_INCLUDE})
target_include_directories(AntTweakBarSample PRIVATE ${UWP_INCLUDE_DIR})
target_sources(AntTweakBarSample PRIVATE ${UWP_SOURCE} ${UWP_INCLUDE})
set_property(SOURCE ${ASSETS} PROPERTY VS_DEPLOYMENT_CONTENT 1)
set_property(SOURCE ${SHADERS} PROPERTY VS_DEPLOYMENT_LOCATION "assets/shaders")
set_property(SOURCE build/assets/MengerSponge.lua PROPERTY VS_DEPLOYMENT_LOCATION "assets")
endif()
target_include_directories(AntTweakBarSample
PRIVATE
src
)
target_link_libraries(AntTweakBarSample
PRIVATE
SampleBase
Lua
RenderScript
)
set_common_target_properties(AntTweakBarSample)
if(MSVC)
# Disable MSVC-specific warnings
# - w4201: nonstandard extension used: nameless struct/unio
target_compile_options(AntTweakBarSample PRIVATE /wd4201)
endif()
set_target_properties(AntTweakBarSample PROPERTIES
FOLDER Samples
)
source_group("src" FILES ${SOURCE})
source_group("include" FILES ${INCLUDE})
source_group("assets\\shaders" FILES ${SHADERS})
source_group("assets" FILES build/assets/MengerSponge.lua)
| 2,865
|
https://github.com/alyforalyssa/bbblocks/blob/master/app/components/AppGrid/index.js
|
Github Open Source
|
Open Source
|
MIT
| null |
bbblocks
|
alyforalyssa
|
JavaScript
|
Code
| 211
| 694
|
/**
*
* AppGrid
*
*/
import React from 'react';
import PropTypes from 'prop-types';
import {
AppGridContainer,
BlocksGrid,
BlockItemPositionContainer,
} from './style';
import AppGridGuidelines from './AppGridGuidelines';
import AppGridController from './AppGridController';
const AppGrid = props => {
const { row, column, blocks, actions, style } = props;
const { blockHeight, columnGap, rowGap, width } = style;
// to change
const blockWidth = `${Math.floor(width / column)}px`;
const appGridGuidelinesProps = {
row,
column,
blocks,
blockHeight,
blockWidth,
};
return (
<AppGridContainer>
{/* transparent grid line in the back */}
<AppGridGuidelines {...appGridGuidelinesProps} />
{/* the actual grid of blocks */}
<BlocksGrid
row={row}
column={column}
blockWidth={blockWidth}
blockHeight={blockHeight}
columnGap={columnGap}
rowGap={rowGap}
>
{blocks.map(block => (
<AppBlock block={block} key={block.id} actions={actions} />
))}
</BlocksGrid>
</AppGridContainer>
);
};
AppGrid.propTypes = {
row: PropTypes.number,
style: PropTypes.any,
column: PropTypes.number,
blocks: PropTypes.array,
actions: PropTypes.shape({
onSelectBlock: PropTypes.func.isRequired,
onInitializeBlockContent: PropTypes.func.isRequired,
}),
};
const AppBlock = props => {
const ref = React.useRef(null);
const { block, actions } = props;
React.useEffect(() => {
const blockProps = {};
if (ref.current) {
blockProps.width = ref.current.offsetWidth;
blockProps.height = ref.current.offsetHeight;
}
actions.onInitializeBlockContent(block, blockProps);
}, []);
return (
<BlockItemPositionContainer
{...block.position}
backgroundColor={block.style.backgroundColor}
ref={ref}
onClick={() => {
actions.onSelectBlock(block);
}}
>
<canvas id={block.id} />
</BlockItemPositionContainer>
);
};
AppBlock.propTypes = {
block: PropTypes.any.isRequired,
actions: PropTypes.shape({
onSelectBlock: PropTypes.func.isRequired,
onInitializeBlockContent: PropTypes.func.isRequired,
}),
};
export default AppGrid;
export { AppGridController };
| 34,926
|
https://github.com/degica/barcelona/blob/master/db/migrate/20160303080907_unencrypt_aws_access_key_id.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
barcelona
|
degica
|
Ruby
|
Code
| 26
| 112
|
class UnencryptAwsAccessKeyId < ActiveRecord::Migration
def change
District.find_each do |district|
encrypted = district.encrypted_aws_access_key_id
decrypted = EncryptAttribute.decrypt_attribute(
encrypted,
ENV['ENCRYPTION_KEY'],
{}
)
district.aws_access_key_id = decrypted
district.save!
end
end
end
| 4,414
|
https://github.com/ezracr/vote-role-assignment/blob/master/bot/src/middlewares/devMiddleware.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
vote-role-assignment
|
ezracr
|
TypeScript
|
Code
| 51
| 164
|
import path from 'path'
import express, { Application } from 'express'
const publicPath = 'uploads'
const normPubPath = path.resolve(process.cwd(), publicPath)
export default function devMiddleware(app: Application): void {
if (process.env.NODE_ENV !== 'production') {
app.use((req, res, next) => {
res.setHeader('Access-Control-Allow-Methods', 'GET')
res.setHeader('Access-Control-Allow-Origin', '*')
next()
})
app.use('/uploads', express.static(normPubPath, { maxAge: 0 }))
}
}
| 16,994
|
https://github.com/zxwnstn/AnrealEngine/blob/master/Engine/Source/Runtime/Core/public/ModuleManager.h
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
AnrealEngine
|
zxwnstn
|
C
|
Code
| 93
| 319
|
#pragma once
#include "Core.h"
#include "Type.h"
class IModule
{
public:
virtual IModule* CreateInstance() = 0;
virtual void DestroyInst();
};
class ModuleManager
{
private:
ModuleManager();
public:
ModuleManager* Get();
void ShotDown();
public:
CORE_API void Init(const std::vector<Name>& list);
IModule* GetModule(NameRef moduleName);
void AddModule(NameRef moduleName);
void LoadModule(NameRef moduleName);
void UnLoadModule(NameRef moduleName);
private:
static std::unordered_map<Name, IModule*> ModuleInstList;
};
#define DECLARE_MODULE(MODULE_NAME) \
class ModuleInterface##_MODULE_NAME \
{ \
public: \
static IModule* CreateInstance() \
{ \
static MODULE_NAME* Inst = nullptr; \
if (!Inst) \
{ \
Inst = new MODULE_NAME; \
} \
return Inst; \
} \
};
| 41,806
|
https://github.com/elleyerium/Elysium/blob/master/Assets/Scripts/Game/Graphics/UI/Buttons/ButtonTracker.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
Elysium
|
elleyerium
|
C#
|
Code
| 109
| 627
|
using System.Collections;
using System.Collections.Generic;
using Game.Difficult;
using Game.Graphics.Effects;
using Game.Graphics.UI.Background;
using Game.Graphics.UI.Menu.Animations;
using Game.Graphics.UI.Screen;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace Game.Graphics.UI.Buttons
{
public class ButtonTracker : MonoBehaviour
{
[SerializeField] private WindowButtonHandler _buttonHandler;
[SerializeField] private BotDifficult _botDifficult;
[SerializeField] private GameProvider _gameProvider;
public void WebScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.WebScreen));
public void NotificationsScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.NotificationsScreen));
public void UserInfoScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.ProfileInfoScreen));
public void SettingsScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.SettingsScreen));
public void HomeScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.MainScreen));
public void LeaderboardsScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.LeaderboardsScreen));
public void StylingScreen() =>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.StylingScreen));
public void LobbyScreen()=>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.LobbyScreen));
/*public void ShowDiff()=>
//_buttonHandler.ShowGameModeOfflineWindow();*/ //TODO: FIX
public void PlayOffline()
{
//_gameProvider.SetOfflineHost(new BotDifficult(DifficultRate.Easy));
Initiate.Fade("Bots", Color.black, 1f);
//_botDifficult.Play();
}
public void ModeSelectorScreen()=>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.ModeSelectorScreen));
public void DiffSelectorScreen()=>
ScreenManager.Instance.ChangeScreen(ScreenManager.Instance.GetScreen(ScreenType.DiffSelectorScreen));
/*public void BackDiff()=>
_buttonHandler.ShowGameModeWindow();*/
}
}
| 6,040
|
https://github.com/robertkowalski/kowalski.gd/blob/master/assets/images/code/2011-01-01--private_und_publicA.js
|
Github Open Source
|
Open Source
|
MIT
| null |
kowalski.gd
|
robertkowalski
|
JavaScript
|
Code
| 71
| 172
|
var Spion = function() {
//@private
var name = 'Hans';
var setzeName = function(n) {
name = n;
return 'Neuer Name: '+ name;
}
var holeName = function() {
return name;
}
//Interface
return {
//@public
name: 'Peter',
neuerName: function(n) {
return setzeName(n);
},
getName: function() { // hat Zugriff auf die privaten Variablen / Methoden
return 'Private Eigenschaft: '+ name + " - Privater Getter: "+ holeName();
}
}
};
| 44,843
|
https://github.com/denofiend/code-lib/blob/master/node/hubot/node_modules/hubot/src/startup_error.coffee
|
Github Open Source
|
Open Source
|
MIT
| 2,014
|
code-lib
|
denofiend
|
CoffeeScript
|
Code
| 4
| 8
|
class StartupError extends Error
| 42,048
|
https://github.com/asfaltboy/steampipe-plugin-github/blob/master/github/table_github_search_label.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
steampipe-plugin-github
|
asfaltboy
|
Go
|
Code
| 438
| 1,483
|
package github
import (
"context"
"regexp"
"strings"
"github.com/google/go-github/v33/github"
"github.com/turbot/steampipe-plugin-sdk/grpc/proto"
"github.com/turbot/steampipe-plugin-sdk/plugin"
"github.com/turbot/steampipe-plugin-sdk/plugin/transform"
)
//// TABLE DEFINITION
func tableGitHubSearchLable(ctx context.Context) *plugin.Table {
return &plugin.Table{
Name: "github_search_label",
Description: "Find labels in a repository with names or descriptions that match search keywords.",
List: &plugin.ListConfig{
KeyColumns: plugin.AllColumns([]string{"query", "repository_id"}),
ShouldIgnoreError: isNotFoundError([]string{"404"}),
Hydrate: tableGitHubSearchLabelList,
},
Columns: []*plugin.Column{
{Name: "id", Transform: transform.FromField("ID"), Type: proto.ColumnType_INT, Description: "The ID of the label."},
{Name: "repository_id", Type: proto.ColumnType_INT, Transform: transform.FromQual("repository_id"), Description: "The ID of the repository."},
{Name: "repository_full_name", Type: proto.ColumnType_STRING, Transform: transform.From(extractSearchLabelRepositoryFullName), Description: "The full name of the repository (login/repo-name)."},
{Name: "name", Type: proto.ColumnType_STRING, Description: "The name of the label."},
{Name: "query", Type: proto.ColumnType_STRING, Transform: transform.FromQual("query"), Description: "The query used to match the label."},
{Name: "color", Type: proto.ColumnType_STRING, Description: "The color assigned to the label."},
{Name: "default", Type: proto.ColumnType_BOOL, Default: false, Description: "Whether the label is a default one."},
{Name: "description", Type: proto.ColumnType_STRING, Description: "The description of the label."},
{Name: "score", Type: proto.ColumnType_DOUBLE, Description: "The score of the label."},
{Name: "url", Type: proto.ColumnType_STRING, Description: "The API URL to get the label details."},
{Name: "text_matches", Type: proto.ColumnType_JSON, Description: "The text match details."},
},
}
}
//// LIST FUNCTION
func tableGitHubSearchLabelList(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
logger := plugin.Logger(ctx)
logger.Trace("tableGitHubSearchLabelList")
quals := d.KeyColumnQuals
repoId := d.KeyColumnQuals["repository_id"].GetInt64Value()
query := quals["query"].GetStringValue()
if query == "" {
return nil, nil
}
opt := &github.SearchOptions{
ListOptions: github.ListOptions{PerPage: 100},
TextMatch: true,
}
type ListPageResponse struct {
result *github.LabelsSearchResult
resp *github.Response
}
client := connect(ctx, d)
// Reduce the basic request limit down if the user has only requested a small number of rows
limit := d.QueryContext.Limit
if limit != nil {
if *limit < int64(opt.ListOptions.PerPage) {
opt.ListOptions.PerPage = int(*limit)
}
}
listPage := func(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
result, resp, err := client.Search.Labels(ctx, repoId, query, opt)
if err != nil {
logger.Error("tableGitHubSearchLabelList", "error_Search.Labels", err)
return nil, err
}
return ListPageResponse{
result: result,
resp: resp,
}, nil
}
for {
listPageResponse, err := plugin.RetryHydrate(ctx, d, h, listPage, &plugin.RetryConfig{ShouldRetryError: shouldRetryError})
if err != nil {
logger.Error("tableGitHubSearchLabelList", "error_RetryHydrate", err)
return nil, err
}
listResponse := listPageResponse.(ListPageResponse)
labels := listResponse.result.Labels
resp := listResponse.resp
for _, i := range labels {
d.StreamListItem(ctx, i)
// Context can be cancelled due to manual cancellation or the limit has been hit
if d.QueryStatus.RowsRemaining(ctx) == 0 {
return nil, nil
}
}
if resp.NextPage == 0 {
break
}
opt.Page = resp.NextPage
}
return nil, nil
}
//// TRANSFORM FUNCTION
func extractSearchLabelRepositoryFullName(_ context.Context, d *transform.TransformData) (interface{}, error) {
label := d.HydrateItem.(*github.LabelResult)
if label.URL != nil {
rx := regexp.MustCompile(`(?s)` + regexp.QuoteMeta("repos/") + `(.*?)` + regexp.QuoteMeta("/labels"))
replacer := strings.NewReplacer("repos/", "", "/labels", "")
return replacer.Replace(rx.FindString(*label.URL)), nil
}
return "", nil
}
| 42,541
|
https://github.com/JiangMuWen/cesium-native/blob/master/CesiumGeometry/test/TestAxisTransforms.cpp
|
Github Open Source
|
Open Source
|
Apache-2.0, LicenseRef-scancode-free-unknown
| 2,021
|
cesium-native
|
JiangMuWen
|
C++
|
Code
| 202
| 971
|
#include "CesiumGeometry/AxisTransforms.h"
#include <CesiumUtility/Math.h>
#include <catch2/catch.hpp>
TEST_CASE("AxisTransforms convert the axes correctly") {
glm::dvec4 X_AXIS{1.0, 0.0, 0.0, 0.0};
glm::dvec4 Y_AXIS{0.0, 1.0, 0.0, 0.0};
glm::dvec4 Z_AXIS{0.0, 0.0, 1.0, 0.0};
SECTION("Y_UP_TO_Z_UP transforms X to X, Y to -Z, and Z to Y") {
REQUIRE(X_AXIS * CesiumGeometry::AxisTransforms::Y_UP_TO_Z_UP == X_AXIS);
REQUIRE(Y_AXIS * CesiumGeometry::AxisTransforms::Y_UP_TO_Z_UP == -Z_AXIS);
REQUIRE(Z_AXIS * CesiumGeometry::AxisTransforms::Y_UP_TO_Z_UP == Y_AXIS);
}
SECTION("Z_UP_TO_Y_UP transforms X to X, Y to Z, and Z to -Y") {
REQUIRE(X_AXIS * CesiumGeometry::AxisTransforms::Z_UP_TO_Y_UP == X_AXIS);
REQUIRE(Y_AXIS * CesiumGeometry::AxisTransforms::Z_UP_TO_Y_UP == Z_AXIS);
REQUIRE(Z_AXIS * CesiumGeometry::AxisTransforms::Z_UP_TO_Y_UP == -Y_AXIS);
}
SECTION("X_UP_TO_Z_UP transforms X to -Z, Y to Y, and Z to X") {
REQUIRE(X_AXIS * CesiumGeometry::AxisTransforms::X_UP_TO_Z_UP == -Z_AXIS);
REQUIRE(Y_AXIS * CesiumGeometry::AxisTransforms::X_UP_TO_Z_UP == Y_AXIS);
REQUIRE(Z_AXIS * CesiumGeometry::AxisTransforms::X_UP_TO_Z_UP == X_AXIS);
}
SECTION("Z_UP_TO_X_UP transforms X to Z, Y to Y, and Z to -X") {
REQUIRE(X_AXIS * CesiumGeometry::AxisTransforms::Z_UP_TO_X_UP == Z_AXIS);
REQUIRE(Y_AXIS * CesiumGeometry::AxisTransforms::Z_UP_TO_X_UP == Y_AXIS);
REQUIRE(Z_AXIS * CesiumGeometry::AxisTransforms::Z_UP_TO_X_UP == -X_AXIS);
}
SECTION("X_UP_TO_Y_UP transforms X to -Y, Y to X, and Z to Z") {
REQUIRE(X_AXIS * CesiumGeometry::AxisTransforms::X_UP_TO_Y_UP == -Y_AXIS);
REQUIRE(Y_AXIS * CesiumGeometry::AxisTransforms::X_UP_TO_Y_UP == X_AXIS);
REQUIRE(Z_AXIS * CesiumGeometry::AxisTransforms::X_UP_TO_Y_UP == Z_AXIS);
}
SECTION("Y_UP_TO_X_UP transforms X to Y, Y to -X, and Z to Z") {
REQUIRE(X_AXIS * CesiumGeometry::AxisTransforms::Y_UP_TO_X_UP == Y_AXIS);
REQUIRE(Y_AXIS * CesiumGeometry::AxisTransforms::Y_UP_TO_X_UP == -X_AXIS);
REQUIRE(Z_AXIS * CesiumGeometry::AxisTransforms::Y_UP_TO_X_UP == Z_AXIS);
}
}
| 28,140
|
https://github.com/lriki/Lumino/blob/master/lumino/Graphics/src/RHI/Backend/Vulkan/VulkanHelper.cpp
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
Lumino
|
lriki
|
C++
|
Code
| 4,246
| 25,650
|
#include "Internal.hpp"
#include <LuminoPlatform/PlatformWindow.hpp>
#include <LuminoPlatform/PlatformSupport.hpp>
#include "VulkanHelper.hpp"
#include "VulkanDeviceContext.hpp"
#include "VulkanBuffers.hpp"
#include "VulkanTextures.hpp"
#include "VulkanSingleFrameAllocator.hpp"
#include <LuminoGraphics/RHI/GraphicsExtensionVulkan.hpp>
PFN_vkCreateInstance vkCreateInstance;
PFN_vkDestroyInstance vkDestroyInstance;
PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures;
PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties;
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
PFN_vkCreateDevice vkCreateDevice;
PFN_vkDestroyDevice vkDestroyDevice;
PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties;
PFN_vkGetDeviceQueue vkGetDeviceQueue;
PFN_vkQueueSubmit vkQueueSubmit;
PFN_vkQueueWaitIdle vkQueueWaitIdle;
PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
PFN_vkAllocateMemory vkAllocateMemory;
PFN_vkFreeMemory vkFreeMemory;
PFN_vkMapMemory vkMapMemory;
PFN_vkUnmapMemory vkUnmapMemory;
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment;
PFN_vkBindBufferMemory vkBindBufferMemory;
PFN_vkBindImageMemory vkBindImageMemory;
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements;
PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties;
PFN_vkQueueBindSparse vkQueueBindSparse;
PFN_vkCreateFence vkCreateFence;
PFN_vkDestroyFence vkDestroyFence;
PFN_vkResetFences vkResetFences;
PFN_vkGetFenceStatus vkGetFenceStatus;
PFN_vkWaitForFences vkWaitForFences;
PFN_vkCreateSemaphore vkCreateSemaphore;
PFN_vkDestroySemaphore vkDestroySemaphore;
PFN_vkCreateEvent vkCreateEvent;
PFN_vkDestroyEvent vkDestroyEvent;
PFN_vkGetEventStatus vkGetEventStatus;
PFN_vkSetEvent vkSetEvent;
PFN_vkResetEvent vkResetEvent;
PFN_vkCreateQueryPool vkCreateQueryPool;
PFN_vkDestroyQueryPool vkDestroyQueryPool;
PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
PFN_vkCreateBuffer vkCreateBuffer;
PFN_vkDestroyBuffer vkDestroyBuffer;
PFN_vkCreateBufferView vkCreateBufferView;
PFN_vkDestroyBufferView vkDestroyBufferView;
PFN_vkCreateImage vkCreateImage;
PFN_vkDestroyImage vkDestroyImage;
PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout;
PFN_vkCreateImageView vkCreateImageView;
PFN_vkDestroyImageView vkDestroyImageView;
PFN_vkCreateShaderModule vkCreateShaderModule;
PFN_vkDestroyShaderModule vkDestroyShaderModule;
PFN_vkCreatePipelineCache vkCreatePipelineCache;
PFN_vkDestroyPipelineCache vkDestroyPipelineCache;
PFN_vkGetPipelineCacheData vkGetPipelineCacheData;
PFN_vkMergePipelineCaches vkMergePipelineCaches;
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
PFN_vkCreateComputePipelines vkCreateComputePipelines;
PFN_vkDestroyPipeline vkDestroyPipeline;
PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
PFN_vkCreateSampler vkCreateSampler;
PFN_vkDestroySampler vkDestroySampler;
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
PFN_vkResetDescriptorPool vkResetDescriptorPool;
PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
PFN_vkCreateFramebuffer vkCreateFramebuffer;
PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
PFN_vkCreateRenderPass vkCreateRenderPass;
PFN_vkDestroyRenderPass vkDestroyRenderPass;
PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity;
PFN_vkCreateCommandPool vkCreateCommandPool;
PFN_vkDestroyCommandPool vkDestroyCommandPool;
PFN_vkResetCommandPool vkResetCommandPool;
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
PFN_vkEndCommandBuffer vkEndCommandBuffer;
PFN_vkResetCommandBuffer vkResetCommandBuffer;
PFN_vkCmdBindPipeline vkCmdBindPipeline;
PFN_vkCmdSetViewport vkCmdSetViewport;
PFN_vkCmdSetScissor vkCmdSetScissor;
PFN_vkCmdSetLineWidth vkCmdSetLineWidth;
PFN_vkCmdSetDepthBias vkCmdSetDepthBias;
PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants;
PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds;
PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask;
PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask;
PFN_vkCmdSetStencilReference vkCmdSetStencilReference;
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
PFN_vkCmdDraw vkCmdDraw;
PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
PFN_vkCmdDrawIndirect vkCmdDrawIndirect;
PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect;
PFN_vkCmdDispatch vkCmdDispatch;
PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect;
PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
PFN_vkCmdCopyImage vkCmdCopyImage;
PFN_vkCmdBlitImage vkCmdBlitImage;
PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer;
PFN_vkCmdFillBuffer vkCmdFillBuffer;
PFN_vkCmdClearColorImage vkCmdClearColorImage;
PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage;
PFN_vkCmdClearAttachments vkCmdClearAttachments;
PFN_vkCmdResolveImage vkCmdResolveImage;
PFN_vkCmdSetEvent vkCmdSetEvent;
PFN_vkCmdResetEvent vkCmdResetEvent;
PFN_vkCmdWaitEvents vkCmdWaitEvents;
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
PFN_vkCmdBeginQuery vkCmdBeginQuery;
PFN_vkCmdEndQuery vkCmdEndQuery;
PFN_vkCmdResetQueryPool vkCmdResetQueryPool;
PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp;
PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults;
PFN_vkCmdPushConstants vkCmdPushConstants;
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
PFN_vkCmdNextSubpass vkCmdNextSubpass;
PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
PFN_vkCmdExecuteCommands vkCmdExecuteCommands;
// VK_KHR_surface
PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
// VK_KHR_swapchain
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
PFN_vkQueuePresentKHR vkQueuePresentKHR;
// VK_KHR_display
PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR;
PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR;
PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR;
PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR;
PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR;
PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR;
PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR;
// VK_KHR_display_swapchain
PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR;
#ifdef VK_USE_PLATFORM_XLIB_KHR
PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR;
PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR;
#endif
#ifdef VK_USE_PLATFORM_XCB_KHR
PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR;
PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR;
#endif
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR;
PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR;
#endif
#ifdef VK_USE_PLATFORM_MIR_KHR
PFN_vkCreateMirSurfaceKHR vkCreateMirSurfaceKHR;
PFN_vkGetPhysicalDeviceMirPresentationSupportKHR vkGetPhysicalDeviceMirPresentationSupportKHR;
#endif
#ifdef VK_USE_PLATFORM_ANDROID_KHR
PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR;
#endif
#ifdef VK_USE_PLATFORM_WIN32_KHR
PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR;
PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR;
#endif
PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2;
namespace ln {
namespace detail {
//==============================================================================
// VulkanHelper
const std::vector<const char*> VulkanHelper::validationLayers = {
"VK_LAYER_KHRONOS_validation",
"VK_LAYER_LUNARG_standard_validation",
};
struct FormatConversionItem {
VkFormat vulkanFormat;
uint32_t bitPerPixel;
TextureFormat lnFormat;
bool isCompress;
};
struct BlendFactorConversionItem {
BlendFactor lnValue;
VkBlendFactor vkValueColor;
VkBlendFactor vkValueAlpha;
};
struct BlendOpConversionItem {
BlendOp lnValue;
VkBlendOp vkValue;
};
struct ComparisonFuncConversionItem {
ComparisonFunc lnValue;
VkCompareOp vkValue;
};
struct FillModeConversionItem {
FillMode lnValue;
VkPolygonMode vkValue;
};
struct CullModeConversionItem {
CullMode lnValue;
VkCullModeFlagBits vkValue;
};
struct StencilOpConversionItem {
StencilOp lnValue;
VkStencilOp vkValue;
};
struct FilterModeConversionItem {
TextureFilterMode lnValue;
VkFilter vkValue;
};
struct AddressModeConversionItem {
TextureAddressMode lnValue;
VkSamplerAddressMode vkValue;
};
struct PrimitiveTopologyConversionItem {
PrimitiveTopology lnValue;
VkPrimitiveTopology vkValue;
};
struct VertexElementTypeConversionItem {
VertexElementType lnValue;
VkFormat vkValue;
};
static FormatConversionItem s_formatConversionTable[] = {
{ VK_FORMAT_UNDEFINED, 0, TextureFormat::Unknown, false },
{ VK_FORMAT_R8G8B8A8_UNORM, 32, TextureFormat::RGBA8, false },
{ VK_FORMAT_UNDEFINED, 0, TextureFormat::RGB8, false }, // TODO: remove
{ VK_FORMAT_R16G16B16A16_SFLOAT, 64, TextureFormat::RGBA16F, false },
{ VK_FORMAT_R32G32B32A32_SFLOAT, 128, TextureFormat::RGBA32F, false },
{ VK_FORMAT_R16_SFLOAT, 16, TextureFormat::R16F, false },
{ VK_FORMAT_R32_SFLOAT, 32, TextureFormat::R32F, false },
{ VK_FORMAT_R32_SINT, 32, TextureFormat::R32S, false },
};
static const BlendFactorConversionItem s_blendFactorConversionTable[] = {
{ BlendFactor::Zero, VK_BLEND_FACTOR_ZERO, VK_BLEND_FACTOR_ZERO },
{ BlendFactor::One, VK_BLEND_FACTOR_ONE, VK_BLEND_FACTOR_ONE },
{ BlendFactor::SourceColor, VK_BLEND_FACTOR_SRC_COLOR, VK_BLEND_FACTOR_SRC_ALPHA },
{ BlendFactor::InverseSourceColor, VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA },
{ BlendFactor::SourceAlpha, VK_BLEND_FACTOR_SRC_ALPHA, VK_BLEND_FACTOR_SRC_ALPHA },
{ BlendFactor::InverseSourceAlpha, VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA },
{ BlendFactor::DestinationColor, VK_BLEND_FACTOR_DST_COLOR, VK_BLEND_FACTOR_DST_ALPHA },
{ BlendFactor::InverseDestinationColor, VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA },
{ BlendFactor::DestinationAlpha, VK_BLEND_FACTOR_DST_ALPHA, VK_BLEND_FACTOR_DST_ALPHA },
{ BlendFactor::InverseDestinationAlpha, VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA },
};
static const BlendOpConversionItem s_blendOpConversionTable[] = {
{ BlendOp::Add, VK_BLEND_OP_ADD },
{ BlendOp::Subtract, VK_BLEND_OP_SUBTRACT },
{ BlendOp::ReverseSubtract, VK_BLEND_OP_REVERSE_SUBTRACT },
{ BlendOp::Min, VK_BLEND_OP_MIN },
{ BlendOp::Max, VK_BLEND_OP_MAX },
};
static const ComparisonFuncConversionItem s_comparisoFuncConversionTable[] = {
{ ComparisonFunc::Never, VK_COMPARE_OP_NEVER },
{ ComparisonFunc::Less, VK_COMPARE_OP_LESS },
{ ComparisonFunc::LessEqual, VK_COMPARE_OP_LESS_OR_EQUAL },
{ ComparisonFunc::Greater, VK_COMPARE_OP_GREATER },
{ ComparisonFunc::GreaterEqual, VK_COMPARE_OP_GREATER_OR_EQUAL },
{ ComparisonFunc::Equal, VK_COMPARE_OP_EQUAL },
{ ComparisonFunc::NotEqual, VK_COMPARE_OP_NOT_EQUAL },
{ ComparisonFunc::Always, VK_COMPARE_OP_ALWAYS },
};
static const FillModeConversionItem s_fillModeConversionTable[] = {
{ FillMode::Solid, VK_POLYGON_MODE_FILL },
{ FillMode::Wireframe, VK_POLYGON_MODE_LINE },
};
static const CullModeConversionItem s_cullModeConversionTable[] = {
{ CullMode::None, VK_CULL_MODE_NONE },
{ CullMode::Front, VK_CULL_MODE_FRONT_BIT },
{ CullMode::Back, VK_CULL_MODE_BACK_BIT },
};
static const StencilOpConversionItem s_stencilOpConversionTable[] = {
{ StencilOp::Keep, VK_STENCIL_OP_KEEP },
{ StencilOp::Replace, VK_STENCIL_OP_REPLACE },
};
static const FilterModeConversionItem s_filterModeConversionTable[] = {
{ TextureFilterMode::Point, VK_FILTER_NEAREST },
{ TextureFilterMode::Linear, VK_FILTER_LINEAR },
};
static const AddressModeConversionItem s_addressModeConversionTable[] = {
{ TextureAddressMode::Repeat, VK_SAMPLER_ADDRESS_MODE_REPEAT },
{ TextureAddressMode::Clamp, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE },
};
static const PrimitiveTopologyConversionItem s_primitiveTopologyConversionTable[] = {
{ PrimitiveTopology::TriangleList, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST },
{ PrimitiveTopology::TriangleStrip, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP },
{ PrimitiveTopology::TriangleFan, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN },
{ PrimitiveTopology::LineList, VK_PRIMITIVE_TOPOLOGY_LINE_LIST },
{ PrimitiveTopology::LineStrip, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP },
{ PrimitiveTopology::PointList, VK_PRIMITIVE_TOPOLOGY_POINT_LIST },
};
static const VertexElementTypeConversionItem s_vertexElementTypeConversionTable[] = {
{ VertexElementType::Unknown, VK_FORMAT_UNDEFINED },
{ VertexElementType::Float1, VK_FORMAT_R32_SFLOAT },
{ VertexElementType::Float2, VK_FORMAT_R32G32_SFLOAT },
{ VertexElementType::Float3, VK_FORMAT_R32G32B32_SFLOAT },
{ VertexElementType::Float4, VK_FORMAT_R32G32B32A32_SFLOAT },
{ VertexElementType::Ubyte4, VK_FORMAT_R8G8B8A8_UINT },
{ VertexElementType::Color4, VK_FORMAT_R8G8B8A8_UNORM }, // UNORM : https://msdn.microsoft.com/ja-jp/library/ee415736%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
{ VertexElementType::Short2, VK_FORMAT_R16G16_SINT },
{ VertexElementType::Short4, VK_FORMAT_R16G16B16A16_SINT },
};
VkFormat VulkanHelper::LNFormatToVkFormat(TextureFormat format) {
assert(s_formatConversionTable[(int)format].lnFormat == format);
return s_formatConversionTable[(int)format].vulkanFormat;
}
TextureFormat VulkanHelper::VkFormatToLNFormat(VkFormat format) {
for (auto& i : s_formatConversionTable) {
if (i.vulkanFormat == format) {
return i.lnFormat;
}
}
return TextureFormat::Unknown;
}
VkBlendFactor VulkanHelper::LNBlendFactorToVkBlendFactor_Color(BlendFactor value) {
assert(s_blendFactorConversionTable[(int)value].lnValue == value);
return s_blendFactorConversionTable[(int)value].vkValueColor;
}
VkBlendFactor VulkanHelper::LNBlendFactorToVkBlendFactor_Alpha(BlendFactor value) {
assert(s_blendFactorConversionTable[(int)value].lnValue == value);
return s_blendFactorConversionTable[(int)value].vkValueAlpha;
}
VkBlendOp VulkanHelper::LNBlendOpToVkBlendOp(BlendOp value) {
assert(s_blendOpConversionTable[(int)value].lnValue == value);
return s_blendOpConversionTable[(int)value].vkValue;
}
VkCompareOp VulkanHelper::LNComparisonFuncToVkCompareOp(ComparisonFunc value) {
assert(s_comparisoFuncConversionTable[(int)value].lnValue == value);
return s_comparisoFuncConversionTable[(int)value].vkValue;
}
VkPolygonMode VulkanHelper::LNFillModeToVkPolygonMode(FillMode value) {
assert(s_fillModeConversionTable[(int)value].lnValue == value);
return s_fillModeConversionTable[(int)value].vkValue;
}
VkCullModeFlagBits VulkanHelper::LNCullModeToVkCullMode(CullMode value) {
assert(s_cullModeConversionTable[(int)value].lnValue == value);
return s_cullModeConversionTable[(int)value].vkValue;
}
VkStencilOp VulkanHelper::LNStencilOpToVkStencilOp(StencilOp value) {
assert(s_stencilOpConversionTable[(int)value].lnValue == value);
return s_stencilOpConversionTable[(int)value].vkValue;
}
VkFilter VulkanHelper::LNTextureFilterModeToVkFilter(TextureFilterMode value) {
assert(s_filterModeConversionTable[(int)value].lnValue == value);
return s_filterModeConversionTable[(int)value].vkValue;
}
VkSamplerAddressMode VulkanHelper::LNTextureAddressModeModeToVkSamplerAddressMode(TextureAddressMode value) {
assert(s_addressModeConversionTable[(int)value].lnValue == value);
return s_addressModeConversionTable[(int)value].vkValue;
}
VkPrimitiveTopology VulkanHelper::LNPrimitiveTopologyToVkPrimitiveTopology(PrimitiveTopology value) {
assert(s_primitiveTopologyConversionTable[(int)value].lnValue == value);
return s_primitiveTopologyConversionTable[(int)value].vkValue;
}
VkFormat VulkanHelper::LNVertexElementTypeToVkFormat(VertexElementType value) {
assert(s_vertexElementTypeConversionTable[(int)value].lnValue == value);
return s_vertexElementTypeConversionTable[(int)value].vkValue;
}
bool VulkanHelper::initVulkanFunctions() {
static bool loaded = false;
if (loaded) return true;
#if defined(LN_OS_WIN32)
HMODULE hModule = ::LoadLibraryW(L"vulkan-1.dll");
if (!hModule) return false;
#define FUNC_PTR(type, name) reinterpret_cast<type>(::GetProcAddress(hModule, name))
#else
void* libvulkan = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL);
if (!libvulkan) return false;
#define FUNC_PTR(type, name) reinterpret_cast<type>(dlsym(libvulkan, name))
#endif
// core
vkCreateInstance = FUNC_PTR(PFN_vkCreateInstance, "vkCreateInstance");
vkDestroyInstance = FUNC_PTR(PFN_vkDestroyInstance, "vkDestroyInstance");
vkEnumeratePhysicalDevices = FUNC_PTR(PFN_vkEnumeratePhysicalDevices, "vkEnumeratePhysicalDevices");
vkGetPhysicalDeviceFeatures = FUNC_PTR(PFN_vkGetPhysicalDeviceFeatures, "vkGetPhysicalDeviceFeatures");
vkGetPhysicalDeviceFormatProperties = FUNC_PTR(PFN_vkGetPhysicalDeviceFormatProperties, "vkGetPhysicalDeviceFormatProperties");
vkGetPhysicalDeviceImageFormatProperties = FUNC_PTR(PFN_vkGetPhysicalDeviceImageFormatProperties, "vkGetPhysicalDeviceImageFormatProperties");
vkGetPhysicalDeviceProperties = FUNC_PTR(PFN_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties");
vkGetPhysicalDeviceQueueFamilyProperties = FUNC_PTR(PFN_vkGetPhysicalDeviceQueueFamilyProperties, "vkGetPhysicalDeviceQueueFamilyProperties");
vkGetPhysicalDeviceMemoryProperties = FUNC_PTR(PFN_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties");
vkGetInstanceProcAddr = FUNC_PTR(PFN_vkGetInstanceProcAddr, "vkGetInstanceProcAddr");
vkGetDeviceProcAddr = FUNC_PTR(PFN_vkGetDeviceProcAddr, "vkGetDeviceProcAddr");
vkCreateDevice = FUNC_PTR(PFN_vkCreateDevice, "vkCreateDevice");
vkDestroyDevice = FUNC_PTR(PFN_vkDestroyDevice, "vkDestroyDevice");
vkEnumerateInstanceExtensionProperties = FUNC_PTR(PFN_vkEnumerateInstanceExtensionProperties, "vkEnumerateInstanceExtensionProperties");
vkEnumerateDeviceExtensionProperties = FUNC_PTR(PFN_vkEnumerateDeviceExtensionProperties, "vkEnumerateDeviceExtensionProperties");
vkEnumerateInstanceLayerProperties = FUNC_PTR(PFN_vkEnumerateInstanceLayerProperties, "vkEnumerateInstanceLayerProperties");
vkEnumerateDeviceLayerProperties = FUNC_PTR(PFN_vkEnumerateDeviceLayerProperties, "vkEnumerateDeviceLayerProperties");
vkGetDeviceQueue = FUNC_PTR(PFN_vkGetDeviceQueue, "vkGetDeviceQueue");
vkQueueSubmit = FUNC_PTR(PFN_vkQueueSubmit, "vkQueueSubmit");
vkQueueWaitIdle = FUNC_PTR(PFN_vkQueueWaitIdle, "vkQueueWaitIdle");
vkDeviceWaitIdle = FUNC_PTR(PFN_vkDeviceWaitIdle, "vkDeviceWaitIdle");
vkAllocateMemory = FUNC_PTR(PFN_vkAllocateMemory, "vkAllocateMemory");
vkFreeMemory = FUNC_PTR(PFN_vkFreeMemory, "vkFreeMemory");
vkMapMemory = FUNC_PTR(PFN_vkMapMemory, "vkMapMemory");
vkUnmapMemory = FUNC_PTR(PFN_vkUnmapMemory, "vkUnmapMemory");
vkFlushMappedMemoryRanges = FUNC_PTR(PFN_vkFlushMappedMemoryRanges, "vkFlushMappedMemoryRanges");
vkInvalidateMappedMemoryRanges = FUNC_PTR(PFN_vkInvalidateMappedMemoryRanges, "vkInvalidateMappedMemoryRanges");
vkGetDeviceMemoryCommitment = FUNC_PTR(PFN_vkGetDeviceMemoryCommitment, "vkGetDeviceMemoryCommitment");
vkBindBufferMemory = FUNC_PTR(PFN_vkBindBufferMemory, "vkBindBufferMemory");
vkBindImageMemory = FUNC_PTR(PFN_vkBindImageMemory, "vkBindImageMemory");
vkGetBufferMemoryRequirements = FUNC_PTR(PFN_vkGetBufferMemoryRequirements, "vkGetBufferMemoryRequirements");
vkGetImageMemoryRequirements = FUNC_PTR(PFN_vkGetImageMemoryRequirements, "vkGetImageMemoryRequirements");
vkGetImageSparseMemoryRequirements = FUNC_PTR(PFN_vkGetImageSparseMemoryRequirements, "vkGetImageSparseMemoryRequirements");
vkGetPhysicalDeviceSparseImageFormatProperties = FUNC_PTR(PFN_vkGetPhysicalDeviceSparseImageFormatProperties, "vkGetPhysicalDeviceSparseImageFormatProperties");
vkQueueBindSparse = FUNC_PTR(PFN_vkQueueBindSparse, "vkQueueBindSparse");
vkCreateFence = FUNC_PTR(PFN_vkCreateFence, "vkCreateFence");
vkDestroyFence = FUNC_PTR(PFN_vkDestroyFence, "vkDestroyFence");
vkResetFences = FUNC_PTR(PFN_vkResetFences, "vkResetFences");
vkGetFenceStatus = FUNC_PTR(PFN_vkGetFenceStatus, "vkGetFenceStatus");
vkWaitForFences = FUNC_PTR(PFN_vkWaitForFences, "vkWaitForFences");
vkCreateSemaphore = FUNC_PTR(PFN_vkCreateSemaphore, "vkCreateSemaphore");
vkDestroySemaphore = FUNC_PTR(PFN_vkDestroySemaphore, "vkDestroySemaphore");
vkCreateEvent = FUNC_PTR(PFN_vkCreateEvent, "vkCreateEvent");
vkDestroyEvent = FUNC_PTR(PFN_vkDestroyEvent, "vkDestroyEvent");
vkGetEventStatus = FUNC_PTR(PFN_vkGetEventStatus, "vkGetEventStatus");
vkSetEvent = FUNC_PTR(PFN_vkSetEvent, "vkSetEvent");
vkResetEvent = FUNC_PTR(PFN_vkResetEvent, "vkResetEvent");
vkCreateQueryPool = FUNC_PTR(PFN_vkCreateQueryPool, "vkCreateQueryPool");
vkDestroyQueryPool = FUNC_PTR(PFN_vkDestroyQueryPool, "vkDestroyQueryPool");
vkGetQueryPoolResults = FUNC_PTR(PFN_vkGetQueryPoolResults, "vkGetQueryPoolResults");
vkCreateBuffer = FUNC_PTR(PFN_vkCreateBuffer, "vkCreateBuffer");
vkDestroyBuffer = FUNC_PTR(PFN_vkDestroyBuffer, "vkDestroyBuffer");
vkCreateBufferView = FUNC_PTR(PFN_vkCreateBufferView, "vkCreateBufferView");
vkDestroyBufferView = FUNC_PTR(PFN_vkDestroyBufferView, "vkDestroyBufferView");
vkCreateImage = FUNC_PTR(PFN_vkCreateImage, "vkCreateImage");
vkDestroyImage = FUNC_PTR(PFN_vkDestroyImage, "vkDestroyImage");
vkGetImageSubresourceLayout = FUNC_PTR(PFN_vkGetImageSubresourceLayout, "vkGetImageSubresourceLayout");
vkCreateImageView = FUNC_PTR(PFN_vkCreateImageView, "vkCreateImageView");
vkDestroyImageView = FUNC_PTR(PFN_vkDestroyImageView, "vkDestroyImageView");
vkCreateShaderModule = FUNC_PTR(PFN_vkCreateShaderModule, "vkCreateShaderModule");
vkDestroyShaderModule = FUNC_PTR(PFN_vkDestroyShaderModule, "vkDestroyShaderModule");
vkCreatePipelineCache = FUNC_PTR(PFN_vkCreatePipelineCache, "vkCreatePipelineCache");
vkDestroyPipelineCache = FUNC_PTR(PFN_vkDestroyPipelineCache, "vkDestroyPipelineCache");
vkGetPipelineCacheData = FUNC_PTR(PFN_vkGetPipelineCacheData, "vkGetPipelineCacheData");
vkMergePipelineCaches = FUNC_PTR(PFN_vkMergePipelineCaches, "vkMergePipelineCaches");
vkCreateGraphicsPipelines = FUNC_PTR(PFN_vkCreateGraphicsPipelines, "vkCreateGraphicsPipelines");
vkCreateComputePipelines = FUNC_PTR(PFN_vkCreateComputePipelines, "vkCreateComputePipelines");
vkDestroyPipeline = FUNC_PTR(PFN_vkDestroyPipeline, "vkDestroyPipeline");
vkCreatePipelineLayout = FUNC_PTR(PFN_vkCreatePipelineLayout, "vkCreatePipelineLayout");
vkDestroyPipelineLayout = FUNC_PTR(PFN_vkDestroyPipelineLayout, "vkDestroyPipelineLayout");
vkCreateSampler = FUNC_PTR(PFN_vkCreateSampler, "vkCreateSampler");
vkDestroySampler = FUNC_PTR(PFN_vkDestroySampler, "vkDestroySampler");
vkCreateDescriptorSetLayout = FUNC_PTR(PFN_vkCreateDescriptorSetLayout, "vkCreateDescriptorSetLayout");
vkDestroyDescriptorSetLayout = FUNC_PTR(PFN_vkDestroyDescriptorSetLayout, "vkDestroyDescriptorSetLayout");
vkCreateDescriptorPool = FUNC_PTR(PFN_vkCreateDescriptorPool, "vkCreateDescriptorPool");
vkDestroyDescriptorPool = FUNC_PTR(PFN_vkDestroyDescriptorPool, "vkDestroyDescriptorPool");
vkResetDescriptorPool = FUNC_PTR(PFN_vkResetDescriptorPool, "vkResetDescriptorPool");
vkAllocateDescriptorSets = FUNC_PTR(PFN_vkAllocateDescriptorSets, "vkAllocateDescriptorSets");
vkFreeDescriptorSets = FUNC_PTR(PFN_vkFreeDescriptorSets, "vkFreeDescriptorSets");
vkUpdateDescriptorSets = FUNC_PTR(PFN_vkUpdateDescriptorSets, "vkUpdateDescriptorSets");
vkCreateFramebuffer = FUNC_PTR(PFN_vkCreateFramebuffer, "vkCreateFramebuffer");
vkDestroyFramebuffer = FUNC_PTR(PFN_vkDestroyFramebuffer, "vkDestroyFramebuffer");
vkCreateRenderPass = FUNC_PTR(PFN_vkCreateRenderPass, "vkCreateRenderPass");
vkDestroyRenderPass = FUNC_PTR(PFN_vkDestroyRenderPass, "vkDestroyRenderPass");
vkGetRenderAreaGranularity = FUNC_PTR(PFN_vkGetRenderAreaGranularity, "vkGetRenderAreaGranularity");
vkCreateCommandPool = FUNC_PTR(PFN_vkCreateCommandPool, "vkCreateCommandPool");
vkDestroyCommandPool = FUNC_PTR(PFN_vkDestroyCommandPool, "vkDestroyCommandPool");
vkResetCommandPool = FUNC_PTR(PFN_vkResetCommandPool, "vkResetCommandPool");
vkAllocateCommandBuffers = FUNC_PTR(PFN_vkAllocateCommandBuffers, "vkAllocateCommandBuffers");
vkFreeCommandBuffers = FUNC_PTR(PFN_vkFreeCommandBuffers, "vkFreeCommandBuffers");
vkBeginCommandBuffer = FUNC_PTR(PFN_vkBeginCommandBuffer, "vkBeginCommandBuffer");
vkEndCommandBuffer = FUNC_PTR(PFN_vkEndCommandBuffer, "vkEndCommandBuffer");
vkResetCommandBuffer = FUNC_PTR(PFN_vkResetCommandBuffer, "vkResetCommandBuffer");
vkCmdBindPipeline = FUNC_PTR(PFN_vkCmdBindPipeline, "vkCmdBindPipeline");
vkCmdSetViewport = FUNC_PTR(PFN_vkCmdSetViewport, "vkCmdSetViewport");
vkCmdSetScissor = FUNC_PTR(PFN_vkCmdSetScissor, "vkCmdSetScissor");
vkCmdSetLineWidth = FUNC_PTR(PFN_vkCmdSetLineWidth, "vkCmdSetLineWidth");
vkCmdSetDepthBias = FUNC_PTR(PFN_vkCmdSetDepthBias, "vkCmdSetDepthBias");
vkCmdSetBlendConstants = FUNC_PTR(PFN_vkCmdSetBlendConstants, "vkCmdSetBlendConstants");
vkCmdSetDepthBounds = FUNC_PTR(PFN_vkCmdSetDepthBounds, "vkCmdSetDepthBounds");
vkCmdSetStencilCompareMask = FUNC_PTR(PFN_vkCmdSetStencilCompareMask, "vkCmdSetStencilCompareMask");
vkCmdSetStencilWriteMask = FUNC_PTR(PFN_vkCmdSetStencilWriteMask, "vkCmdSetStencilWriteMask");
vkCmdSetStencilReference = FUNC_PTR(PFN_vkCmdSetStencilReference, "vkCmdSetStencilReference");
vkCmdBindDescriptorSets = FUNC_PTR(PFN_vkCmdBindDescriptorSets, "vkCmdBindDescriptorSets");
vkCmdBindIndexBuffer = FUNC_PTR(PFN_vkCmdBindIndexBuffer, "vkCmdBindIndexBuffer");
vkCmdBindVertexBuffers = FUNC_PTR(PFN_vkCmdBindVertexBuffers, "vkCmdBindVertexBuffers");
vkCmdDraw = FUNC_PTR(PFN_vkCmdDraw, "vkCmdDraw");
vkCmdDrawIndexed = FUNC_PTR(PFN_vkCmdDrawIndexed, "vkCmdDrawIndexed");
vkCmdDrawIndirect = FUNC_PTR(PFN_vkCmdDrawIndirect, "vkCmdDrawIndirect");
vkCmdDrawIndexedIndirect = FUNC_PTR(PFN_vkCmdDrawIndexedIndirect, "vkCmdDrawIndexedIndirect");
vkCmdDispatch = FUNC_PTR(PFN_vkCmdDispatch, "vkCmdDispatch");
vkCmdDispatchIndirect = FUNC_PTR(PFN_vkCmdDispatchIndirect, "vkCmdDispatchIndirect");
vkCmdCopyBuffer = FUNC_PTR(PFN_vkCmdCopyBuffer, "vkCmdCopyBuffer");
vkCmdCopyImage = FUNC_PTR(PFN_vkCmdCopyImage, "vkCmdCopyImage");
vkCmdBlitImage = FUNC_PTR(PFN_vkCmdBlitImage, "vkCmdBlitImage");
vkCmdCopyBufferToImage = FUNC_PTR(PFN_vkCmdCopyBufferToImage, "vkCmdCopyBufferToImage");
vkCmdCopyImageToBuffer = FUNC_PTR(PFN_vkCmdCopyImageToBuffer, "vkCmdCopyImageToBuffer");
vkCmdUpdateBuffer = FUNC_PTR(PFN_vkCmdUpdateBuffer, "vkCmdUpdateBuffer");
vkCmdFillBuffer = FUNC_PTR(PFN_vkCmdFillBuffer, "vkCmdFillBuffer");
vkCmdClearColorImage = FUNC_PTR(PFN_vkCmdClearColorImage, "vkCmdClearColorImage");
vkCmdClearDepthStencilImage = FUNC_PTR(PFN_vkCmdClearDepthStencilImage, "vkCmdClearDepthStencilImage");
vkCmdClearAttachments = FUNC_PTR(PFN_vkCmdClearAttachments, "vkCmdClearAttachments");
vkCmdResolveImage = FUNC_PTR(PFN_vkCmdResolveImage, "vkCmdResolveImage");
vkCmdSetEvent = FUNC_PTR(PFN_vkCmdSetEvent, "vkCmdSetEvent");
vkCmdResetEvent = FUNC_PTR(PFN_vkCmdResetEvent, "vkCmdResetEvent");
vkCmdWaitEvents = FUNC_PTR(PFN_vkCmdWaitEvents, "vkCmdWaitEvents");
vkCmdPipelineBarrier = FUNC_PTR(PFN_vkCmdPipelineBarrier, "vkCmdPipelineBarrier");
vkCmdBeginQuery = FUNC_PTR(PFN_vkCmdBeginQuery, "vkCmdBeginQuery");
vkCmdEndQuery = FUNC_PTR(PFN_vkCmdEndQuery, "vkCmdEndQuery");
vkCmdResetQueryPool = FUNC_PTR(PFN_vkCmdResetQueryPool, "vkCmdResetQueryPool");
vkCmdWriteTimestamp = FUNC_PTR(PFN_vkCmdWriteTimestamp, "vkCmdWriteTimestamp");
vkCmdCopyQueryPoolResults = FUNC_PTR(PFN_vkCmdCopyQueryPoolResults, "vkCmdCopyQueryPoolResults");
vkCmdPushConstants = FUNC_PTR(PFN_vkCmdPushConstants, "vkCmdPushConstants");
vkCmdBeginRenderPass = FUNC_PTR(PFN_vkCmdBeginRenderPass, "vkCmdBeginRenderPass");
vkCmdNextSubpass = FUNC_PTR(PFN_vkCmdNextSubpass, "vkCmdNextSubpass");
vkCmdEndRenderPass = FUNC_PTR(PFN_vkCmdEndRenderPass, "vkCmdEndRenderPass");
vkCmdExecuteCommands = FUNC_PTR(PFN_vkCmdExecuteCommands, "vkCmdExecuteCommands");
// VK_KHR_surface
vkDestroySurfaceKHR = FUNC_PTR(PFN_vkDestroySurfaceKHR, "vkDestroySurfaceKHR");
vkGetPhysicalDeviceSurfaceSupportKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceSurfaceSupportKHR, "vkGetPhysicalDeviceSurfaceSupportKHR");
vkGetPhysicalDeviceSurfaceCapabilitiesKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
vkGetPhysicalDeviceSurfaceFormatsKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceSurfaceFormatsKHR, "vkGetPhysicalDeviceSurfaceFormatsKHR");
vkGetPhysicalDeviceSurfacePresentModesKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceSurfacePresentModesKHR, "vkGetPhysicalDeviceSurfacePresentModesKHR");
// VK_KHR_swapchain
vkCreateSwapchainKHR = FUNC_PTR(PFN_vkCreateSwapchainKHR, "vkCreateSwapchainKHR");
vkDestroySwapchainKHR = FUNC_PTR(PFN_vkDestroySwapchainKHR, "vkDestroySwapchainKHR");
vkGetSwapchainImagesKHR = FUNC_PTR(PFN_vkGetSwapchainImagesKHR, "vkGetSwapchainImagesKHR");
vkAcquireNextImageKHR = FUNC_PTR(PFN_vkAcquireNextImageKHR, "vkAcquireNextImageKHR");
vkQueuePresentKHR = FUNC_PTR(PFN_vkQueuePresentKHR, "vkQueuePresentKHR");
// VK_KHR_display
vkGetPhysicalDeviceDisplayPropertiesKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceDisplayPropertiesKHR, "vkGetPhysicalDeviceDisplayPropertiesKHR");
vkGetPhysicalDeviceDisplayPlanePropertiesKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
vkGetDisplayPlaneSupportedDisplaysKHR = FUNC_PTR(PFN_vkGetDisplayPlaneSupportedDisplaysKHR, "vkGetDisplayPlaneSupportedDisplaysKHR");
vkGetDisplayModePropertiesKHR = FUNC_PTR(PFN_vkGetDisplayModePropertiesKHR, "vkGetDisplayModePropertiesKHR");
vkCreateDisplayModeKHR = FUNC_PTR(PFN_vkCreateDisplayModeKHR, "vkCreateDisplayModeKHR");
vkGetDisplayPlaneCapabilitiesKHR = FUNC_PTR(PFN_vkGetDisplayPlaneCapabilitiesKHR, "vkGetDisplayPlaneCapabilitiesKHR");
vkCreateDisplayPlaneSurfaceKHR = FUNC_PTR(PFN_vkCreateDisplayPlaneSurfaceKHR, "vkCreateDisplayPlaneSurfaceKHR");
// VK_KHR_display_swapchain
vkCreateSharedSwapchainsKHR = FUNC_PTR(PFN_vkCreateSharedSwapchainsKHR, "vkCreateSharedSwapchainsKHR");
#ifdef VK_USE_PLATFORM_XLIB_KHR
vkCreateXlibSurfaceKHR = FUNC_PTR(PFN_vkCreateXlibSurfaceKHR, "vkCreateXlibSurfaceKHR");
vkGetPhysicalDeviceXlibPresentationSupportKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
#endif
#ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR = FUNC_PTR(PFN_vkCreateXcbSurfaceKHR, "vkCreateXcbSurfaceKHR");
vkGetPhysicalDeviceXcbPresentationSupportKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
#endif
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkCreateWaylandSurfaceKHR = FUNC_PTR(PFN_vkCreateWaylandSurfaceKHR, "vkCreateWaylandSurfaceKHR");
vkGetPhysicalDeviceWaylandPresentationSupportKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
#endif
#ifdef VK_USE_PLATFORM_MIR_KHR
vkCreateMirSurfaceKHR = FUNC_PTR(PFN_vkCreateMirSurfaceKHR, "vkCreateMirSurfaceKHR");
vkGetPhysicalDeviceMirPresentationSupportKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceMirPresentationSupportKHR, "vkGetPhysicalDeviceMirPresentationSupportKHR");
#endif
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkCreateAndroidSurfaceKHR = FUNC_PTR(PFN_vkCreateAndroidSurfaceKHR, "vkCreateAndroidSurfaceKHR");
#endif
#ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR = FUNC_PTR(PFN_vkCreateWin32SurfaceKHR, "vkCreateWin32SurfaceKHR");
vkGetPhysicalDeviceWin32PresentationSupportKHR = FUNC_PTR(PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
#endif
vkGetPhysicalDeviceProperties2 = FUNC_PTR(PFN_vkGetPhysicalDeviceProperties2, "vkGetPhysicalDeviceProperties2");
loaded = true;
return true;
}
bool VulkanHelper::checkVulkanSupported() {
if (!VulkanHelper::initVulkanFunctions()) {
LN_LOG_WARNING("Valid vulkan library not found.");
return false;
}
VkApplicationInfo appInfo = {};
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
appInfo.pApplicationName = "Lumino Application";
appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
appInfo.pEngineName = "Lumino Engine";
appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0);
appInfo.apiVersion = VK_MAKE_VERSION(1, 1, 0);
VkInstanceCreateInfo createInfo = {};
createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
createInfo.pApplicationInfo = &appInfo;
VkInstance instance = VK_NULL_HANDLE;
VkResult result = vkCreateInstance(&createInfo, nullptr, &instance);
if (instance) {
vkDestroyInstance(instance, nullptr);
}
return result == VK_SUCCESS;
}
const char* VulkanHelper::getVkResultName(VkResult result) {
#define VK_RESULT_VALUE(v) \
case v: \
return #v
switch (result) {
VK_RESULT_VALUE(VK_SUCCESS);
VK_RESULT_VALUE(VK_NOT_READY);
VK_RESULT_VALUE(VK_TIMEOUT);
VK_RESULT_VALUE(VK_EVENT_SET);
VK_RESULT_VALUE(VK_EVENT_RESET);
VK_RESULT_VALUE(VK_INCOMPLETE); // and VK_RESULT_END_RANGE
VK_RESULT_VALUE(VK_ERROR_OUT_OF_HOST_MEMORY);
VK_RESULT_VALUE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
VK_RESULT_VALUE(VK_ERROR_INITIALIZATION_FAILED);
VK_RESULT_VALUE(VK_ERROR_DEVICE_LOST);
VK_RESULT_VALUE(VK_ERROR_MEMORY_MAP_FAILED);
VK_RESULT_VALUE(VK_ERROR_LAYER_NOT_PRESENT);
VK_RESULT_VALUE(VK_ERROR_EXTENSION_NOT_PRESENT);
VK_RESULT_VALUE(VK_ERROR_FEATURE_NOT_PRESENT);
VK_RESULT_VALUE(VK_ERROR_INCOMPATIBLE_DRIVER);
VK_RESULT_VALUE(VK_ERROR_TOO_MANY_OBJECTS);
VK_RESULT_VALUE(VK_ERROR_FORMAT_NOT_SUPPORTED);
VK_RESULT_VALUE(VK_ERROR_FRAGMENTED_POOL); // and VK_RESULT_BEGIN_RANGE
VK_RESULT_VALUE(VK_ERROR_OUT_OF_POOL_MEMORY); // and VK_ERROR_OUT_OF_POOL_MEMORY_KHR
VK_RESULT_VALUE(VK_ERROR_INVALID_EXTERNAL_HANDLE); // and VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR
VK_RESULT_VALUE(VK_ERROR_SURFACE_LOST_KHR);
VK_RESULT_VALUE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
VK_RESULT_VALUE(VK_SUBOPTIMAL_KHR);
VK_RESULT_VALUE(VK_ERROR_OUT_OF_DATE_KHR);
VK_RESULT_VALUE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
VK_RESULT_VALUE(VK_ERROR_VALIDATION_FAILED_EXT);
VK_RESULT_VALUE(VK_ERROR_INVALID_SHADER_NV);
//VK_RESULT_VALUE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
VK_RESULT_VALUE(VK_ERROR_FRAGMENTATION_EXT);
VK_RESULT_VALUE(VK_ERROR_NOT_PERMITTED_EXT);
//VK_RESULT_VALUE(VK_ERROR_INVALID_DEVICE_ADDRESS_EXT);
//VK_RESULT_VALUE(VK_RESULT_RANGE_SIZE);
VK_RESULT_VALUE(VK_RESULT_MAX_ENUM);
}
#undef VK_RESULT_VALUE
return "<Unkonwn VkResult>";
}
std::vector<const char*> VulkanHelper::checkValidationLayerSupport() {
uint32_t layerCount;
vkEnumerateInstanceLayerProperties(&layerCount, nullptr);
std::vector<VkLayerProperties> availableLayers(layerCount);
vkEnumerateInstanceLayerProperties(&layerCount, availableLayers.data());
std::vector<const char*> result;
for (const char* layerName : validationLayers) {
bool layerFound = false;
for (const auto& layerProperties : availableLayers) {
if (strcmp(layerName, layerProperties.layerName) == 0) {
result.push_back(layerName);
break;
}
}
}
return result;
}
int VulkanHelper::getPrimitiveVertexCount(PrimitiveTopology primitive, int primitiveCount) {
switch (primitive) {
case PrimitiveTopology::TriangleList:
return primitiveCount * 3;
case PrimitiveTopology::TriangleStrip:
return 2 + primitiveCount;
case PrimitiveTopology::TriangleFan:
return 2 + primitiveCount;
case PrimitiveTopology::LineList:
return primitiveCount * 2;
case PrimitiveTopology::LineStrip:
return 1 + primitiveCount;
case PrimitiveTopology::PointList:
return primitiveCount;
default:
LN_UNREACHABLE();
return 0;
}
}
Result VulkanHelper::createImageView(VulkanDevice* deviceContext, VkImage image, VkFormat format, uint32_t mipLevel, VkImageAspectFlags aspectFlags, VkImageView* outView) {
LN_CHECK(deviceContext);
LN_CHECK(mipLevel >= 1);
VkImageViewCreateInfo viewInfo = {};
viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
viewInfo.image = image;
viewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
viewInfo.format = format;
viewInfo.subresourceRange.aspectMask = aspectFlags;
viewInfo.subresourceRange.baseMipLevel = 0;
viewInfo.subresourceRange.levelCount = mipLevel;
viewInfo.subresourceRange.baseArrayLayer = 0;
viewInfo.subresourceRange.layerCount = 1;
LN_VK_CHECK(vkCreateImageView(deviceContext->vulkanDevice(), &viewInfo, deviceContext->vulkanAllocator(), outView));
return ok();
}
//==============================================================================
// AbstractVulkanAllocator
VKAPI_ATTR
void* VKAPI_CALL AllocCallback(
void* pUserData,
size_t size,
size_t alignment,
VkSystemAllocationScope scope) {
AbstractVulkanAllocator* allocator = reinterpret_cast<AbstractVulkanAllocator*>(pUserData);
return allocator->alloc(size, alignment, scope);
}
VKAPI_ATTR
void* VKAPI_CALL ReallocCallback(
void* pUserData,
void* pOriginal,
size_t size,
size_t alignment,
VkSystemAllocationScope scope) {
AbstractVulkanAllocator* allocator = reinterpret_cast<AbstractVulkanAllocator*>(pUserData);
return allocator->realloc(pOriginal, size, alignment, scope);
}
VKAPI_ATTR
void VKAPI_CALL FreeCallback(void* pUserData, void* pMemory) {
AbstractVulkanAllocator* allocator = reinterpret_cast<AbstractVulkanAllocator*>(pUserData);
return allocator->free(pMemory);
}
AbstractVulkanAllocator::AbstractVulkanAllocator()
: m_allocatorCallbacks() {
}
Result AbstractVulkanAllocator::init() {
m_allocatorCallbacks.pfnAllocation = AllocCallback;
m_allocatorCallbacks.pfnFree = FreeCallback;
m_allocatorCallbacks.pfnReallocation = ReallocCallback;
m_allocatorCallbacks.pfnInternalAllocation = nullptr;
m_allocatorCallbacks.pfnInternalFree = nullptr;
m_allocatorCallbacks.pUserData = this;
return ok();
}
//==============================================================================
// VulkanAllocator
VulkanAllocator::VulkanAllocator()
: m_counter(0)
, m_allocationSize{} {
}
Result VulkanAllocator::init() {
return AbstractVulkanAllocator::init();
}
void* VulkanAllocator::alloc(size_t size, size_t alignment, VkSystemAllocationScope scope) noexcept {
m_counter++;
m_allocationSize[scope] -= size;
#ifdef LN_OS_WIN32
return _aligned_malloc(size, alignment);
#else
return aligned_alloc(alignment, size);
#endif
}
void* VulkanAllocator::realloc(void* ptr, size_t size, size_t alignment, VkSystemAllocationScope scope) noexcept {
m_counter++;
#ifdef LN_OS_WIN32
return _aligned_realloc(ptr, size, alignment);
#else
A3D_UNUSED(alignment);
return realloc(ptr, size);
#endif
}
void VulkanAllocator::free(void* ptr) noexcept {
m_counter--;
#ifdef LN_OS_WIN32
_aligned_free(ptr);
#else
free(ptr);
#endif
}
//==============================================================================
// VulkanLinearAllocator
VulkanLinearAllocator::VulkanLinearAllocator()
: m_linearAllocator(nullptr) {
}
Result VulkanLinearAllocator::init() {
return AbstractVulkanAllocator::init();
}
void* VulkanLinearAllocator::alloc(size_t size, size_t alignment, VkSystemAllocationScope scope) noexcept {
// alignment しないと、Radeon Vega 8 で落ちることがあった
void* ptr = m_linearAllocator->allocate(size, alignment);
assert(((size_t)ptr) % alignment == 0);
return ptr;
}
void* VulkanLinearAllocator::realloc(void* ptr, size_t size, size_t alignment, VkSystemAllocationScope scope) noexcept {
// TODO: NotImplemented
assert(0);
return nullptr;
}
void VulkanLinearAllocator::free(void* ptr) noexcept {
// フレーム終了時にすべてクリアされるため不要
}
//==============================================================================
// VulkanImage
VulkanImage::VulkanImage() {
}
Result VulkanImage::init(VulkanDevice* deviceContext, uint32_t width, uint32_t height, VkFormat format, uint32_t mipLevel, VkSampleCountFlagBits numSamples, VkImageTiling tiling, VkImageUsageFlags usage, VkMemoryPropertyFlags properties, VkImageAspectFlags aspectFlags) {
LN_DCHECK(deviceContext);
LN_CHECK(mipLevel >= 1);
m_deviceContext = deviceContext;
m_externalManagement = false;
m_width = width;
m_height = height;
m_format = format;
VkDevice device = m_deviceContext->vulkanDevice();
VkImageCreateInfo imageInfo = {};
imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageInfo.imageType = VK_IMAGE_TYPE_2D;
imageInfo.extent.width = width;
imageInfo.extent.height = height;
imageInfo.extent.depth = 1;
imageInfo.mipLevels = mipLevel;
imageInfo.arrayLayers = 1;
imageInfo.format = format;
imageInfo.tiling = tiling;
imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageInfo.usage = usage;
imageInfo.samples = numSamples;
imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
LN_VK_CHECK(vkCreateImage(device, &imageInfo, m_deviceContext->vulkanAllocator(), &m_image));
VkMemoryRequirements memRequirements;
vkGetImageMemoryRequirements(device, m_image, &memRequirements);
VkMemoryAllocateInfo allocInfo = {};
allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
allocInfo.allocationSize = memRequirements.size;
m_deviceContext->findMemoryType(memRequirements.memoryTypeBits, properties, &allocInfo.memoryTypeIndex);
LN_VK_CHECK(vkAllocateMemory(device, &allocInfo, m_deviceContext->vulkanAllocator(), &m_imageMemory));
vkBindImageMemory(device, m_image, m_imageMemory, 0);
if (!VulkanHelper::createImageView(m_deviceContext, m_image, format, mipLevel, aspectFlags, &m_imageView)) {
return err();
}
return ok();
}
Result VulkanImage::initWrap(VulkanDevice* deviceContext, uint32_t width, uint32_t height, VkFormat format, VkImage image, VkImageView imageView) {
LN_DCHECK(deviceContext);
m_externalManagement = true;
m_width = width;
m_height = height;
m_deviceContext = deviceContext;
m_format = format;
m_image = image;
m_imageView = imageView;
return ok();
}
void VulkanImage::dispose() {
if (!m_externalManagement) {
if (m_imageView) {
vkDestroyImageView(m_deviceContext->vulkanDevice(), m_imageView, m_deviceContext->vulkanAllocator());
m_imageView = VK_NULL_HANDLE;
}
if (m_image) {
vkDestroyImage(m_deviceContext->vulkanDevice(), m_image, m_deviceContext->vulkanAllocator());
m_image = VK_NULL_HANDLE;
}
if (m_imageMemory) {
vkFreeMemory(m_deviceContext->vulkanDevice(), m_imageMemory, m_deviceContext->vulkanAllocator());
m_imageMemory = VK_NULL_HANDLE;
}
}
}
//==============================================================================
// VulkanCommandBuffer
VulkanCommandBuffer::VulkanCommandBuffer() {
}
VulkanCommandBuffer::~VulkanCommandBuffer() {
}
Result VulkanCommandBuffer::init(VulkanDevice* deviceContext) {
if (LN_REQUIRE(deviceContext)) return err();
m_deviceContext = deviceContext;
if (!m_vulkanAllocator.init()) {
return err();
}
// ひとまず 16MB (100万頂点くらいでの見積)
// 1ページは、更新したいバッファ全体が乗るサイズになっていればよい。
// もしあふれる場合は一度 LinearAllocator の LargePage 扱いにして、
// 次のフレームに移る前にページサイズを大きくして LinearAllocator を作り直す。
// ただ、普通動的なバッファ更新でこんなに大きなサイズを使うことはないような気もする。
// なお、静的なバッファの場合は init 時に malloc でメモリをとるようにしているので LinearAllocator は関係ない。
resetAllocator(LinearAllocatorPageManager::DefaultPageSize);
//m_uniformBufferSingleFrameAllocator = makeRef<VulkanSingleFrameAllocator>(m_deviceContext->uniformBufferSingleFrameAllocator());
m_transferBufferSingleFrameAllocator = makeRef<VulkanSingleFrameAllocator>(m_deviceContext->transferBufferSingleFrameAllocator());
m_stagingBufferPoolUsed = 0;
glowStagingBufferPool();
VkCommandBufferAllocateInfo allocInfo = {};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.commandPool = m_deviceContext->vulkanCommandPool();
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandBufferCount = 1;
LN_VK_CHECK(vkAllocateCommandBuffers(m_deviceContext->vulkanDevice(), &allocInfo, &m_commandBuffer));
VkFenceCreateInfo fenceInfo = {};
fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
LN_VK_CHECK(vkCreateFence(m_deviceContext->vulkanDevice(), &fenceInfo, m_deviceContext->vulkanAllocator(), &m_inFlightFence));
return ok();
}
void VulkanCommandBuffer::dispose() {
// Wait for execution to complete as it may be pending.
vkWaitForFences(m_deviceContext->vulkanDevice(), 1, &m_inFlightFence, VK_TRUE, std::numeric_limits<uint64_t>::max());
// CommandBuffer must be released before vkResetDescriptorPool.
if (m_commandBuffer) {
vkFreeCommandBuffers(m_deviceContext->vulkanDevice(), m_deviceContext->vulkanCommandPool(), 1, &m_commandBuffer);
m_commandBuffer = VK_NULL_HANDLE;
}
cleanInFlightResources();
m_stagingBufferPool.clear();
m_stagingBufferPoolUsed = 0;
if (m_inFlightFence) {
vkDestroyFence(m_deviceContext->vulkanDevice(), m_inFlightFence, m_deviceContext->vulkanAllocator());
m_inFlightFence = VK_NULL_HANDLE;
}
}
void VulkanCommandBuffer::wait() {
// もし前回 vkQueueSubmit したコマンドバッファが完了していなければ待つ
vkWaitForFences(m_deviceContext->vulkanDevice(), 1, &m_inFlightFence, VK_TRUE, std::numeric_limits<uint64_t>::max());
vkResetCommandBuffer(vulkanCommandBuffer(), VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
}
Result VulkanCommandBuffer::beginRecording() {
m_linearAllocator->cleanup();
//m_uniformBufferSingleFrameAllocator->cleanup();
m_transferBufferSingleFrameAllocator->cleanup();
// 前回の描画で使ったリソースを開放する。
// end で解放しないのは、まだその後の実際のコマンド実行で使うリソースであるから。
cleanInFlightResources();
VkCommandBufferBeginInfo beginInfo = {};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
LN_VK_CHECK(vkBeginCommandBuffer(vulkanCommandBuffer(), &beginInfo));
m_lastFoundFramebuffer = nullptr;
return ok();
}
Result VulkanCommandBuffer::endRecording() {
if (m_currentRenderPass) {
vkCmdEndRenderPass(vulkanCommandBuffer());
m_currentRenderPass = nullptr;
}
m_lastFoundFramebuffer = nullptr;
LN_VK_CHECK(vkEndCommandBuffer(vulkanCommandBuffer()));
//for (auto& pass : m_usingShaderPasses) {
// pass->recodingPool = nullptr;
//}
return ok();
}
void VulkanCommandBuffer::endRenderPassInRecordingIfNeeded() {
if (m_currentRenderPass) {
vkCmdEndRenderPass(vulkanCommandBuffer());
m_currentRenderPass = false;
}
}
Result VulkanCommandBuffer::submit(VkSemaphore waitSemaphore, VkSemaphore signalSemaphore) {
VkSubmitInfo submitInfo = {};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
// 実行を開始する前に待機するセマフォ
VkSemaphore waitSemaphores[] = { waitSemaphore }; //imageAvailableSemaphores[currentFrame] };
VkPipelineStageFlags waitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
submitInfo.waitSemaphoreCount = (waitSemaphore == VK_NULL_HANDLE) ? 0 : 1;
submitInfo.pWaitSemaphores = waitSemaphores;
submitInfo.pWaitDstStageMask = waitStages;
// 実行するコマンド
VkCommandBuffer commandBuffer = vulkanCommandBuffer();
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &commandBuffer;
// 実行を完了したときに通知されるセマフォ
VkSemaphore signalSemaphores[] = { signalSemaphore }; // renderFinishedSemaphores[currentFrame]};
submitInfo.signalSemaphoreCount = 1;
submitInfo.pSignalSemaphores = signalSemaphores; // validation layer: Queue 0xd51c110 is signaling semaphore 0x52 that was previously signaled by queue 0xd51c110 but has not since been waited on by any queue.
// unsignaled にしておく。vkQueueSubmit で発行した実行が完了したときに signaled になる。
LN_VK_CHECK(vkResetFences(m_deviceContext->vulkanDevice(), 1, &m_inFlightFence));
LN_VK_CHECK(vkQueueSubmit(m_deviceContext->m_graphicsQueue, 1, &submitInfo, m_inFlightFence));
return ok();
}
//Result VulkanCommandBuffer::allocateDescriptorSets(VulkanShaderPass* shaderPass, std::array<VkDescriptorSet, DescriptorType_Count>* outSets)
//{
// LN_DCHECK(shaderPass);
//
// // このコマンド実行中に新たな ShaderPass が使われるたびに、新しく VulkanShaderPass から Pool を確保しようとする。
// // ただし、毎回やると重いので簡単なキャッシュを設ける。
// // 線形探索だけど、ShaderPass が1フレームに 100 も 200 も使われることはそうないだろう。
//
// int usingShaderPass = -1;
// for (int i = 0; i < m_usingShaderPasses.size(); i++) {
// if (m_usingShaderPasses[i] == shaderPass) {
// usingShaderPass = i;
// }
// }
//
// if (usingShaderPass == -1) {
// auto pool = shaderPass->getDescriptorSetsPool();
// m_usingDescriptorSetsPools.push_back(pool);
// m_usingShaderPasses.push_back(shaderPass);
// usingShaderPass = m_usingDescriptorSetsPools.size() - 1;
// }
//
// return m_usingDescriptorSetsPools[usingShaderPass]->allocateDescriptorSets(this, outSets);
//}
VulkanBuffer* VulkanCommandBuffer::allocateBuffer(size_t size, VkBufferUsageFlags usage) {
if (m_stagingBufferPoolUsed >= m_stagingBufferPool.size()) {
glowStagingBufferPool();
}
VulkanBuffer* buffer = &m_stagingBufferPool[m_stagingBufferPoolUsed];
m_stagingBufferPoolUsed++;
if (!buffer->init(m_deviceContext, size, usage, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, m_vulkanAllocator.vulkanAllocator())) {
return nullptr;
}
//// できるだけ毎回オブジェクトを再構築するのは避けたいので、サイズが小さい時だけにしてみる
//if (buffer->size() < size) {
// buffer->resetBuffer(size, usage);
//}
//// LinearAllocator からメモリ確保
//buffer->resetMemoryBuffer(VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, m_vulkanAllocator.vulkanAllocator());
return buffer;
}
VulkanSingleFrameBufferInfo VulkanCommandBuffer::cmdCopyBuffer(size_t size, VulkanBuffer* destination) {
//VulkanBuffer* buffer = allocateBuffer(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
VulkanSingleFrameBufferInfo bufferInfo = m_transferBufferSingleFrameAllocator->allocate(size);
// コマンドバッファに乗せる
VkBufferCopy copyRegion;
copyRegion.srcOffset = bufferInfo.offset;
copyRegion.dstOffset = 0;
copyRegion.size = size;
//vkCmdCopyBuffer(m_commandBuffer, buffer->nativeBuffer(), destination->nativeBuffer(), 1, ©Region);
vkCmdCopyBuffer(m_commandBuffer, bufferInfo.buffer->nativeBuffer(), destination->nativeBuffer(), 1, ©Region);
#if 1 // TODO: test
VkBufferMemoryBarrier barrier = {};
barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
//barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
//barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT; // TODO: ?
//barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
//barrier.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT; // TODO: ?
barrier.srcQueueFamilyIndex = m_deviceContext->graphicsQueueFamilyIndex();
barrier.dstQueueFamilyIndex = m_deviceContext->graphicsQueueFamilyIndex();
barrier.buffer = destination->nativeBuffer();
//barrier.offset;
barrier.size = size;
vkCmdPipelineBarrier(
m_commandBuffer,
VK_PIPELINE_STAGE_TRANSFER_BIT, // このパイプラインステージで、1セットのデータが完全に生成されたことを保証する
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, // このパイプラインステージがそれを消費することを許可する
0,
0,
nullptr,
1,
&barrier, // どのデータをブロック/ブロック解除するかを定義します。
0,
nullptr);
// http://web.engr.oregonstate.edu/~mjb/vulkan/Handouts/PipelineBarriers.2pp.pdf
// https://stackoverflow.com/questions/48894573/how-to-synchronize-uniform-buffer-updates
//https://stackoverflow.com/questions/40577047/vulkan-vkcmdpipelinebarrier-for-data-coherence
// https://chromium.googlesource.com/chromium/src/+/master/gpu/vulkan/vulkan_command_buffer.cc
#endif
// 戻り先で書いてもらう
return bufferInfo;
}
VulkanBuffer* VulkanCommandBuffer::cmdCopyBufferToImage(size_t size, const VkBufferImageCopy& region, VulkanImage* destination) {
VulkanBuffer* buffer = allocateBuffer(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
// コマンドバッファに乗せる
vkCmdCopyBufferToImage(m_commandBuffer, buffer->nativeBuffer(), destination->vulkanImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
// 戻り先で書いてもらう
return buffer;
}
void VulkanCommandBuffer::cleanInFlightResources() {
for (auto& buf : m_stagingBufferPool) {
buf.dispose();
}
m_stagingBufferPoolUsed = 0;
}
void VulkanCommandBuffer::resetAllocator(size_t pageSize) {
m_linearAllocatorManager = makeRef<LinearAllocatorPageManager>(pageSize);
m_linearAllocator = makeRef<LinearAllocator>(m_linearAllocatorManager);
m_vulkanAllocator.setLinearAllocator(m_linearAllocator);
}
Result VulkanCommandBuffer::glowStagingBufferPool() {
size_t oldSize = 0;
size_t newSize = m_stagingBufferPool.empty() ? 64 : m_stagingBufferPool.size() * 2;
m_stagingBufferPool.resize(newSize);
//for (size_t i = oldSize; i < newSize; i++) {
// if (!m_stagingBufferPool[i].init(m_deviceContext)) {
// return false;
// }
//}
return ok();
}
//==============================================================================
// VulkanRenderPass
VulkanRenderPass::VulkanRenderPass()
: m_nativeRenderPass(VK_NULL_HANDLE)
, m_loadOpClear(false) {
}
Result VulkanRenderPass::init(VulkanDevice* deviceContext, const DeviceFramebufferState& state, bool loadOpClear) {
LN_CHECK(deviceContext);
m_deviceContext = deviceContext;
m_loadOpClear = m_loadOpClear;
VkDevice device = m_deviceContext->vulkanDevice();
// TODO: 以下、ひとまず正しく動くことを優先に、VK_ATTACHMENT_LOAD_OP_LOAD や VK_ATTACHMENT_STORE_OP_STORE を毎回使っている。
// これは RT 全体をクリアする場合は CLEAR、ポストエフェクトなどで全体を再描画する場合は DONT_CARE にできる。
// 後で最適化を考えておく。
// MaxRenderTargets + 1枚の depthbuffer
VkAttachmentDescription attachmentDescs[MaxMultiRenderTargets /* + 1*/] = {};
VkAttachmentReference attachmentRefs[MaxMultiRenderTargets /* + 1*/] = {};
VkAttachmentReference* depthAttachmentRef = nullptr;
int attachmentCount = 0;
int colorAttachmentCount = 0;
for (int i = 0; i < MaxMultiRenderTargets; i++) {
if (state.renderTargets[i]) {
VulkanRenderTarget* renderTarget = static_cast<VulkanRenderTarget*>(state.renderTargets[i]);
attachmentDescs[i].flags = 0;
attachmentDescs[i].format = renderTarget->image()->vulkanFormat(); //VulkanHelper::LNFormatToVkFormat(state.renderTargets[i]->getTextureFormat());
attachmentDescs[i].samples = VK_SAMPLE_COUNT_1_BIT;
//attachmentDescs[i].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;//VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescs[i].loadOp = (loadOpClear) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD; // サンプルでは画面全体 clear する前提なので、前回値を保持する必要はない。そのため CLEAR。というか、CLEAR 指定しないと clear しても背景真っ黒になった。
attachmentDescs[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
attachmentDescs[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // VK_ATTACHMENT_LOAD_OP_LOAD;// VK_ATTACHMENT_LOAD_OP_DONT_CARE; // TODO: stencil。今は未対応
attachmentDescs[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; //VK_ATTACHMENT_STORE_OP_STORE; //VK_ATTACHMENT_STORE_OP_DONT_CARE;// // TODO: stencil。今は未対応
if (renderTarget->isSwapchainBackbuffer()) {
// swapchain の場合
// TODO: initialLayout は、Swapchain 作成直後は VK_IMAGE_LAYOUT_UNDEFINED を指定しなければならない。
// なお、Barrier に乗せて遷移させることは許可されていない。ここで何とかする必要がある。
// https://stackoverflow.com/questions/37524032/how-to-deal-with-the-layouts-of-presentable-images
// validation layer: Submitted command buffer expects image 0x50 (subresource: aspectMask 0x1 array layer 0, mip level 0) to be in layout VK_IMAGE_LAYOUT_PRESENT_SRC_KHR--instead, image 0x50's current layout is VK_IMAGE_LAYOUT_UNDEFINED.
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; // VK_IMAGE_LAYOUT_UNDEFINED; // レンダリング前のレイアウト定義。UNDEFINED はレイアウトは何でもよいが、内容の保証はない。サンプルでは全体 clear するので問題ない。
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
}
else {
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; // DONT_CARE と併用する場合は UNDEFINED にしておくとよい
// パス終了後はシェーダ入力(テクスチャ)として使用できるように VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL に遷移する。
// https://qiita.com/Pctg-x8/items/a1a39678e9ca95c59d19
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
attachmentRefs[i].attachment = attachmentCount;
attachmentRefs[i].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentCount++;
colorAttachmentCount++;
}
else {
break;
}
}
if (state.depthBuffer) {
int i = colorAttachmentCount;
attachmentDescs[i].flags = 0;
attachmentDescs[i].format = m_deviceContext->findDepthFormat(); //VK_FORMAT_D32_SFLOAT_S8_UINT;
attachmentDescs[i].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescs[i].loadOp = (loadOpClear) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD; // VK_ATTACHMENT_LOAD_OP_DONT_CARE;
//attachmentDescs[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;// VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescs[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE; //VK_ATTACHMENT_STORE_OP_DONT_CARE;//
attachmentDescs[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD; // VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescs[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE; // VK_ATTACHMENT_STORE_OP_DONT_CARE;// VK_ATTACHMENT_STORE_OP_STORE;
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; // VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentRefs[i].attachment = attachmentCount;
attachmentRefs[i].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
depthAttachmentRef = &attachmentRefs[i];
attachmentCount++;
}
VkSubpassDescription subpass;
subpass.flags = 0;
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = nullptr;
subpass.colorAttachmentCount = colorAttachmentCount;
subpass.pColorAttachments = attachmentRefs;
subpass.pResolveAttachments = nullptr;
subpass.pDepthStencilAttachment = depthAttachmentRef;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = nullptr;
VkSubpassDependency dependency = {};
dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
dependency.dstSubpass = 0;
dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependency.srcAccessMask = 0;
dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
VkRenderPassCreateInfo renderPassInfo = {};
renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassInfo.pNext = nullptr;
renderPassInfo.flags = 0;
renderPassInfo.attachmentCount = attachmentCount;
renderPassInfo.pAttachments = attachmentDescs;
renderPassInfo.subpassCount = 1;
renderPassInfo.pSubpasses = &subpass;
renderPassInfo.dependencyCount = 1;
renderPassInfo.pDependencies = &dependency;
LN_VK_CHECK(vkCreateRenderPass(m_deviceContext->vulkanDevice(), &renderPassInfo, m_deviceContext->vulkanAllocator(), &m_nativeRenderPass));
#if 0
std::array<VkAttachmentDescription, 2> colorAttachment = {};
colorAttachment[0].format = swapChainImageFormat;
colorAttachment[0].samples = VK_SAMPLE_COUNT_1_BIT;
colorAttachment[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; // サンプルでは画面全体 clear する前提なので、前回値を保持する必要はない。そのため CLEAR。というか、CLEAR 指定しないと clear しても背景真っ黒になった。
//colorAttachment[0].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; //
colorAttachment[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
colorAttachment[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // TODO: stencil。今は未対応
colorAttachment[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; // TODO: stencil。今は未対応
colorAttachment[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; // レンダリング前のレイアウト定義。UNDEFINED はレイアウトは何でもよいが、内容の保証はない。サンプルでは全体 clear するので問題ない。
colorAttachment[0].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; // レンダリング後に自動遷移するレイアウト。スワップチェインはこれ。
//colorAttachment[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; // レンダリング後に自動遷移するレイアウト。普通のレンダーターゲットはこれ。
colorAttachment[1].format = swapChainImageFormat;
colorAttachment[1].samples = VK_SAMPLE_COUNT_1_BIT;
colorAttachment[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; // サンプルでは画面全体 clear する前提なので、前回値を保持する必要はない。そのため CLEAR。というか、CLEAR 指定しないと clear しても背景真っ黒になった。
colorAttachment[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
colorAttachment[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // TODO: stencil。今は未対応
colorAttachment[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; // TODO: stencil。今は未対応
colorAttachment[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; // レンダリング前のレイアウト定義。UNDEFINED はレイアウトは何でもよいが、内容の保証はない。サンプルでは全体 clear するので問題ない。
colorAttachment[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; // レンダリング後に自動遷移するレイアウト。普通のレンダーターゲットはこれ。
VkAttachmentDescription depthAttachment = {};
depthAttachment.format = m_deviceContext->findDepthFormat();
depthAttachment.samples = VK_SAMPLE_COUNT_1_BIT;
depthAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
depthAttachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
depthAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
depthAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
depthAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
depthAttachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
// https://vulkan-tutorial.com/Drawing_a_triangle/Graphics_pipeline_basics/Render_passes
// TODO: ポストエフェクト処理の最適化として考えてみてもいいかもしれない。
std::array<VkAttachmentReference, 2> colorAttachmentRef = {};
VkAttachmentReference depthAttachmentRef = {};
VkSubpassDescription subpass = {};
{
colorAttachmentRef[0].attachment = 0;
colorAttachmentRef[0].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentRef[1].attachment = 2;
colorAttachmentRef[1].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
depthAttachmentRef.attachment = 1;
depthAttachmentRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.colorAttachmentCount = 1;//;colorAttachmentRef.size();
subpass.pColorAttachments = colorAttachmentRef.data();
subpass.pDepthStencilAttachment = &depthAttachmentRef;
}
// 今は subpass 1 個なのであまり関係はないが、前後の subpass に対してどんなアクションが完了するまで待つべきかという指定を行う。
VkSubpassDependency dependency = {};
dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
dependency.dstSubpass = 0;
dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependency.srcAccessMask = 0;
dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
std::array<VkAttachmentDescription, 3> attachments = { colorAttachment[0], depthAttachment/*, colorAttachment[1]*/ };
VkRenderPassCreateInfo renderPassInfo = {};
renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassInfo.attachmentCount = static_cast<uint32_t>(attachments.size());
renderPassInfo.pAttachments = attachments.data();
renderPassInfo.subpassCount = 1;
renderPassInfo.pSubpasses = &subpass;
renderPassInfo.dependencyCount = 1;
renderPassInfo.pDependencies = &dependency;
LN_VK_CHECK(vkCreateRenderPass(device, &renderPassInfo, m_deviceContext->vulkanAllocator(), &m_renderPass));
#endif
return ok();
}
void VulkanRenderPass::dispose() {
if (m_nativeRenderPass) {
vkDestroyRenderPass(m_deviceContext->vulkanDevice(), m_nativeRenderPass, m_deviceContext->vulkanAllocator());
m_nativeRenderPass = VK_NULL_HANDLE;
}
}
////==============================================================================
//// VulkanRenderPassCache
//
//VulkanRenderPassCache::VulkanRenderPassCache()
//{
//}
//
//Result VulkanRenderPassCache::init(VulkanDevice* deviceContext)
//{
// LN_DCHECK(deviceContext);
// m_deviceContext = deviceContext;
//
// return true;
//}
//
//void VulkanRenderPassCache::dispose()
//{
// clear();
//}
//
//VulkanRenderPass* VulkanRenderPassCache::findOrCreate(const FetchKey& key)
//{
// uint64_t hash = computeHash(key);
// Ref<VulkanRenderPass> renderPass;
// if (find(hash, &renderPass)) {
// return renderPass;
// }
// else {
// renderPass = makeRef<VulkanRenderPass>();
// if (!renderPass->init(m_deviceContext, key.state, key.loadOpClear)) {
// return nullptr;
// }
// add(hash, renderPass);
// return renderPass;
// }
//}
//
//uint64_t VulkanRenderPassCache::computeHash(const FetchKey& key)
//{
// MixHash hash;
// hash.add(key.state.renderTargets.size());
// for (size_t i = 0; i < key.state.renderTargets.size(); i++) {
// if (key.state.renderTargets[i]) {
// hash.add(static_cast<VulkanTexture*>(key.state.renderTargets[i])->image()->vulkanFormat());
// }
// }
// if (key.state.depthBuffer) {
// hash.add(key.state.depthBuffer->format());
// }
// hash.add(key.loadOpClear);
// return hash.value();
//}
//==============================================================================
// VulkanFramebuffer
VulkanFramebuffer::VulkanFramebuffer() {
}
Result VulkanFramebuffer::init(VulkanDevice* deviceContext, VulkanRenderPass* ownerRenderPass, const DeviceFramebufferState& state /*, bool loadOpClear*/, uint64_t hash) {
LN_CHECK(deviceContext);
LN_CHECK(ownerRenderPass);
m_deviceContext = deviceContext;
m_ownerRenderPass = ownerRenderPass;
m_hash = hash;
//m_renderTargetCount = state.renderTargets.size();
for (size_t i = 0; i < state.renderTargets.size(); i++) {
m_renderTargets[i] = state.renderTargets[i];
}
m_depthBuffer = state.depthBuffer;
VkImageView attachments[MaxMultiRenderTargets + 1] = {};
int attachmentsCount = 0;
for (size_t i = 0; i < m_renderTargets.size(); i++) {
if (m_renderTargets[i]) {
attachments[attachmentsCount] = static_cast<VulkanTexture*>(m_renderTargets[i])->image()->vulkanImageView();
attachmentsCount++;
}
}
if (m_depthBuffer) {
attachments[attachmentsCount] = static_cast<VulkanDepthBuffer*>(m_depthBuffer)->image()->vulkanImageView();
attachmentsCount++;
}
RHIExtent3D size = m_renderTargets[0]->extentSize();
VkFramebufferCreateInfo framebufferInfo = {};
framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferInfo.pNext = nullptr;
framebufferInfo.flags = 0;
framebufferInfo.renderPass = m_ownerRenderPass->nativeRenderPass();
framebufferInfo.attachmentCount = attachmentsCount;
framebufferInfo.pAttachments = attachments;
framebufferInfo.width = size.width;
framebufferInfo.height = size.height;
framebufferInfo.layers = 1;
LN_VK_CHECK(vkCreateFramebuffer(m_deviceContext->vulkanDevice(), &framebufferInfo, m_deviceContext->vulkanAllocator(), &m_framebuffer));
return ok();
}
void VulkanFramebuffer::dispose() {
if (m_framebuffer) {
vkDestroyFramebuffer(m_deviceContext->vulkanDevice(), m_framebuffer, m_deviceContext->vulkanAllocator());
m_framebuffer = 0;
}
//if (m_deviceContext) {
// m_deviceContext->pipelineCache()->invalidateFromFrameBuffer(this);
// m_deviceContext = nullptr;
//}
}
bool VulkanFramebuffer::containsRenderTarget(RHIResource* renderTarget) const {
for (auto& i : m_renderTargets) {
if (i == renderTarget) {
return true;
}
}
return false;
}
bool VulkanFramebuffer::containsDepthBuffer(RHIResource* depthBuffer) const {
return m_depthBuffer == depthBuffer;
}
} // namespace detail
} // namespace ln
#include <LuminoGraphics/RHI/Texture.hpp>
#include <LuminoGraphics/RHI/DepthBuffer.hpp>
namespace ln {
void VulkanIntegration::getImageInfo(GraphicsCommandList* graphicsContext, RenderTargetTexture* texture, VkImage* outImage, VkImageView* outImageView, VkFormat* outFormat, int* outWidth, int* outHeight) {
auto vulkanTexture = static_cast<detail::VulkanRenderTarget*>(detail::GraphicsResourceInternal::resolveRHIObject<detail::RHIResource>(graphicsContext, texture, nullptr));
auto image = vulkanTexture->image();
*outImage = image->vulkanImage();
*outImageView = image->vulkanImageView();
*outFormat = image->vulkanFormat();
*outWidth = vulkanTexture->extentSize().width;
*outHeight = vulkanTexture->extentSize().height;
}
void VulkanIntegration::getImageInfo(GraphicsCommandList* graphicsContext, DepthBuffer* texture, VkImage* outImage, VkImageView* outImageView, VkFormat* outFormat, int* outWidth, int* outHeight) {
auto vulkanTexture = static_cast<detail::VulkanDepthBuffer*>(detail::GraphicsResourceInternal::resolveRHIObject<detail::RHIResource>(graphicsContext, texture, nullptr));
auto image = vulkanTexture->image();
*outImage = image->vulkanImage();
*outImageView = image->vulkanImageView();
*outFormat = image->vulkanFormat();
*outWidth = vulkanTexture->extentSize().width;
*outHeight = vulkanTexture->extentSize().height;
}
} // namespace ln
| 24,147
|
https://github.com/Egor92/TestApplications/blob/master/.Net 4.0/CaptureVideoApp/RecorderParams.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
TestApplications
|
Egor92
|
C#
|
Code
| 90
| 207
|
using System.Windows;
using SharpAvi;
namespace CaptureVideoApp
{
public class RecorderParams
{
public RecorderParams(string filename, int frameRate, FourCC encoder, int quality)
{
FileName = filename;
FramesPerSecond = frameRate;
Codec = encoder;
Quality = quality;
Height = (int)SystemParameters.PrimaryScreenHeight;
Width = (int)SystemParameters.PrimaryScreenWidth;
}
public string FileName { get; private set; }
public int FramesPerSecond { get; private set; }
public int Quality { get; private set; }
public FourCC Codec { get; private set; }
public int Height { get; private set; }
public int Width { get; private set; }
}
}
| 6,369
|
https://github.com/gnulinooks/sympy/blob/master/sympy/thirdparty/pyglet/pyglet/gl/glext_missing.py
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,009
|
sympy
|
gnulinooks
|
Python
|
Code
| 559
| 1,836
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2007 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Additional hand-coded GL extensions.
These are hand-wrapped extension tokens and functions that are in
the OpenGL Extension Registry but have not yet been added to either
the registry's glext.h or nVidia's glext.h. Remove wraps from here
when the headers are updated (and glext_arb.py or glext_nv.py are
regenerated).
When adding an extension here, include the name and URL, and any tokens and
functions appearing under "New Tokens" and "New Procedures" headings. Don't
forget to add the GL_/gl prefix.
Unnumbered extensions in the registry are not included.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: glext_missing.py 1322 2007-10-23 12:58:03Z Alex.Holkner $'
from ctypes import *
from pyglet.gl.lib import link_GL as _link_function
from pyglet.gl.lib import c_ptrdiff_t
# At time of writing, ABI glext.h was last updated 2005/06/20, so numbered
# non-ARB extensions from 312 on must be included here.
# GL_EXT_packed_depth_stencil
# http://oss.sgi.com/projects/ogl-sample/registry/EXT/packed_depth_stencil.txt
GL_DEPTH_STENCIL_EXT = 0x84F9
GL_UNSIGNED_INT_24_8_EXT = 0x84FA
GL_DEPTH24_STENCIL8_EXT = 0x88F0
GL_TEXTURE_STENCIL_SIZE_EXT = 0x88F1
# GL_EXT_texture_sRGB
# http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_sRGB.txt
GL_SRGB_EXT = 0x8C40
GL_SRGB8_EXT = 0x8C41
GL_SRGB_ALPHA_EXT = 0x8C42
GL_SRGB8_ALPHA8_EXT = 0x8C43
GL_SLUMINANCE_ALPHA_EXT = 0x8C44
GL_SLUMINANCE8_ALPHA8_EXT = 0x8C45
GL_SLUMINANCE_EXT = 0x8C46
GL_SLUMINANCE8_EXT = 0x8C47
GL_COMPRESSED_SRGB_EXT = 0x8C48
GL_COMPRESSED_SRGB_ALPHA_EXT = 0x8C49
GL_COMPRESSED_SLUMINANCE_EXT = 0x8C4A
GL_COMPRESSED_SLUMINANCE_ALPHA_EXT = 0x8C4B
GL_COMPRESSED_SRGB_S3TC_DXT1_EXT = 0x8C4C
GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT = 0x8C4D
GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT = 0x8C4E
GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT = 0x8C4F
# GL_EXT_stencil_clear_tag
# http://oss.sgi.com/projects/ogl-sample/registry/EXT/stencil_clear_tag.txt
GLuint = c_uint # /usr/include/GL/gl.h:62
GLsizei = c_int # /usr/include/GL/gl.h:59
glStencilClearTagEXT = _link_function(
'glStencilClearTagEXT', None, [GLsizei, GLuint])
GL_STENCIL_TAG_BITS_EXT = 0x88F2
GL_STENCIL_CLEAR_TAG_VALUE_EXT = 0x88F3
# GL_EXT_framebuffer_blit
# http://oss.sgi.com/projects/ogl-sample/registry/EXT/framebuffer_blit.txt
GLenum = c_uint # /usr/include/GL/gl.h:53
GLint = c_int # /usr/include/GL/gl.h:58
glBlitFramebufferEXT = _link_function(
'glBlitFramebufferEXT', None, [GLint, GLint, GLint, GLint,
GLint, GLint, GLint, GLint,
GLuint, GLenum])
GL_READ_FRAMEBUFFER_EXT = 0x8CA8
GL_DRAW_FRAMEBUFFER_EXT = 0x8CA9
GL_DRAW_FRAMEBUFFER_BINDING_EXT = 0x8CA6
GL_READ_FRAMEBUFFER_BINDING_EXT = 0x8CAA
# GL_EXT_framebuffer_multisample
# http://oss.sgi.com/projects/ogl-sample/registry/EXT/framebuffer_multisample.txt
GL_RENDERBUFFER_SAMPLES_EXT = 0x8CAB
# GL_MESAX_texture_stack
# http://oss.sgi.com/projects/ogl-sample/registry/MESAX/texture_stack.txt
GL_TEXTURE_1D_STACK_MESAX = 0x8759
GL_TEXTURE_2D_STACK_MESAX = 0x875A
GL_PROXY_TEXTURE_1D_STACK_MESAX = 0x875B
GL_PROXY_TEXTURE_2D_STACK_MESAX = 0x875C
GL_TEXTURE_1D_STACK_BINDING_MESAX = 0x875D
GL_TEXTURE_2D_STACK_BINDING_MESAX = 0x875E
| 15,189
|
https://github.com/TeddyFirman/Algorithm_Python/blob/master/project_euler/problem_174/sol1.py
|
Github Open Source
|
Open Source
|
MIT
| null |
Algorithm_Python
|
TeddyFirman
|
Python
|
Code
| 236
| 506
|
"""
Project Euler Problem 174: https://projecteuler.net/problem=174
We shall define a square lamina to be a square outline with a square "hole" so that
the shape possesses vertical and horizontal symmetry.
Given eight tiles it is possible to form a lamina in only one way: 3x3 square with a
1x1 hole in the middle. However, using thirty-two tiles it is possible to form two
distinct laminae.
If t represents the number of tiles used, we shall say that t = 8 is type L(1) and
t = 32 is type L(2).
Let N(n) be the number of t ≤ 1000000 such that t is type L(n); for example,
N(15) = 832.
What is ∑ N(n) for 1 ≤ n ≤ 10?
"""
from collections import defaultdict
from math import ceil, sqrt
def solution(t_limit: int = 1000000, n_limit: int = 10) -> int:
"""
Return the sum of N(n) for 1 <= n <= n_limit.
>>> solution(1000,5)
249
>>> solution(10000,10)
2383
"""
count: defaultdict = defaultdict(int)
for outer_width in range(3, (t_limit // 4) + 2):
if outer_width * outer_width > t_limit:
hole_width_lower_bound = max(
ceil(sqrt(outer_width * outer_width - t_limit)), 1
)
else:
hole_width_lower_bound = 1
hole_width_lower_bound += (outer_width - hole_width_lower_bound) % 2
for hole_width in range(hole_width_lower_bound, outer_width - 1, 2):
count[outer_width * outer_width - hole_width * hole_width] += 1
return sum(1 for n in count.values() if 1 <= n <= 10)
if __name__ == "__main__":
print(f"{solution() = }")
| 14,786
|
https://github.com/Luisromero2811/Aplicaciones-Web/blob/master/database/seeds/Datos.php
|
Github Open Source
|
Open Source
|
MIT
| null |
Aplicaciones-Web
|
Luisromero2811
|
PHP
|
Code
| 53
| 191
|
<?php
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
class Datos extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('productos')->insert([
'Nombre_Producto' => 'Tennis Nike',
]);
DB::table('productos')->insert([
'Nombre_Producto' => 'Jersey Santos Laguna',
]);
DB::table('productos')->insert([
'Nombre_Producto' => 'Jersey Alemania',
]);
DB::table('productos')->insert([
'Nombre_Producto' => 'Jersey Bayern Munchen',
]);
}
}
| 50,396
|
https://github.com/timcera/flibs/blob/master/src/ipc/ipc_mmap_data.f90
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,019
|
flibs
|
timcera
|
Fortran Free Form
|
Code
| 5,943
| 18,424
|
subroutine ipc_send_int_scalar( comm, data, error )
type(ipc_comm) :: comm
integer :: data
logical :: error
integer, dimension(1),save :: typeid = 1
integer, dimension(1) :: data_
integer, dimension(3) :: dims
integer, dimension(1) :: length
error = .true.
if ( comm%pos + 7 > comm%maxsize ) return
if ( comm%pos + 7 > comm%maxsize ) return
error = .false.
data_(1) = data
dims = 0
length(1) = 0
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_int_c( comm%idcomm, comm%pos, data_, size(data_) )
comm%pos = comm%pos + size(data_)
end subroutine ipc_send_int_scalar
subroutine ipc_receive_int_scalar( comm, data, error )
type(ipc_comm) :: comm
integer :: data
integer, dimension(1) :: data_
logical :: error
integer, save :: typeid = 1
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length ) then
error = .true.
return
endif
call ipc_receive_int_c( comm%idcomm, comm%pos, data_, size(data_) )
comm%pos = comm%pos + size(length_)
data = data_(1)
end subroutine ipc_receive_int_scalar
subroutine ipc_send_real_scalar( comm, data, error )
type(ipc_comm) :: comm
real :: data
logical :: error
integer, dimension(1),save :: typeid = 2
real, dimension(1) :: data_
integer, dimension(3) :: dims
integer, dimension(1) :: length
error = .true.
if ( comm%pos + 7 > comm%maxsize ) return
if ( comm%pos + 7 > comm%maxsize ) return
error = .false.
data_(1) = data
dims = 0
length(1) = 0
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_real_c( comm%idcomm, comm%pos, data_, size(data_) )
comm%pos = comm%pos + size(data_)
end subroutine ipc_send_real_scalar
subroutine ipc_receive_real_scalar( comm, data, error )
type(ipc_comm) :: comm
real :: data
real, dimension(1) :: data_
logical :: error
integer, save :: typeid = 2
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length ) then
error = .true.
return
endif
call ipc_receive_real_c( comm%idcomm, comm%pos, data_, size(data_) )
comm%pos = comm%pos + size(length_)
data = data_(1)
end subroutine ipc_receive_real_scalar
subroutine ipc_send_dbl_scalar( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)) :: data
logical :: error
integer, dimension(1),save :: typeid = 3
real(kind(1.0d0)), dimension(1) :: data_
integer, dimension(3) :: dims
integer, dimension(1) :: length
error = .true.
if ( comm%pos + 7 > comm%maxsize ) return
if ( comm%pos + 7 > comm%maxsize ) return
error = .false.
data_(1) = data
dims = 0
length(1) = 0
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_dbl_c( comm%idcomm, comm%pos, data_, 2*size(data_) )
comm%pos = comm%pos + 2*size(data_)
end subroutine ipc_send_dbl_scalar
subroutine ipc_receive_dbl_scalar( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)) :: data
real(kind(1.0d0)), dimension(1) :: data_
logical :: error
integer, save :: typeid = 3
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length ) then
error = .true.
return
endif
call ipc_receive_dbl_c( comm%idcomm, comm%pos, data_, 2*size(data_) )
comm%pos = comm%pos + size(length_)
data = data_(1)
end subroutine ipc_receive_dbl_scalar
subroutine ipc_send_log_scalar( comm, data, error )
type(ipc_comm) :: comm
logical :: data
logical :: error
integer, dimension(1),save :: typeid = 4
logical, dimension(1) :: data_
integer, dimension(3) :: dims
integer, dimension(1) :: length
error = .true.
if ( comm%pos + 7 > comm%maxsize ) return
if ( comm%pos + 7 > comm%maxsize ) return
error = .false.
data_(1) = data
dims = 0
length(1) = 0
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_log_c( comm%idcomm, comm%pos, data_, size(data_) )
comm%pos = comm%pos + size(data_)
end subroutine ipc_send_log_scalar
subroutine ipc_receive_log_scalar( comm, data, error )
type(ipc_comm) :: comm
logical :: data
logical, dimension(1) :: data_
logical :: error
integer, save :: typeid = 4
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length ) then
error = .true.
return
endif
call ipc_receive_log_c( comm%idcomm, comm%pos, data_, size(data_) )
comm%pos = comm%pos + size(length_)
data = data_(1)
end subroutine ipc_receive_log_scalar
subroutine ipc_send_char_scalar( comm, data, error )
type(ipc_comm) :: comm
character(len=*) :: data
logical :: error
integer, dimension(1),save :: typeid = 5
character(len=len(data)), dimension(1) :: data_
integer, dimension(3) :: dims
integer, dimension(1) :: length
error = .true.
if ( comm%pos + 7 > comm%maxsize ) return
if ( comm%pos + 7 > comm%maxsize ) return
error = .false.
data_(1) = data
dims = 0
length(1) = len(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_char_c( comm%idcomm, comm%pos, data_, (len(data)+3)/4 )
comm%pos = comm%pos + (len(data)+3)/4
end subroutine ipc_send_char_scalar
subroutine ipc_receive_char_scalar( comm, data, error )
type(ipc_comm) :: comm
character(len=*) :: data
character(len=len(data)), dimension(1) :: data_
logical :: error
integer, save :: typeid = 5
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = len(data)
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
! Tricky: it is the contents that counts ...
if ( typeid_(1) /= typeid .or. length_(1) > length ) then
error = .true.
return
endif
call ipc_receive_char_c( comm%idcomm, comm%pos, data_, (length_(1)+3)/4 )
comm%pos = comm%pos + (length_(1)+3)/4
data = data_(1)(1:length_(1))
end subroutine ipc_receive_char_scalar
subroutine ipc_send_cmplx_scalar( comm, data, error )
type(ipc_comm) :: comm
complex :: data
logical :: error
integer, dimension(1),save :: typeid = 6
complex, dimension(1) :: data_
integer, dimension(3) :: dims
integer, dimension(1) :: length
error = .true.
if ( comm%pos + 7 > comm%maxsize ) return
if ( comm%pos + 7 > comm%maxsize ) return
error = .false.
data_(1) = data
dims = 0
length(1) = 0
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_cmplx_c( comm%idcomm, comm%pos, data_, 2*size(data_) )
comm%pos = comm%pos + 2*size(data_)
end subroutine ipc_send_cmplx_scalar
subroutine ipc_receive_cmplx_scalar( comm, data, error )
type(ipc_comm) :: comm
complex :: data
complex, dimension(1) :: data_
logical :: error
integer, save :: typeid = 6
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length ) then
error = .true.
return
endif
call ipc_receive_cmplx_c( comm%idcomm, comm%pos, data_, 2*size(data_) )
comm%pos = comm%pos + size(length_)
data = data_(1)
end subroutine ipc_receive_cmplx_scalar
subroutine ipc_send_int_1d( comm, data, error )
type(ipc_comm) :: comm
integer, dimension(:) :: data
logical :: error
integer, dimension(1), save :: typeid = 1
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_int_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_int_1d( comm, data, error )
type(ipc_comm) :: comm
integer, dimension(:) :: data
logical :: error
integer, save :: typeid = 1
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_int_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_int_1d
subroutine ipc_send_int_2d( comm, data, error )
type(ipc_comm) :: comm
integer, dimension(:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 1
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_int_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_int_2d( comm, data, error )
type(ipc_comm) :: comm
integer, dimension(:,:) :: data
logical :: error
integer, save :: typeid = 1
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_int_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_int_2d
subroutine ipc_send_int_3d( comm, data, error )
type(ipc_comm) :: comm
integer, dimension(:,:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 1
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_int_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_int_3d( comm, data, error )
type(ipc_comm) :: comm
integer, dimension(:,:,:) :: data
logical :: error
integer, save :: typeid = 1
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_int_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_int_3d
subroutine ipc_send_real_1d( comm, data, error )
type(ipc_comm) :: comm
real, dimension(:) :: data
logical :: error
integer, dimension(1), save :: typeid = 2
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_real_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_real_1d( comm, data, error )
type(ipc_comm) :: comm
real, dimension(:) :: data
logical :: error
integer, save :: typeid = 2
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_real_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_real_1d
subroutine ipc_send_real_2d( comm, data, error )
type(ipc_comm) :: comm
real, dimension(:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 2
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_real_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_real_2d( comm, data, error )
type(ipc_comm) :: comm
real, dimension(:,:) :: data
logical :: error
integer, save :: typeid = 2
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_real_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_real_2d
subroutine ipc_send_real_3d( comm, data, error )
type(ipc_comm) :: comm
real, dimension(:,:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 2
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_real_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_real_3d( comm, data, error )
type(ipc_comm) :: comm
real, dimension(:,:,:) :: data
logical :: error
integer, save :: typeid = 2
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_real_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_real_3d
subroutine ipc_send_dbl_1d( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)), dimension(:) :: data
logical :: error
integer, dimension(1), save :: typeid = 3
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = 2*size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_dbl_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_dbl_1d( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)), dimension(:) :: data
logical :: error
integer, save :: typeid = 3
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_dbl_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + 2*size(data)
end subroutine ipc_receive_dbl_1d
subroutine ipc_send_dbl_2d( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)), dimension(:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 3
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = 2*size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_dbl_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_dbl_2d( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)), dimension(:,:) :: data
logical :: error
integer, save :: typeid = 3
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_dbl_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + 2*size(data)
end subroutine ipc_receive_dbl_2d
subroutine ipc_send_dbl_3d( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)), dimension(:,:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 3
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = 2*size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_dbl_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_dbl_3d( comm, data, error )
type(ipc_comm) :: comm
real(kind(1.0d0)), dimension(:,:,:) :: data
logical :: error
integer, save :: typeid = 3
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_dbl_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + 2*size(data)
end subroutine ipc_receive_dbl_3d
subroutine ipc_send_log_1d( comm, data, error )
type(ipc_comm) :: comm
logical, dimension(:) :: data
logical :: error
integer, dimension(1), save :: typeid = 4
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_log_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_log_1d( comm, data, error )
type(ipc_comm) :: comm
logical, dimension(:) :: data
logical :: error
integer, save :: typeid = 4
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_log_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_log_1d
subroutine ipc_send_log_2d( comm, data, error )
type(ipc_comm) :: comm
logical, dimension(:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 4
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_log_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_log_2d( comm, data, error )
type(ipc_comm) :: comm
logical, dimension(:,:) :: data
logical :: error
integer, save :: typeid = 4
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_log_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_log_2d
subroutine ipc_send_log_3d( comm, data, error )
type(ipc_comm) :: comm
logical, dimension(:,:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 4
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_log_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_log_3d( comm, data, error )
type(ipc_comm) :: comm
logical, dimension(:,:,:) :: data
logical :: error
integer, save :: typeid = 4
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_log_c( comm%idcomm, comm%pos, data, size(data) )
comm%pos = comm%pos + size(data)
end subroutine ipc_receive_log_3d
subroutine ipc_send_char_1d( comm, data, error )
type(ipc_comm) :: comm
character(len=*), dimension(:) :: data
logical :: error
integer, dimension(1), save :: typeid = 5
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = len(data(1))
sizedata = (size(data)*len(data)+3)/4
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_char_c( comm%idcomm, comm%pos, data, (size(data)*len(data)+3)/4 )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_char_1d( comm, data, error )
type(ipc_comm) :: comm
character(len=*), dimension(:) :: data
logical :: error
integer, save :: typeid = 5
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = len(data(1))
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_char_c( comm%idcomm, comm%pos, data, (size(data)*len(data)+3)/4 )
comm%pos = comm%pos + (size(data)*len(data)+3)/4
end subroutine ipc_receive_char_1d
subroutine ipc_send_char_2d( comm, data, error )
type(ipc_comm) :: comm
character(len=*), dimension(:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 5
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = len(data(1,1))
sizedata = (size(data)*len(data)+3)/4
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_char_c( comm%idcomm, comm%pos, data, (size(data)*len(data)+3)/4 )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_char_2d( comm, data, error )
type(ipc_comm) :: comm
character(len=*), dimension(:,:) :: data
logical :: error
integer, save :: typeid = 5
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = len(data(1,1))
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_char_c( comm%idcomm, comm%pos, data, (size(data)*len(data)+3)/4 )
comm%pos = comm%pos + (size(data)*len(data)+3)/4
end subroutine ipc_receive_char_2d
subroutine ipc_send_char_3d( comm, data, error )
type(ipc_comm) :: comm
character(len=*), dimension(:,:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 5
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = len(data(1,1,1))
sizedata = (size(data)*len(data)+3)/4
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_char_c( comm%idcomm, comm%pos, data, (size(data)*len(data)+3)/4 )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_char_3d( comm, data, error )
type(ipc_comm) :: comm
character(len=*), dimension(:,:,:) :: data
logical :: error
integer, save :: typeid = 5
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = len(data(1,1,1))
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_char_c( comm%idcomm, comm%pos, data, (size(data)*len(data)+3)/4 )
comm%pos = comm%pos + (size(data)*len(data)+3)/4
end subroutine ipc_receive_char_3d
subroutine ipc_send_cmplx_1d( comm, data, error )
type(ipc_comm) :: comm
complex, dimension(:) :: data
logical :: error
integer, dimension(1), save :: typeid = 6
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = 2*size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_cmplx_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_cmplx_1d( comm, data, error )
type(ipc_comm) :: comm
complex, dimension(:) :: data
logical :: error
integer, save :: typeid = 6
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_cmplx_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + 2*size(data)
end subroutine ipc_receive_cmplx_1d
subroutine ipc_send_cmplx_2d( comm, data, error )
type(ipc_comm) :: comm
complex, dimension(:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 6
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = 2*size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_cmplx_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_cmplx_2d( comm, data, error )
type(ipc_comm) :: comm
complex, dimension(:,:) :: data
logical :: error
integer, save :: typeid = 6
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_cmplx_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + 2*size(data)
end subroutine ipc_receive_cmplx_2d
subroutine ipc_send_cmplx_3d( comm, data, error )
type(ipc_comm) :: comm
complex, dimension(:,:,:) :: data
logical :: error
integer, dimension(1), save :: typeid = 6
integer, dimension(1) :: length
integer, dimension(3) :: dims
integer :: sizedata
integer :: i
length = 0
sizedata = 2*size(data)
error = .true.
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
if ( comm%pos + 6 + sizedata > comm%maxsize ) return
error = .false.
dims = 0
dims(1:size(shape(data))) = size(data)
call ipc_send_int_c( comm%idcomm, comm%pos, typeid, 1 )
comm%pos = comm%pos + size(typeid)
call ipc_send_int_c( comm%idcomm, comm%pos, dims, 3 )
comm%pos = comm%pos + size(dims)
call ipc_send_int_c( comm%idcomm, comm%pos, length, 1 )
comm%pos = comm%pos + size(length)
call ipc_send_cmplx_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + size(data)
end subroutine
subroutine ipc_receive_cmplx_3d( comm, data, error )
type(ipc_comm) :: comm
complex, dimension(:,:,:) :: data
logical :: error
integer, save :: typeid = 6
integer, dimension(1) :: typeid_
integer :: ierr
integer, dimension(3) :: dims_
integer :: length
integer, dimension(1) :: length_
integer :: dummy
error = .false.
length = 0
call ipc_receive_int_c( comm%idcomm, comm%pos, typeid_, size(typeid_) )
comm%pos = comm%pos + size(typeid_)
call ipc_receive_int_c( comm%idcomm, comm%pos, dims_, size(dims_) )
comm%pos = comm%pos + size(dims_)
call ipc_receive_int_c( comm%idcomm, comm%pos, length_, size(length_) )
comm%pos = comm%pos + size(length_)
if ( typeid_(1) /= typeid .or. length_(1) /= length .or. &
any( dims_(1:size(shape(data))) /= size(data) ) ) then
error = .true.
return
endif
call ipc_receive_cmplx_c( comm%idcomm, comm%pos, data, 2*size(data) )
comm%pos = comm%pos + 2*size(data)
end subroutine ipc_receive_cmplx_3d
| 29,803
|
https://github.com/Orienteering-Web-Project/owp_entry/blob/master/Resources/views/Form/form__entry_club.html.twig
|
Github Open Source
|
Open Source
|
MIT
| null |
owp_entry
|
Orienteering-Web-Project
|
Twig
|
Code
| 223
| 920
|
{% extends '@OwpCore/content.html.twig' %}
{% block stylesheets %}
{{ parent() }}
{% endblock %}
{% block javascripts %}
{{ parent() }}
{{ encore_entry_script_tags('entry') }}
{% endblock %}
{% block title %}{{ event.title }}{% endblock %}
{% block content_header %}
<img class="first-slide opacity-20" src="{{ asset('images/default.jpg') | imagine_filter('carousel_header') }}" alt="First slide">
<div class="container">
<div class="carousel-caption text-left content-header col-12 col-lg-10 offset-lg-1">
<h1 class="display-2 d-inline-block"><span class="badge badge-primary">{{ event.dateBegin|date('d') }}</span></h1>
<h3 class="display-4 d-inline-block"><span class="badge">{{ event.dateBegin|date('F')|trans|upper }}</span></h3>
<h3 class="display-4 d-inline-block"><span class="badge text-primary">{{ event.dateBegin|date('Y') }}</span></h3>
<h1>Inscription - {{ event.title }}</h1>
<div class="my-4">
<a href="{{ path('owp_event_show', {'slug': event.slug}) }}" class="btn btn-primary">Retourner sur la page de l'évènement</a>
</div>
</div>
</div>
{% endblock %}
{% block content_body %}
<div class="row my-4 mx-0">
<div class="col-12 col-lg-10 offset-lg-1">
{{ _self.breadcrumb([{'label':event.title, 'url': path('owp_event_show', {'slug': event.slug}), 'class': ''},{'label':'Inscription', 'url': '#', 'class': 'active'}])}}
</div>
</div>
{{ form_start(form, {'attr': {'id': 'form_entry'}}) }}
<div class="row my-4 mx-0">
<div class="col-12 col-lg-10 offset-lg-1">
<div class="mb-4"
{% include '@OwpEntry/Form/form__entry_club__select_club.html.twig' %}
<table class="table">
<thead>
</thead>
<tbody>
{% for people in form.base %}
<tr>
<td class="form-check">
{{ form_widget(people) }}
</td>
<td>
{{ form_label(people) }}
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
<div class="row position-sticky fixed-bottom bg-white border-top border-primary mx-0 my-4">
<div class="col-12 col-lg-10 offset-lg-1 p-4 my-lg-4">
<div class="d-inline-flex">
<button type="submit" class="btn btn-lg btn-primary" form="form_entry" disabled>Enregistrer l'inscription</button>
<div id="number-entries" class="p-2 text-muted">
<span>0</span> personne(s) sélectionnée(s)
</div>
</div>
</div>
</div>
{{ form_end(form) }}
{% endblock %}
| 2,759
|
https://github.com/superoo7/api/blob/master/app/models/hunt_transaction.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
api
|
superoo7
|
Ruby
|
Code
| 412
| 1,376
|
require 'utils'
require 's_logger'
class HuntTransaction < ApplicationRecord
BOUNTY_TYPES = %w(sponsor voting resteem sp_claim posting commenting referral report moderator contribution guardian)
validates_presence_of :amount, :memo
validate :validate_sender_and_receiver, :validate_eth_format
validates :memo, length: { maximum: 255 }
validates :bounty_type, inclusion: { in: BOUNTY_TYPES }
def validate_sender_and_receiver
if sender.blank? && receiver.blank?
errors.add(:receiver, "one side of transaction should be in off-chain")
end
if sender.blank? && eth_address.blank?
errors.add(:sender, "cannot be empty")
elsif !sender.blank? && !eth_address.blank?
errors.add(:eth_address, "Only one of internal or external receiver can be assigned")
end
if receiver.blank? && eth_address.blank?
errors.add(:receiver, "cannot be empty")
elsif !receiver.blank? && !eth_address.blank?
errors.add(:eth_address, "Only one of internal or external receiver can be assigned")
end
end
def validate_eth_format
unless eth_address.blank?
errors.add(:eth_address, "Wrong format") if eth_address.size != 42 || !eth_address.downcase.start_with?('0x')
end
unless eth_tx_hash.blank?
errors.add(:eth_tx_hash, "Wrong format") if eth_tx_hash.size != 66 || !eth_tx_hash.downcase.start_with?('0x')
end
end
def self.reward_reporter!(username, amount)
logger = SLogger.new('reward-log')
if user = User.find_by(username: username)
today = Time.zone.today.to_time
reward_user!(username, amount, 'report', "Bounty rewards for reporting abusing users - #{formatted_date(today)}", false)
logger.log "ABUSING_REPORT] Sent #{amount} HUNT to @#{username}\nBalance: #{user.hunt_balance} -> #{user.reload.hunt_balance}", true
else
logger.log "No user found: @#{username}", true
end
end
def self.reward_contributor!(username, amount, week, bounty_type, memo)
logger = SLogger.new('reward-log')
if user = User.find_by(username: username)
msg = "#{memo} - week #{week}"
reward_user!(username, amount, bounty_type, msg, true)
logger.log "#{bounty_type.upcase}] Sent #{amount} HUNT to @#{username} - #{msg}\n" +
"Balance: #{user.hunt_balance.round(2)} -> #{user.reload.hunt_balance.round(2)}", true
else
logger.log "No user found: @#{username}", true
end
end
def self.reward_sponsor!(username, amount, week)
reward_user!(username, amount, 'sponsor', "Weekly reward for delegation sponsor - week #{week}", true)
end
def self.reward_votings!(username, amount, date)
reward_user!(username, amount, 'voting', "Daily reward for voting contribution - #{formatted_date(date)}", true)
end
def self.reward_resteems!(username, amount, date)
reward_user!(username, amount, 'resteem', "Daily reward for resteem contribution - #{formatted_date(date)}", true)
end
private_class_method def self.reward_user!(username, amount, bounty_type, memo, check_dups = false)
return if amount == 0
raise 'Duplicated Rewards' if check_dups && self.exists?(receiver: username, memo: memo)
user = User.find_by(username: username)
user = User.create!(username: username, encrypted_token: '') unless user
send!(amount, 'steemhunt', user.username, nil, bounty_type, memo)
end
private_class_method def self.send!(amount, sender_name = nil, receiver_name = nil, eth_address = nil, bounty_type = nil, memo = nil)
return if amount == 0
sender = sender_name.blank? ? nil : User.find_by(username: sender_name)
receiver = receiver_name.blank? ? nil : User.find_by(username: receiver_name)
ActiveRecord::Base.transaction do
self.create!(
sender: sender_name,
receiver: receiver_name,
eth_address: eth_address,
amount: amount,
bounty_type: bounty_type,
memo: memo
)
unless sender.blank?
sender.update!(hunt_balance: sender.hunt_balance - amount)
end
unless receiver.blank?
receiver.update!(hunt_balance: receiver.hunt_balance + amount)
end
end
unless eth_address.blank?
# TODO: ETH Transaction
# TODO: Rollback DB on errors - should be in a separate transaction
end
end
end
| 9,453
|
https://github.com/octanolabs/g0penrpc/blob/master/go.mod
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
g0penrpc
|
octanolabs
|
Go Module
|
Code
| 11
| 69
|
module github.com/octanolabs/g0penrpc
go 1.14
require (
github.com/qri-io/jsonpointer v0.1.1
github.com/qri-io/jsonschema v0.2.0
)
| 50,463
|
https://github.com/S4n60w3n/hammer/blob/master/src/Parser/Parser.spec.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
hammer
|
S4n60w3n
|
TypeScript
|
Code
| 253
| 1,107
|
import { parseInput } from 'Parser';
describe('parseInput', () => {
it('only reference', () => {
const result = parseInput('reference 70.0 45.0');
expect(result).not.toBeNull();
expect(result.refs_therm).toBe(70);
expect(result.refs_hum).toBe(45);
});
it('data, only one', () => {
const result = parseInput(`reference 70.0 45.0
thermometer temp-2
2007-04-05T22:01 69.5
2007-04-05T22:02 70.1
2007-04-05T22:03 71.3
2007-04-05T22:04 71.5
2007-04-05T22:05 69.8
`);
expect(result).not.toBeNull();
expect(result.readings['temp-2'].readingType).toBe('thermometer');
});
it('data, only two', () => {
const result = parseInput(`reference 70.0 45.0
thermometer temp-2
2007-04-05T22:01 69.5
2007-04-05T22:02 70.1
2007-04-05T22:03 71.3
2007-04-05T22:04 71.5
2007-04-05T22:05 69.8
thermometer temp-3
2007-04-05T22:01 69.5
2007-04-05T22:02 70.1
2007-04-05T22:03 71.3
2007-04-05T22:04 71.5
2007-04-05T22:05 69.8
`);
expect(result).not.toBeNull();
expect(result.readings['temp-3'].readingType).toBe('thermometer');
});
describe('Error', () => {
it('Empty', () => {
expect(parseInput(``)).toBe(null);
});
it('null', () => {
expect(parseInput(null)).toBe(null);
});
it('undefined', () => {
expect(parseInput(undefined)).toBe(null);
});
it('start without reference', () => {
expect(parseInput('Random text')).toBeNull();
});
it('Reference without data', () => {
expect(parseInput('Reference')).toBeNull();
});
it('Reference without numberic data', () => {
expect(parseInput('Reference a2 2')).toBeNull();
});
it('Thermomether without name', () => {
const result = parseInput(`reference 70.0 45.0
thermometer
2007-04-05T22:01 69.5
2007-04-05T22:02 70.1
2007-04-05T22:03 71.3
2007-04-05T22:04 71.5
2007-04-05T22:05 69.8
`);
expect(result.readings.unnamed).not.toBeNull();
expect(result.readings.unnamed).not.toBeUndefined();
});
it('Thermomether non numeric data', () => {
const result = parseInput(`reference 70.0 45.0
thermometer temp
2007-04-05T22:01 6a9.5
`);
expect(result.readings.temp.data[0].value).toBeNaN();
});
it('Starts with newLine', () => {
const result = parseInput(`
reference 70.0 45.0
thermometer temp
2007-04-05T22:01 69.5
`);
expect(result.readings.temp.data[0].value).toBe(69.5);
expect(result.refs_therm).toBe(70);
expect(result.refs_hum).toBe(45);
});
it('Reference with float', () => {
const result = parseInput(`
reference 70.1 45.5
thermometer temp
2007-04-05T22:01 69.5
`);
expect(result.refs_therm).toBe(70.1);
expect(result.refs_hum).toBe(45.5);
});
it('Ends without newLine', () => {
const result = parseInput(`reference 70.0 45.0
thermometer temp
2007-04-05T22:01 69.5`);
expect(result.readings.temp.data[0].value).toBe(69.5);
});
});
});
| 12,241
|
https://github.com/ftmdapp/Fantom-PWA-Explorer/blob/master/src/layouts/FAddressDetail.vue
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
Fantom-PWA-Explorer
|
ftmdapp
|
Vue
|
Code
| 1,074
| 4,243
|
<template>
<div class="f-address-detail">
<template v-if="!dAccountByAddressError">
<div class="row f-data-layout equal-height no-vert-col-padding collapse-md">
<div class="col col-6-lg margin-bottom-menu">
<f-card>
<h2>{{ $t('view_address_detail.balance') }}</h2>
<div class="balance center-v">
<h3 class="h1"><span v-show="cAccount">{{ toFTM(cAccount ? cAccount.totalValue : 1) }} <span class="ftm">FTM</span></span></h3>
<div v-show="cAccount" class="usd">${{ toUSD(cAccount ? cAccount.totalValue : 1) }}</div>
</div>
</f-card>
</div>
<div class="col col-6-lg margin-bottom-menu">
<f-card>
<h2>{{ $t('view_address_detail.available') }}</h2>
<div class="balance center-v">
<h3 class="h1"><span v-show="'available' in cAssets">{{ toFTM(cAssets.available) }} <span class="ftm">FTM</span></span></h3>
<div v-show="'available' in cAssets" class="usd">${{ toUSD(cAssets.available) }}</div>
</div>
</f-card>
</div>
<div class="col">
<f-card>
<h2>{{ $t('view_address_detail.staking') }}</h2>
<!--
<div class="row no-collapse">
<div class="col f-row-label">{{ $t('view_address_detail.available') }}</div>
<div class="col">
<div v-show="'available' in cAssets">{{ toFTM(cAssets.available) }} FTM</div>
</div>
</div>
-->
<div class="row no-collapse">
<div class="col f-row-label">{{ $t('view_address_detail.delegated') }}</div>
<div class="col">
<div v-show="'delegated' in cAssets">{{ toFTM(cAssets.delegated) }} FTM</div>
</div>
</div>
<div class="row no-collapse">
<div class="col f-row-label">{{ $t('view_address_detail.pending_rewards') }}</div>
<div class="col">
<div v-show="'pending_rewards' in cAssets">{{ toFTM(cAssets.pending_rewards) }} FTM</div>
</div>
</div>
<div class="row no-collapse">
<div class="col f-row-label">{{ $t('view_address_detail.stashed_rewards') }}</div>
<div class="col">
<div v-show="'stashed' in cAssets">{{ toFTM(cAssets.stashed) }} FTM</div>
</div>
</div>
<div class="row no-collapse">
<div class="col f-row-label">{{ $t('view_address_detail.claimed_rewards') }}</div>
<div class="col">
<div v-show="'claimed_rewards' in cAssets">{{ toFTM(cAssets.claimed_rewards) }} FTM</div>
</div>
</div>
<div class="row no-collapse">
<div class="col f-row-label">{{ $t('validator') }}</div>
<div class="col">
<router-link v-if="validator && validator.address" :to="{ name: 'validator-detail', params: {address: validator.address} }">
{{ validator ? validator.name : '' }}
</router-link>
<span v-else>{{ validator ? validator.name : '' }}</span>
</div>
</div>
</f-card>
</div>
</div>
<!--
<br><br>
<f-card>
<h2 class="break-word">{{ id }}</h2>
<div class="row">
<div class="col">
<div class="num-block">
<h2 class="h3">{{ $t('view_address_detail.value_in_ftm') }}</h2>
<div class="num"><span v-show="cAccount">{{ toFTM(cAccount ? cAccount.totalValue : 1) }}</span></div>
</div>
</div>
<div class="col">
<div class="num-block">
<h2 class="h3">{{ $t('view_address_detail.value_in_usd') }}</h2>
<div class="num"><span v-show="cAccount">{{ toUSD(cAccount ? cAccount.totalValue : 1) }}</span></div>
</div>
</div>
</div>
</f-card>
-->
<!--
<div class="f-subsection">
<h2 class="h1">{{ $t('view_address_detail.assets') }} <span v-if="cAssetItems.length" class="f-records-count">({{ cAssetItems.length }})</span></h2>
<f-data-table
:columns="dAssetColumns"
:items="cAssetItems"
fixed-header
>
</f-data-table>
</div>
-->
<div class="f-subsection">
<h2 class="h1">{{ $t('view_block_detail.block_transactions') }} <span v-if="dRecordsCount" class="f-records-count">({{ dRecordsCount }})</span></h2>
<f-transaction-list
:items="cTransactionItems"
:loading="cLoading"
:address-col="id"
@fetch-more="onFetchMore"
></f-transaction-list>
</div>
</template>
<template v-else>
<div class="query-error">{{ dAccountByAddressError }}</div>
</template>
</div>
</template>
<script>
import FCard from "../components/FCard.vue";
import gql from 'graphql-tag';
import { WEIToFTM, FTMToUSD } from "../utils/transactions.js";
import FTransactionList from "../data-tables/FTransactionList.vue";
import {formatHexToInt, numToFixed, formatNumberByLocale, timestampToDate} from "../filters.js";
// import FDataTable from "../components/FDataTable.vue";
export default {
components: {
// FDataTable,
FTransactionList,
FCard
},
props: {
/** Address id. */
id: {
type: String,
required: true,
default: ''
},
/** Number of items per page. */
itemsPerPage: {
type: Number,
default: 20
}
},
apollo: {
account: {
query: gql`
query AccountByAddress($address: Address!, $cursor: Cursor, $count: Int!) {
account (address: $address) {
address
contract {
address
deployedBy {
hash
contractAddress
}
name
version
compiler
sourceCode
abi
validated
supportContact
timestamp
}
balance
totalValue
stashed
txCount
txList(cursor: $cursor, count: $count) {
pageInfo {
first
last
hasNext
hasPrevious
}
totalCount
edges {
cursor
transaction {
hash
from
to
value
gasUsed
block {
number
timestamp
}
}
}
}
staker {
id
createdTime
isActive
}
delegation {
toStakerId
createdTime
amount
claimedReward
pendingRewards {
amount
fromEpoch
toEpoch
}
}
}
}
`,
variables() {
return {
address: this.id,
count: this.itemsPerPage,
cursor: null
}
},
error(_error) {
this.dAccountByAddressError = _error.message;
}
},
},
data() {
return {
dRecordsCount: 0,
dAccountByAddressError: '',
/*
dAssetColumns: [
{
name: 'asset',
label: this.$t('view_address_detail.asset')
},
{
name: 'balance',
label: this.$t('view_address_detail.balance'),
css: {textAlign: 'right'}
},
{
name: 'valueInFTM',
label: this.$t('view_address_detail.value_in_ftm'),
css: {textAlign: 'right'}
},
{
name: 'valueInUSD',
label: this.$t('view_address_detail.value_in_usd'),
css: {textAlign: 'right'}
}
]
*/
}
},
watch: {
/**
* Watch route change and reset some properties, if only route parameter changes (whole component is reused,
* not rendered from scratch!).
*
* @param {object} _to
* @param {object} _from
*/
$route(_to, _from) {
if (_to.name === _from.name) {
this.appendItems = false;
}
}
},
computed: {
cAccount() {
if (this.account && this.account.contract) {
this.$emit('is-contract');
} else {
this.$emit('is-address');
}
return this.account;
},
cTransactionItems() {
const {cAccount} = this;
const txList = (cAccount ? cAccount.txList : null);
if (txList) {
// eslint-disable-next-line vue/no-side-effects-in-computed-properties
this.dRecordsCount = formatHexToInt(txList.totalCount);
}
return {
action: (this.appendItems ? 'append' : 'replace'),
hasNext: (txList ? txList.pageInfo.hasNext : false),
data: (txList ? txList.edges : [])
};
},
cAssets() {
const {cAccount} = this;
const assets = {};
if (cAccount) {
const {delegation} = cAccount;
console.log('wt', cAccount);
assets.available = cAccount.balance;
assets.delegated = (delegation ? delegation.amount : 0);
assets.pending_rewards = (delegation ? delegation.pendingRewards.amount : 0);
assets.stashed = cAccount.stashed || 0;
assets.claimed_rewards = (delegation ? delegation.claimedReward : 0);
}
return assets;
},
/**
* Get items for assets data table.
*/
/*
cAssetItems() {
const {cAccount} = this;
const items = [];
if (cAccount) {
items.push(this.getAssetItem(this.$t('view_address_detail.available'), cAccount.balance));
const {delegation} = cAccount;
if (delegation) {
items.push(this.getAssetItem(this.$t('view_address_detail.delegated'), delegation.amount));
items.push(this.getAssetItem(this.$t('view_address_detail.pending_rewards'), delegation.pendingRewards.amount));
items.push(this.getAssetItem(this.$t('view_address_detail.claimed_rewards'), delegation.claimedReward));
} else {
items.push(this.getAssetItem(this.$t('view_address_detail.delegated'), 0));
items.push(this.getAssetItem(this.$t('view_address_detail.pending_rewards'), 0));
items.push(this.getAssetItem(this.$t('view_address_detail.claimed_rewards'), 0));
}
}
return items;
},
*/
cLoading() {
return this.$apollo.queries.account.loading;
},
},
asyncComputed: {
async validator() {
const delegation = this.account ? this.account.delegation : null;
if (delegation && delegation.toStakerId !== '0x0') {
const validatorInfo = await this.getStakerById(delegation.toStakerId);
return {
name: `${validatorInfo.stakerInfo ? validatorInfo.stakerInfo.name : this.$t('unknown')}, ${parseInt(validatorInfo.id, 16)}`,
address: validatorInfo.stakerAddress,
};
} else {
return {
name: '-',
address: '',
};
}
},
},
created() {
/** If `true`, transaction items will be appended. */
this.appendItems = false;
},
methods: {
/**
* Get one item for asset data table.
*
* @param {string} _assetName
* @param {string|number} _value
*/
/*
getAssetItem(_assetName, _value) {
return {
asset: _assetName,
balance: this.toFTM(_value),
valueInFTM: this.toFTM(_value),
valueInUSD: this.toUSD(_value)
}
},
*/
/**
* Convert value to FTM.
*
* @param {string|number} _value
* @return {string}
*/
toFTM(_value) {
return formatNumberByLocale(numToFixed(WEIToFTM(_value), 2), 2);
},
/**
* Convert value to USD.
*
* @param {string|number} _value
* @return {string}
*/
toUSD(_value) {
return formatNumberByLocale(numToFixed(FTMToUSD(WEIToFTM(_value), this.$store.state.tokenPrice), 2), 2);
},
async getStakerById(_id) {
const data = await this.$apollo.query({
query: gql`
query StakerById($id: Long!) {
staker(id: $id) {
id
stakerAddress
stakerInfo {
name
website
contact
logoUrl
}
}
}
`,
variables: {
id: _id,
},
fetchPolicy: 'no-cache',
});
return data.data.staker;
},
onFetchMore() {
const {cAccount} = this;
const txList = (cAccount ? cAccount.txList : null);
if (txList && txList.pageInfo && txList.pageInfo.hasNext) {
const cursor = txList.pageInfo.last;
this.$apollo.queries.account.fetchMore({
variables: {
address: this.id,
count: this.itemsPerPage,
cursor
},
updateQuery: (previousResult, { fetchMoreResult }) => {
this.appendItems = true;
return fetchMoreResult;
}
});
}
},
WEIToFTM,
FTMToUSD,
timestampToDate,
}
}
</script>
<style lang="scss">
.f-address-detail {
.balance {
height: calc(100% - 48px);
text-align: right;
h3 {
text-align: right;
margin-bottom: 0;
}
.usd {
color: $light-gray-color;
font-size: 26px;
}
}
.num-block {
h2 {
text-align: center;
margin-top: 16px;
margin-bottom: 4px;
}
.num {
text-align: center;
font-weight: bold;
font-size: $fs36;
}
}
> .f-card {
}
}
</style>
| 35,973
|
https://github.com/ibireme/c_numconv_benchmark/blob/master/src/itoa/itoa_sse2.cpp
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
c_numconv_benchmark
|
ibireme
|
C++
|
Code
| 1,543
| 5,201
|
/*
Source: https://github.com/miloyip/itoa-benchmark/blob/master/src/sse2.cpp
License: https://github.com/miloyip/itoa-benchmark/blob/master/license.txt
Code is modified for benchmark.
Require x86 cpu with SSE2.
*/
// SSE2 implementation according to http://0x80.pl/articles/sse-itoa.html
// Modifications: (1) fix incorrect digits (2) accept all ranges (3) write to user provided buffer.
#if ((defined(__i386) || defined(__amd64)) && defined(__SSE2__)) || \
(defined(_M_IX86) || defined(_M_AMD64))
#include <cassert>
#include <emmintrin.h>
#include <stdint.h>
#ifdef _MSC_VER
#include <intrin.h>
#endif
#ifdef _MSC_VER
#define ALIGN_PRE __declspec(align(16))
#define ALIGN_SUF
#else
#define ALIGN_PRE
#define ALIGN_SUF __attribute__ ((aligned(16)))
#endif
static const char gDigitsLut[200] = {
'0','0','0','1','0','2','0','3','0','4','0','5','0','6','0','7','0','8','0','9',
'1','0','1','1','1','2','1','3','1','4','1','5','1','6','1','7','1','8','1','9',
'2','0','2','1','2','2','2','3','2','4','2','5','2','6','2','7','2','8','2','9',
'3','0','3','1','3','2','3','3','3','4','3','5','3','6','3','7','3','8','3','9',
'4','0','4','1','4','2','4','3','4','4','4','5','4','6','4','7','4','8','4','9',
'5','0','5','1','5','2','5','3','5','4','5','5','5','6','5','7','5','8','5','9',
'6','0','6','1','6','2','6','3','6','4','6','5','6','6','6','7','6','8','6','9',
'7','0','7','1','7','2','7','3','7','4','7','5','7','6','7','7','7','8','7','9',
'8','0','8','1','8','2','8','3','8','4','8','5','8','6','8','7','8','8','8','9',
'9','0','9','1','9','2','9','3','9','4','9','5','9','6','9','7','9','8','9','9'
};
static const uint32_t kDiv10000 = 0xd1b71759;
ALIGN_PRE static const uint32_t kDiv10000Vector[4] ALIGN_SUF = { kDiv10000, kDiv10000, kDiv10000, kDiv10000 };
ALIGN_PRE static const uint32_t k10000Vector[4] ALIGN_SUF = { 10000, 10000, 10000, 10000 };
ALIGN_PRE static const uint16_t kDivPowersVector[8] ALIGN_SUF = { 8389, 5243, 13108, 32768, 8389, 5243, 13108, 32768 }; // 10^3, 10^2, 10^1, 10^0
ALIGN_PRE static const uint16_t kShiftPowersVector[8] ALIGN_SUF = {
1 << (16 - (23 + 2 - 16)),
1 << (16 - (19 + 2 - 16)),
1 << (16 - 1 - 2),
1 << (15),
1 << (16 - (23 + 2 - 16)),
1 << (16 - (19 + 2 - 16)),
1 << (16 - 1 - 2),
1 << (15)
};
ALIGN_PRE static const uint16_t k10Vector[8] ALIGN_SUF = { 10, 10, 10, 10, 10, 10, 10, 10 };
ALIGN_PRE static const char kAsciiZero[16] ALIGN_SUF = { '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0' };
inline __m128i Convert8DigitsSSE2(uint32_t value) {
assert(value <= 99999999);
// abcd, efgh = abcdefgh divmod 10000
const __m128i abcdefgh = _mm_cvtsi32_si128(value);
const __m128i abcd = _mm_srli_epi64(_mm_mul_epu32(abcdefgh, reinterpret_cast<const __m128i*>(kDiv10000Vector)[0]), 45);
const __m128i efgh = _mm_sub_epi32(abcdefgh, _mm_mul_epu32(abcd, reinterpret_cast<const __m128i*>(k10000Vector)[0]));
// v1 = [ abcd, efgh, 0, 0, 0, 0, 0, 0 ]
const __m128i v1 = _mm_unpacklo_epi16(abcd, efgh);
// v1a = v1 * 4 = [ abcd * 4, efgh * 4, 0, 0, 0, 0, 0, 0 ]
const __m128i v1a = _mm_slli_epi64(v1, 2);
// v2 = [ abcd * 4, abcd * 4, abcd * 4, abcd * 4, efgh * 4, efgh * 4, efgh * 4, efgh * 4 ]
const __m128i v2a = _mm_unpacklo_epi16(v1a, v1a);
const __m128i v2 = _mm_unpacklo_epi32(v2a, v2a);
// v4 = v2 div 10^3, 10^2, 10^1, 10^0 = [ a, ab, abc, abcd, e, ef, efg, efgh ]
const __m128i v3 = _mm_mulhi_epu16(v2, reinterpret_cast<const __m128i*>(kDivPowersVector)[0]);
const __m128i v4 = _mm_mulhi_epu16(v3, reinterpret_cast<const __m128i*>(kShiftPowersVector)[0]);
// v5 = v4 * 10 = [ a0, ab0, abc0, abcd0, e0, ef0, efg0, efgh0 ]
const __m128i v5 = _mm_mullo_epi16(v4, reinterpret_cast<const __m128i*>(k10Vector)[0]);
// v6 = v5 << 16 = [ 0, a0, ab0, abc0, 0, e0, ef0, efg0 ]
const __m128i v6 = _mm_slli_epi64(v5, 16);
// v7 = v4 - v6 = { a, b, c, d, e, f, g, h }
const __m128i v7 = _mm_sub_epi16(v4, v6);
return v7;
}
inline __m128i ShiftDigits_SSE2(__m128i a, unsigned digit) {
assert(digit <= 8);
switch (digit) {
case 0: return a;
case 1: return _mm_srli_si128(a, 1);
case 2: return _mm_srli_si128(a, 2);
case 3: return _mm_srli_si128(a, 3);
case 4: return _mm_srli_si128(a, 4);
case 5: return _mm_srli_si128(a, 5);
case 6: return _mm_srli_si128(a, 6);
case 7: return _mm_srli_si128(a, 7);
case 8: return _mm_srli_si128(a, 8);
}
return a; // should not execute here.
}
extern "C"
char *itoa_u32_sse2(uint32_t value, char* buffer) {
if (value < 10000) {
const uint32_t d1 = (value / 100) << 1;
const uint32_t d2 = (value % 100) << 1;
if (value >= 1000)
*buffer++ = gDigitsLut[d1];
if (value >= 100)
*buffer++ = gDigitsLut[d1 + 1];
if (value >= 10)
*buffer++ = gDigitsLut[d2];
*buffer++ = gDigitsLut[d2 + 1];
// *buffer++ = '\0';
return buffer;
}
else if (value < 100000000) {
// Experiment shows that this case SSE2 is slower
#if 0
const __m128i a = Convert8DigitsSSE2(value);
// Convert to bytes, add '0'
const __m128i va = _mm_add_epi8(_mm_packus_epi16(a, _mm_setzero_si128()), reinterpret_cast<const __m128i*>(kAsciiZero)[0]);
// Count number of digit
const unsigned mask = _mm_movemask_epi8(_mm_cmpeq_epi8(va, reinterpret_cast<const __m128i*>(kAsciiZero)[0]));
unsigned long digit;
#ifdef _MSC_VER
_BitScanForward(&digit, ~mask | 0x8000);
#else
digit = __builtin_ctz(~mask | 0x8000);
#endif
// Shift digits to the beginning
__m128i result = ShiftDigits_SSE2(va, digit);
//__m128i result = _mm_srl_epi64(va, _mm_cvtsi32_si128(digit * 8));
_mm_storel_epi64(reinterpret_cast<__m128i*>(buffer), result);
buffer[8 - digit] = '\0';
#else
// value = bbbbcccc
const uint32_t b = value / 10000;
const uint32_t c = value % 10000;
const uint32_t d1 = (b / 100) << 1;
const uint32_t d2 = (b % 100) << 1;
const uint32_t d3 = (c / 100) << 1;
const uint32_t d4 = (c % 100) << 1;
if (value >= 10000000)
*buffer++ = gDigitsLut[d1];
if (value >= 1000000)
*buffer++ = gDigitsLut[d1 + 1];
if (value >= 100000)
*buffer++ = gDigitsLut[d2];
*buffer++ = gDigitsLut[d2 + 1];
*buffer++ = gDigitsLut[d3];
*buffer++ = gDigitsLut[d3 + 1];
*buffer++ = gDigitsLut[d4];
*buffer++ = gDigitsLut[d4 + 1];
//*buffer++ = '\0';
return buffer;
#endif
}
else {
// value = aabbbbbbbb in decimal
const uint32_t a = value / 100000000; // 1 to 42
value %= 100000000;
if (a >= 10) {
const unsigned i = a << 1;
*buffer++ = gDigitsLut[i];
*buffer++ = gDigitsLut[i + 1];
}
else
*buffer++ = '0' + static_cast<char>(a);
const __m128i b = Convert8DigitsSSE2(value);
const __m128i ba = _mm_add_epi8(_mm_packus_epi16(_mm_setzero_si128(), b), reinterpret_cast<const __m128i*>(kAsciiZero)[0]);
const __m128i result = _mm_srli_si128(ba, 8);
_mm_storel_epi64(reinterpret_cast<__m128i*>(buffer), result);
//buffer[8] = '\0';
return buffer + 8;
}
}
extern "C"
char *itoa_i32_sse2(int32_t value, char* buffer) {
uint32_t u = static_cast<uint32_t>(value);
if (value < 0) {
*buffer++ = '-';
u = ~u + 1;
}
return itoa_u32_sse2(u, buffer);
}
extern "C"
char *itoa_u64_sse2(uint64_t value, char* buffer) {
if (value < 100000000) {
uint32_t v = static_cast<uint32_t>(value);
if (v < 10000) {
const uint32_t d1 = (v / 100) << 1;
const uint32_t d2 = (v % 100) << 1;
if (v >= 1000)
*buffer++ = gDigitsLut[d1];
if (v >= 100)
*buffer++ = gDigitsLut[d1 + 1];
if (v >= 10)
*buffer++ = gDigitsLut[d2];
*buffer++ = gDigitsLut[d2 + 1];
//*buffer++ = '\0';
return buffer;
}
else {
// Experiment shows that this case SSE2 is slower
#if 0
const __m128i a = Convert8DigitsSSE2(v);
// Convert to bytes, add '0'
const __m128i va = _mm_add_epi8(_mm_packus_epi16(a, _mm_setzero_si128()), reinterpret_cast<const __m128i*>(kAsciiZero)[0]);
// Count number of digit
const unsigned mask = _mm_movemask_epi8(_mm_cmpeq_epi8(va, reinterpret_cast<const __m128i*>(kAsciiZero)[0]));
unsigned long digit;
#ifdef _MSC_VER
_BitScanForward(&digit, ~mask | 0x8000);
#else
digit = __builtin_ctz(~mask | 0x8000);
#endif
// Shift digits to the beginning
__m128i result = ShiftDigits_SSE2(va, digit);
_mm_storel_epi64(reinterpret_cast<__m128i*>(buffer), result);
buffer[8 - digit] = '\0';
#else
// value = bbbbcccc
const uint32_t b = v / 10000;
const uint32_t c = v % 10000;
const uint32_t d1 = (b / 100) << 1;
const uint32_t d2 = (b % 100) << 1;
const uint32_t d3 = (c / 100) << 1;
const uint32_t d4 = (c % 100) << 1;
if (value >= 10000000)
*buffer++ = gDigitsLut[d1];
if (value >= 1000000)
*buffer++ = gDigitsLut[d1 + 1];
if (value >= 100000)
*buffer++ = gDigitsLut[d2];
*buffer++ = gDigitsLut[d2 + 1];
*buffer++ = gDigitsLut[d3];
*buffer++ = gDigitsLut[d3 + 1];
*buffer++ = gDigitsLut[d4];
*buffer++ = gDigitsLut[d4 + 1];
//*buffer++ = '\0';
return buffer;
#endif
}
}
else if (value < 10000000000000000) {
const uint32_t v0 = static_cast<uint32_t>(value / 100000000);
const uint32_t v1 = static_cast<uint32_t>(value % 100000000);
const __m128i a0 = Convert8DigitsSSE2(v0);
const __m128i a1 = Convert8DigitsSSE2(v1);
// Convert to bytes, add '0'
const __m128i va = _mm_add_epi8(_mm_packus_epi16(a0, a1), reinterpret_cast<const __m128i*>(kAsciiZero)[0]);
// Count number of digit
const unsigned mask = _mm_movemask_epi8(_mm_cmpeq_epi8(va, reinterpret_cast<const __m128i*>(kAsciiZero)[0]));
#ifdef _MSC_VER
unsigned long digit;
_BitScanForward(&digit, ~mask | 0x8000);
#else
unsigned digit = __builtin_ctz(~mask | 0x8000);
#endif
// Shift digits to the beginning
__m128i result = ShiftDigits_SSE2(va, digit);
_mm_storeu_si128(reinterpret_cast<__m128i*>(buffer), result);
//buffer[16 - digit] = '\0';
return buffer + 16 - digit;
}
else {
const uint32_t a = static_cast<uint32_t>(value / 10000000000000000); // 1 to 1844
value %= 10000000000000000;
if (a < 10)
*buffer++ = '0' + static_cast<char>(a);
else if (a < 100) {
const uint32_t i = a << 1;
*buffer++ = gDigitsLut[i];
*buffer++ = gDigitsLut[i + 1];
}
else if (a < 1000) {
*buffer++ = '0' + static_cast<char>(a / 100);
const uint32_t i = (a % 100) << 1;
*buffer++ = gDigitsLut[i];
*buffer++ = gDigitsLut[i + 1];
}
else {
const uint32_t i = (a / 100) << 1;
const uint32_t j = (a % 100) << 1;
*buffer++ = gDigitsLut[i];
*buffer++ = gDigitsLut[i + 1];
*buffer++ = gDigitsLut[j];
*buffer++ = gDigitsLut[j + 1];
}
const uint32_t v0 = static_cast<uint32_t>(value / 100000000);
const uint32_t v1 = static_cast<uint32_t>(value % 100000000);
const __m128i a0 = Convert8DigitsSSE2(v0);
const __m128i a1 = Convert8DigitsSSE2(v1);
// Convert to bytes, add '0'
const __m128i va = _mm_add_epi8(_mm_packus_epi16(a0, a1), reinterpret_cast<const __m128i*>(kAsciiZero)[0]);
_mm_storeu_si128(reinterpret_cast<__m128i*>(buffer), va);
// buffer[16] = '\0';
return buffer + 16;
}
}
extern "C"
char *itoa_i64_sse2(int64_t value, char* buffer) {
uint64_t u = static_cast<uint64_t>(value);
if (value < 0) {
*buffer++ = '-';
u = ~u + 1;
}
return itoa_u64_sse2(u, buffer);
}
extern "C" {
/* benckmark config */
int itoa_sse2_available_32 = 1;
int itoa_sse2_available_64 = 1;
}
#else
#include <stdint.h>
extern "C" {
char *itoa_u32_sse2(uint32_t value, char *buffer) { return buffer; }
char *itoa_i32_sse2(int32_t value, char *buffer) { return buffer; }
char *itoa_u64_sse2(uint64_t value, char *buffer) { return buffer; }
char *itoa_i64_sse2(int64_t value, char *buffer) { return buffer; }
/* benckmark config */
int itoa_sse2_available_32 = 0;
int itoa_sse2_available_64 = 0;
}
#endif
| 30,018
|
https://github.com/daniel-rck/QuestPDF/blob/master/QuestPDF.Examples/RowExamples.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
QuestPDF
|
daniel-rck
|
C#
|
Code
| 150
| 746
|
using NUnit.Framework;
using QuestPDF.Examples.Engine;
using QuestPDF.Fluent;
using QuestPDF.Helpers;
using QuestPDF.Infrastructure;
namespace QuestPDF.Examples
{
public class RowExamples
{
[Test]
public void ItemTypes()
{
RenderingTest
.Create()
.ProducePdf()
.PageSize(650, 300)
.ShowResults()
.Render(container =>
{
container
.Padding(25)
.MinimalBox()
.Border(1)
.Column(column =>
{
column.Item().LabelCell("Total width: 600px");
column.Item().Row(row =>
{
row.ConstantItem(150).ValueCell("150px");
row.ConstantItem(100).ValueCell("100px");
row.RelativeItem(4).ValueCell("200px");
row.RelativeItem(3).ValueCell("150px");
});
column.Item().Row(row =>
{
row.ConstantItem(100).ValueCell("100px");
row.ConstantItem(50).ValueCell("50px");
row.RelativeItem(2).ValueCell("100px");
row.RelativeItem(1).ValueCell("50px");
});
});
});
}
[Test]
public void Stability()
{
// up to version 2021.12, this code would always result with the infinite layout exception
RenderingTest
.Create()
.ProducePdf()
.MaxPages(100)
.PageSize(250, 150)
.ShowResults()
.Render(container =>
{
container
.Padding(25)
.Row(row =>
{
row.RelativeItem().Column(column =>
{
column.Item().ShowOnce().Element(CreateBox).Text("X");
column.Item().Element(CreateBox).Text("1");
column.Item().Element(CreateBox).Text("2");
});
row.RelativeItem().Column(column =>
{
column.Item().Element(CreateBox).Text("1");
column.Item().Element(CreateBox).Text("2");
});
});
});
static IContainer CreateBox(IContainer container)
{
return container
.ExtendHorizontal()
.ExtendVertical()
.Background(Colors.Grey.Lighten4)
.Border(1)
.AlignCenter()
.AlignMiddle()
.ShowOnce();
}
}
[Test]
public void Stability_NoItems()
{
RenderingTest
.Create()
.ProducePdf()
.MaxPages(100)
.PageSize(250, 150)
.Render(container =>
{
container
.Padding(25)
.Row(row => { });
});
}
}
}
| 29,174
|
https://github.com/ugurmeet/presto/blob/master/presto-orc/src/main/java/com/facebook/presto/orc/writer/DictionaryColumnWriter.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
presto
|
ugurmeet
|
Java
|
Code
| 1,579
| 5,513
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc.writer;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.orc.ColumnWriterOptions;
import com.facebook.presto.orc.DictionaryCompressionOptimizer.DictionaryColumn;
import com.facebook.presto.orc.DwrfDataEncryptor;
import com.facebook.presto.orc.OrcEncoding;
import com.facebook.presto.orc.checkpoint.StreamCheckpoint;
import com.facebook.presto.orc.metadata.ColumnEncoding;
import com.facebook.presto.orc.metadata.CompressedMetadataWriter;
import com.facebook.presto.orc.metadata.MetadataWriter;
import com.facebook.presto.orc.metadata.RowGroupIndex;
import com.facebook.presto.orc.metadata.Stream;
import com.facebook.presto.orc.metadata.Stream.StreamKind;
import com.facebook.presto.orc.metadata.statistics.ColumnStatistics;
import com.facebook.presto.orc.stream.LongOutputStream;
import com.facebook.presto.orc.stream.PresentOutputStream;
import com.facebook.presto.orc.stream.StreamDataOutput;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import static com.facebook.presto.common.array.Arrays.ExpansionFactor.MEDIUM;
import static com.facebook.presto.common.array.Arrays.ExpansionOption.PRESERVE;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
import static com.facebook.presto.orc.DictionaryCompressionOptimizer.estimateIndexBytesPerValue;
import static com.facebook.presto.orc.metadata.CompressionKind.NONE;
import static com.facebook.presto.orc.stream.LongOutputStream.createDataOutputStream;
import static com.facebook.presto.orc.writer.ColumnWriterUtils.buildRowGroupIndexes;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public abstract class DictionaryColumnWriter
implements ColumnWriter, DictionaryColumn
{
// In theory, nulls are stored using bit fields with 1 bit per entry
// In code, though they use Byte RLE, using 8 is a good heuristic and close to worst case.
public static final int NUMBER_OF_NULLS_PER_BYTE = 8;
private static final int EXPECTED_ROW_GROUP_SEGMENT_SIZE = 10_000;
protected final int column;
protected final int sequence;
protected final ColumnWriterOptions columnWriterOptions;
protected final Optional<DwrfDataEncryptor> dwrfEncryptor;
protected final OrcEncoding orcEncoding;
protected final MetadataWriter metadataWriter;
private final CompressedMetadataWriter compressedMetadataWriter;
private final List<DictionaryRowGroup> rowGroups = new ArrayList<>();
private final DictionaryRowGroupBuilder rowGroupBuilder = new DictionaryRowGroupBuilder();
private final int preserveDirectEncodingStripeCount;
private PresentOutputStream presentStream;
private LongOutputStream dataStream;
private int[] rowGroupIndexes;
private int rowGroupOffset;
private long rawBytesEstimate;
private long totalValueCount;
private long totalNonNullValueCount;
private boolean closed;
private boolean inRowGroup;
private boolean directEncoded;
private long rowGroupRetainedSizeInBytes;
private int preserveDirectEncodingStripeIndex;
public DictionaryColumnWriter(
int column,
int sequence,
ColumnWriterOptions columnWriterOptions,
Optional<DwrfDataEncryptor> dwrfEncryptor,
OrcEncoding orcEncoding,
MetadataWriter metadataWriter)
{
checkArgument(column >= 0, "column is negative");
checkArgument(sequence >= 0, "sequence is negative");
this.column = column;
this.sequence = sequence;
this.columnWriterOptions = requireNonNull(columnWriterOptions, "columnWriterOptions is null");
this.dwrfEncryptor = requireNonNull(dwrfEncryptor, "dwrfEncryptor is null");
this.orcEncoding = requireNonNull(orcEncoding, "orcEncoding is null");
this.compressedMetadataWriter = new CompressedMetadataWriter(metadataWriter, columnWriterOptions, dwrfEncryptor);
this.preserveDirectEncodingStripeCount = columnWriterOptions.getPreserveDirectEncodingStripeCount();
this.dataStream = createDataOutputStream(columnWriterOptions, dwrfEncryptor, orcEncoding);
this.presentStream = new PresentOutputStream(columnWriterOptions, dwrfEncryptor);
this.metadataWriter = requireNonNull(metadataWriter, "metadataWriter is null");
this.rowGroupIndexes = new int[EXPECTED_ROW_GROUP_SEGMENT_SIZE];
}
protected abstract ColumnWriter createDirectColumnWriter();
protected abstract ColumnWriter getDirectColumnWriter();
protected abstract boolean tryConvertRowGroupToDirect(int dictionaryIndexCount, int[] dictionaryIndexes, int maxDirectBytes);
protected abstract boolean tryConvertRowGroupToDirect(int dictionaryIndexCount, short[] dictionaryIndexes, int maxDirectBytes);
protected abstract boolean tryConvertRowGroupToDirect(int dictionaryIndexCount, byte[] dictionaryIndexes, int maxDirectBytes);
protected abstract ColumnEncoding getDictionaryColumnEncoding();
protected abstract BlockStatistics addBlockToDictionary(Block block, int rowGroupOffset, int[] rowGroupIndexes);
protected abstract long getRetainedDictionaryBytes();
/**
* writeDictionary to the Streams and optionally return new mappings to be used.
* The mapping is used for sorting the indexes. ORC dictionary needs to be sorted,
* but DWRF sorting is optional.
*
* @return new mappings to be used for indexes, if no new mappings, Optional.empty.
*/
protected abstract Optional<int[]> writeDictionary();
protected abstract void beginDataRowGroup();
protected abstract void movePresentStreamToDirectWriter(PresentOutputStream presentStream);
protected abstract void writeDataStreams(
int rowGroupValueCount,
byte[] rowGroupIndexes,
Optional<int[]> originalDictionaryToSortedIndex,
LongOutputStream dataStream);
protected abstract void writeDataStreams(
int rowGroupValueCount,
short[] rowGroupIndexes,
Optional<int[]> originalDictionaryToSortedIndex,
LongOutputStream dataStream);
protected abstract void writeDataStreams(
int rowGroupValueCount,
int[] rowGroupIndexes,
Optional<int[]> originalDictionaryToSortedIndex,
LongOutputStream dataStream);
protected abstract void resetDictionary();
protected abstract void closeDictionary();
protected abstract List<StreamDataOutput> getDictionaryStreams(int column, int sequence);
protected abstract ColumnStatistics createColumnStatistics();
@Override
public long getRawBytesEstimate()
{
checkState(!directEncoded);
return rawBytesEstimate;
}
@Override
public boolean isDirectEncoded()
{
return directEncoded;
}
@Override
public int getIndexBytes()
{
checkState(!directEncoded);
return toIntExact(estimateIndexBytesPerValue(getDictionaryEntries()) * getNonNullValueCount());
}
@Override
public long getValueCount()
{
checkState(!directEncoded);
return totalValueCount;
}
@Override
public long getNonNullValueCount()
{
checkState(!directEncoded);
return totalNonNullValueCount;
}
@Override
public long getNullValueCount()
{
checkState(!directEncoded);
return totalValueCount - totalNonNullValueCount;
}
private boolean tryConvertRowGroupToDirect(byte[][] byteSegments, short[][] shortSegments, int[][] intSegments, int maxDirectBytes)
{
// The row group indexes may be split between byte, short and int segments. They need to be processed in
// byte, short and int order. If they are processed in different order, it will result in data corruption.
if (byteSegments != null) {
for (byte[] byteIndexes : byteSegments) {
if (!tryConvertRowGroupToDirect(byteIndexes.length, byteIndexes, maxDirectBytes)) {
return false;
}
}
}
if (shortSegments != null) {
for (short[] shortIndexes : shortSegments) {
if (!tryConvertRowGroupToDirect(shortIndexes.length, shortIndexes, maxDirectBytes)) {
return false;
}
}
}
if (intSegments != null) {
for (int[] intIndexes : intSegments) {
if (!tryConvertRowGroupToDirect(intIndexes.length, intIndexes, maxDirectBytes)) {
return false;
}
}
}
return true;
}
@Override
public OptionalInt tryConvertToDirect(int maxDirectBytes)
{
checkState(!closed);
checkState(!directEncoded);
ColumnWriter directWriter = createDirectColumnWriter();
checkState(directWriter.getBufferedBytes() == 0, "direct writer should have no data");
for (DictionaryRowGroup rowGroup : rowGroups) {
beginDataRowGroup();
// todo we should be able to pass the stats down to avoid recalculating min and max
boolean success = tryConvertRowGroupToDirect(rowGroup.getByteSegments(), rowGroup.getShortSegments(), rowGroup.getIntSegments(), maxDirectBytes);
if (!success) {
return resetDirectWriter(directWriter);
}
directWriter.finishRowGroup();
}
if (inRowGroup) {
beginDataRowGroup();
boolean success = tryConvertRowGroupToDirect(
rowGroupBuilder.getByteSegments(),
rowGroupBuilder.getShortSegments(),
rowGroupBuilder.getIntegerSegments(),
maxDirectBytes);
if (!success) {
return resetDirectWriter(directWriter);
}
if (!tryConvertRowGroupToDirect(rowGroupOffset, rowGroupIndexes, maxDirectBytes)) {
return resetDirectWriter(directWriter);
}
}
else {
checkState(rowGroupOffset == 0);
}
// Conversion to DirectStream succeeded, Transfer the present stream to direct writer and assign
// this a new PresentStream, so one writer is responsible for one present stream.
movePresentStreamToDirectWriter(presentStream);
presentStream = new PresentOutputStream(columnWriterOptions, dwrfEncryptor);
// free the dictionary
rawBytesEstimate = 0;
totalValueCount = 0;
totalNonNullValueCount = 0;
resetRowGroups();
closeDictionary();
resetDictionary();
directEncoded = true;
return OptionalInt.of(toIntExact(directWriter.getBufferedBytes()));
}
private OptionalInt resetDirectWriter(ColumnWriter directWriter)
{
directWriter.close();
directWriter.reset();
return OptionalInt.empty();
}
@Override
public Map<Integer, ColumnEncoding> getColumnEncodings()
{
checkState(closed);
if (directEncoded) {
return getDirectColumnWriter().getColumnEncodings();
}
return ImmutableMap.of(column, getDictionaryColumnEncoding());
}
@Override
public void beginRowGroup()
{
checkState(!inRowGroup);
inRowGroup = true;
if (directEncoded) {
getDirectColumnWriter().beginRowGroup();
}
else {
presentStream.recordCheckpoint();
}
}
@Override
public long writeBlock(Block block)
{
checkState(!closed);
checkArgument(block.getPositionCount() > 0, "Block is empty");
if (directEncoded) {
return getDirectColumnWriter().writeBlock(block);
}
rowGroupIndexes = ensureCapacity(rowGroupIndexes, rowGroupOffset + block.getPositionCount(), MEDIUM, PRESERVE);
for (int position = 0; position < block.getPositionCount(); position++) {
presentStream.writeBoolean(!block.isNull(position));
}
BlockStatistics blockStatistics = addBlockToDictionary(block, rowGroupOffset, rowGroupIndexes);
totalNonNullValueCount += blockStatistics.getNonNullValueCount();
rawBytesEstimate += blockStatistics.getRawBytesEstimate();
rowGroupOffset += blockStatistics.getNonNullValueCount();
totalValueCount += block.getPositionCount();
if (rowGroupOffset >= EXPECTED_ROW_GROUP_SEGMENT_SIZE) {
rowGroupBuilder.addIndexes(getDictionaryEntries() - 1, rowGroupIndexes, rowGroupOffset);
rowGroupOffset = 0;
}
return blockStatistics.getRawBytesIncludingNulls();
}
@Override
public Map<Integer, ColumnStatistics> finishRowGroup()
{
checkState(!closed);
checkState(inRowGroup);
inRowGroup = false;
if (directEncoded) {
return getDirectColumnWriter().finishRowGroup();
}
ColumnStatistics statistics = createColumnStatistics();
rowGroupBuilder.addIndexes(getDictionaryEntries() - 1, rowGroupIndexes, rowGroupOffset);
DictionaryRowGroup rowGroup = rowGroupBuilder.build(statistics);
rowGroups.add(rowGroup);
if (columnWriterOptions.isIgnoreDictionaryRowGroupSizes()) {
rowGroupRetainedSizeInBytes += rowGroup.getColumnStatistics().getRetainedSizeInBytes();
}
else {
rowGroupRetainedSizeInBytes += rowGroup.getShallowRetainedSizeInBytes();
rowGroupRetainedSizeInBytes += rowGroupBuilder.getIndexRetainedBytes();
}
rowGroupOffset = 0;
rowGroupBuilder.reset();
return ImmutableMap.of(column, statistics);
}
@Override
public void close()
{
checkState(!closed);
checkState(!inRowGroup);
closed = true;
if (directEncoded) {
getDirectColumnWriter().close();
}
else {
bufferOutputData();
}
}
@Override
public Map<Integer, ColumnStatistics> getColumnStripeStatistics()
{
checkState(closed);
if (directEncoded) {
return getDirectColumnWriter().getColumnStripeStatistics();
}
return ImmutableMap.of(column, ColumnStatistics.mergeColumnStatistics(rowGroups.stream()
.map(DictionaryRowGroup::getColumnStatistics)
.collect(toList())));
}
private void bufferOutputData()
{
checkState(closed);
checkState(!directEncoded);
Optional<int[]> originalDictionaryToSortedIndex = writeDictionary();
if (!rowGroups.isEmpty()) {
dataStream.recordCheckpoint();
}
for (DictionaryRowGroup rowGroup : rowGroups) {
// The row group indexes may be split between byte, short and int segments. They need to be processed in
// byte, short and int order. If they are processed in different order, it will result in data corruption.
byte[][] byteSegments = rowGroup.getByteSegments();
if (byteSegments != null) {
for (byte[] byteIndexes : byteSegments) {
writeDataStreams(
byteIndexes.length,
byteIndexes,
originalDictionaryToSortedIndex,
dataStream);
}
}
short[][] shortSegments = rowGroup.getShortSegments();
if (shortSegments != null) {
for (short[] shortIndexes : shortSegments) {
writeDataStreams(
shortIndexes.length,
shortIndexes,
originalDictionaryToSortedIndex,
dataStream);
}
}
int[][] intSegments = rowGroup.getIntSegments();
if (intSegments != null) {
for (int[] integerIndexes : intSegments) {
writeDataStreams(
integerIndexes.length,
integerIndexes,
originalDictionaryToSortedIndex,
dataStream);
}
}
dataStream.recordCheckpoint();
}
closeDictionary();
dataStream.close();
presentStream.close();
}
@Override
public List<StreamDataOutput> getIndexStreams(Optional<List<? extends StreamCheckpoint>> prependCheckpoints)
throws IOException
{
checkState(closed);
if (directEncoded) {
return getDirectColumnWriter().getIndexStreams(prependCheckpoints);
}
boolean compressed = columnWriterOptions.getCompressionKind() != NONE;
List<ColumnStatistics> rowGroupColumnStatistics = rowGroups.stream().map(DictionaryRowGroup::getColumnStatistics).collect(toList());
List<RowGroupIndex> rowGroupIndexes = buildRowGroupIndexes(compressed, rowGroupColumnStatistics, prependCheckpoints, presentStream, dataStream);
Slice slice = compressedMetadataWriter.writeRowIndexes(rowGroupIndexes);
Stream stream = new Stream(column, sequence, StreamKind.ROW_INDEX, slice.length(), false);
return ImmutableList.of(new StreamDataOutput(slice, stream));
}
@Override
public List<StreamDataOutput> getDataStreams()
{
checkState(closed);
if (directEncoded) {
return getDirectColumnWriter().getDataStreams();
}
// actually write data
ImmutableList.Builder<StreamDataOutput> outputDataStreams = ImmutableList.builder();
presentStream.getStreamDataOutput(column, sequence).ifPresent(outputDataStreams::add);
outputDataStreams.add(dataStream.getStreamDataOutput(column, sequence));
outputDataStreams.addAll(getDictionaryStreams(column, sequence));
return outputDataStreams.build();
}
@Override
public long getBufferedBytes()
{
checkState(!closed);
if (directEncoded) {
return getDirectColumnWriter().getBufferedBytes();
}
// for dictionary columns we report the data we expect to write to the output stream
long numberOfNullBytes = getNullValueCount() / NUMBER_OF_NULLS_PER_BYTE;
return getIndexBytes() + getDictionaryBytes() + numberOfNullBytes;
}
@VisibleForTesting
public long getRowGroupRetainedSizeInBytes()
{
return rowGroupRetainedSizeInBytes;
}
@Override
public long getRetainedBytes()
{
return sizeOf(rowGroupIndexes) +
rowGroupBuilder.getRetainedSizeInBytes() +
dataStream.getRetainedBytes() +
presentStream.getRetainedBytes() +
getRetainedDictionaryBytes() +
rowGroupRetainedSizeInBytes;
}
private void resetRowGroups()
{
rowGroups.clear();
rowGroupBuilder.reset();
rowGroupRetainedSizeInBytes = 0;
rowGroupOffset = 0;
}
@Override
public void reset()
{
checkState(closed);
closed = false;
presentStream.reset();
// Dictionary data is held in memory, until the Stripe is flushed. OrcOutputStream maintains the
// allocated buffer and reuses the buffer for writing data. For Direct writer, OrcOutputStream
// behavior avoids the reallocation of the buffers by maintaining the pool. For Dictionary writer,
// OrcOutputStream doubles the memory requirement in most cases (one for dictionary and one for
// OrcOutputBuffer). To avoid this, the streams are reallocated for every stripe.
dataStream = createDataOutputStream(columnWriterOptions, dwrfEncryptor, orcEncoding);
resetDictionary();
resetRowGroups();
rawBytesEstimate = 0;
totalValueCount = 0;
totalNonNullValueCount = 0;
if (directEncoded) {
getDirectColumnWriter().reset();
if (preserveDirectEncodingStripeIndex >= preserveDirectEncodingStripeCount) {
directEncoded = false;
preserveDirectEncodingStripeIndex = 0;
}
else {
preserveDirectEncodingStripeIndex++;
}
}
}
static class BlockStatistics
{
private final int nonNullValueCount;
private final long rawBytesEstimate;
private final long rawBytesIncludingNulls;
public BlockStatistics(int nonNullValueCount, long rawBytesEstimate, long rawBytesIncludingNulls)
{
this.nonNullValueCount = nonNullValueCount;
this.rawBytesEstimate = rawBytesEstimate;
this.rawBytesIncludingNulls = rawBytesIncludingNulls;
}
public int getNonNullValueCount()
{
return nonNullValueCount;
}
public long getRawBytesEstimate()
{
return rawBytesEstimate;
}
public long getRawBytesIncludingNulls()
{
return rawBytesIncludingNulls;
}
}
}
| 9,818
|
https://github.com/pundiramit/external-mesa3d/blob/master/src/mesa/main/vdpau.c
|
Github Open Source
|
Open Source
|
MIT
| null |
external-mesa3d
|
pundiramit
|
C
|
Code
| 1,137
| 4,358
|
/**************************************************************************
*
* Copyright 2013 Advanced Micro Devices, Inc.
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
**************************************************************************/
/*
* Authors:
* Christian König <christian.koenig@amd.com>
*
*/
#include <stdbool.h>
#include "util/hash_table.h"
#include "util/set.h"
#include "util/u_memory.h"
#include "context.h"
#include "glformats.h"
#include "texobj.h"
#include "teximage.h"
#include "vdpau.h"
#define MAX_TEXTURES 4
struct vdp_surface
{
GLenum target;
struct gl_texture_object *textures[MAX_TEXTURES];
GLenum access, state;
GLboolean output;
const GLvoid *vdpSurface;
};
void GLAPIENTRY
_mesa_VDPAUInitNV(const GLvoid *vdpDevice, const GLvoid *getProcAddress)
{
GET_CURRENT_CONTEXT(ctx);
if (!vdpDevice) {
_mesa_error(ctx, GL_INVALID_VALUE, "vdpDevice");
return;
}
if (!getProcAddress) {
_mesa_error(ctx, GL_INVALID_VALUE, "getProcAddress");
return;
}
if (ctx->vdpDevice || ctx->vdpGetProcAddress || ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUInitNV");
return;
}
ctx->vdpDevice = vdpDevice;
ctx->vdpGetProcAddress = getProcAddress;
ctx->vdpSurfaces = _mesa_set_create(NULL, _mesa_hash_pointer,
_mesa_key_pointer_equal);
}
static void
unregister_surface(struct set_entry *entry)
{
struct vdp_surface *surf = (struct vdp_surface *)entry->key;
GET_CURRENT_CONTEXT(ctx);
if (surf->state == GL_SURFACE_MAPPED_NV) {
GLintptr surfaces[] = { (GLintptr)surf };
_mesa_VDPAUUnmapSurfacesNV(1, surfaces);
}
_mesa_set_remove(ctx->vdpSurfaces, entry);
free(surf);
}
void GLAPIENTRY
_mesa_VDPAUFiniNV(void)
{
GET_CURRENT_CONTEXT(ctx);
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUFiniNV");
return;
}
_mesa_set_destroy(ctx->vdpSurfaces, unregister_surface);
ctx->vdpDevice = 0;
ctx->vdpGetProcAddress = 0;
ctx->vdpSurfaces = NULL;
}
static GLintptr
register_surface(struct gl_context *ctx, GLboolean isOutput,
const GLvoid *vdpSurface, GLenum target,
GLsizei numTextureNames, const GLuint *textureNames)
{
struct vdp_surface *surf;
int i;
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAURegisterSurfaceNV");
return (GLintptr)NULL;
}
if (target != GL_TEXTURE_2D && target != GL_TEXTURE_RECTANGLE) {
_mesa_error(ctx, GL_INVALID_ENUM, "VDPAURegisterSurfaceNV");
return (GLintptr)NULL;
}
if (target == GL_TEXTURE_RECTANGLE && !ctx->Extensions.NV_texture_rectangle) {
_mesa_error(ctx, GL_INVALID_ENUM, "VDPAURegisterSurfaceNV");
return (GLintptr)NULL;
}
surf = CALLOC_STRUCT( vdp_surface );
if (surf == NULL) {
_mesa_error_no_memory("VDPAURegisterSurfaceNV");
return (GLintptr)NULL;
}
surf->vdpSurface = vdpSurface;
surf->target = target;
surf->access = GL_READ_WRITE;
surf->state = GL_SURFACE_REGISTERED_NV;
surf->output = isOutput;
for (i = 0; i < numTextureNames; ++i) {
struct gl_texture_object *tex;
tex = _mesa_lookup_texture_err(ctx, textureNames[i],
"VDPAURegisterSurfaceNV");
if (tex == NULL) {
free(surf);
return (GLintptr)NULL;
}
_mesa_lock_texture(ctx, tex);
if (tex->Immutable) {
_mesa_unlock_texture(ctx, tex);
free(surf);
_mesa_error(ctx, GL_INVALID_OPERATION,
"VDPAURegisterSurfaceNV(texture is immutable)");
return (GLintptr)NULL;
}
if (tex->Target == 0) {
tex->Target = target;
tex->TargetIndex = _mesa_tex_target_to_index(ctx, target);
} else if (tex->Target != target) {
_mesa_unlock_texture(ctx, tex);
free(surf);
_mesa_error(ctx, GL_INVALID_OPERATION,
"VDPAURegisterSurfaceNV(target mismatch)");
return (GLintptr)NULL;
}
/* This will disallow respecifying the storage. */
tex->Immutable = GL_TRUE;
_mesa_unlock_texture(ctx, tex);
_mesa_reference_texobj(&surf->textures[i], tex);
}
_mesa_set_add(ctx->vdpSurfaces, surf);
return (GLintptr)surf;
}
GLintptr GLAPIENTRY
_mesa_VDPAURegisterVideoSurfaceNV(const GLvoid *vdpSurface, GLenum target,
GLsizei numTextureNames,
const GLuint *textureNames)
{
GET_CURRENT_CONTEXT(ctx);
if (numTextureNames != 4) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAURegisterVideoSurfaceNV");
return (GLintptr)NULL;
}
return register_surface(ctx, false, vdpSurface, target,
numTextureNames, textureNames);
}
GLintptr GLAPIENTRY
_mesa_VDPAURegisterOutputSurfaceNV(const GLvoid *vdpSurface, GLenum target,
GLsizei numTextureNames,
const GLuint *textureNames)
{
GET_CURRENT_CONTEXT(ctx);
if (numTextureNames != 1) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAURegisterVideoSurfaceNV");
return (GLintptr)NULL;
}
return register_surface(ctx, true, vdpSurface, target,
numTextureNames, textureNames);
}
GLboolean GLAPIENTRY
_mesa_VDPAUIsSurfaceNV(GLintptr surface)
{
struct vdp_surface *surf = (struct vdp_surface *)surface;
GET_CURRENT_CONTEXT(ctx);
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUIsSurfaceNV");
return false;
}
if (!_mesa_set_search(ctx->vdpSurfaces, surf)) {
return false;
}
return true;
}
void GLAPIENTRY
_mesa_VDPAUUnregisterSurfaceNV(GLintptr surface)
{
struct vdp_surface *surf = (struct vdp_surface *)surface;
struct set_entry *entry;
int i;
GET_CURRENT_CONTEXT(ctx);
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUUnregisterSurfaceNV");
return;
}
/* according to the spec it's ok when this is zero */
if (surface == 0)
return;
entry = _mesa_set_search(ctx->vdpSurfaces, surf);
if (!entry) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUUnregisterSurfaceNV");
return;
}
for (i = 0; i < MAX_TEXTURES; i++) {
if (surf->textures[i]) {
surf->textures[i]->Immutable = GL_FALSE;
_mesa_reference_texobj(&surf->textures[i], NULL);
}
}
_mesa_set_remove(ctx->vdpSurfaces, entry);
free(surf);
}
void GLAPIENTRY
_mesa_VDPAUGetSurfaceivNV(GLintptr surface, GLenum pname, GLsizei bufSize,
GLsizei *length, GLint *values)
{
struct vdp_surface *surf = (struct vdp_surface *)surface;
GET_CURRENT_CONTEXT(ctx);
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUGetSurfaceivNV");
return;
}
if (!_mesa_set_search(ctx->vdpSurfaces, surf)) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUGetSurfaceivNV");
return;
}
if (pname != GL_SURFACE_STATE_NV) {
_mesa_error(ctx, GL_INVALID_ENUM, "VDPAUGetSurfaceivNV");
return;
}
if (bufSize < 1) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUGetSurfaceivNV");
return;
}
values[0] = surf->state;
if (length != NULL)
*length = 1;
}
void GLAPIENTRY
_mesa_VDPAUSurfaceAccessNV(GLintptr surface, GLenum access)
{
struct vdp_surface *surf = (struct vdp_surface *)surface;
GET_CURRENT_CONTEXT(ctx);
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUSurfaceAccessNV");
return;
}
if (!_mesa_set_search(ctx->vdpSurfaces, surf)) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUSurfaceAccessNV");
return;
}
if (access != GL_READ_ONLY && access != GL_WRITE_ONLY &&
access != GL_READ_WRITE) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUSurfaceAccessNV");
return;
}
if (surf->state == GL_SURFACE_MAPPED_NV) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUSurfaceAccessNV");
return;
}
surf->access = access;
}
void GLAPIENTRY
_mesa_VDPAUMapSurfacesNV(GLsizei numSurfaces, const GLintptr *surfaces)
{
GET_CURRENT_CONTEXT(ctx);
int i;
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUUnmapSurfacesNV");
return;
}
for (i = 0; i < numSurfaces; ++i) {
struct vdp_surface *surf = (struct vdp_surface *)surfaces[i];
if (!_mesa_set_search(ctx->vdpSurfaces, surf)) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUSurfaceAccessNV");
return;
}
if (surf->state == GL_SURFACE_MAPPED_NV) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUSurfaceAccessNV");
return;
}
}
for (i = 0; i < numSurfaces; ++i) {
struct vdp_surface *surf = (struct vdp_surface *)surfaces[i];
unsigned numTextureNames = surf->output ? 1 : 4;
unsigned j;
for (j = 0; j < numTextureNames; ++j) {
struct gl_texture_object *tex = surf->textures[j];
struct gl_texture_image *image;
_mesa_lock_texture(ctx, tex);
image = _mesa_get_tex_image(ctx, tex, surf->target, 0);
if (!image) {
_mesa_error(ctx, GL_OUT_OF_MEMORY, "VDPAUMapSurfacesNV");
_mesa_unlock_texture(ctx, tex);
return;
}
ctx->Driver.FreeTextureImageBuffer(ctx, image);
ctx->Driver.VDPAUMapSurface(ctx, surf->target, surf->access,
surf->output, tex, image,
surf->vdpSurface, j);
_mesa_unlock_texture(ctx, tex);
}
surf->state = GL_SURFACE_MAPPED_NV;
}
}
void GLAPIENTRY
_mesa_VDPAUUnmapSurfacesNV(GLsizei numSurfaces, const GLintptr *surfaces)
{
GET_CURRENT_CONTEXT(ctx);
int i;
if (!ctx->vdpDevice || !ctx->vdpGetProcAddress || !ctx->vdpSurfaces) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUUnmapSurfacesNV");
return;
}
for (i = 0; i < numSurfaces; ++i) {
struct vdp_surface *surf = (struct vdp_surface *)surfaces[i];
if (!_mesa_set_search(ctx->vdpSurfaces, surf)) {
_mesa_error(ctx, GL_INVALID_VALUE, "VDPAUSurfaceAccessNV");
return;
}
if (surf->state != GL_SURFACE_MAPPED_NV) {
_mesa_error(ctx, GL_INVALID_OPERATION, "VDPAUSurfaceAccessNV");
return;
}
}
for (i = 0; i < numSurfaces; ++i) {
struct vdp_surface *surf = (struct vdp_surface *)surfaces[i];
unsigned numTextureNames = surf->output ? 1 : 4;
unsigned j;
for (j = 0; j < numTextureNames; ++j) {
struct gl_texture_object *tex = surf->textures[j];
struct gl_texture_image *image;
_mesa_lock_texture(ctx, tex);
image = _mesa_select_tex_image(tex, surf->target, 0);
ctx->Driver.VDPAUUnmapSurface(ctx, surf->target, surf->access,
surf->output, tex, image,
surf->vdpSurface, j);
if (image)
ctx->Driver.FreeTextureImageBuffer(ctx, image);
_mesa_unlock_texture(ctx, tex);
}
surf->state = GL_SURFACE_REGISTERED_NV;
}
}
| 48,564
|
https://github.com/hoa1604/TheNews/blob/master/TheNewsWebsite/TheNewsWebsite/Controllers/CategariesAdminController.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
TheNews
|
hoa1604
|
C#
|
Code
| 360
| 1,215
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.EntityFrameworkCore;
using TheNewsWebsite.Models.TheNewsWebsite;
using Microsoft.AspNetCore.Authorization;
namespace TheNewsWebsite.Controllers
{
public class CategariesAdminController : Controller
{
private readonly TheNewsContext _context;
public CategariesAdminController(TheNewsContext context)
{
_context = context;
}
// GET: CategariesAdmin
public async Task<IActionResult> Index()
{
var list = from s in _context.Categaries where s.Status == true select s;
return View(list.ToList());
}
public async Task<IActionResult> ShowAll()
{
return View(await _context.Categaries.ToListAsync());
}
// GET: CategariesAdmin/Details/5
public async Task<IActionResult> Details(int? id)
{
if (id == null)
{
return NotFound();
}
var categary = await _context.Categaries
.SingleOrDefaultAsync(m => m.Id == id);
if (categary == null)
{
return NotFound();
}
return View(categary);
}
// GET: CategariesAdmin/Create
public IActionResult Create()
{
return View();
}
// POST: CategariesAdmin/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Create([Bind("Id,Name,Status")] Categary categary)
{
if (ModelState.IsValid)
{
_context.Add(categary);
await _context.SaveChangesAsync();
return RedirectToAction("Index");
}
return View(categary);
}
// GET: CategariesAdmin/Edit/5
public async Task<IActionResult> Edit(int? id)
{
if (id == null)
{
return NotFound();
}
var categary = await _context.Categaries.SingleOrDefaultAsync(m => m.Id == id);
if (categary == null)
{
return NotFound();
}
ViewBag.Status = categary.Status;
return View(categary);
}
// POST: CategariesAdmin/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Edit(int id, [Bind("Id,Name,Status")] Categary categary)
{
if (id != categary.Id)
{
return NotFound();
}
if (ModelState.IsValid)
{
try
{
_context.Update(categary);
await _context.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!CategaryExists(categary.Id))
{
return NotFound();
}
else
{
throw;
}
}
return RedirectToAction("Index");
}
return View(categary);
}
// GET: CategariesAdmin/Delete/5
public async Task<IActionResult> Delete(int? id)
{
if (id == null)
{
return NotFound();
}
var categary = await _context.Categaries
.SingleOrDefaultAsync(m => m.Id == id);
if (categary == null)
{
return NotFound();
}
return View(categary);
}
// POST: CategariesAdmin/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public async Task<IActionResult> DeleteConfirmed(int id)
{
var categary = await _context.Categaries.SingleOrDefaultAsync(m => m.Id == id);
categary.Status = false;
await _context.SaveChangesAsync();
return RedirectToAction("Index");
}
private bool CategaryExists(int id)
{
return _context.Categaries.Any(e => e.Id == id);
}
}
}
| 4,366
|
https://github.com/Infragistics/wpf-samples/blob/master/Applications/IGExtensions/IGExtensions.Framework/Tools/Randomizer.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
wpf-samples
|
Infragistics
|
C#
|
Code
| 94
| 216
|
using System.Threading;
// ReSharper disable CheckNamespace
namespace System
// ReSharper restore CheckNamespace
{
/// <summary>
/// Represents an utility for creating random generators
/// </summary>
public class Randomizer
{
protected static int SeedCounter = new Random().Next();
[ThreadStatic]
protected static Random Random;
public static Random Instance
{
get
{
if (Random == null)
{
var seed = Interlocked.Increment(ref SeedCounter);
Random = new Random(seed);
}
return Random;
}
}
public static Random Create()
{
Thread.Sleep(1); // creates one of 1000 random seeds
var seed = DateTime.Now.Millisecond;
var random = new Random(seed);
return random;
}
}
}
| 13,496
|
https://github.com/sridharsridha/Sorting/blob/master/src/include/cartesiantree_sort.hpp
|
Github Open Source
|
Open Source
|
MIT
| null |
Sorting
|
sridharsridha
|
C++
|
Code
| 14
| 77
|
//
// Created by Sridhar N on 28/06/20.
//
#ifndef SORTING_CARTESIANTREE_SORT_HPP
#define SORTING_CARTESIANTREE_SORT_HPP
#endif//SORTING_CARTESIANTREE_SORT_HPP
| 39,986
|
https://github.com/kubesphere/kubesphere/blob/master/vendor/github.com/open-policy-agent/opa/loader/filter/filter.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
kubesphere
|
kubesphere
|
Go
|
Code
| 13
| 35
|
package filter
import "io/fs"
type LoaderFilter func(abspath string, info fs.FileInfo, depth int) bool
| 28,988
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.