repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
joonashak/vyardage
|
server/src/middlewares/errorHandling.js
|
<filename>server/src/middlewares/errorHandling.js
/**
* Middleware to handle errors.
* Designed to work with this module's CustomErrors and objection's errors.
* The error object consumed by this middleware should include the fields statusCode
* (HTTP status code), type (concise error type for UI logic) and message (human info).
*/
export default (error, request, response, next) => {
if (!error) {
next();
}
// Respond with error message, using defaults if necessary.
const {
statusCode = 500,
type = 'UnknownError',
message = 'An unknown error occured.',
table,
column,
constraint,
} = error;
const data = {
...error.data,
table,
column,
constraint,
};
response.status(statusCode).send({ message, type, data });
};
|
majk1/netbeans-mmd-plugin
|
mind-map/mind-map-swing-panel/src/main/java/com/igormaznitsa/mindmap/swing/panel/ui/IconBlock.java
|
/*
* Copyright 2015-2018 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.igormaznitsa.mindmap.swing.panel.ui;
import com.igormaznitsa.mindmap.model.Extra;
import com.igormaznitsa.mindmap.model.ExtraFile;
import com.igormaznitsa.mindmap.model.ExtraLink;
import com.igormaznitsa.mindmap.model.Topic;
import com.igormaznitsa.mindmap.swing.panel.MindMapPanelConfig;
import com.igormaznitsa.mindmap.swing.panel.ui.gfx.MMGraphics;
import com.igormaznitsa.mindmap.swing.panel.utils.ScalableIcon;
import com.igormaznitsa.mindmap.swing.panel.utils.Utils;
import java.awt.geom.Rectangle2D;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class IconBlock {
private final Rectangle2D bounds = new Rectangle2D.Double();
private final Topic model;
private double scale = 1.0d;
private boolean contentPresented;
private Extra<?>[] currentExtras = null;
public IconBlock(@Nonnull final IconBlock orig) {
this.bounds.setRect(orig.bounds);
this.model = orig.model;
this.scale = orig.scale;
this.contentPresented = orig.contentPresented;
this.currentExtras = orig.currentExtras == null ? null : orig.currentExtras.clone();
}
public IconBlock(@Nonnull final Topic model) {
this.model = model;
}
public void setCoordOffset(final double x, final double y) {
this.bounds.setRect(x, y, this.bounds.getWidth(), this.bounds.getHeight());
}
public void updateSize(@Nonnull final MMGraphics gfx, @Nonnull final MindMapPanelConfig cfg) {
final int numberOfIcons = this.model.getNumberOfExtras();
this.scale = cfg.getScale();
if (numberOfIcons == 0) {
this.bounds.setRect(0d, 0d, 0d, 0d);
this.contentPresented = false;
} else {
final double scaledIconWidth = ScalableIcon.BASE_WIDTH * this.scale;
final double scaledIconHeight = ScalableIcon.BASE_HEIGHT * this.scale;
this.bounds.setRect(0d, 0d, scaledIconWidth * numberOfIcons, scaledIconHeight);
this.contentPresented = true;
this.currentExtras = new Extra<?>[numberOfIcons];
int index = 0;
for (final Extra<?> e : this.model.getExtras().values()) {
this.currentExtras[index++] = e;
}
}
}
public boolean hasContent() {
return this.currentExtras != null && this.contentPresented;
}
public void paint(@Nonnull final MMGraphics gfx) {
final int numberOfIcons = this.model.getNumberOfExtras();
if (numberOfIcons != 0) {
double offsetX = this.bounds.getX();
final int offsetY = (int) Math.round(this.bounds.getY());
final double scaledIconWidth = ScalableIcon.BASE_WIDTH * this.scale;
for (final Extra<?> e : this.currentExtras) {
final ScalableIcon ico;
switch (e.getType()) {
case FILE:
ico = findIconForFileType((ExtraFile) e);
break;
case LINK:
final String uri = e.getAsString();
ico = uri.startsWith("mailto:") ? ScalableIcon.LINK_EMAIL : ScalableIcon.LINK;
break;
case NOTE:
ico = ScalableIcon.TEXT;
break;
case TOPIC:
ico = ScalableIcon.TOPIC;
break;
default:
throw new Error("Unexpected extras"); //NOI18N
}
if (scaledIconWidth >= 1.0d) {
gfx.drawImage(ico.getImage(this.scale), (int) Math.round(offsetX), offsetY);
offsetX += scaledIconWidth;
}
}
}
}
@Nonnull
public ScalableIcon findIconForFileType(@Nonnull final ExtraFile theFileLink) {
final ScalableIcon result;
if (theFileLink.isMMDFile()) {
result = theFileLink.isAbsolute() ? ScalableIcon.FILE_MMD_WARN : ScalableIcon.FILE_MMD;
} else if (Utils.isPlantUmlFileExtension(theFileLink.getLCFileExtension())) {
result = theFileLink.isAbsolute() ? ScalableIcon.FILE_PLANTUML_WARN : ScalableIcon.FILE_PLANTUML;
} else {
result = theFileLink.isAbsolute() ? ScalableIcon.FILE_WARN : ScalableIcon.FILE;
}
return result;
}
@Nullable
public Extra<?> findExtraForPoint(final double x, final double y) {
Extra<?> result = null;
if (this.hasContent() && this.bounds.contains(x, y)) {
final double iconWidth = this.scale * ScalableIcon.BASE_WIDTH;
final int index = (int) ((x - this.bounds.getX()) / iconWidth);
result = index >= 0 && index < this.currentExtras.length ? this.currentExtras[index] : null;
}
return result;
}
@Nonnull
public Rectangle2D getBounds() {
return this.bounds;
}
}
|
mosburn/turbot_tuils
|
examples/find_ec2_instance.py
|
#!/usr/bin/env python
import turbotutils.cluster
import turbotutils.account
import boto3
import argparse
import sys
def main(akey, skey, token, instance_id):
session = boto3.Session(aws_access_key_id=akey, aws_secret_access_key=skey, aws_session_token=token)
client = session.client('ec2', region_name='us-east-1')
try:
response = client.describe_instances(InstanceIds=[instance_id])
print("Found it ")
sys.exit(0)
except:
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Finds which account an ec2 instance exists in')
parser.add_argument('instance', help='the instance id you wish to find')
args = parser.parse_args()
# Set to False if you do not have a valid certificate for your Turbot Host
turbot_host_certificate_verification = True
# Set to your Turbot Host URL
turbot_host = turbotutils.get_turbot_host()
turbot_user_id = turbotutils.get_turbot_user()
# Get the access and secret key pairs
(turbot_api_access_key, turbot_api_secret_key) = turbotutils.get_turbot_access_keys()
accounts = turbotutils.cluster.get_turbot_account_ids(turbot_api_access_key, turbot_api_secret_key, turbot_host_certificate_verification, turbot_host)
for account in accounts:
turbot_account = account
print("Checking %s" % turbot_account)
(akey, skey, token) = turbotutils.account.get_aws_access_key(turbot_api_access_key, turbot_api_secret_key, turbot_host_certificate_verification, turbot_host, turbot_account, turbot_user_id)
main(akey,skey,token, instance_id=args.instance)
|
rxzz0/QountersMinus-1.19.0
|
include/Qounters/PBQounter.hpp
|
#pragma once
#include "util/logger.hpp"
#include "util/format.hpp"
#include "util/note_count.hpp"
#include "Qounter.hpp"
#include "QounterRegistry.hpp"
#include "custom-types/shared/macros.hpp"
#include "questui/shared/BeatSaberUI.hpp"
#include "UnityEngine/Color.hpp"
#include "GlobalNamespace/ScoreModel.hpp"
#include "GlobalNamespace/ScoreUIController.hpp"
#include "GlobalNamespace/GameplayModifiersModelSO.hpp"
#include "GlobalNamespace/PlayerLevelStatsData.hpp"
#include "GlobalNamespace/GameEnergyCounter.hpp"
#include "UnityEngine/Resources.hpp"
namespace QountersMinus {
enum class PBQounterMode {
Absolute,
Relative
};
static int PBQounterModeCount = 2;
static std::unordered_map<int, std::string> PBQounterModeNames = {
{static_cast<int>(PBQounterMode::Absolute), "Absolute"},
{static_cast<int>(PBQounterMode::Relative), "Relative"}
};
static std::unordered_map<std::string, int> PBQounterModeLookup = {
{"Absolute", static_cast<int>(PBQounterMode::Absolute)},
{"Relative", static_cast<int>(PBQounterMode::Relative)}
};
}
DECLARE_CLASS_CUSTOM(QountersMinus::Qounters, PBQounter, QountersMinus::Qounter,
static bool Enabled;
static int Position;
static float Distance;
static int Mode;
static UnityEngine::Color BetterColor;
static UnityEngine::Color DefaultColor;
static int DecimalPrecision;
static int TextSize;
static bool UnderScore;
static bool HideFirstScore;
DECLARE_INSTANCE_FIELD(TMPro::TextMeshProUGUI*, pbText);
DECLARE_INSTANCE_FIELD_DEFAULT(int, maxPossibleScore, 0);
DECLARE_INSTANCE_FIELD_DEFAULT(int, highScore, 0);
DECLARE_STATIC_METHOD(void, Register);
DECLARE_INSTANCE_METHOD(void, Start);
DECLARE_INSTANCE_METHOD(void, OnScoreUpdated, int modifiedScore);
DECLARE_INSTANCE_METHOD(void, SetPersonalBest, float ratioOfMaxScore);
)
|
achapero/laravel-new
|
resources/js/components/views/private/admins/PageBoarding/Clearent/PageBoardingClearentBoardingProcessSteps/StepBankAccountAddBank.js
|
import {
Button,
Card,
Checkbox,
Col,
Divider,
Form,
Input,
InputNumber,
notification,
Radio,
Row,
Space
} from "antd";
import React, { useEffect, useState } from "react";
import useAxiosQuery from "../../../../../../providers/useAxiosQuery";
import * as htmlToImage from "html-to-image";
import { jsPDF } from "jspdf";
import FileSaver from "file-saver";
const StepBankAccountAddBank = ({
merchantNumber,
showAddBank,
setShowAddBank,
selectedBankAccount,
setSelectedBankAccount
}) => {
const [form] = Form.useForm();
const {
mutate: mutateSaveBankToDB,
isLoading: isLoadingMutateSaveBankToDB
} = useAxiosQuery("POST", "api/v1/clearent/saveBankDB");
const saveBankToDB = () => {
let data = form.getFieldsValue();
data["merchantNumber"] = merchantNumber;
if (data.bankAccountTypeID == 1 || data.bankAccountTypeID == 2) {
data["isNameSameAsLegalOrDBAName"] = true;
} else {
data["isNameSameAsLegalOrDBAName"] = false;
}
let _data = {
data: data,
merchantNumber: merchantNumber
};
console.log("data", _data);
mutateSaveBankToDB(_data, {
onSuccess: res => {
console.log(res);
if (res.success) {
notification.success({
message: "Bank Successfully Saved"
});
}
}
});
};
const {
mutate: mutateGetSavedBankDB,
isLoading: isLoadingGetSavedBankDB
} = useAxiosQuery("POST", `api/v1/clearent/getBankDB`);
const [merchantData, setMerchantData] = useState();
const [taxpayerData, setTaxpayer] = useState();
// const [bankDBData, setbankDBData] = useState();
useEffect(() => {
mutateGetSavedBankDB(
{ merchantNumber },
{
onSuccess: res => {
console.log("mutateGetSavedBankDB", res);
if (selectedBankAccount) {
console.log("selectedBankAccount", selectedBankAccount);
setBankAccountDetails({ ...selectedBankAccount });
form.setFieldsValue(selectedBankAccount);
} else {
if (res.saved) {
if (res.saved.bank_account) {
form.setFieldsValue(
JSON.parse(res.saved.bank_account)
);
setBankAccountDetails(
JSON.parse(res.saved.bank_account)
);
}
}
}
// setbankDBData(JSON.parse(res.saved.bank_account));
setMerchantData(JSON.parse(res.merchant.merchant));
setTaxpayer(JSON.parse(res.merchant.tax_payer));
}
}
);
return () => {};
}, []);
useEffect(() => {
console.log("bankAccountDetails", bankAccountDetails);
return () => {};
}, [bankAccountDetails]);
const {
mutate: mutateCreateBank,
isLoading: isLoadingMutateCreateBank
} = useAxiosQuery(
"POST_FILE",
"api/v1/clearent/postBank",
`clearent_boarding_banks_${merchantNumber}`
);
const handleSubmitCreateBank = (e, withUpload = false) => {
if (e) {
e.preventDefault();
}
let data = { ...bankAccountDetails };
data["merchantNumber"] = merchantNumber;
if (data.bankAccountTypeID == 1 || data.bankAccountTypeID == 2) {
data["isNameSameAsLegalOrDBAName"] = true;
} else {
data["isNameSameAsLegalOrDBAName"] = false;
}
console.log("data", data);
htmlToImage
.toPng(document.getElementById("bankCheck"), {
quality: 0.95
})
.then(function(dataUrl) {
const pdf = new jsPDF();
pdf.addImage(dataUrl, "PNG", 0, 0);
// pdf.save("test.png");
let BankCheck = pdf.output("blob");
let formData = new FormData();
Object.keys(data).map((val, key) => {
formData.append(val, data[val]);
});
formData.append("voidedCheck", BankCheck);
console.log("formData", formData);
mutateCreateBank(formData, {
onSuccess: res => {
if (res.success) {
if (res.data.errors) {
if (res.data.errors.length > 0) {
res.data.errors.map((error, key) => {
notification.error({
message: error.errorMessage
});
});
}
} else {
setBankAccountDetails({
...bankAccountDetails,
bankAccountID: res.data.bankAccountID
});
saveBankToDB();
setShowAddBank(false);
}
}
}
});
});
//console.log(data);
};
const [bankAccountDetails, setBankAccountDetails] = useState({
bankName: "",
nameOnAccount: "",
bankAccountTypeID: null,
bankAccountNameTypeID: null,
aba: "",
accountNumber: "",
hasFees: true,
hasFunds: true,
hasChargebacks: true
});
useEffect(() => {
console.log("bankAccountDetails", bankAccountDetails);
return () => {};
}, [bankAccountDetails]);
const downloadBankCheck = () => {
htmlToImage
.toPng(document.getElementById("bankCheck"), {
quality: 0.95
})
.then(function(dataUrl) {
const pdf = new jsPDF();
pdf.addImage(dataUrl, "PNG", 0, 0);
// pdf.save("test.png");
let BankCheck = pdf.output("blob");
let merchantName = bankAccountDetails.nameOnAccount.replace(
/ /g,
"_"
);
let merchantNumberlast5 = merchantNumber.slice(-5);
FileSaver.saveAs(
BankCheck,
merchantName +
"_" +
merchantNumberlast5 +
"_VoidedCheck.pdf"
);
});
};
return (
<>
<FormAddBankCheck bankAccountDetails={bankAccountDetails} />
<Card
title="Add a Bank Account"
extra={
<Button type="ghost" onClick={e => setShowAddBank(false)}>
Back to List
</Button>
}
>
<Form
form={form}
layout="vertical"
onFinish={values => {
handleSubmitCreateBank();
}}
onValuesChange={(changedValue, values) => {
if (changedValue.bankAccountTypeID) {
if (changedValue.bankAccountTypeID == 1) {
let nameOnAccount = "";
if (taxpayerData.businessLegalName) {
nameOnAccount =
taxpayerData.businessLegalName;
} else {
nameOnAccount =
taxpayerData.legalFirstName +
" " +
taxpayerData.legalLastName;
}
form.setFieldsValue({
nameOnAccount: nameOnAccount
});
}
if (changedValue.bankAccountTypeID == 2) {
console.log(merchantData.dbaName);
form.setFieldsValue({
nameOnAccount: merchantData.dbaName
});
}
}
console.log("valueschange", bankAccountDetails, values);
setBankAccountDetails({
...bankAccountDetails,
...values
});
}}
>
<Row gutter={12}>
<Col xs={24}>
<Form.Item
label="Bank Name"
name="bankName"
rules={[
{
required: true,
message: "This field is required"
}
]}
>
<Input />
</Form.Item>
</Col>
<Col xs={24}>
<Form.Item
label="Is the Merchant's account under Legal Name or DBA?"
name="bankAccountTypeID"
rules={[
{
required: true,
message: "This field is required"
}
]}
>
<Radio.Group>
<Radio value={1}>Legal Name</Radio>
<Radio value={2}>DBA</Radio>
<Radio value={3}>Other</Radio>
</Radio.Group>
</Form.Item>
</Col>
<Col xs={24}>
<Form.Item
label="Name on Account"
name="nameOnAccount"
rules={[
{
required: true,
message: "This field is required"
}
]}
>
<Input />
</Form.Item>
</Col>
<Col xs={24} md={12}>
<Form.Item
label="Routing Number"
name="aba"
rules={[
{
pattern: /^(?:\d*)$/,
message:
"Routing Number should contain just number"
},
{
max: 9,
message:
"Routing Number should be less than 9 character"
}
]}
>
<Input type="number" />
</Form.Item>
</Col>
<Col xs={24} md={12}>
<Form.Item
label="Account Number"
name="accountNumber"
rules={[
{
pattern: /^(?:\d*)$/,
message:
"Account Number should contain just number"
},
{
max: 12,
message:
"Account Number should be less than 12 character"
}
]}
>
<Input type="number" />
</Form.Item>
</Col>
<Col xs={24}>
<Form.Item
label="Account Type"
name="bankAccountNameTypeID"
rules={[
{
required: true,
message: "This field is required"
}
]}
>
<Radio.Group>
<Radio value={1}>Checking</Radio>
<Radio value={2}>Savings</Radio>
</Radio.Group>
</Form.Item>
</Col>
<Col xs={24}>
Account Use (all options must be accounted for by
the merchant's bank account(s)):
<Row>
<Col xs={3}>
<Form.Item
label=""
name="hasFunds"
valuePropName="checked"
initialValue={true}
>
<Checkbox>Deposit</Checkbox>
</Form.Item>
</Col>
<Col xs={3}>
<Form.Item
label=""
name="hasFees"
valuePropName="checked"
initialValue={true}
>
<Checkbox>Fees</Checkbox>
</Form.Item>
</Col>
<Col xs={3}>
<Form.Item
label=""
name="hasChargebacks"
valuePropName="checked"
initialValue={true}
>
<Checkbox>Chargebacks</Checkbox>
</Form.Item>
</Col>
</Row>
</Col>
</Row>
</Form>{" "}
<Divider />
<Space>
<Button
type="primary"
onClick={e => saveBankToDB()}
loading={isLoadingMutateSaveBankToDB}
>
Save
</Button>
<Button
type="primary"
onClick={e => form.submit()}
loading={isLoadingMutateCreateBank}
disabled={
bankAccountDetails.accountNumber &&
bankAccountDetails.accountNumber.indexOf("*") === -1
? false
: true
}
>
{/* {dataTaxpayer && dataTaxpayer.data.tin ? (
<>reValidate</>
) : ( */}
<>Save & Proceed</>
{/* )} */}
</Button>
{bankAccountDetails.accountNumber &&
bankAccountDetails.accountNumber.indexOf("*") ===
-1 && (
<Button
type="primary"
onClick={e => downloadBankCheck()}
>
Generate Bank Check
</Button>
)}
</Space>
</Card>
</>
);
};
export default StepBankAccountAddBank;
const FormAddBankCheck = ({ bankAccountDetails }) => {
return (
<div
style={{
backgroundImage:
"url(" +
`${window.location.origin}/images/voidedcheck.png` +
")",
backgroundRepeat: "no-repeat",
height: 295,
width: 668,
backgroundSize: "contain",
fontWeight: "700",
fontFamily: "Orbitron",
position: "absolute",
zIndex: -99
}}
id="bankCheck"
>
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<span style={{ fontSize: 19, marginLeft: 30 }}>
{bankAccountDetails.nameOnAccount}
</span>
<br />
<br />
<br />
<span style={{ fontSize: 19, marginLeft: 46 }}>
{bankAccountDetails.aba}
</span>
<span style={{ fontSize: 19, marginLeft: 50 }}>
{bankAccountDetails.accountNumber}
</span>
</div>
);
};
|
BSFrance/BSFrance
|
stm32/libraries/OfficialCompatibility/src/PeripheralPins.h
|
#define NC 0xFF
#include "Arduino.h"
typedef uint8_t PinName;
typedef struct {
PinName pin;
uint8_t mode;
uint8_t pupd;
uint8_t afnum;
} PinMap;
extern const PinMap PinMap_Ethernet[];
inline PinName pin_pinName(const PinMap* map) {return map->pin;}
const PinMap *pinmap_function(PinName pin, const PinMap* map);
#define STM_PORT(A) variant_pin_list[A].port
#define STM_GPIO_PIN(A) variant_pin_list[A].pinMask
#define STM_PIN_MODE(A) A->mode
#define STM_PIN_PUPD(A) A->pupd
#define STM_PIN_AFNUM(A) A->afnum
inline GPIO_TypeDef *set_GPIO_Port_Clock(GPIO_TypeDef *port) {
stm32GpioClockEnable(port);
return port;
}
|
isabella232/jetty-hadoop-fix
|
modules/jsp-api-2.0/src/main/java/javax/servlet/jsp/el/VariableResolver.java
|
/**
*
* Copyright 2003-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// This source code implements specifications defined by the Java
// Community Process. In order to remain compliant with the specification
// DO NOT add / change / or delete method signatures!
//
package javax.servlet.jsp.el;
/**
* <p>This class is used to customize the way an ExpressionEvaluator resolves
* variable references at evaluation time. For example, instances of this class can
* implement their own variable lookup mechanisms, or introduce the
* notion of "implicit variables" which override any other variables.
* An instance of this class should be passed when evaluating
* an expression.</p>
*
* <p>An instance of this class includes the context against which resolution
* will happen</p>
*
* @since 2.0
*/
public interface VariableResolver
{
//-------------------------------------
/**
* Resolves the specified variable.
* Returns null if the variable is not found.
*
* @param pName the name of the variable to resolve
* @return the result of the variable resolution
*
* @throws ELException if a failure occurred while trying to resolve
* the given variable
**/
public Object resolveVariable (String pName)
throws ELException;
//-------------------------------------
}
|
OleksandrYehorovEPAM/stripes-components
|
lib/NavListItem/NavListItem.js
|
<reponame>OleksandrYehorovEPAM/stripes-components<gh_stars>0
/**
* Nav List Item
*/
import React from 'react';
import classnames from 'classnames';
import PropTypes from 'prop-types';
import Link from 'react-router-dom/Link';
import css from './NavListItem.css';
const propTypes = {
children: PropTypes.oneOfType([PropTypes.element, PropTypes.string, PropTypes.array]),
className: PropTypes.string,
isActive: PropTypes.bool,
};
const NavListItem = ({ children, className, isActive, ...rest }) => {
let Element = 'button';
if (rest.href) {
Element = 'a';
}
if (rest.to) {
Element = Link;
}
return (
<Element className={classnames(css.NavListItem, { [css.isActive]: isActive }, className)} {...rest}>
<div className={css.NavListItemInner}>
{children}
</div>
</Element>
);
};
NavListItem.propTypes = propTypes;
export default NavListItem;
|
tomaytotomatof/leonspoks
|
multimedia/media/media_js_standard/audioEncoder/src/main/js/test/AudioEncoderFuncCallbackTest.test.js
|
<reponame>tomaytotomatof/leonspoks
/*
* Copyright (C) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import media from '@ohos.multimedia.media'
import fileio from '@ohos.fileio'
import abilityAccessCtrl from '@ohos.abilityAccessCtrl'
import bundle from '@ohos.bundle'
import featureAbility from '@ohos.ability.featureAbility'
import mediaLibrary from '@ohos.multimedia.mediaLibrary'
import {getFileDescriptor, closeFileDescriptor} from './AudioEncoderTestBase.test.js';
import {describe, beforeAll, beforeEach, afterEach, afterAll, it, expect} from 'deccjsunit/index'
describe('AudioEncoderFuncCallback', function () {
const RESOURCEPATH = '/data/accounts/account_0/appdata/ohos.acts.multimedia.audio.audioencoder/'
const AUDIOPATH = 'S16LE.pcm';
const BASIC_PATH = RESOURCEPATH + 'results/encode_func_callback_';
let audioEncodeProcessor;
let readStreamSync;
let eosframenum = 0;
let stopAtEOS = false;
let resetAtEOS = false;
let flushAtEOS = false;
let workdoneAtEOS = false;
let needGetMediaDes = false;
let needrelease = false;
let frameCnt = 1;
let timestamp = 0;
let sawInputEOS = false;
let sawOutputEOS = false;
let inputQueue = [];
let outputQueue = [];
const ES = [0, 4096];
let ES_LENGTH = 1500;
let readpath;
let savepath;
let fdRead;
let fdWrite;
let fileAsset;
const context = featureAbility.getContext();
const mediaTest = mediaLibrary.getMediaLibrary(context);
let fileKeyObj = mediaLibrary.FileKey;
beforeAll(async function() {
console.info('beforeAll case 1');
await applyPermission();
console.info('beforeAll case after get permission');
})
beforeEach(function() {
console.info('beforeEach case');
audioEncodeProcessor = null;
readStreamSync = undefined;
eosframenum = 0;
stopAtEOS = false;
resetAtEOS = false;
flushAtEOS = false;
workdoneAtEOS = false;
needGetMediaDes = false;
needrelease = false;
frameCnt = 1;
timestamp = 0;
sawInputEOS = false;
sawOutputEOS = false;
inputQueue = [];
outputQueue = [];
ES_LENGTH = 1500;
})
afterEach(async function() {
console.info('afterEach case');
if (audioEncodeProcessor != null) {
await audioEncodeProcessor.release().then(() => {
console.info('audioEncodeProcessor release success');
audioEncodeProcessor = null;
}, failCallback).catch(failCatch);
}
await closeFileDescriptor(AUDIOPATH);
await closeFdWrite();
})
afterAll(async function() {
console.info('afterAll case');
await closeFileDescriptor(AUDIOPATH);
})
function resetParam() {
readStreamSync = undefined;
eosframenum = 0;
stopAtEOS = false;
resetAtEOS = false;
flushAtEOS = false;
workdoneAtEOS = false;
needGetMediaDes = false;
needrelease = false;
frameCnt = 1;
timestamp = 0;
sawInputEOS = false;
sawOutputEOS = false;
inputQueue = [];
outputQueue = [];
}
async function getFdRead(pathName, done) {
await getFileDescriptor(pathName).then((res) => {
if (res == undefined) {
expect().assertFail();
console.info('case error fileDescriptor undefined, open file fail');
done();
} else {
fdRead = res.fd;
console.info("case pathName is: " + pathName);
console.info("case fdRead is: " + fdRead);
}
})
}
async function applyPermission() {
let appInfo = await bundle.getApplicationInfo('ohos.acts.multimedia.audio.audioencoder', 0, 100);
let atManager = abilityAccessCtrl.createAtManager();
if (atManager != null) {
let tokenID = appInfo.accessTokenId;
console.info('[permission] case accessTokenID is ' + tokenID);
let permissionName1 = 'ohos.permission.MEDIA_LOCATION';
let permissionName2 = 'ohos.permission.READ_MEDIA';
let permissionName3 = 'ohos.permission.WRITE_MEDIA';
await atManager.grantUserGrantedPermission(tokenID, permissionName1, 1).then((result) => {
console.info('[permission] case grantUserGrantedPermission success :' + result);
}).catch((err) => {
console.info('[permission] case grantUserGrantedPermission failed :' + err);
});
await atManager.grantUserGrantedPermission(tokenID, permissionName2, 1).then((result) => {
console.info('[permission] case grantUserGrantedPermission success :' + result);
}).catch((err) => {
console.info('[permission] case grantUserGrantedPermission failed :' + err);
});
await atManager.grantUserGrantedPermission(tokenID, permissionName3, 1).then((result) => {
console.info('[permission] case grantUserGrantedPermission success :' + result);
}).catch((err) => {
console.info('[permission] case grantUserGrantedPermission failed :' + err);
});
} else {
console.info('[permission] case apply permission failed, createAtManager failed');
}
}
async function getFdWrite(pathName) {
console.info('[mediaLibrary] case start getFdWrite');
console.info('[mediaLibrary] case getFdWrite pathName is ' + pathName);
let mediaType = mediaLibrary.MediaType.AUDIO;
console.info('[mediaLibrary] case mediaType is ' + mediaType);
let publicPath = await mediaTest.getPublicDirectory(mediaLibrary.DirectoryType.DIR_AUDIO);
console.info('[mediaLibrary] case getFdWrite publicPath is ' + publicPath);
let dataUri = await mediaTest.createAsset(mediaType, pathName, publicPath);
if (dataUri != undefined) {
let args = dataUri.id.toString();
let fetchOp = {
selections : fileKeyObj.ID + "=?",
selectionArgs : [args],
}
let fetchWriteFileResult = await mediaTest.getFileAssets(fetchOp);
console.info('[mediaLibrary] case getFdWrite getFileAssets() success');
fileAsset = await fetchWriteFileResult.getAllObject();
console.info('[mediaLibrary] case getFdWrite getAllObject() success');
fdWrite = await fileAsset[0].open('Rw');
console.info('[mediaLibrary] case getFdWrite fdWrite is ' + fdWrite);
}
}
async function closeFdWrite() {
if (fileAsset != null) {
await fileAsset[0].close(fdWrite).then(() => {
console.info('[mediaLibrary] case close fdWrite success, fd is ' + fdWrite);
}).catch((err) => {
console.info('[mediaLibrary] case close fdWrite failed');
});
} else {
console.info('[mediaLibrary] case fileAsset is null');
}
}
function readFile(path) {
console.info('case read file start execution');
try{
console.info('case filepath: ' + path);
readStreamSync = fileio.fdopenStreamSync(fdRead, 'rb');
}catch(e) {
console.info(e);
}
}
function getContent(buf, len) {
console.info("case start get content");
let lengthreal = -1;
lengthreal = readStreamSync.readSync(buf,{length:len});
console.info('case lengthreal is :' + lengthreal);
}
function writeHead(path, len) {
console.info('case writeFile buffer.length is: ' + len);
try{
let head = new ArrayBuffer(7);
addADTStoPacket(head, len);
let res = fileio.writeSync(fdWrite, head, {length: 7});
console.info('case fileio.write head success');
} catch(e) {
console.info('case fileio.write head error is ' + e);
}
}
function writeFile(path, buf, len) {
try{
let res = fileio.writeSync(fdWrite, buf, {length: len});
console.info('case fileio.write buffer success');
} catch(e) {
console.info('case fileio.write buffer error is ' + e);
}
}
function addADTStoPacket(head, len) {
let view = new Uint8Array(head);
console.info("start add ADTS to Packet");
let packetLen = len + 7; // 7: head length
let profile = 2; // 2: AAC LC
let freqIdx = 4; // 4: 44100HZ
let chanCfg = 2; // 2: 2 channel
view[0] = 0xFF;
view[1] = 0xF9;
view[2] = ((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2);
view[3] = ((chanCfg & 3) << 6) + (packetLen >> 11);
view[4] = (packetLen & 0x7FF) >> 3;
view[5] = ((packetLen & 7) << 5) + 0x1F;
view[6] = 0xFC;
}
async function stopWork() {
audioEncodeProcessor.stop((err) => {
expect(err).assertUndefined();
console.info("case stop success")
})
}
async function resetWork() {
resetParam();
audioEncodeProcessor.reset((err) => {
expect(err).assertUndefined();
console.info("case reset success");
if (needrelease) {
audioEncodeProcessor.release((err) => {
expect(err).assertUndefined();
console.info("case release success");
audioEncodeProcessor = null;
})
}
})
}
async function flushWork(done) {
inputQueue = [];
outputQueue = [];
await closeFileDescriptor(readpath);
await getFdRead(readpath, done);
audioEncodeProcessor.flush((err) => {
expect(err).assertUndefined();
console.info("case flush at inputeos success");
resetParam();
readFile(AUDIOPATH);
workdoneAtEOS =true;
})
}
async function doneWork(done) {
audioEncodeProcessor.stop((err) => {
expect(err).assertUndefined();
console.info("case stop success");
resetParam();
audioEncodeProcessor.reset((err) => {
expect(err).assertUndefined();
audioEncodeProcessor.release(async(err) => {
expect(err).assertUndefined();
console.log("case release success");
audioEncodeProcessor = null;
await closeFileDescriptor(AUDIOPATH);
await closeFdWrite();
done();
})
})
})
}
function sleep(time) {
return new Promise((resolve) => setTimeout(resolve, time));
}
function wait(time) {
for(let t = Date.now(); Date.now() - t <= time;);
}
async function enqueueAllInputs(queue) {
while (queue.length > 0 && !sawInputEOS) {
let inputobject = queue.shift();
if (frameCnt == eosframenum || frameCnt == ES_LENGTH + 1) {
console.info("EOS frame seperately")
inputobject.flags = 1;
inputobject.timeMs = 0;
inputobject.length = 0;
sawInputEOS = true;
} else {
console.info("read frame from file");
inputobject.timeMs = timestamp;
inputobject.offset = 0;
inputobject.length = ES[1];
getContent(inputobject.data, ES[1]);
inputobject.flags = 0;
}
timestamp += 23;
frameCnt += 1;
audioEncodeProcessor.pushInputData(inputobject, () => {
console.info('queueInput success');
})
}
}
async function dequeueAllOutputs(queue, savepath, done) {
while (queue.length > 0 && !sawOutputEOS) {
let outputobject = queue.shift();
if (outputobject.flags == 1) {
sawOutputEOS = true;
if (stopAtEOS) {
await stopWork();
} else if (resetAtEOS) {
await resetWork();
} else if (flushAtEOS) {
await flushWork(done);
} else if (workdoneAtEOS) {
await doneWork(done);
} else {
console.info("sawOutputEOS = true");
}
}
else{
writeHead(savepath, outputobject.length);
writeFile(savepath, outputobject.data, outputobject.length);
console.info("write to file success");
}
audioEncodeProcessor.freeOutputBuffer(outputobject, () => {
console.info('release output success');
})
}
}
function setCallback(savepath, done) {
console.info('case callback');
audioEncodeProcessor.on('needInputData', async(inBuffer) => {
console.info('case inputBufferAvailable');
inputQueue.push(inBuffer);
await enqueueAllInputs(inputQueue);
});
audioEncodeProcessor.on('newOutputData', async(outBuffer) => {
console.info('case outputBufferAvailable');
if (needGetMediaDes) {
audioEncodeProcessor.getOutputMediaDescription((err, MediaDescription) => {
expect(err).assertUndefined();
console.info("case get OutputMediaDescription success");
console.info('get outputMediaDescription : ' + MediaDescription);
needGetMediaDes=false;
});
}
outputQueue.push(outBuffer);
await dequeueAllOutputs(outputQueue, savepath, done);
});
audioEncodeProcessor.on('error',(err) => {
console.info('case error called,errName is' + err);
});
audioEncodeProcessor.on('streamChanged',(format) => {
console.info('case Output format changed: ' + format);
});
}
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_00_0100
* @tc.name : 000.test set EOS after last frame and reset
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level0
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_00_0100', 0, async function (done) {
console.info("test set EOS after last frame and reset");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
let mediaDescription2 = {
"codec_mime": 'audio/mp4a-latm',
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_00.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
needGetMediaDes = true;
workdoneAtEOS = true;
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, AudioCaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info(`AudioCaps: ` + AudioCaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
})
});
media.getMediaCapability((err, mediaCaps) => {
expect(err).assertUndefined();
console.info(`case getMediaCapability 1`);
mediaCaps.getAudioEncoderCaps((err, audioCaps) => {
expect(err).assertUndefined();
console.info('getAudioEncoderCaps success');
if (typeof (audioCaps) != 'undefined') {
console.info("case audioCaps " + audioCaps);
} else {
console.info("case audioCaps is not defined");
}
})
mediaCaps.findAudioEncoder(mediaDescription2, (err, codecname) => {
expect(err).assertUndefined();
console.info('findAudioEncoder success');
if (typeof (codecname) != 'undefined') {
console.info("case codecname " + codecname);
} else {
console.info("case codecname is not defined");
}
})
})
media.createAudioEncoderByName('avenc_aac', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder by mime 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0100
* @tc.name : 001.test set EOS manually before last frame and reset
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level1
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0100', 0, async function (done) {
console.info("case test set EOS manually before last frame and reset");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_01.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
eosframenum = 500;
workdoneAtEOS = true;
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, Audiocaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info("AudioCaps: " + Audiocaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
})
});
media.createAudioEncoderByMime('audio/mp4a-latm', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0200
* @tc.name : 002.test flush at running state
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level1
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0200', 0, async function (done) {
console.info("case test flush at running state");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_02.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
workdoneAtEOS = true;
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, Audiocaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info("AudioCaps: " + Audiocaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
setTimeout(() => {eventEmitter.emit('flush')},5000)
})
});
eventEmitter.on('flush', () => {
inputQueue = [];
outputQueue = [];
audioEncodeProcessor.flush((err) => {
expect(err).assertUndefined();
console.info(`case flush after 5s`);
})
});
media.createAudioEncoderByMime('audio/mp4a-latm', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0300
* @tc.name : 003. test flush at EOS state
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level1
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0300', 0, async function (done) {
console.info("case test flush at EOS state");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_03.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
eosframenum = 500;
flushAtEOS = true;
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, Audiocaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info("AudioCaps: " + Audiocaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
})
});
media.createAudioEncoderByMime('audio/mp4a-latm', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0400
* @tc.name : 004.test stop at running state and reset
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level1
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0400', 0, async function (done) {
console.info("case test stop at running state and reset");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_04.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, Audiocaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info("AudioCaps: " + Audiocaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
eventEmitter.emit('stop');
})
});
eventEmitter.on('stop', () => {
sleep(5000).then(() => {
audioEncodeProcessor.stop((err) => {
expect(err).assertUndefined();
console.info(`case stop 1`);
eventEmitter.emit('reset');
})
})
});
eventEmitter.on('reset', () => {
resetParam();
audioEncodeProcessor.reset((err) => {
expect(err).assertUndefined();
console.info(`case reset 1`);
eventEmitter.emit('release');
})
});
eventEmitter.on('release', () => {
audioEncodeProcessor.release(async(err) => {
expect(err).assertUndefined();
console.info(`case release 1`);
audioEncodeProcessor = null;
await closeFileDescriptor(AUDIOPATH);
await closeFdWrite();
done();
})
});
media.createAudioEncoderByMime('audio/mp4a-latm', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0500
* @tc.name : 005.test stop and restart
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level1
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0500', 0, async function (done) {
console.info("case test stop and restart");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_05.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
eosframenum = 100;
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, Audiocaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info("AudioCaps: " + Audiocaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
eventEmitter.emit('stop');
})
});
eventEmitter.on('stop', () => {
sleep(5000).then(() => {
audioEncodeProcessor.stop((err) => {
expect(err).assertUndefined();
console.info(`stop after 5s`);
resetParam();
readFile(AUDIOPATH);
eventEmitter.emit('restart');
})
})
});
eventEmitter.on('restart', () => {
sleep(2000).then(async() => {
resetParam();
await closeFileDescriptor(readpath);
await getFdRead(readpath, done);
readFile(readpath);
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`restart after 2s`);
workdoneAtEOS = true;
enqueueAllInputs(inputQueue);
})
})
});
media.createAudioEncoderByMime('audio/mp4a-latm', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
/* *
* @tc.number : SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0600
* @tc.name : 006.test reconfigure for new file with the same format
* @tc.desc : basic Encode function
* @tc.size : MediumTest
* @tc.type : Function test
* @tc.level : Level1
*/
it('SUB_MEDIA_AUDIO_ENCODER_FUNCTION_CALLBACK_01_0600', 0, async function (done) {
console.info("case test reconfigure for new file with the same format");
let events = require('events');
let eventEmitter = new events.EventEmitter();
let mediaDescription = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_06.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
eosframenum = 100;
resetAtEOS = true;
let mediaDescription2 = {
"channel_count": 2,
"sample_rate": 44100,
"audio_sample_format": 1,
}
let hasreconfigured = false;
eventEmitter.on('getAudioEncoderCaps', () => {
audioEncodeProcessor.getAudioEncoderCaps((err, Audiocaps) => {
expect(err).assertUndefined();
console.info(`case getAudioEncoderCaps 1`);
console.info("AudioCaps: " + Audiocaps);
eventEmitter.emit('configure', mediaDescription);
})
});
eventEmitter.on('configure', (mediaDescription) => {
audioEncodeProcessor.configure(mediaDescription, (err) => {
expect(err).assertUndefined();
console.info(`case configure 1`);
readFile(AUDIOPATH);
eventEmitter.emit('prepare');
})
});
eventEmitter.on('prepare', () => {
audioEncodeProcessor.prepare((err) => {
expect(err).assertUndefined();
console.info(`case prepare 1`);
setCallback(savepath, done);
eventEmitter.emit('start');
})
});
eventEmitter.on('start', () => {
audioEncodeProcessor.start((err) => {
expect(err).assertUndefined();
console.info(`case start 1`);
if (!hasreconfigured) {
eventEmitter.emit('reconfigure', mediaDescription2);
}
})
});
eventEmitter.on('reconfigure', (mediaDescription2) => {
sleep(10000).then(async() => {
resetParam();
await closeFileDescriptor(readpath);
await closeFdWrite();
audioEncodeProcessor.configure(mediaDescription2, async(err) => {
expect(err).assertUndefined();
console.info(`case configure 2`);
resetParam();
readpath = AUDIOPATH;
savepath = 'audioEncode_function_callback_06_2.aac';
await getFdWrite(savepath);
await getFdRead(readpath, done);
readFile(AUDIOPATH);
workdoneAtEOS = true;
hasreconfigured = true;
eventEmitter.emit('prepare');
})
})
});
media.createAudioEncoderByMime('audio/mp4a-latm', (err, processor) => {
expect(err).assertUndefined();
console.info(`case createAudioEncoder 1`);
audioEncodeProcessor = processor;
eventEmitter.emit('getAudioEncoderCaps');
})
})
})
|
softicar/platform
|
platform-core-module/src/main/java/com/softicar/platform/core/module/email/EmailContentType.java
|
<filename>platform-core-module/src/main/java/com/softicar/platform/core/module/email/EmailContentType.java
package com.softicar.platform.core.module.email;
public enum EmailContentType {
PLAIN("text/plain;charset=utf-8"),
HTML("text/html;charset=utf-8"),
//
;
private String contentTypeString;
private EmailContentType(String contentTypeString) {
this.contentTypeString = contentTypeString;
}
public String getContentTypeString() {
return contentTypeString;
}
}
|
MC-JY/aws-sdk-java
|
aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/ElasticsearchAction.java
|
<gh_stars>1000+
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes an action that writes data to an Amazon OpenSearch Service domain.
* </p>
* <note>
* <p>
* The <code>Elasticsearch</code> action can only be used by existing rule actions. To create a new rule action or to
* update an existing rule action, use the <code>OpenSearch</code> rule action instead. For more information, see <a
* href="https://docs.aws.amazon.com/iot/latest/apireference/API_OpenSearchAction.html">OpenSearchAction</a>.
* </p>
* </note>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ElasticsearchAction implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The IAM role ARN that has access to OpenSearch.
* </p>
*/
private String roleArn;
/**
* <p>
* The endpoint of your OpenSearch domain.
* </p>
*/
private String endpoint;
/**
* <p>
* The index where you want to store your data.
* </p>
*/
private String index;
/**
* <p>
* The type of document you are storing.
* </p>
*/
private String type;
/**
* <p>
* The unique identifier for the document you are storing.
* </p>
*/
private String id;
/**
* <p>
* The IAM role ARN that has access to OpenSearch.
* </p>
*
* @param roleArn
* The IAM role ARN that has access to OpenSearch.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
* <p>
* The IAM role ARN that has access to OpenSearch.
* </p>
*
* @return The IAM role ARN that has access to OpenSearch.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
* <p>
* The IAM role ARN that has access to OpenSearch.
* </p>
*
* @param roleArn
* The IAM role ARN that has access to OpenSearch.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ElasticsearchAction withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
* <p>
* The endpoint of your OpenSearch domain.
* </p>
*
* @param endpoint
* The endpoint of your OpenSearch domain.
*/
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
/**
* <p>
* The endpoint of your OpenSearch domain.
* </p>
*
* @return The endpoint of your OpenSearch domain.
*/
public String getEndpoint() {
return this.endpoint;
}
/**
* <p>
* The endpoint of your OpenSearch domain.
* </p>
*
* @param endpoint
* The endpoint of your OpenSearch domain.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ElasticsearchAction withEndpoint(String endpoint) {
setEndpoint(endpoint);
return this;
}
/**
* <p>
* The index where you want to store your data.
* </p>
*
* @param index
* The index where you want to store your data.
*/
public void setIndex(String index) {
this.index = index;
}
/**
* <p>
* The index where you want to store your data.
* </p>
*
* @return The index where you want to store your data.
*/
public String getIndex() {
return this.index;
}
/**
* <p>
* The index where you want to store your data.
* </p>
*
* @param index
* The index where you want to store your data.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ElasticsearchAction withIndex(String index) {
setIndex(index);
return this;
}
/**
* <p>
* The type of document you are storing.
* </p>
*
* @param type
* The type of document you are storing.
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The type of document you are storing.
* </p>
*
* @return The type of document you are storing.
*/
public String getType() {
return this.type;
}
/**
* <p>
* The type of document you are storing.
* </p>
*
* @param type
* The type of document you are storing.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ElasticsearchAction withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The unique identifier for the document you are storing.
* </p>
*
* @param id
* The unique identifier for the document you are storing.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The unique identifier for the document you are storing.
* </p>
*
* @return The unique identifier for the document you are storing.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The unique identifier for the document you are storing.
* </p>
*
* @param id
* The unique identifier for the document you are storing.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ElasticsearchAction withId(String id) {
setId(id);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getEndpoint() != null)
sb.append("Endpoint: ").append(getEndpoint()).append(",");
if (getIndex() != null)
sb.append("Index: ").append(getIndex()).append(",");
if (getType() != null)
sb.append("Type: ").append(getType()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ElasticsearchAction == false)
return false;
ElasticsearchAction other = (ElasticsearchAction) obj;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getEndpoint() == null ^ this.getEndpoint() == null)
return false;
if (other.getEndpoint() != null && other.getEndpoint().equals(this.getEndpoint()) == false)
return false;
if (other.getIndex() == null ^ this.getIndex() == null)
return false;
if (other.getIndex() != null && other.getIndex().equals(this.getIndex()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getEndpoint() == null) ? 0 : getEndpoint().hashCode());
hashCode = prime * hashCode + ((getIndex() == null) ? 0 : getIndex().hashCode());
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
return hashCode;
}
@Override
public ElasticsearchAction clone() {
try {
return (ElasticsearchAction) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.iot.model.transform.ElasticsearchActionMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
gitaumoses4/Hospital-System
|
src/database/DatabaseFunctions.java
|
<reponame>gitaumoses4/Hospital-System<gh_stars>0
package database;
import hospitalsystem.Strings;
import hospitalsystem.User;
import java.sql.ResultSet;
/**
*
* @author <NAME>
*/
public class DatabaseFunctions {
private static final DatabaseConnection db = new DatabaseConnection(Strings.DATABAES_NAME);
public static boolean addUser(User user) {
String query = "INSERT INTO " + Strings.USERS_TABLE_NAME + " VALUES ("
+ user.getStaffId() + ","
+ user.getFirstName() + ","
+ user.getLastName() + ","
+ user.getEmail() + ","
+ user.getDesignation() + ","
+ user.getUsername() + ","
+ user.getPassword() + ");";
return db.execute(query);
}
public static boolean userExists(String staffId) {
return getUser(staffId) != null;
}
public static User getUser(String staffId) {
String query = "SELECT * FROM " + Strings.USERS_TABLE_NAME + " WHERE " + Strings.STAFF_ID + " = '" + staffId + "';";
ResultSet values = db.executeQuery(query);
if (values == null) {
return null;
} else {
try {
User user = new User();
while (values.next()) {
user.setStaffId(values.getString(Strings.STAFF_ID));
user.setFirstName(values.getString(Strings.FIRST_NAME));
user.setLastName(values.getString(Strings.LAST_NAME));
user.setEmail(values.getString(Strings.EMAIL));
user.setDesignation(values.getString(Strings.DESIGNATION));
user.setUsername(values.getString(Strings.USERNAME));
user.setPassword(values.getString(Strings.PASSWORD));
return user;
}
} catch (Exception e) {
return null;
}
}
return null;
}
public static void printAllUsers() {
String sql = "SELECT " + Strings.STAFF_ID + " FROM " + Strings.USERS_TABLE_NAME + ";";
ResultSet result = db.executeQuery(sql);
if (result != null) {
try {
while (result.next()) {
System.out.println(getUser(result.getString(1)));
}
} catch (Exception e) {
}
}
}
public static void main(String[] args) {
printAllUsers();
}
}
|
marpme/lib-ledger-core
|
api/core/react-native/LibLedgerCore/android/binding/android/jni/jni/jni/Account.cpp
|
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from wallet.djinni
#include "Account.hpp" // my header
#include "AddressListCallback.hpp"
#include "AmountCallback.hpp"
#include "AmountListCallback.hpp"
#include "BitcoinLikeAccount.hpp"
#include "BlockCallback.hpp"
#include "ErrorCodeCallback.hpp"
#include "EventBus.hpp"
#include "Logger.hpp"
#include "Marshal.hpp"
#include "OperationQuery.hpp"
#include "Preferences.hpp"
#include "TimePeriod.hpp"
#include "WalletType.hpp"
namespace djinni_generated {
Account::Account() : ::djinni::JniInterface<::ledger::core::api::Account, Account>("Account$CppProxy") {}
Account::~Account() = default;
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_nativeDestroy(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
delete reinterpret_cast<::djinni::CppProxyHandle<::ledger::core::api::Account>*>(nativeRef);
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT jint JNICALL Java_Account_00024CppProxy_native_1getIndex(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getIndex();
return ::djinni::release(::djinni::I32::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1queryOperations(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->queryOperations();
return ::djinni::release(::djinni_generated::OperationQuery::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1getBalance(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef, jobject j_callback)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->getBalance(::djinni_generated::AmountCallback::toCpp(jniEnv, j_callback));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1getBalanceHistory(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef, jstring j_start, jstring j_end, jobject j_period, jobject j_callback)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->getBalanceHistory(::djinni::String::toCpp(jniEnv, j_start),
::djinni::String::toCpp(jniEnv, j_end),
::djinni_generated::TimePeriod::toCpp(jniEnv, j_period),
::djinni_generated::AmountListCallback::toCpp(jniEnv, j_callback));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT jboolean JNICALL Java_Account_00024CppProxy_native_1isSynchronizing(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->isSynchronizing();
return ::djinni::release(::djinni::Bool::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1synchronize(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->synchronize();
return ::djinni::release(::djinni_generated::EventBus::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1getPreferences(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getPreferences();
return ::djinni::release(::djinni_generated::Preferences::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1getLogger(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getLogger();
return ::djinni::release(::djinni_generated::Logger::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1getOperationPreferences(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef, jstring j_uid)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getOperationPreferences(::djinni::String::toCpp(jniEnv, j_uid));
return ::djinni::release(::djinni_generated::Preferences::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1asBitcoinLikeAccount(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->asBitcoinLikeAccount();
return ::djinni::release(::djinni_generated::BitcoinLikeAccount::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jboolean JNICALL Java_Account_00024CppProxy_native_1isInstanceOfBitcoinLikeAccount(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->isInstanceOfBitcoinLikeAccount();
return ::djinni::release(::djinni::Bool::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jboolean JNICALL Java_Account_00024CppProxy_native_1isInstanceOfEthereumLikeAccount(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->isInstanceOfEthereumLikeAccount();
return ::djinni::release(::djinni::Bool::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jboolean JNICALL Java_Account_00024CppProxy_native_1isInstanceOfRippleLikeAccount(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->isInstanceOfRippleLikeAccount();
return ::djinni::release(::djinni::Bool::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1getFreshPublicAddresses(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef, jobject j_callback)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->getFreshPublicAddresses(::djinni_generated::AddressListCallback::toCpp(jniEnv, j_callback));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1getWalletType(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getWalletType();
return ::djinni::release(::djinni_generated::WalletType::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT jobject JNICALL Java_Account_00024CppProxy_native_1getEventBus(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getEventBus();
return ::djinni::release(::djinni_generated::EventBus::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1startBlockchainObservation(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->startBlockchainObservation();
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1stopBlockchainObservation(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->stopBlockchainObservation();
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT jboolean JNICALL Java_Account_00024CppProxy_native_1isObservingBlockchain(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->isObservingBlockchain();
return ::djinni::release(::djinni::Bool::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1getLastBlock(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef, jobject j_callback)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->getLastBlock(::djinni_generated::BlockCallback::toCpp(jniEnv, j_callback));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
CJNIEXPORT jstring JNICALL Java_Account_00024CppProxy_native_1getRestoreKey(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
auto r = ref->getRestoreKey();
return ::djinni::release(::djinni::String::fromCpp(jniEnv, r));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, 0 /* value doesn't matter */)
}
CJNIEXPORT void JNICALL Java_Account_00024CppProxy_native_1eraseDataSince(JNIEnv* jniEnv, jobject /*this*/, jlong nativeRef, jobject j_date, jobject j_callback)
{
try {
DJINNI_FUNCTION_PROLOGUE1(jniEnv, nativeRef);
const auto& ref = ::djinni::objectFromHandleAddress<::ledger::core::api::Account>(nativeRef);
ref->eraseDataSince(::djinni::Date::toCpp(jniEnv, j_date),
::djinni_generated::ErrorCodeCallback::toCpp(jniEnv, j_callback));
} JNI_TRANSLATE_EXCEPTIONS_RETURN(jniEnv, )
}
} // namespace djinni_generated
|
pradeepkumarcm-egov/DIGIT-Dev
|
finance/egov/egov-egi/src/main/java/org/egov/infra/microservice/models/StorageResponse.java
|
package org.egov.infra.microservice.models;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
public class StorageResponse {
private List<FileReq> files;
public List<FileReq> getFiles() {
return files;
}
public void setFiles(List<FileReq> files) {
this.files = files;
}
}
|
8pig/springboot-blog-java
|
parent-blog/api-blog/src/main/java/com/zhou/blog/service/impl/ArticleServiceImpl.java
|
<reponame>8pig/springboot-blog-java
package com.zhou.blog.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.util.BeanUtil;
import com.mysql.cj.log.Log;
import com.zhou.blog.dao.dos.Archives;
import com.zhou.blog.dao.mapper.ArticleBodyMapper;
import com.zhou.blog.dao.mapper.ArticleMapper;
import com.zhou.blog.dao.mapper.ArticleTagMapper;
import com.zhou.blog.dao.pojo.*;
import com.zhou.blog.service.*;
import com.zhou.blog.utils.UserThreadLocal;
import com.zhou.blog.vo.ArticleBodyVo;
import com.zhou.blog.vo.ArticleVo;
import com.zhou.blog.vo.Result;
import com.zhou.blog.vo.TagVo;
import com.zhou.blog.vo.params.ArticleParam;
import com.zhou.blog.vo.params.PageParams;
import org.joda.time.DateTime;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class ArticleServiceImpl implements ArticleService {
/*
* 数据层注入
* */
@Autowired
private ArticleMapper articleMapper;
@Autowired
private TagService tagService;
@Autowired
private SysUserService sysUserService;
@Resource
private ArticleTagMapper articleTagMapper;
@Override
public Result listArticle(PageParams pageParams) {
Page<Article> page = new Page<>(pageParams.getPage(),pageParams.getPageSize());
IPage<Article> articleIPage = articleMapper.listArticle(
page,
pageParams.getCategoryId(),
pageParams.getTagId(),
pageParams.getYear(),
pageParams.getMonth());
List<Article> records = articleIPage.getRecords();
return Result.success(copyList(records,true,true));
}
// @Override
// public Result listArticle(PageParams pageParams) {
// /*
// * 分页查询数据库
// * */
// Page<Article> page = new Page<>(pageParams.getPage(), pageParams.getPageSize());
// LambdaQueryWrapper<Article> queryWrapper = new LambdaQueryWrapper<Article>();
// // 是否置顶排序
//// queryWrapper.orderByDesc(Article::getWeight);
// // order by create_data desc
// List<Long> articleIdList = new ArrayList<>();
// queryWrapper.orderByDesc(Article::getWeight, Article::getCreateDate);
// if (pageParams.getCategoryId() != null) {
// queryWrapper.eq(Article::getCategoryId, pageParams.getCategoryId());
// }
// if(pageParams.getTagId() != null) {
// LambdaQueryWrapper<ArticleTag> queryWrapper1 = new LambdaQueryWrapper();
// queryWrapper1.eq(ArticleTag::getTagId, pageParams.getTagId());
// List<ArticleTag> articleTags = articleTagMapper.selectList(queryWrapper1);
// for (ArticleTag articleTag : articleTags) {
// articleIdList.add(articleTag.getArticleId());
// }
// if(articleIdList.size() > 0){
// queryWrapper.in(Article::getId, articleIdList);
// }
//
// // article 没有tag 字段 一对多的关系
// articleTagMapper.selectList(queryWrapper1);
// }
// Page<Article> articlePage = articleMapper.selectPage(page, queryWrapper);
//
// List<Article> records = articlePage.getRecords();
//
// List<ArticleVo> articleVoList = copyList(records, true, true);
//
// return Result.success(articleVoList);
// }
// 最热文章
@Override
public Result hotArticle(int limit) {
LambdaQueryWrapper<Article> queryWrapper = new LambdaQueryWrapper<Article>();
queryWrapper.orderByDesc(Article::getViewCounts);
queryWrapper.select(Article::getId, Article::getTitle);
queryWrapper.last("limit "+ limit);
// select id, title from article order by view_count desc limit 5
List<Article> articleList = articleMapper.selectList(queryWrapper);
return Result.success(copyList(articleList, false, false));
}
// 最新文章
@Override
public Result newArticle(int limit) {
LambdaQueryWrapper<Article> queryWrapper = new LambdaQueryWrapper<Article>();
queryWrapper.orderByDesc(Article::getCreateDate);
queryWrapper.select(Article::getId, Article::getTitle);
queryWrapper.last("limit "+ limit);
// select id, title from article order by create_time desc limit 5
List<Article> articleList = articleMapper.selectList(queryWrapper);
return Result.success(copyList(articleList, false, false));
}
// 文章归档
@Override
public Result listArchives() {
List<Archives> archivesList = articleMapper.listArchives();
return Result.success(archivesList);
}
private List<ArticleVo> copyList(List<Article> records, boolean isTag, boolean isAuthor ) {
List<ArticleVo> articleVoList = new ArrayList<>();
for (Article record: records) {
articleVoList.add(copy(record, isTag, isAuthor, false, false));
}
return articleVoList;
}
/*
* 重载
* */
private List<ArticleVo> copyList(List<Article> records, boolean isTag, boolean isAuthor,boolean isBody, boolean isCategory ) {
List<ArticleVo> articleVoList = new ArrayList<>();
for (Article record: records) {
articleVoList.add(copy(record, isTag, isAuthor, isBody, isCategory));
}
return articleVoList;
}
@Autowired
private CategoryService categoryService;
private ArticleVo copy(Article article, boolean isTag, boolean isAuthor,boolean isBody, boolean isCategory){
ArticleVo articleVo = new ArticleVo();
articleVo.setId(String.valueOf(article.getId()));
BeanUtils.copyProperties(article,articleVo);
System.out.println(article.toString());
System.out.println(articleVo.toString());
articleVo.setCreateDate(new DateTime(article.getCreateDate()).toString("yyyy-MM-dd HH:mm"));
//并不是所有的接口 都需要标签 ,作者信息
if (isTag){
Long articleId = article.getId();
articleVo.setTags(tagService.findTagsByArticleId(articleId));
}
if (isAuthor){
Long authorId = article.getAuthorId();
articleVo.setAuthor(sysUserService.findUserById(authorId).getNickname());
}
if(isBody){
Long bodyId = article.getBodyId();
articleVo.setBody(findArticleBodyById(bodyId));
}
if (isCategory) {
try {
Long categoryId = article.getCategoryId();
articleVo.setCategory(categoryService.findCategoryById(categoryId));
}catch (Exception e) {
System.out.println(e);
}
}
return articleVo;
}
@Resource
private ArticleBodyMapper articleBodyMapper;
private ArticleBodyVo findArticleBodyById(Long bodyId) {
ArticleBody articleBody = articleBodyMapper.selectById(bodyId);
ArticleBodyVo articleBodyVo = new ArticleBodyVo();
articleBodyVo.setContent(articleBody.getContent());
return articleBodyVo;
}
@Autowired
private ThreadService threadService;
/*
* 根据id查文章信息
* 根据bodyid 和categoryid 做关联查询
*
* */
@Override
public Result findArticleById(Long articleId) {
Article article = this.articleMapper.selectById(articleId);
ArticleVo articleVo = copy(article, true, true, true, true);
threadService.updateArticleViewCout(articleMapper, article);
return Result.success(articleVo);
}
/*
* save
* 1. 构建article 对象
* */
@Override
public Result publish(ArticleParam articleParam) {
SysUser sysUser = UserThreadLocal.get();
/*
*
* 1. 作者id 当前登录的ID 想获得当前登录用户 得把需要获取的路由加到congig 中
* 2. 标签 加入关联表中 tag
* 3. body 内容存储
*
*
* */
// 插入后会生成 文章id
Article article = new Article();
article.setAuthorId(sysUser.getId());
article.setWeight(Article.Article_Common);
article.setViewCounts(0);
article.setTitle(articleParam.getTitle());
article.setSummary(articleParam.getSummary());
article.setCommentCounts(0);
article.setCreateDate(System.currentTimeMillis());
article.setCategoryId(Long.parseLong(articleParam.getCategory().getId()));
articleMapper.insert(article);
// tag 存储tag 表
List<TagVo> tags = articleParam.getTags();
if(tags != null) {
Long articleId = article.getId();
for (TagVo tag : tags) {
ArticleTag articleTag = new ArticleTag();
articleTag.setTagId(Long.parseLong(tag.getId()));
articleTag.setArticleId(articleId);
articleTagMapper.insert(articleTag);
}
}
// body
ArticleBody articleBody = new ArticleBody();
articleBody.setArticleId(article.getId());
articleBody.setContent(articleParam.getBody().getContent());
articleBody.setContentHtml(articleParam.getBody().getContentHtml());
articleBodyMapper.insert(articleBody);
article.setBodyId(articleBody.getId());
articleMapper.updateById(article);
Map<String, String> map = new HashMap<>();
map.put("id", article.getId().toString());
return Result.success(map);
}
}
|
dunarel/dunphd-thesis
|
Chapter4/Supplementary/proc_hom/db/migrate/075_add_data_to_tree_order.rb
|
require 'faster_csv'
class AddDataToTreeOrder < ActiveRecord::Migration
def up
self.class.update_data()
end
def down
self.class.delete_data()
end
def self.update_data()
self.delete_data()
self.insert_data()
end
def self.update_data_csv()
csv_data = FasterCSV.open("#{AppConfig.db_imports_dir}/sp-tr-cin-taxon-names.csv",:col_sep => "\t")
columns = csv_data.shift
csv_data.each { |row|
# use row here...
puts "row[0]: #{row[0]},row[1]: #{row[1]}, row[2]: #{row[2]}, row[3]: #{row[3]}"
#next if row.length == 0
#update Taxons
execute "update taxons
set tree_name = '#{row[2]}',
tree_order = '#{row[3]}'
where id = '#{row[0]}'"
}
end
def self.insert_data()
[[0,228908],
[1,374847],
[2,272557],
[3,273057],
[4,768679],
[5,188937],
[6,362976],
[7,309800],
[8,348780],
[9,634497],
[10,272569],
[11,243090],
[12,190304],
[13,240015],
[14,743525],
[15,224324],
[16,484019],
[17,255470],
[18,311424],
[19,330214],
[20,379066],
[21,267671],
[22,759914],
[23,565034],
[24,167539],
[25,1148],
[26,],
[27,],
[28,],
[29,],
[30,],
[31,],
[32,],
[33,],
[34,],
[35,],
[36,],
[37,],
[38,],
[39,],
[40,],
[41,],
[42,],
[43,],
[44,],
[45,],
[46,],
[47,],
[48,],
[49,],
[50,],
[51,],
[52,],
[53,],
[54,],
[55,],
[56,],
[57,],
[58,],
[59,],
[60,],
[61,],
[62,],
[63,],
[64,],
[65,],
[66,],
[67,],
[68,],
[69,],
[70,],
[71,],
[72,],
[73,],
[74,],
[75,],
[76,],
[77,],
[78,],
[79,],
[80,],
[81,],
[82,],
[83,],
[84,],
[85,],
[86,],
[87,],
[88,],
[89,],
[90,],
[91,],
[92,],
[93,],
[94,],
[95,],
[96,],
[97,],
[98,],
[99,],
[100,],
[101,],
[102,],
[103,],
[104,],
[105,],
[106,],
[107,],
[108,],
[109,],
[110,]
].each { |tx|
execute "update taxons
set tree_order = #{tx[0]}
where id = #{tx[1]}"
}
end
def self.delete_data()
execute "update taxons
set tree_order = null"
end
end
|
smoe/nydax
|
frontend/src/components/Drawer/Drawer.js
|
/* eslint-disable css-modules/no-undef-class */
/* eslint-disable jsx-a11y/anchor-is-valid */
import React from 'react';
import PropTypes from 'prop-types';
import withStyles from 'isomorphic-style-loader/lib/withStyles';
import { connect } from 'react-redux';
import cx from 'classnames';
import s from './Drawer.css';
import Link from '../Link';
import routes from '../../constants/routes';
import themify from '../../themify';
class Drawer extends React.Component {
render() {
return (
<React.Fragment>
<div
className={
this.props.show ? cx(s.sidebarOverlay, s.active) : s.sidebarOverlay
}
/>
<nav
className={
this.props.show
? cx(themify(s, s.sidebarDrawer, this.props.theme), s.active)
: themify(s, s.sidebarDrawer, this.props.theme)
}
>
<div
role="presentation"
onClick={this.props.onHide}
className={s.dismiss}
>
<i className="fa fa-arrow-left" />
</div>
<br />
<br />
{/*
<div className="sidebarHeader">
<h3>Sidebar</h3>
</div> */}
<ul className={s.menu}>
<li
className={
this.props.currentRoute === routes.DASHBOARD
? cx(s.menuItem, s.active)
: s.menuItem
}
>
<Link to={routes.DASHBOARD}>
<svg
className={s.menuIcon}
xmlns="http://www.w3.org/2000/svg"
width="30"
height="30"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="1"
strokeLinecap="round"
strokeLinejoin="round"
>
<rect x="3" y="3" width="18" height="18" rx="2" ry="2" />
<line x1="3" y1="9" x2="21" y2="9" />
<line x1="9" y1="21" x2="9" y2="9" />
</svg>
<span>Dashboard</span>
</Link>
</li>
<li
className={
this.props.currentRoute === routes.TRADING_PLATFORM
? cx(s.menuItem, s.active)
: s.menuItem
}
>
<Link to={routes.TRADING_PLATFORM}>
<svg
style={{ marginLeft: 13 }}
className={s.menuIcon}
xmlns="http://www.w3.org/2000/svg"
width="25"
height="25"
viewBox="0 0 15 15"
>
<g fill="none" fillRule="evenodd" stroke="currentColor">
<path d="M13.036 9.385H1.975A.975.975 0 0 1 1 8.41V1.975C1 1.436 1.437 1 1.975 1h11.06c.54 0 .975.436.975.975V8.41a.975.975 0 0 1-.974.975z" />
<path
strokeLinecap="round"
strokeLinejoin="round"
d="M2.638 14.251l4.867-4.866v4.866M12.42 14.251L7.505 9.385M1 5.819h1.638l1.928-1.494 2.217 2.536 1.445-1.669 1.35 1.669 4.432-4.223"
/>
</g>
</svg>{' '}
<span>Trading Platform</span>
</Link>
</li>
<li
className={
this.props.currentRoute === routes.DEPOSIT
? cx(s.menuItem, s.active)
: s.menuItem
}
>
<Link to={routes.DEPOSIT}>
<svg
style={{ marginLeft: 15 }}
className={s.menuIcon}
xmlns="http://www.w3.org/2000/svg"
width="25"
height="25"
viewBox="0 0 18 16"
>
<g
fill="none"
fillRule="evenodd"
stroke="currentColor"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M4.345 8.942H1.54A.544.544 0 0 1 1 8.395V1.547C1 1.245 1.242 1 1.54 1h14.886c.299 0 .54.245.54.547v6.848a.544.544 0 0 1-.54.547h-2.774" />
<path d="M4.344 5.983H3V3.929h12v2.054h-1" />
<path d="M13.66 3.93v10.444a.544.544 0 0 1-.54.548H4.876a.544.544 0 0 1-.54-.548V3.93M8.979 12.329l.02-6.027" />
<path d="M6.379 9l2.617-2.698L11.636 9" />
</g>
</svg>{' '}
<span>Deposit</span>
</Link>
</li>
<li
className={
this.props.currentRoute === routes.WITHDRAW
? cx(s.menuItem, s.active)
: s.menuItem
}
>
<Link to={routes.WITHDRAW}>
<svg
style={{ marginLeft: 15 }}
className={s.menuIcon}
xmlns="http://www.w3.org/2000/svg"
width="25"
height="25"
viewBox="0 0 16 16"
>
<g
fill="none"
fillRule="evenodd"
stroke="currentColor"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M3.933 7.781h-2.46c-.261 0-.473-.24-.473-.536V1.536C1 1.24 1.212 1 1.473 1h13.054c.261 0 .473.24.473.536v5.71c0 .295-.212.535-.473.535h-2.433M8 2.651V7" />
<path d="M10.316 4.641L8 7 5.664 4.641M4.335 9.13h7.37c.44 0 .295.165.295.37v5.137c0 .205.144.37-.294.37h-7.37c-.44 0-.336-.165-.336-.37V9.5c0-.205-.104-.37.335-.37z" />
</g>
</svg>{' '}
<span>Withdraw</span>
</Link>
</li>
<li
className={
this.props.currentRoute === routes.SETTINGS
? cx(s.menuItem, s.active)
: s.menuItem
}
>
<Link to={routes.SETTINGS}>
<svg
style={{ marginLeft: 13 }}
className={s.menuIcon}
xmlns="http://www.w3.org/2000/svg"
width="30"
height="30"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="1"
strokeLinecap="round"
strokeLinejoin="round"
>
<circle cx="12" cy="12" r="3" />
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z" />
</svg>{' '}
<span>Settings</span>
</Link>
</li>
</ul>
</nav>
</React.Fragment>
);
}
}
Drawer.propTypes = {
show: PropTypes.bool,
// eslint-disable-next-line react/no-unused-prop-types
onHide: PropTypes.func,
currentRoute: PropTypes.string,
theme: PropTypes.string,
};
Drawer.defaultProps = {
show: false,
onHide: () => false,
currentRoute: '/',
theme: 'dark',
};
const mapState = state => ({
currentRoute: state.currentRoute,
theme: state.theme,
});
export default connect(
mapState,
undefined,
)(withStyles(s)(Drawer));
|
CNResoy/Spicy
|
src/minecraft/spicy/module/modules/player/FastUse.java
|
<reponame>CNResoy/Spicy
package spicy.module.modules.player;
import com.darkmagician6.eventapi.SubscribeEvent;
import net.minecraft.item.ItemBow;
import net.minecraft.network.play.client.C03PacketPlayer;
import net.minecraft.network.play.client.C07PacketPlayerDigging;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import spicy.events.player.PlayerMotionUpdateEvent;
import spicy.main.Wrapper;
import spicy.module.Category;
import spicy.module.Module;
/**
* @author Viserys
* @since 5/06/2020
*/
public class FastUse extends Module {
public FastUse() {
super("FastUse", 0, Category.PLAYER, true);
}
@Override
public void onEnabled() {
System.out.println("FastUse enabled.");
super.onEnabled();
}
@Override
public void onDisabled() {
System.out.println("FastUse disabled.");
super.onDisabled();
}
@SubscribeEvent
public void onUpdate(PlayerMotionUpdateEvent event) {
if (event.getState() == PlayerMotionUpdateEvent.State.PRE && Wrapper.player().getItemInUseDuration() == 16 && !(Wrapper.player().getItemInUse().getItem() instanceof ItemBow)) {
for (int i = 0; i < 17; ++i) {
Wrapper.packet(new C03PacketPlayer(true));
}
Wrapper.packet(new C07PacketPlayerDigging(C07PacketPlayerDigging.Action.RELEASE_USE_ITEM, BlockPos.ORIGIN, EnumFacing.DOWN));
}
}
@Override
public String getModuleDesc() {
return "Fast using items";
}
}
|
lisaong/Accera
|
accera/value/src/Array.cpp
|
<filename>accera/value/src/Array.cpp
////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
// Authors: <NAME>
////////////////////////////////////////////////////////////////////////////////////////////////////
#include "Array.h"
#include "EmitterContext.h"
#include <utilities/include/Exception.h>
#include <algorithm>
#include <cassert>
#include <functional>
#include <numeric>
#include <iostream>
namespace accera
{
using namespace utilities;
namespace value
{
namespace
{
MemoryLayout GetSliceLayout(const MemoryLayout& originalLayout, std::vector<int64_t> slicedDimensions)
{
std::sort(slicedDimensions.begin(), slicedDimensions.end(), std::greater<int64_t>());
MemoryLayout result = originalLayout;
for (auto dim : slicedDimensions)
{
result = result.GetSliceLayout(dim);
}
return result;
}
} // namespace
Array::Array() = default;
Array::Array(Value value, const std::string& name) :
_value(value)
{
if (!_value.IsDefined() || !_value.IsConstrained())
{
throw InputException(InputExceptionErrors::invalidArgument, "Value passed in must be defined and have a memory layout");
}
if (_value.GetLayout() == ScalarLayout)
{
throw InputException(InputExceptionErrors::invalidArgument, "Value passed in must not be scalar");
}
if (!name.empty())
{
SetName(name);
}
}
Array::~Array() = default;
Array::Array(const Array&) = default;
Array::Array(Array&&) noexcept = default;
Array& Array::operator=(const Array& other)
{
if (this != &other)
{
_value = other._value;
}
return *this;
}
Array& Array::operator=(Array&& other)
{
if (this != &other)
{
_value = std::move(other._value);
other._value = Value();
}
return *this;
}
Value Array::GetValue() const { return _value; }
Array Array::Copy() const
{
auto newValue = Allocate(_value.GetBaseType(), _value.GetLayout());
newValue = _value;
return newValue;
}
Scalar Array::operator()(const std::vector<Scalar>& indices)
{
if (static_cast<int64_t>(indices.size()) != GetValue().GetLayout().NumDimensions())
{
throw InputException(InputExceptionErrors::invalidSize);
}
std::vector<int64_t> dims(indices.size());
std::iota(dims.begin(), dims.end(), 0);
Value indexedValue = GetContext().Slice(_value, dims, indices);
return indexedValue;
}
Array Array::SubArray(const std::vector<Scalar>& offsets, const MemoryShape& shape, std::optional<std::vector<int64_t>> strides) const
{
assert(offsets.size() == (size_t) Rank() && shape.NumDimensions() == Rank());
if (!strides)
{
strides = std::vector<int64_t>(Rank(), 1LL);
}
assert(strides->size() == static_cast<size_t>(Rank()));
return GetContext().View(_value, offsets, shape, *strides);
}
Array Array::Slice(std::vector<int64_t> slicedDimensions, std::vector<Scalar> sliceOffsets) const
{
auto newLayout = GetSliceLayout(_value.GetLayout(), slicedDimensions);
return GetContext().Slice(_value, slicedDimensions, sliceOffsets);
}
Array Array::Reorder(const DimensionOrder& order) const
{
return GetContext().Reorder(_value, order);
}
// TODO: Enable when functionality is needed and semantics are fully cleared
#if 0
Array Array::MergeDimensions(int64_t dim1, int64_t dim2) const
{
return GetContext().MergeDimensions(_value, dim1, dim2);
}
Array Array::SplitDimension(int64_t dim, int64_t size) const
{
return GetContext().SplitDimension(_value, dim, size);
}
Array Array::Reshape(const MemoryLayout& layout) const
{
if (GetLayout().GetMemorySize() != layout.GetMemorySize())
{
throw InputException(InputExceptionErrors::invalidSize, "Total memory size of a reshape op must remain constant");
}
return GetContext().Reshape(_value, layout);
}
#endif // 0
utilities::MemoryShape Array::Shape() const { return _value.GetLayout().GetActiveSize(); }
utilities::MemoryLayout Array::GetLayout() const { return _value.GetLayout(); }
int64_t Array::Size() const { return static_cast<int64_t>(_value.GetLayout().NumElements()); }
int64_t Array::Rank() const { return static_cast<int64_t>(_value.GetLayout().NumDimensions()); }
ValueType Array::GetType() const { return _value.GetBaseType(); }
void Array::SetName(const std::string& name) { _value.SetName(name); }
std::string Array::GetName() const { return _value.GetName(); }
void For(Array array, std::function<void(const std::vector<Scalar>&)> fn)
{
auto layout = array.GetValue().GetLayout();
GetContext().For(layout, [fn = std::move(fn), &layout](std::vector<Scalar> coordinates) {
if (layout.NumDimensions() != static_cast<int>(coordinates.size()))
{
throw InputException(InputExceptionErrors::invalidSize);
}
fn(coordinates);
});
}
Array& Array::operator+=(Array m)
{
if (m.Shape() != Shape())
{
throw InputException(InputExceptionErrors::sizeMismatch);
}
if (m.GetType() != GetType())
{
throw InputException(InputExceptionErrors::typeMismatch);
}
For(*this, [this, &m](const std::vector<Scalar>& indices) {
(*this)(indices) += m(indices);
});
return *this;
}
Array& Array::operator-=(Array m)
{
if (m.Shape() != Shape())
{
throw InputException(InputExceptionErrors::sizeMismatch);
}
if (m.GetType() != GetType())
{
throw InputException(InputExceptionErrors::typeMismatch);
}
For(*this, [this, &m](const std::vector<Scalar>& indices) {
(*this)(indices) -= m(indices);
});
return *this;
}
Array& Array::operator+=(Scalar s)
{
if (s.GetType() != GetType())
{
throw InputException(InputExceptionErrors::typeMismatch);
}
For(*this, [this, &s](const std::vector<Scalar>& indices) {
(*this)(indices) += s;
});
return *this;
}
Array& Array::operator-=(Scalar s)
{
if (s.GetType() != GetType())
{
throw InputException(InputExceptionErrors::typeMismatch);
}
For(*this, [this, &s](const std::vector<Scalar>& indices) {
(*this)(indices) -= s;
});
return *this;
}
Array& Array::operator*=(Scalar s)
{
if (s.GetType() != GetType())
{
throw InputException(InputExceptionErrors::typeMismatch);
}
For(*this, [this, &s](const std::vector<Scalar>& indices) {
(*this)(indices) *= s;
});
return *this;
}
Array& Array::operator/=(Scalar s)
{
if (s.GetType() != GetType())
{
throw InputException(InputExceptionErrors::typeMismatch);
}
For(*this, [this, &s](const std::vector<Scalar>& indices) {
(*this)(indices) /= s;
});
return *this;
}
} // namespace value
} // namespace accera
|
lucmichalski/go-prestashop
|
internal/models/shop_group.go
|
<filename>internal/models/shop_group.go
package models
import (
"context"
"fmt"
"gorm.io/gorm"
)
type _ShopGroupMgr struct {
*_BaseMgr
}
func ShopGroupMgr(db *gorm.DB) *_ShopGroupMgr {
if db == nil {
panic(fmt.Errorf("ShopGroupMgr need init by db"))
}
ctx, cancel := context.WithCancel(context.Background())
return &_ShopGroupMgr{_BaseMgr: &_BaseMgr{DB: db.Table("ps_shop_group"), isRelated: globalIsRelated, ctx: ctx, cancel: cancel, timeout: -1}}
}
func (obj *_ShopGroupMgr) GetTableName() string {
return "ps_shop_group"
}
func (obj *_ShopGroupMgr) Get() (result ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Find(&result).Error
return
}
func (obj *_ShopGroupMgr) Gets() (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) WithIDShopGroup(idShopGroup int) Option {
return optionFunc(func(o *options) { o.query["id_shop_group"] = idShopGroup })
}
func (obj *_ShopGroupMgr) WithName(name string) Option {
return optionFunc(func(o *options) { o.query["name"] = name })
}
func (obj *_ShopGroupMgr) WithShareCustomer(shareCustomer bool) Option {
return optionFunc(func(o *options) { o.query["share_customer"] = shareCustomer })
}
func (obj *_ShopGroupMgr) WithShareOrder(shareOrder bool) Option {
return optionFunc(func(o *options) { o.query["share_order"] = shareOrder })
}
func (obj *_ShopGroupMgr) WithShareStock(shareStock bool) Option {
return optionFunc(func(o *options) { o.query["share_stock"] = shareStock })
}
func (obj *_ShopGroupMgr) WithActive(active bool) Option {
return optionFunc(func(o *options) { o.query["active"] = active })
}
func (obj *_ShopGroupMgr) WithDeleted(deleted bool) Option {
return optionFunc(func(o *options) { o.query["deleted"] = deleted })
}
func (obj *_ShopGroupMgr) GetByOption(opts ...Option) (result ShopGroup, err error) {
options := options{
query: make(map[string]interface{}, len(opts)),
}
for _, o := range opts {
o.apply(&options)
}
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where(options.query).Find(&result).Error
return
}
func (obj *_ShopGroupMgr) GetByOptions(opts ...Option) (results []*ShopGroup, err error) {
options := options{
query: make(map[string]interface{}, len(opts)),
}
for _, o := range opts {
o.apply(&options)
}
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where(options.query).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromIDShopGroup(idShopGroup int) (result ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("id_shop_group = ?", idShopGroup).Find(&result).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromIDShopGroup(idShopGroups []int) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("id_shop_group IN (?)", idShopGroups).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromName(name string) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("name = ?", name).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromName(names []string) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("name IN (?)", names).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromShareCustomer(shareCustomer bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("share_customer = ?", shareCustomer).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromShareCustomer(shareCustomers []bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("share_customer IN (?)", shareCustomers).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromShareOrder(shareOrder bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("share_order = ?", shareOrder).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromShareOrder(shareOrders []bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("share_order IN (?)", shareOrders).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromShareStock(shareStock bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("share_stock = ?", shareStock).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromShareStock(shareStocks []bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("share_stock IN (?)", shareStocks).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromActive(active bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("active = ?", active).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromActive(actives []bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("active IN (?)", actives).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetFromDeleted(deleted bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("deleted = ?", deleted).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) GetBatchFromDeleted(deleteds []bool) (results []*ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("deleted IN (?)", deleteds).Find(&results).Error
return
}
func (obj *_ShopGroupMgr) FetchByPrimaryKey(idShopGroup int) (result ShopGroup, err error) {
err = obj.DB.WithContext(obj.ctx).Table(obj.GetTableName()).Where("id_shop_group = ?", idShopGroup).Find(&result).Error
return
}
|
fransfilastap/pm5
|
src/main/java/id/franspratama/geol/core/dao/DailyRegionJDBCAvailabilityRepository.java
|
package id.franspratama.geol.core.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import id.franspratama.geol.core.pojo.DailyRegionAvailability;
import id.franspratama.geol.core.pojo.NetworkTechnology;
import id.franspratama.geol.core.pojo.Region;
import id.franspratama.geol.core.pojo.TimeSpan;
@Repository("dailyRegionJDBCAvailabilityRepository")
public class DailyRegionJDBCAvailabilityRepository implements ITimespanBasedAvailabilityRepository<DailyRegionAvailability>{
@Autowired
public JdbcTemplate jdbc;
private HashMap<TimeSpan, String> timespanMap;
private SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
private static final String QUERY = "SELECT * FROM daily_availability_region WHERE MONTH(NOW()) = MONTH(TIME) AND YEAR(NOW()) = YEAR(TIME)";
private static final String LAST_MONTH_QUERY = "SELECT * FROM daily_availability_region "+
"WHERE YEAR(time) = YEAR(CURRENT_DATE - INTERVAL 1 MONTH)"+
"AND MONTH(time) = MONTH(CURRENT_DATE - INTERVAL 1 MONTH)"
+ " GROUP BY TIME,region,technology";
@PostConstruct
public void init(){
timespanMap = new HashMap<>();
timespanMap.put(TimeSpan.TODAY, "WHERE DATE(TIME) = CURDATE()");
timespanMap.put(TimeSpan.LAST_2_DAY, "WHERE DATE(TIME) BETWEEN DATE_SUB(CURDATE(), INTERVAL 1 DAY) AND DATE_ADD(CURDATE(), INTERVAL 1 DAY)");
timespanMap.put(TimeSpan.LAST_3_DAY, "WHERE DATE(TIME) BETWEEN DATE_SUB(CURDATE(), INTERVAL 2 DAY) AND DATE_ADD(CURDATE(), INTERVAL 1 DAY)");
timespanMap.put(TimeSpan._1WEEK, "WHERE DATE(TIME) BETWEEN DATE_SUB(CURDATE(), INTERVAL 1 WEEK) AND DATE_ADD(CURDATE(), INTERVAL 1 DAY)");
timespanMap.put(TimeSpan._2WEEK, "WHERE DATE(TIME) BETWEEN DATE_SUB(CURDATE(), INTERVAL 2 WEEK) AND DATE_ADD(CURDATE(), INTERVAL 1 DAY)");
timespanMap.put(TimeSpan._3WEEK, "WHERE DATE(TIME) BETWEEN DATE_SUB(CURDATE(), INTERVAL 3 WEEK) AND DATE_ADD(CURDATE(), INTERVAL 1 DAY)");
timespanMap.put(TimeSpan._4WEEK, "WHERE DATE(TIME) BETWEEN DATE_SUB(CURDATE(), INTERVAL 4 WEEK) AND DATE_ADD(CURDATE(), INTERVAL 1 DAY)");
}
@Override
public List<DailyRegionAvailability> getAvailability(TimeSpan span) {
String finalQuery = QUERY;
if( span == TimeSpan.LAST_MONTH ){
finalQuery = LAST_MONTH_QUERY;
}
else if( span == TimeSpan.THIS_MONTH ){
finalQuery = QUERY;
}
List<DailyRegionAvailability> navs = jdbc.query(finalQuery, new RowMapper<DailyRegionAvailability>(){
@Override
public DailyRegionAvailability mapRow(ResultSet res, int i) throws SQLException {
DailyRegionAvailability nav = new DailyRegionAvailability();
nav.setAvailability( res.getDouble("Availability") );
nav.setRegion( res.getString("Region") );
nav.setTechnology( res.getString("Technology") );
nav.setTime( res.getTimestamp("Time") );
nav.setId( res.getInt("id") );
return nav;
}
});
return navs;
}
@Override
public List<DailyRegionAvailability> getAvailability(TimeSpan span, Region region, NetworkTechnology technology) {
return getAvailability(span).stream().filter(p->{
return p.getTechnology().equalsIgnoreCase(technology.getTechnology());
}).collect(Collectors.toList());
}
}
|
gdonald/eldac
|
spec/factories/project_folders.rb
|
<gh_stars>0
# frozen_string_literal: true
FactoryBot.define do
factory :project_folder do
user { create(:user, :valid_user) }
project { create(:project, :valid_project) }
folder { create(:folder, :valid_folder) }
trait :valid_project_folder do
end
end
end
|
Pyroseza/Random
|
python_sed_example.py
|
<filename>python_sed_example.py
import os
test = "echo i\'m sexy and i know it, who i, boi yeah | sed 's/\\bi\\b/I/g'"
print(test)
os.system(test)
|
gravitationalwavedc/gwcloud_bilby
|
src/gw_bilby/production-settings.py
|
from .base import *
DEBUG = False
SITE_URL = "https://gw-cloud.org"
STATIC_URL = "/bilby/static/"
ALLOWED_HOSTS = ['*']
EMAIL_HOST = 'mail.swin.edu.au'
EMAIL_PORT = 25
GWCLOUD_JOB_CONTROLLER_API_URL = "http://gwcloud-job-server:8000/job/apiv1"
GWCLOUD_AUTH_API_URL = "http://gwcloud-auth:8000/auth/graphql"
GWCLOUD_DB_SEARCH_API_URL = "http://gwcloud-db-search:8000/graphql"
try:
from .environment import *
except ImportError:
pass
|
mhradek/aurkitu
|
aurkitu-maven-plugin/src/test/java/com/michaelhradek/aurkitu/plugin/core/ComparatorsTest.java
|
package com.michaelhradek.aurkitu.plugin.core;
import com.michaelhradek.aurkitu.plugin.core.output.EnumDeclaration;
import com.michaelhradek.aurkitu.plugin.core.output.Schema;
import com.michaelhradek.aurkitu.plugin.core.output.TypeDeclaration;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class ComparatorsTest {
@Test
public void testStringListOrdering() {
List<String> testStringArray = new ArrayList<>();
testStringArray.add("beta");
testStringArray.add("gamma");
testStringArray.add("alpha");
Assert.assertEquals("beta", testStringArray.get(0));
Assert.assertEquals("gamma", testStringArray.get(1));
Assert.assertEquals("alpha", testStringArray.get(2));
testStringArray.sort(Comparators.STRING_LIST);
Assert.assertEquals("alpha", testStringArray.get(0));
Assert.assertEquals("beta", testStringArray.get(1));
Assert.assertEquals("gamma", testStringArray.get(2));
}
@Test
public void testTypeDeclarationSort() {
List<TypeDeclaration> typeDeclarationArrayList = new ArrayList<>();
TypeDeclaration typeAlpha = new TypeDeclaration();
typeAlpha.setName("alpha");
TypeDeclaration typeBeta = new TypeDeclaration();
typeBeta.setName("beta");
TypeDeclaration typeGamma = new TypeDeclaration();
typeGamma.setName("gamma");
typeDeclarationArrayList.add(typeBeta);
typeDeclarationArrayList.add(typeGamma);
typeDeclarationArrayList.add(typeAlpha);
Assert.assertTrue(typeDeclarationArrayList.get(0).getName().equals("beta"));
Assert.assertTrue(typeDeclarationArrayList.get(1).getName().equals("gamma"));
Assert.assertTrue(typeDeclarationArrayList.get(2).getName().equals("alpha"));
typeDeclarationArrayList.sort(Comparators.TYPE_DECLARATION);
Assert.assertTrue(typeDeclarationArrayList.get(0).getName().equals("alpha"));
Assert.assertTrue(typeDeclarationArrayList.get(1).getName().equals("beta"));
Assert.assertTrue(typeDeclarationArrayList.get(2).getName().equals("gamma"));
}
@Test
public void testEnumDeclarationSort() {
List<EnumDeclaration> enumDeclarationArrayList = new ArrayList<>();
EnumDeclaration enumAlpha = new EnumDeclaration();
enumAlpha.setName("alpha");
EnumDeclaration enumBeta = new EnumDeclaration();
enumBeta.setName("beta");
EnumDeclaration enumGamma = new EnumDeclaration();
enumGamma.setName("gamma");
enumDeclarationArrayList.add(enumBeta);
enumDeclarationArrayList.add(enumGamma);
enumDeclarationArrayList.add(enumAlpha);
Assert.assertTrue(enumDeclarationArrayList.get(0).getName().equals("beta"));
Assert.assertTrue(enumDeclarationArrayList.get(1).getName().equals("gamma"));
Assert.assertTrue(enumDeclarationArrayList.get(2).getName().equals("alpha"));
enumDeclarationArrayList.sort(Comparators.ENUM_DECLARATION);
Assert.assertTrue(enumDeclarationArrayList.get(0).getName().equals("alpha"));
Assert.assertTrue(enumDeclarationArrayList.get(1).getName().equals("beta"));
Assert.assertTrue(enumDeclarationArrayList.get(2).getName().equals("gamma"));
}
@Test
public void testConstantSort() {
List<Schema.Constant<Integer>> constantArrayList = new ArrayList<>();
Schema.Constant<Integer> constantAlpha = new Schema.Constant<>();
constantAlpha.name = "alpha";
Schema.Constant<Integer> constantBeta = new Schema.Constant<>();
constantBeta.name = "beta";
Schema.Constant<Integer> constantGamma = new Schema.Constant<>();
constantGamma.name = "gamma";
constantArrayList.add(constantBeta);
constantArrayList.add(constantGamma);
constantArrayList.add(constantAlpha);
Assert.assertTrue(constantArrayList.get(0).name.equals("beta"));
Assert.assertTrue(constantArrayList.get(1).name.equals("gamma"));
Assert.assertTrue(constantArrayList.get(2).name.equals("alpha"));
constantArrayList.sort(Comparators.CONSTANT_DECLARATION);
Assert.assertTrue(constantArrayList.get(0).name.equals("alpha"));
Assert.assertTrue(constantArrayList.get(1).name.equals("beta"));
Assert.assertTrue(constantArrayList.get(2).name.equals("gamma"));
}
@Test
public void testTypeDeclarationProperty() {
List<TypeDeclaration.Property> propertyArrayList = new ArrayList<>();
TypeDeclaration.Property propertyAlpha = new TypeDeclaration.Property();
propertyAlpha.name = "alpha";
TypeDeclaration.Property propertyBeta = new TypeDeclaration.Property();
propertyBeta.name = "beta";
TypeDeclaration.Property propertyGamma = new TypeDeclaration.Property();
propertyGamma.name = "gamma";
propertyArrayList.add(propertyBeta);
propertyArrayList.add(propertyGamma);
propertyArrayList.add(propertyAlpha);
Assert.assertTrue(propertyArrayList.get(0).name.equals("beta"));
Assert.assertTrue(propertyArrayList.get(1).name.equals("gamma"));
Assert.assertTrue(propertyArrayList.get(2).name.equals("alpha"));
propertyArrayList.sort(Comparators.TYPE_DECLARATION_PROPERTY);
Assert.assertTrue(propertyArrayList.get(0).name.equals("alpha"));
Assert.assertTrue(propertyArrayList.get(1).name.equals("beta"));
Assert.assertTrue(propertyArrayList.get(2).name.equals("gamma"));
}
}
|
1690296356/jdk
|
test/langtools/tools/doclint/anchorTests/p/package-info.java
|
/* @test /nodynamiccopyright/
* @bug 8025246 8247957
* @summary doclint is showing error on anchor already defined when it's not
* @library ../..
* @modules jdk.javadoc/jdk.javadoc.internal.doclint
* @build DocLintTester
* @run main DocLintTester -ref package-info.out package-info.java
* @compile/fail/ref=package-info.javac.out -XDrawDiagnostics -Werror -Xdoclint:all package-info.java
*/
/**
* <a id=here>here</a>
* <a id=here>here again</a>
* <a name=name>obsolete anchor</a>
* <a name=name>obsolete anchor again</a>
*/
package p;
|
yossigil/Automata
|
test/automaton/instances/compound/abMany.java
|
<filename>test/automaton/instances/compound/abMany.java
package automaton.instances.compound;
import org.junit.jupiter.api.Test;
import automaton.Lexer;
public class abMany {
final Lexer abStar = Lexer.c('a').Then('b').many();
@Test void m1() { abStar.DFSA(); }
@Test void m2() { abStar.DFSA().minimal(); }
@Test void t0() { abStar.TikZ(); }
@Test void t1() { abStar.DFSA().TikZ(); }
@Test void t3() { abStar.DFSA().minimal().TikZ(); }
@Test void DFSA() { abStar.DFSA(); }
}
|
SebastianTirado/Cpp-Learning-Archive
|
Deitel/Chapter17/examples/17.09/fig17_11.cpp
|
/*
* =====================================================================================
*
* Filename:
*
* Description:
*
* Version: 1.0
* Created: Thanks to github you know it
* Revision: none
* Compiler: g++
*
* Author: <NAME> <EMAIL>
*
*
* =====================================================================================
*/
#include "ClientData.hpp"
#include <cstdlib>
#include <fstream>
#include <iostream>
int main(int argc, const char* argv[]) {
std::ofstream outCredit("credit.dat", std::ios::out | std::ios::binary);
// exit program if ofstream could not open file
if (!outCredit) {
std::cerr << "File could not be opened." << std::endl;
return 1;
}
ClientData blankClient; // constructor zeros out each data member
// output 100 blank records to file
for (int i = 0; i < 100; ++i) {
outCredit.write(reinterpret_cast<const char*>(&blankClient),
sizeof(ClientData));
}
return 0;
}
|
ibara1454/pyss
|
run_mpi_test.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Run this test with
# `mpiexec -n 1 python run_mpi_test.py`
#
from unittest import TestLoader, TextTestRunner
if __name__ == '__main__':
path = 'test/mpi'
loader = TestLoader()
test = loader.discover(path)
runner = TextTestRunner(verbosity=2)
runner.run(test)
|
jsatdapr/cadence
|
runtime/common/pathdomain_string.go
|
<gh_stars>100-1000
// Code generated by "stringer -type=PathDomain"; DO NOT EDIT.
package common
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[PathDomainUnknown-0]
_ = x[PathDomainStorage-1]
_ = x[PathDomainPrivate-2]
_ = x[PathDomainPublic-3]
}
const _PathDomain_name = "PathDomainUnknownPathDomainStoragePathDomainPrivatePathDomainPublic"
var _PathDomain_index = [...]uint8{0, 17, 34, 51, 67}
func (i PathDomain) String() string {
if i >= PathDomain(len(_PathDomain_index)-1) {
return "PathDomain(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _PathDomain_name[_PathDomain_index[i]:_PathDomain_index[i+1]]
}
|
foruy/openflow-multiopenstack
|
openstack-dashboard/openstack_dashboard/dashboards/admin/instances/urls.py
|
<reponame>foruy/openflow-multiopenstack
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.instances import views
urlpatterns = patterns('openstack_dashboard.dashboards.admin.instances.views',
url(r'^$', views.AdminIndexView.as_view(), name='index'),
)
|
jobial-io/cloudformation-template-generator
|
src/test/scala/com/monsanto/arch/cloudformation/model/resource/CodeCommit_UT.scala
|
<gh_stars>100-1000
package com.monsanto.arch.cloudformation.model.resource
import com.monsanto.arch.cloudformation.model.{ ResourceRef, Template, Token }
import org.scalatest.{ FunSpec, Matchers }
import spray.json._
class CodeCommit_UT extends FunSpec with Matchers {
val repo = `AWS::CodeCommit::Repository`(
name = "RepoFoo",
RepositoryDescription = Some(""),
RepositoryName = "RepoBar",
Triggers = Some(Seq(
CodeCommitTrigger(
Branches = Some(Seq("foo")),
CustomData = Some("bar"),
DestinationArn = Some("arn::::baz"),
Events = Some(Seq(
CodeCommitEvent.updateReference,
CodeCommitEvent.deleteReference
)),
Name = "BarTrigger"
)
))
)
describe("UsagePlan"){
it ("should serialize as expected") {
val expectedJson =
"""
|{
| "Resources": {
| "RepoFoo": {
| "Properties": {
| "RepositoryDescription": "",
| "RepositoryName": "RepoBar",
| "Triggers": [
| {
| "Branches": [
| "foo"
| ],
| "CustomData": "bar",
| "DestinationArn": "arn::::baz",
| "Events": [
| "updateReference",
| "deleteReference"
| ],
| "Name": "BarTrigger"
| }
| ]
| },
| "Type": "AWS::CodeCommit::Repository"
| }
| }
|}
""".stripMargin.parseJson
Template.fromResource(repo).toJson should be (expectedJson)
}
}
}
|
lusing/mljs
|
js2/esp.js
|
const espree = require("espree");
const code = "for(let i=0;i<10;i+=1){console.log(i);}";
const ast = espree.parse(code, {ecmaVersion: 2020});
console.log(ast);
const tokens = espree.tokenize(code, {ecmaVersion: 2020});
console.log(tokens);
const code2 = 'function greet(input) {return input ?? "Hello world";}';
const ast2 = espree.parse(code2, {ecmaVersion: 2020});
console.log(ast2);
const tokens2 = espree.tokenize(code2, {ecmaVersion: 2020});
console.log(tokens2);
const code3 = `
//Test Class Function
class Test {
constructor() {
this.x = 2;
}
}`;
const babel = require("@babel/core");
let result = babel.transformSync(code3, {
presets: ["@babel/preset-env"]
});
console.log(result.code);
const code4 = 'let a = 2 ** 8;'
let result1 = babel.transformSync(code4, {
presets: ["@babel/preset-env"]
});
console.log(result1.code);
// console.log(result.map);
// console.log(result.ast);
// console.log("===");
// result2 = babel.parseSync(code2);
// console.log(result2);
const babelParser = require('@babel/parser');
let ast3 = babelParser.parse(code2, {})
console.log(ast3.program.body);
console.log('<---');
const ast4 = babelParser.parse(code4, {})
console.log(ast4.program.body);
const traverse2 = require("@babel/traverse");
traverse2.default(ast4, {
enter(path) {
console.log(path.type);
}
});
console.log(traverse2);
console.log('--->');
const code_error1 = 'let a = 1;)';
try {
let ast3 = babelParser.parse(code_error1, {});
} catch (e) {
console.error(e);
}
const code6 = 'if (a==2) {a+=1};';
const t = require('@babel/types');
const ast6 = babelParser.parse(code6, {})
traverse2.default(ast6, {
enter(path) {
if (t.isIdentifier(path.node)) {
console.log(path.node);
} else {
//console.log(path.node);
}
}
});
//console.log(ast6);
const code8 = 'if (a==2) {a+=1};';
const ast8 = babelParser.parse(code6, {})
traverse2.default(ast8, {
enter(path) {
if (t.isBinaryExpression(path.node, {operator: "=="})) {
path.node.operator = "===";
}
}
});
const generate = require("@babel/generator");
let c2 = generate.default(ast8, {});
console.log(c2.code);
const babelTemplate = require("@babel/template");
const requireTemplate = babelTemplate.default(`
const IMPORT_NAME = require(SOURCE);
`);
const ast9 = requireTemplate({
IMPORT_NAME: t.identifier("babelTemplate"),
SOURCE: t.stringLiteral("@babel/template")
});
console.log(ast9);
console.log(generate.default(ast9).code);
const forTemplate = babelTemplate.default(`
for(let i=0;i<END;i+=1){
console.log(i); // output loop variable
}
`);
const ast10 = forTemplate({
END: t.numericLiteral(10)
});
console.log(ast10);
console.log(generate.default(ast10).code);
const codeFrame = require("@babel/code-frame");
const rawLines2 = 'let a = isNaN(b);';
const result2 = codeFrame.codeFrameColumns(rawLines2, {
start: {line: 1, column: 9},
end: {line: 1, column: 14},
}, {highlightCode: true});
console.log(result2);
const rawLines3 = ["class CodeAnalyzer {", " constructor()", "};"].join("\n");
const result3 = codeFrame.codeFrameColumns(rawLines3, {
start: {line: 2, column: 3},
end: {line: 2, column: 16},
}, {highlightCode: true});
console.log(result3);
|
CCLDESTY/xml3d.js
|
src/renderer/renderer/canvas-handler.js
|
var MouseEvents = require("./events/mouse.js");
var TouchEvents = require("./events/touch.js");
var c_globalCanvasId = 0;
/**
* @param {Element} xml3dElement
* @constructor
*/
function AbstractCanvasHandler(xml3dElement, canvas) {
this._xml3dElement = xml3dElement;
this._canvas = canvas;
this.id = ++c_globalCanvasId; // global canvas id starts at 1
this._mouseHandler = new MouseEvents.MouseEventHandler(xml3dElement, this);
this._registerCanvasListeners(this._mouseHandler, MouseEvents.EVENTS);
if(this.hasTouchEvents()) {
this._touchHandler = new TouchEvents.TouchEventHandler(xml3dElement, this);
this._registerCanvasListeners(this._touchHandler, TouchEvents.EVENTS);
}
}
/**
* @returns {boolean}
*/
AbstractCanvasHandler.prototype.hasTouchEvents = function() {
return 'ontouchstart' in window;
};
AbstractCanvasHandler.prototype.getCanvas = function() {
return this._canvas;
};
AbstractCanvasHandler.prototype.dispatchEvent = function(event) {
this._xml3dElement.dispatchEvent(event);
};
AbstractCanvasHandler.prototype._registerCanvasListeners = function (handler, events) {
var canvas = this._canvas;
events.forEach(function (name) {
canvas.addEventListener(name, function (e) {
handler[name] && handler[name].call(handler, e);
e.stopPropagation();
});
});
};
AbstractCanvasHandler.prototype.dispatchCustomEvent = function(type, detail) {
detail = detail || null;
var event = document.createEvent('CustomEvent');
event.initCustomEvent(type, true, true, detail);
this._xml3dElement.dispatchEvent(event);
};
/**
* The update event can be used by user to sync actions
* with rendering
*/
AbstractCanvasHandler.prototype.dispatchUpdateEvent = function () {
this.dispatchCustomEvent("update");
};
AbstractCanvasHandler.prototype.dispatchResizeEvent = function (dimensions) {
this.dispatchCustomEvent("resize", dimensions);
};
/**
* Dispatches a FrameDrawnEvent to listeners
*
* @param start
* @param end
* @param stats
* @return
*/
AbstractCanvasHandler.prototype.dispatchFrameDrawnEvent = function (start, end, stats) {
stats = stats || {
count: {
primitives: 0, objects: 0
}
};
var data = {
timeStart: start, timeEnd: end, renderTimeInMilliseconds: end - start, count: stats.count
};
this.dispatchCustomEvent("framedrawn", data);
};
module.exports = AbstractCanvasHandler;
|
danadi7/tp
|
src/main/java/seedu/schedar/model/task/DoneStatusCode.java
|
package seedu.schedar.model.task;
import java.util.HashMap;
import java.util.Map;
public enum DoneStatusCode {
NOT_DONE(0, "Not done"),
DONE(1, "Done"),
OVERDUE(2, "Overdue");
private static final Map<Integer, DoneStatusCode> BY_STATUS_CODE = new HashMap<>();
private final int statusCode;
private final String label;
static {
for (DoneStatusCode ds : values()) {
BY_STATUS_CODE.put(ds.statusCode, ds);
}
}
private DoneStatusCode(int statusCode, String label) {
this.statusCode = statusCode;
this.label = label;
}
public static DoneStatusCode getDoneStatusByCode(int statusCode) {
return BY_STATUS_CODE.get(statusCode);
}
public int getStatusCode() {
return statusCode;
}
@Override
public String toString() {
return label;
}
}
|
Freddy875/C
|
Archivos/ArchivosDeLectura/fgets.c
|
<reponame>Freddy875/C
/*
Lee cadenas de caracteres hasta n-1 caracteres
O hasta que lea un salto de linea "\n"
o el final del archivo EOF
char *fgets(char *buffer, int tamanio, FILE *archivo);
*/
#include <stdio.h>
#include <stdlib.h>
int main(void){
FILE *archivo;
char sCaracteres[100];
archivo = fopen("gente.txt","r");
if(archivo == NULL){
exit(1);
}else{
printf("\nEl contenido del archivo de prueba es: \n\n");
while(feof(archivo) == 0){
fgets(sCaracteres,100,archivo);
printf("%s",sCaracteres);
}//fin while
getchar();
}//fin if-else
fclose(archivo);
return 0;
}//fin int main
|
alexkalderimis/lab
|
internal/stringset.go
|
<filename>internal/stringset.go
package stringset
type StringSet struct {
trie map[rune]*StringSet
}
func Singleton(s string) StringSet {
var set StringSet
set.Add(s)
return set
}
func (set *StringSet) Contains(item string) bool {
runes := []rune(item)
if len(runes) == 0 {
return true
}
s := set
ok := true
for i, char := range runes {
s, ok = s.trie[char]
if !ok {
return false
}
if i+1 == len(runes) {
return true
}
}
return false
}
func (set *StringSet) Add(item string) {
runes := []rune(item)
if len(runes) == 0 {
return
}
}
|
jorrellz/JetfireTool
|
jetfiretool-cron/src/main/java/com/jetfiretool/cron/pattern/parser/ValueParser.java
|
package com.jetfiretool.cron.pattern.parser;
/**
* 值处理接口<br>
* 值处理用于限定表达式中相应位置的值范围,并转换表达式值为int值
*
* @author Jetfire
*/
public interface ValueParser {
/**
* 处理String值并转为int<br>
* 转换包括:
* <ol>
* <li>数字字符串转为数字</li>
* <li>别名转为对应的数字(如月份和星期)</li>
* </ol>
*
* @param value String值
* @return int
*/
public int parse(String value);
/**
* 返回最小值
*
* @return 最小值
*/
public int getMin();
/**
* 返回最大值
*
* @return 最大值
*/
public int getMax();
}
|
stumoodie/VisualLanguageToolkit
|
lib/antlr-3.4/tool/src/main/java/org/antlr/tool/AssignTokenTypesBehavior.java
|
<reponame>stumoodie/VisualLanguageToolkit
/*
* [The "BSD license"]
* Copyright (c) 2010 <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.tool;
import org.antlr.analysis.Label;
import org.antlr.grammar.v3.AssignTokenTypesWalker;
import org.antlr.misc.Utils;
import org.antlr.runtime.tree.TreeNodeStream;
import java.util.*;
/** Move all of the functionality from assign.types.g grammar file. */
public class AssignTokenTypesBehavior extends AssignTokenTypesWalker {
protected static final Integer UNASSIGNED = Utils.integer(-1);
protected static final Integer UNASSIGNED_IN_PARSER_RULE = Utils.integer(-2);
protected Map<String,Integer> stringLiterals = new TreeMap<String, Integer>();
protected Map<String,Integer> tokens = new TreeMap<String, Integer>();
protected Map<String,String> aliases = new TreeMap<String, String>();
protected Map<String,String> aliasesReverseIndex = new HashMap<String,String>();
/** Track actual lexer rule defs so we don't get repeated token defs in
* generated lexer.
*/
protected Set<String> tokenRuleDefs = new HashSet();
public AssignTokenTypesBehavior() {
super(null);
}
@Override
protected void init(Grammar g) {
this.grammar = g;
currentRuleName = null;
if ( stringAlias==null ) {
// only init once; can't statically init since we need astFactory
initASTPatterns();
}
}
/** Track string literals (could be in tokens{} section) */
@Override
protected void trackString(GrammarAST t) {
// if lexer, don't allow aliasing in tokens section
if ( currentRuleName==null && grammar.type==Grammar.LEXER ) {
ErrorManager.grammarError(ErrorManager.MSG_CANNOT_ALIAS_TOKENS_IN_LEXER,
grammar,
t.token,
t.getText());
return;
}
// in a plain parser grammar rule, cannot reference literals
// (unless defined previously via tokenVocab option)
// don't warn until we hit root grammar as may be defined there.
if ( grammar.getGrammarIsRoot() &&
grammar.type==Grammar.PARSER &&
grammar.getTokenType(t.getText())== Label.INVALID )
{
ErrorManager.grammarError(ErrorManager.MSG_LITERAL_NOT_ASSOCIATED_WITH_LEXER_RULE,
grammar,
t.token,
t.getText());
}
// Don't record literals for lexers, they are things to match not tokens
if ( grammar.type==Grammar.LEXER ) {
return;
}
// otherwise add literal to token types if referenced from parser rule
// or in the tokens{} section
if ( (currentRuleName==null ||
Character.isLowerCase(currentRuleName.charAt(0))) &&
grammar.getTokenType(t.getText())==Label.INVALID )
{
stringLiterals.put(t.getText(), UNASSIGNED_IN_PARSER_RULE);
}
}
@Override
protected void trackToken(GrammarAST t) {
// imported token names might exist, only add if new
// Might have ';'=4 in vocab import and SEMI=';'. Avoid
// setting to UNASSIGNED if we have loaded ';'/SEMI
if ( grammar.getTokenType(t.getText())==Label.INVALID &&
tokens.get(t.getText())==null )
{
tokens.put(t.getText(), UNASSIGNED);
}
}
@Override
protected void trackTokenRule(GrammarAST t,
GrammarAST modifier,
GrammarAST block)
{
// imported token names might exist, only add if new
if ( grammar.type==Grammar.LEXER || grammar.type==Grammar.COMBINED ) {
if ( !Character.isUpperCase(t.getText().charAt(0)) ) {
return;
}
if ( t.getText().equals(Grammar.ARTIFICIAL_TOKENS_RULENAME) ) {
// don't add Tokens rule
return;
}
// track all lexer rules so we can look for token refs w/o
// associated lexer rules.
grammar.composite.lexerRules.add(t.getText());
int existing = grammar.getTokenType(t.getText());
if ( existing==Label.INVALID ) {
tokens.put(t.getText(), UNASSIGNED);
}
// look for "<TOKEN> : <literal> ;" pattern
// (can have optional action last)
if ( block.hasSameTreeStructure(charAlias) ||
block.hasSameTreeStructure(stringAlias) ||
block.hasSameTreeStructure(charAlias2) ||
block.hasSameTreeStructure(stringAlias2) )
{
tokenRuleDefs.add(t.getText());
/*
Grammar parent = grammar.composite.getDelegator(grammar);
boolean importedByParserOrCombined =
parent!=null &&
(parent.type==Grammar.LEXER||parent.type==Grammar.PARSER);
*/
if ( grammar.type==Grammar.COMBINED || grammar.type==Grammar.LEXER ) {
// only call this rule an alias if combined or lexer
alias(t, (GrammarAST)block.getChild(0).getChild(0));
}
}
}
// else error
}
@Override
protected void alias(GrammarAST t, GrammarAST s) {
String tokenID = t.getText();
String literal = s.getText();
String prevAliasLiteralID = aliasesReverseIndex.get(literal);
if ( prevAliasLiteralID!=null ) { // we've seen this literal before
if ( tokenID.equals(prevAliasLiteralID) ) {
// duplicate but identical alias; might be tokens {A='a'} and
// lexer rule A : 'a' ; Is ok, just return
return;
}
// give error unless both are rules (ok if one is in tokens section)
if ( !(tokenRuleDefs.contains(tokenID) && tokenRuleDefs.contains(prevAliasLiteralID)) )
{
// don't allow alias if A='a' in tokens section and B : 'a'; is rule.
// Allow if both are rules. Will get DFA nondeterminism error later.
ErrorManager.grammarError(ErrorManager.MSG_TOKEN_ALIAS_CONFLICT,
grammar,
t.token,
tokenID+"="+literal,
prevAliasLiteralID);
}
return; // don't do the alias
}
int existingLiteralType = grammar.getTokenType(literal);
if ( existingLiteralType !=Label.INVALID ) {
// we've seen this before from a tokenVocab most likely
// don't assign a new token type; use existingLiteralType.
tokens.put(tokenID, existingLiteralType);
}
String prevAliasTokenID = aliases.get(tokenID);
if ( prevAliasTokenID!=null ) {
ErrorManager.grammarError(ErrorManager.MSG_TOKEN_ALIAS_REASSIGNMENT,
grammar,
t.token,
tokenID+"="+literal,
prevAliasTokenID);
return; // don't do the alias
}
aliases.put(tokenID, literal);
aliasesReverseIndex.put(literal, tokenID);
}
@Override
public void defineTokens(Grammar root) {
/*
System.out.println("stringLiterals="+stringLiterals);
System.out.println("tokens="+tokens);
System.out.println("aliases="+aliases);
System.out.println("aliasesReverseIndex="+aliasesReverseIndex);
*/
assignTokenIDTypes(root);
aliasTokenIDsAndLiterals(root);
assignStringTypes(root);
/*
System.out.println("stringLiterals="+stringLiterals);
System.out.println("tokens="+tokens);
System.out.println("aliases="+aliases);
*/
defineTokenNamesAndLiteralsInGrammar(root);
}
/*
protected void defineStringLiteralsFromDelegates() {
if ( grammar.getGrammarIsMaster() && grammar.type==Grammar.COMBINED ) {
List<Grammar> delegates = grammar.getDelegates();
System.out.println("delegates in master combined: "+delegates);
for (int i = 0; i < delegates.size(); i++) {
Grammar d = (Grammar) delegates.get(i);
Set<String> literals = d.getStringLiterals();
for (Iterator it = literals.iterator(); it.hasNext();) {
String literal = (String) it.next();
System.out.println("literal "+literal);
int ttype = grammar.getTokenType(literal);
grammar.defineLexerRuleForStringLiteral(literal, ttype);
}
}
}
}
*/
@Override
protected void assignStringTypes(Grammar root) {
// walk string literals assigning types to unassigned ones
Set s = stringLiterals.keySet();
for (Iterator it = s.iterator(); it.hasNext();) {
String lit = (String) it.next();
Integer oldTypeI = (Integer)stringLiterals.get(lit);
int oldType = oldTypeI.intValue();
if ( oldType<Label.MIN_TOKEN_TYPE ) {
Integer typeI = Utils.integer(root.getNewTokenType());
stringLiterals.put(lit, typeI);
// if string referenced in combined grammar parser rule,
// automatically define in the generated lexer
root.defineLexerRuleForStringLiteral(lit, typeI.intValue());
}
}
}
@Override
protected void aliasTokenIDsAndLiterals(Grammar root) {
if ( root.type==Grammar.LEXER ) {
return; // strings/chars are never token types in LEXER
}
// walk aliases if any and assign types to aliased literals if literal
// was referenced
Set s = aliases.keySet();
for (Iterator it = s.iterator(); it.hasNext();) {
String tokenID = (String) it.next();
String literal = (String)aliases.get(tokenID);
if ( literal.charAt(0)=='\'' && stringLiterals.get(literal)!=null ) {
stringLiterals.put(literal, tokens.get(tokenID));
// an alias still means you need a lexer rule for it
Integer typeI = (Integer)tokens.get(tokenID);
if ( !tokenRuleDefs.contains(tokenID) ) {
root.defineLexerRuleForAliasedStringLiteral(tokenID, literal, typeI.intValue());
}
}
}
}
@Override
protected void assignTokenIDTypes(Grammar root) {
// walk token names, assigning values if unassigned
Set s = tokens.keySet();
for (Iterator it = s.iterator(); it.hasNext();) {
String tokenID = (String) it.next();
if ( tokens.get(tokenID)==UNASSIGNED ) {
tokens.put(tokenID, Utils.integer(root.getNewTokenType()));
}
}
}
@Override
protected void defineTokenNamesAndLiteralsInGrammar(Grammar root) {
Set s = tokens.keySet();
for (Iterator it = s.iterator(); it.hasNext();) {
String tokenID = (String) it.next();
int ttype = ((Integer)tokens.get(tokenID)).intValue();
root.defineToken(tokenID, ttype);
}
s = stringLiterals.keySet();
for (Iterator it = s.iterator(); it.hasNext();) {
String lit = (String) it.next();
int ttype = ((Integer)stringLiterals.get(lit)).intValue();
root.defineToken(lit, ttype);
}
}
}
|
diable201/GolangDevelopment
|
HW_09/api/database/library.go
|
package database
import (
"anime-redis/api/models"
"context"
)
type Library interface {
Connect(url string) error
Close() error
Anime() AnimeRepository
Manga() MangaRepository
Genres() GenresRepository
}
type AnimeRepository interface {
Create(ctx context.Context, anime *models.Anime) error
All(ctx context.Context, filter *models.AnimeFilter) ([]*models.Anime, error)
ByID(ctx context.Context, id int) (*models.Anime, error)
Update(ctx context.Context, anime *models.Anime) error
Delete(ctx context.Context, id int) error
}
type MangaRepository interface {
Create(ctx context.Context, manga *models.Manga) error
All(ctx context.Context, filter *models.MangaFilter) ([]*models.Manga, error)
ByID(ctx context.Context, id int) (*models.Manga, error)
Update(ctx context.Context, manga *models.Manga) error
Delete(ctx context.Context, id int) error
}
type GenresRepository interface {
Create(ctx context.Context, genre *models.Genre) error
All(ctx context.Context, filter *models.GenreFilter) ([]*models.Genre, error)
ByID(ctx context.Context, id int) (*models.Genre, error)
Update(ctx context.Context, genre *models.Genre) error
Delete(ctx context.Context, id int) error
}
|
Tarpelite/OJ_research
|
code/2987.c
|
#include <stdio.h>
int N(int b1,int c1,int l1,int n1);
int M(int b2,int c2,int l2,int n2);
int H(int x,int y);
int min1(int x1,int y1);
int max1(int x2,int y2);
int n;
int a[100000],b[10000],c[10000];
int main()
{
int l,r,k,m,o,i=0,min,max,Nr,Mr;
scanf("%d %d",&n,&k);
m=n;
while(m--)
{
scanf("%d",&a[i]);
i++;
}
o=k;
i=0;
while(o--)
{
scanf("%d %d",&b[i],&c[i]);
i++;
}
for(i=0;i<=k-1;i++)
{
Nr=N(b[i],c[i],i,n);
Mr=M(b[i],c[i],i,n);
min=min1(Nr,Mr);
max=max1(Nr,Mr);
r=H(min,max);
printf("%d\n",r);
}
return 0;
}
int N(int b1,int c1,int l1,int n1)
{
int j1;
int rn=0;
for(j1=b1;j1<=c1;j1++)
rn+=a[j1];
rn=rn%n1;
return rn;
}
int M(int b2,int c2,int l2,int n2)
{
int j2;
int rm=1;
for(j2=b2;j2<=c2;j2++)
rm=rm*a[j2]%n2;
return rm;
}
int H(int x,int y)
{
int j3=x,rh;
rh=a[j3];
j3++;
for(j3;j3<=y;j3++)
rh=rh^a[j3];
return rh;
}
int min1(int x1,int y1)
{
if(x1<y1)
return x1;
else
return y1;
}
int max1(int x2,int y2)
{
if(x2>y2)
return x2;
else
return y2;
}
|
liyuanyuan11/Python
|
list/list14.py
|
nameList=["李若瑜","长子栋","王小明"]
IDList=["1","2","3"]
scoreList1=[94,100,99]
scoreList2=[93,100,95]
scoreList3=[100,94,100]
totalList=[scoreList1[0]+scoreList2[0]+scoreList3[0],scoreList1[1]+scoreList2[1]+scoreList3[1],scoreList1[2]+scoreList2[2]+scoreList3[2]]
print("现在已经有"+str(len(nameList))+"位同学的成绩,他们的得分如下:")
print("学号 姓名 语文 数学 英语 总分")
print(IDList[0]," ",nameList[0]," ",scoreList1[0]," ",scoreList2[0]," ",scoreList3[0]," ",totalList[0])
print(IDList[1]," ",nameList[1]," ",scoreList1[1]," ",scoreList2[1]," ",scoreList3[1]," ",totalList[1])
print(IDList[2]," ",nameList[2]," ",scoreList1[2]," ",scoreList2[2]," ",scoreList3[2]," ",totalList[2])
print("第二位同学的语文成绩录入有误,要改为98分")
scoreList2[1]=98
totalList[1]=scoreList1[1]+scoreList2[1]+scoreList3[1]
print("第二位同学修改后的成绩如下:")
print("学号 姓名 语文 数学 英语 总分")
print(IDList[1]," ",nameList[1]," ",scoreList1[1]," ",scoreList2[1]," ",scoreList3[1]," ",totalList[1])
print("接下来要录入一位新同学的成绩")
nameList.append(input("请输入学生姓名:"))
IDList.append(input("请输入学生学号:"))
scoreList1.append(input("请输入学生语文成绩:"))
scoreList2.append(input("请输入学生数学成绩:"))
scoreList3.append(input("请输入学生英语成绩:"))
totalList.append(float(scoreList1[3])+float(scoreList2[3])+float(scoreList3[3]))
print("现在已经有"+str(len(nameList))+"位同学的成绩,他们的得分如下:")
print("学号 姓名 语文 数学 英语 总分")
print(IDList[0]," ",nameList[0]," ",scoreList1[0]," ",scoreList2[0]," ",scoreList3[0]," ",totalList[0])
print(IDList[1]," ",nameList[1]," ",scoreList1[1]," ",scoreList2[1]," ",scoreList3[1]," ",totalList[1])
print(IDList[2]," ",nameList[2]," ",scoreList1[2]," ",scoreList2[2]," ",scoreList3[2]," ",totalList[2])
print(IDList[3]," ",nameList[3]," ",scoreList1[3]," ",scoreList2[3]," ",scoreList3[3]," ",totalList[3])
print("现在要将王小明同学的记录从列表中删除")
input=nameList.index("王小明")
del nameList[input],IDList[input],scoreList1[input],scoreList2[input],scoreList3[input],totalList[input]
print("现在已经有"+str(len(nameList))+"位同学的成绩,他们的得分入下:")
print("学号 姓名 语文 数学 英语 总分")
print(IDList[0]," ",nameList[0]," ",scoreList1[0]," ",scoreList2[0]," ",scoreList3[0]," ",totalList[0])
print(IDList[1]," ",nameList[1]," ",scoreList1[1]," ",scoreList2[1]," ",scoreList3[1]," ",totalList[1])
print(IDList[2]," ",nameList[2]," ",scoreList1[2]," ",scoreList2[2]," ",scoreList3[2]," ",totalList[2])
|
kokog78/ibello-api
|
src/main/java/hu/ibello/core/Timeout.java
|
/*
* Ark-Sys Kft. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package hu.ibello.core;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* This annotation marks an enum class which defines constants for timeout values.
* The name of those constants can be used in configuration files to assign numeric values to them.
* The constants can be used in {@link TimeoutRelated#withTimeout(Enum)} method to select the right
* timeout value for an action or expectation.
* @author <NAME>
*
*/
@Retention(RUNTIME)
@Target(ElementType.TYPE)
public @interface Timeout {
}
|
qg0/EliteQuant_Excel
|
CppCoreLibrary/QLExtension/termstructures/commodity/commoditycurveext.cpp
|
<reponame>qg0/EliteQuant_Excel
#include <termstructures/commodity/commoditycurveext.hpp>
namespace QLExtension {
CommodityCurveExt::CommodityCurveExt(const std::string name,
const std::vector<Date>& dates,
const std::vector<Real>& prices,
const Calendar& calendar,
const DayCounter& dayCounter)
: TermStructure(0, calendar, dayCounter),
name_(name), dates_(dates), data_(prices),
interpolator_(LogLinear()) {
QL_REQUIRE(dates_.size()>1, "too few dates");
QL_REQUIRE(data_.size()==dates_.size(), "dates/prices count mismatch");
// it assumes dates[0] has times[0] = 0
times_.resize(dates_.size());
times_[0]=0.0;
for (Size i = 1; i < dates_.size(); i++) {
QL_REQUIRE(dates_[i] > dates_[i-1],
"invalid date (" << dates_[i] << ", vs "
<< dates_[i-1] << ")");
times_[i] = dayCounter.yearFraction(dates_[0], dates_[i]);
}
interpolation_ =
interpolator_.interpolate(times_.begin(), times_.end(),
data_.begin());
interpolation_.update();
}
CommodityCurveExt::CommodityCurveExt(const std::string name,
const Calendar& calendar,
const DayCounter& dayCounter)
: TermStructure(0, calendar, dayCounter),
name_(name), interpolator_(LogLinear()) {}
void CommodityCurveExt::setPrices(std::map<Date, Real>& prices) {
QL_REQUIRE(prices.size()>1, "too few prices");
dates_.clear();
data_.clear();
for (std::map<Date, Real>::const_iterator i = prices.begin(); i != prices.end(); ++i) {
dates_.push_back(i->first);
data_.push_back(i->second);
}
times_.resize(dates_.size());
times_[0]=0.0;
for (Size i = 1; i < dates_.size(); i++)
times_[i] = dayCounter().yearFraction(dates_[0], dates_[i]);
interpolation_ =
interpolator_.interpolate(times_.begin(), times_.end(),
data_.begin());
interpolation_.update();
}
std::ostream& operator<<(std::ostream& out, const CommodityCurveExt& curve) {
out << "[" << curve.name_ << "]";
return out;
}
}
|
sopra-fs21-group-10/td-server
|
src/test/java/ch/uzh/ifi/hase/soprafs21/repository/UserRepositoryIntegrationTest.java
|
<filename>src/test/java/ch/uzh/ifi/hase/soprafs21/repository/UserRepositoryIntegrationTest.java
package ch.uzh.ifi.hase.soprafs21.repository;
import ch.uzh.ifi.hase.soprafs21.constant.UserStatus;
import ch.uzh.ifi.hase.soprafs21.entity.User;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@DataJpaTest
class UserRepositoryIntegrationTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private UserRepository userRepository;
@Test
void findByUsername_success() {
// given
User user = new User();
user.setPassword("<PASSWORD>");
user.setUsername("firstname@lastname");
user.setStatus(UserStatus.OFFLINE);
user.setToken("1");
entityManager.persist(user);
entityManager.flush();
// when
User found = userRepository.findByUsername(user.getUsername());
// then
assertNotNull(found.getUserId());
assertEquals(found.getPassword(), user.getPassword());
assertEquals(found.getUsername(), user.getUsername());
assertEquals(found.getToken(), user.getToken());
assertEquals(found.getStatus(), user.getStatus());
}
@Test
void findByToken_success() {
// given
User user = new User();
user.setPassword("<PASSWORD>");
user.setUsername("firstname@lastname");
user.setStatus(UserStatus.OFFLINE);
user.setToken("1");
entityManager.persist(user);
entityManager.flush();
// when
User found = userRepository.findByToken(user.getToken());
// then
assertNotNull(found.getUserId());
assertEquals(user.getPassword(), found.getPassword());
assertEquals(user.getUsername(), found.getUsername());
assertEquals(user.getToken(), found.getToken());
assertEquals( user.getStatus(), found.getStatus());
}
}
|
munkhuushmgl/java-dialogflow-cx
|
proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/WebhookProto.java
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/webhook.proto
package com.google.cloud.dialogflow.cx.v3;
public final class WebhookProto {
private WebhookProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_Webhook_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_Webhook_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_RequestHeadersEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_RequestHeadersEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_GetWebhookRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_GetWebhookRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_UpdateWebhookRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_UpdateWebhookRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_DeleteWebhookRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_DeleteWebhookRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_FulfillmentInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_FulfillmentInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_IntentParameterValue_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_IntentParameterValue_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_ParametersEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_ParametersEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_SentimentAnalysisResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_SentimentAnalysisResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_FulfillmentResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_FulfillmentResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_ParameterInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_ParameterInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_ParametersEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_ParametersEntry_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n+google/cloud/dialogflow/cx/v3/webhook."
+ "proto\022\035google.cloud.dialogflow.cx.v3\032\034go"
+ "ogle/api/annotations.proto\032\027google/api/c"
+ "lient.proto\032\037google/api/field_behavior.p"
+ "roto\032\031google/api/resource.proto\0324google/"
+ "cloud/dialogflow/cx/v3/response_message."
+ "proto\032\036google/protobuf/duration.proto\032\033g"
+ "oogle/protobuf/empty.proto\032 google/proto"
+ "buf/field_mask.proto\032\034google/protobuf/st"
+ "ruct.proto\"\271\004\n\007Webhook\022\014\n\004name\030\001 \001(\t\022\031\n\014"
+ "display_name\030\002 \001(\tB\003\340A\002\022W\n\023generic_web_s"
+ "ervice\030\004 \001(\01328.google.cloud.dialogflow.c"
+ "x.v3.Webhook.GenericWebServiceH\000\022*\n\007time"
+ "out\030\006 \001(\0132\031.google.protobuf.Duration\022\020\n\010"
+ "disabled\030\005 \001(\010\032\357\001\n\021GenericWebService\022\020\n\003"
+ "uri\030\001 \001(\tB\003\340A\002\022\024\n\010username\030\002 \001(\tB\002\030\001\022\024\n\010"
+ "password\030\003 \001(\tB\002\030\001\022e\n\017request_headers\030\004 "
+ "\003(\0132L.google.cloud.dialogflow.cx.v3.Webh"
+ "ook.GenericWebService.RequestHeadersEntr"
+ "y\0325\n\023RequestHeadersEntry\022\013\n\003key\030\001 \001(\t\022\r\n"
+ "\005value\030\002 \001(\t:\0028\001:q\352An\n!dialogflow.google"
+ "apis.com/Webhook\022Iprojects/{project}/loc"
+ "ations/{location}/agents/{agent}/webhook"
+ "s/{webhook}B\t\n\007webhook\"w\n\023ListWebhooksRe"
+ "quest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\022!dialogflo"
+ "w.googleapis.com/Webhook\022\021\n\tpage_size\030\002 "
+ "\001(\005\022\022\n\npage_token\030\003 \001(\t\"i\n\024ListWebhooksR"
+ "esponse\0228\n\010webhooks\030\001 \003(\0132&.google.cloud"
+ ".dialogflow.cx.v3.Webhook\022\027\n\017next_page_t"
+ "oken\030\002 \001(\t\"L\n\021GetWebhookRequest\0227\n\004name\030"
+ "\001 \001(\tB)\340A\002\372A#\n!dialogflow.googleapis.com"
+ "/Webhook\"\217\001\n\024CreateWebhookRequest\0229\n\006par"
+ "ent\030\001 \001(\tB)\340A\002\372A#\022!dialogflow.googleapis"
+ ".com/Webhook\022<\n\007webhook\030\002 \001(\0132&.google.c"
+ "loud.dialogflow.cx.v3.WebhookB\003\340A\002\"\205\001\n\024U"
+ "pdateWebhookRequest\022<\n\007webhook\030\001 \001(\0132&.g"
+ "oogle.cloud.dialogflow.cx.v3.WebhookB\003\340A"
+ "\002\022/\n\013update_mask\030\002 \001(\0132\032.google.protobuf"
+ ".FieldMask\"^\n\024DeleteWebhookRequest\0227\n\004na"
+ "me\030\001 \001(\tB)\340A\002\372A#\n!dialogflow.googleapis."
+ "com/Webhook\022\r\n\005force\030\002 \001(\010\"\352\t\n\016WebhookRe"
+ "quest\022!\n\031detect_intent_response_id\030\001 \001(\t"
+ "\022\016\n\004text\030\n \001(\tH\000\022?\n\016trigger_intent\030\013 \001(\t"
+ "B%\372A\"\n dialogflow.googleapis.com/IntentH"
+ "\000\022\024\n\ntranscript\030\014 \001(\tH\000\022\027\n\rtrigger_event"
+ "\030\016 \001(\tH\000\022\025\n\rlanguage_code\030\017 \001(\t\022W\n\020fulfi"
+ "llment_info\030\006 \001(\0132=.google.cloud.dialogf"
+ "low.cx.v3.WebhookRequest.FulfillmentInfo"
+ "\022M\n\013intent_info\030\003 \001(\01328.google.cloud.dia"
+ "logflow.cx.v3.WebhookRequest.IntentInfo\022"
+ ":\n\tpage_info\030\004 \001(\0132\'.google.cloud.dialog"
+ "flow.cx.v3.PageInfo\022@\n\014session_info\030\005 \001("
+ "\0132*.google.cloud.dialogflow.cx.v3.Sessio"
+ "nInfo\022@\n\010messages\030\007 \003(\0132..google.cloud.d"
+ "ialogflow.cx.v3.ResponseMessage\022(\n\007paylo"
+ "ad\030\010 \001(\0132\027.google.protobuf.Struct\022h\n\031sen"
+ "timent_analysis_result\030\t \001(\0132E.google.cl"
+ "oud.dialogflow.cx.v3.WebhookRequest.Sent"
+ "imentAnalysisResult\032\036\n\017FulfillmentInfo\022\013"
+ "\n\003tag\030\001 \001(\t\032\273\003\n\nIntentInfo\022B\n\023last_match"
+ "ed_intent\030\001 \001(\tB%\372A\"\n dialogflow.googlea"
+ "pis.com/Intent\022\024\n\014display_name\030\003 \001(\t\022\\\n\n"
+ "parameters\030\002 \003(\0132H.google.cloud.dialogfl"
+ "ow.cx.v3.WebhookRequest.IntentInfo.Param"
+ "etersEntry\022\022\n\nconfidence\030\004 \001(\002\032^\n\024Intent"
+ "ParameterValue\022\026\n\016original_value\030\001 \001(\t\022."
+ "\n\016resolved_value\030\002 \001(\0132\026.google.protobuf"
+ ".Value\032\200\001\n\017ParametersEntry\022\013\n\003key\030\001 \001(\t\022"
+ "\\\n\005value\030\002 \001(\0132M.google.cloud.dialogflow"
+ ".cx.v3.WebhookRequest.IntentInfo.IntentP"
+ "arameterValue:\0028\001\032;\n\027SentimentAnalysisRe"
+ "sult\022\r\n\005score\030\001 \001(\002\022\021\n\tmagnitude\030\002 \001(\002B\007"
+ "\n\005query\"\257\005\n\017WebhookResponse\022`\n\024fulfillme"
+ "nt_response\030\001 \001(\0132B.google.cloud.dialogf"
+ "low.cx.v3.WebhookResponse.FulfillmentRes"
+ "ponse\022:\n\tpage_info\030\002 \001(\0132\'.google.cloud."
+ "dialogflow.cx.v3.PageInfo\022@\n\014session_inf"
+ "o\030\003 \001(\0132*.google.cloud.dialogflow.cx.v3."
+ "SessionInfo\022(\n\007payload\030\004 \001(\0132\027.google.pr"
+ "otobuf.Struct\022:\n\013target_page\030\005 \001(\tB#\372A \n"
+ "\036dialogflow.googleapis.com/PageH\000\022:\n\013tar"
+ "get_flow\030\006 \001(\tB#\372A \n\036dialogflow.googleap"
+ "is.com/FlowH\000\032\213\002\n\023FulfillmentResponse\022@\n"
+ "\010messages\030\001 \003(\0132..google.cloud.dialogflo"
+ "w.cx.v3.ResponseMessage\022h\n\016merge_behavio"
+ "r\030\002 \001(\0162P.google.cloud.dialogflow.cx.v3."
+ "WebhookResponse.FulfillmentResponse.Merg"
+ "eBehavior\"H\n\rMergeBehavior\022\036\n\032MERGE_BEHA"
+ "VIOR_UNSPECIFIED\020\000\022\n\n\006APPEND\020\001\022\013\n\007REPLAC"
+ "E\020\002B\014\n\ntransition\"\235\004\n\010PageInfo\0229\n\014curren"
+ "t_page\030\001 \001(\tB#\372A \n\036dialogflow.googleapis"
+ ".com/Page\022C\n\tform_info\030\003 \001(\01320.google.cl"
+ "oud.dialogflow.cx.v3.PageInfo.FormInfo\032\220"
+ "\003\n\010FormInfo\022V\n\016parameter_info\030\002 \003(\0132>.go"
+ "ogle.cloud.dialogflow.cx.v3.PageInfo.For"
+ "mInfo.ParameterInfo\032\253\002\n\rParameterInfo\022\024\n"
+ "\014display_name\030\001 \001(\t\022\020\n\010required\030\002 \001(\010\022\\\n"
+ "\005state\030\003 \001(\0162M.google.cloud.dialogflow.c"
+ "x.v3.PageInfo.FormInfo.ParameterInfo.Par"
+ "ameterState\022%\n\005value\030\004 \001(\0132\026.google.prot"
+ "obuf.Value\022\026\n\016just_collected\030\005 \001(\010\"U\n\016Pa"
+ "rameterState\022\037\n\033PARAMETER_STATE_UNSPECIF"
+ "IED\020\000\022\t\n\005EMPTY\020\001\022\013\n\007INVALID\020\002\022\n\n\006FILLED\020"
+ "\003\"\341\001\n\013SessionInfo\0227\n\007session\030\001 \001(\tB&\372A#\n"
+ "!dialogflow.googleapis.com/Session\022N\n\npa"
+ "rameters\030\002 \003(\0132:.google.cloud.dialogflow"
+ ".cx.v3.SessionInfo.ParametersEntry\032I\n\017Pa"
+ "rametersEntry\022\013\n\003key\030\001 \001(\t\022%\n\005value\030\002 \001("
+ "\0132\026.google.protobuf.Value:\0028\0012\267\010\n\010Webhoo"
+ "ks\022\277\001\n\014ListWebhooks\0222.google.cloud.dialo"
+ "gflow.cx.v3.ListWebhooksRequest\0323.google"
+ ".cloud.dialogflow.cx.v3.ListWebhooksResp"
+ "onse\"F\202\323\344\223\0027\0225/v3/{parent=projects/*/loc"
+ "ations/*/agents/*}/webhooks\332A\006parent\022\254\001\n"
+ "\nGetWebhook\0220.google.cloud.dialogflow.cx"
+ ".v3.GetWebhookRequest\032&.google.cloud.dia"
+ "logflow.cx.v3.Webhook\"D\202\323\344\223\0027\0225/v3/{name"
+ "=projects/*/locations/*/agents/*/webhook"
+ "s/*}\332A\004name\022\305\001\n\rCreateWebhook\0223.google.c"
+ "loud.dialogflow.cx.v3.CreateWebhookReque"
+ "st\032&.google.cloud.dialogflow.cx.v3.Webho"
+ "ok\"W\202\323\344\223\002@\"5/v3/{parent=projects/*/locat"
+ "ions/*/agents/*}/webhooks:\007webhook\332A\016par"
+ "ent,webhook\022\322\001\n\rUpdateWebhook\0223.google.c"
+ "loud.dialogflow.cx.v3.UpdateWebhookReque"
+ "st\032&.google.cloud.dialogflow.cx.v3.Webho"
+ "ok\"d\202\323\344\223\002H2=/v3/{webhook.name=projects/*"
+ "/locations/*/agents/*/webhooks/*}:\007webho"
+ "ok\332A\023webhook,update_mask\022\242\001\n\rDeleteWebho"
+ "ok\0223.google.cloud.dialogflow.cx.v3.Delet"
+ "eWebhookRequest\032\026.google.protobuf.Empty\""
+ "D\202\323\344\223\0027*5/v3/{name=projects/*/locations/"
+ "*/agents/*/webhooks/*}\332A\004name\032x\312A\031dialog"
+ "flow.googleapis.com\322AYhttps://www.google"
+ "apis.com/auth/cloud-platform,https://www"
+ ".googleapis.com/auth/dialogflowB\233\002\n!com."
+ "google.cloud.dialogflow.cx.v3B\014WebhookPr"
+ "otoP\001Z?google.golang.org/genproto/google"
+ "apis/cloud/dialogflow/cx/v3;cx\370\001\001\242\002\002DF\252\002"
+ "\035Google.Cloud.Dialogflow.Cx.V3\352A|\n\'servi"
+ "cedirectory.googleapis.com/Service\022Qproj"
+ "ects/{project}/locations/{location}/name"
+ "spaces/{namespace}/services/{service}b\006p"
+ "roto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.dialogflow.cx.v3.ResponseMessageProto.getDescriptor(),
com.google.protobuf.DurationProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
});
internal_static_google_cloud_dialogflow_cx_v3_Webhook_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_dialogflow_cx_v3_Webhook_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_Webhook_descriptor,
new java.lang.String[] {
"Name", "DisplayName", "GenericWebService", "Timeout", "Disabled", "Webhook",
});
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_Webhook_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_descriptor,
new java.lang.String[] {
"Uri", "Username", "Password", "RequestHeaders",
});
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_RequestHeadersEntry_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_RequestHeadersEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_Webhook_GenericWebService_RequestHeadersEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken",
});
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksResponse_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_ListWebhooksResponse_descriptor,
new java.lang.String[] {
"Webhooks", "NextPageToken",
});
internal_static_google_cloud_dialogflow_cx_v3_GetWebhookRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_dialogflow_cx_v3_GetWebhookRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_GetWebhookRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_descriptor,
new java.lang.String[] {
"Parent", "Webhook",
});
internal_static_google_cloud_dialogflow_cx_v3_UpdateWebhookRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_dialogflow_cx_v3_UpdateWebhookRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_UpdateWebhookRequest_descriptor,
new java.lang.String[] {
"Webhook", "UpdateMask",
});
internal_static_google_cloud_dialogflow_cx_v3_DeleteWebhookRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_dialogflow_cx_v3_DeleteWebhookRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_DeleteWebhookRequest_descriptor,
new java.lang.String[] {
"Name", "Force",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_descriptor,
new java.lang.String[] {
"DetectIntentResponseId",
"Text",
"TriggerIntent",
"Transcript",
"TriggerEvent",
"LanguageCode",
"FulfillmentInfo",
"IntentInfo",
"PageInfo",
"SessionInfo",
"Messages",
"Payload",
"SentimentAnalysisResult",
"Query",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_FulfillmentInfo_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_FulfillmentInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_FulfillmentInfo_descriptor,
new java.lang.String[] {
"Tag",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_descriptor,
new java.lang.String[] {
"LastMatchedIntent", "DisplayName", "Parameters", "Confidence",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_IntentParameterValue_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_IntentParameterValue_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_IntentParameterValue_descriptor,
new java.lang.String[] {
"OriginalValue", "ResolvedValue",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_ParametersEntry_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_ParametersEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_IntentInfo_ParametersEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_SentimentAnalysisResult_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_descriptor
.getNestedTypes()
.get(2);
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_SentimentAnalysisResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookRequest_SentimentAnalysisResult_descriptor,
new java.lang.String[] {
"Score", "Magnitude",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_descriptor,
new java.lang.String[] {
"FulfillmentResponse",
"PageInfo",
"SessionInfo",
"Payload",
"TargetPage",
"TargetFlow",
"Transition",
});
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_FulfillmentResponse_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_FulfillmentResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_WebhookResponse_FulfillmentResponse_descriptor,
new java.lang.String[] {
"Messages", "MergeBehavior",
});
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_descriptor,
new java.lang.String[] {
"CurrentPage", "FormInfo",
});
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_descriptor,
new java.lang.String[] {
"ParameterInfo",
});
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_ParameterInfo_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_ParameterInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_PageInfo_FormInfo_ParameterInfo_descriptor,
new java.lang.String[] {
"DisplayName", "Required", "State", "Value", "JustCollected",
});
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_descriptor,
new java.lang.String[] {
"Session", "Parameters",
});
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_ParametersEntry_descriptor =
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_ParametersEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_cx_v3_SessionInfo_ParametersEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceDefinition);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.dialogflow.cx.v3.ResponseMessageProto.getDescriptor();
com.google.protobuf.DurationProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
sirikata/sirikata
|
libmesh/plugins/collada/ColladaSystem.cpp
|
<gh_stars>10-100
/* Sirikata libproxyobject -- COLLADA Models System
* ColladaSystem.cpp
*
* Copyright (c) 2009, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Sirikata nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "ColladaSystem.hpp"
#include "ColladaErrorHandler.hpp"
#include "ColladaDocumentImporter.hpp"
#include "ColladaDocumentLoader.hpp"
#include "boost/lexical_cast.hpp"
#include "boost/algorithm/string/split.hpp"
#include "boost/algorithm/string/classification.hpp"
#include <sirikata/core/options/Options.hpp>
// OpenCOLLADA headers
#include "COLLADAFWRoot.h"
#include "COLLADASaxFWLLoader.h"
#include "MeshdataToCollada.hpp"
#include <iostream>
#include <fstream>
#include <sirikata/core/util/Paths.hpp>
#include <boost/iostreams/read.hpp>
#include <boost/iostreams/write.hpp>
#include <boost/filesystem.hpp>
#include <sirikata/core/transfer/URL.hpp>
#define COLLADA_LOG(lvl,msg) SILOG(collada, lvl, msg);
using namespace std;
using namespace Sirikata;
using namespace Sirikata::Transfer;
namespace Sirikata { namespace Models {
ColladaSystem::ColladaSystem ()
: mDocuments ()
{
COLLADA_LOG(insane, "ColladaSystem::ColladaSystem() entered");
}
ColladaSystem::~ColladaSystem ()
{
COLLADA_LOG(insane, "ColladaSystem::~ColladaSystem() entered");
}
ColladaSystem* ColladaSystem::create (String const& options)
{
COLLADA_LOG(insane, "ColladaSystem::create( " << options << ") entered");
ColladaSystem* system ( new ColladaSystem );
if ( system->initialize (options ) )
return system;
delete system;
return 0;
}
bool ColladaSystem::initialize(String const& options)
{
COLLADA_LOG(insane, "ColladaSystem::initialize() entered");
InitializeClassOptions ( "colladamodels", this, NULL );
OptionSet::getOptions ( "colladamodels", this )->parse ( options );
return true;
}
/////////////////////////////////////////////////////////////////////
// overrides from ModelsSystem
bool ColladaSystem::canLoad(Transfer::DenseDataPtr data) {
// There's no magic number for collada files. Instead, search for
// a <COLLADA> tag (just the beginning since it has other content
// in it). Originally we'd check for the closing tag too, but to
// keep this check minimal, we only check the beginning of the
// document, so we can't check for the closing tag.
if (!data) return false;
// Create a string out of the first 1K
int32 sublen = std::min((int)data->length(), (int)1024);
std::string subset((const char*)data->begin(), (std::size_t)sublen);
if (subset.find("<COLLADA") != subset.npos)
return true;
return false;
}
namespace {
String normalizeFilename(const String& fname) {
String result = fname;
// Get rid of prefixed ./
if (result.size() > 2 && result[0] == '.' && result[1] == '/')
result = result.substr(2);
// There might be more things we should do to this...
return result;
}
}
void ColladaSystem::addHeaderData(const Transfer::RemoteFileMetadata& metadata, Mesh::MeshdataPtr mesh) {
Mesh::ProgressiveDataPtr progData(new Mesh::ProgressiveData());
const FileHeaders& headers = metadata.getHeaders();
// Texture "Redirects" - Sometimes textures (or subfiles in general) are
// reused from another asset, in which case we can point directly to
// it. Using a relative path for download will work, but when trying to
// reuse the file again (e.g. in aggregation) you need to refer to the
// original. This just swaps out the old filename for the real URL.
//
// First, reconstruct the subfile map from the headers
Transfer::URL mesh_url(metadata.getURI());
typedef std::map<String,String> SubfileMap;
SubfileMap subfiles;
for(int32 i = 0; i < INT_MAX; i++) {
String name_str = "Subfile-" + boost::lexical_cast<String>(i) + "-Name";
String path_str = "Subfile-" + boost::lexical_cast<String>(i) + "-Path";
FileHeaders::const_iterator name_it = headers.find(name_str);
if (name_it == headers.end()) break;
FileHeaders::const_iterator path_it = headers.find(path_str);
if (path_it == headers.end()) break;
Transfer::URL subfile_url(mesh_url.context(), path_it->second);
String subfile_url_str = subfile_url.toString();
subfiles[name_it->second] = subfile_url_str;
}
// Then, if not empty, replace texture names with URLs. There are two places
// we have texture names. The list of textures:
for(Sirikata::Mesh::TextureList::iterator tex_it = mesh->textures.begin(); tex_it != mesh->textures.end(); tex_it++) {
String normalizedTexName = normalizeFilename(*tex_it);
SubfileMap::const_iterator subfile_it = subfiles.find(normalizedTexName);
if (subfile_it != subfiles.end())
*tex_it = subfile_it->second;
}
// And the texture names in materials
for(Sirikata::Mesh::MaterialEffectInfoList::iterator it = mesh->materials.begin(); it != mesh->materials.end(); it++) {
for(Sirikata::Mesh::MaterialEffectInfo::TextureList::iterator tex_it = it->textures.begin(); tex_it != it->textures.end(); tex_it++) {
String normalizedTexName = normalizeFilename(tex_it->uri);
SubfileMap::const_iterator subfile_it = subfiles.find(normalizedTexName);
if (subfile_it != subfiles.end())
tex_it->uri = subfile_it->second;
}
}
// Progressive Info
FileHeaders::const_iterator findProgHash = headers.find("Progresive-Stream");
if (findProgHash == headers.end()) {
// return;
}
FileHeaders::const_iterator findProgNumTriangles = headers.find("Progresive-Stream-Num-Triangles");
if (findProgNumTriangles == headers.end()) {
return;
}
FileHeaders::const_iterator findMipmaps = headers.find("Mipmaps");
if (findMipmaps == headers.end()) {
return;
}
//Parse the hash of the progressive stream
/*Transfer::Fingerprint progHash;
try {
progHash = Transfer::Fingerprint::convertFromHex(findProgHash->second);
} catch (std::invalid_argument const&) {
COLLADA_LOG(warn, "Error parsing progressive hash from headers");
return;
}
COLLADA_LOG(detailed, "adding meshdata hash = " << progHash)
progData->progressiveHash = progHash;
*/
//Parse number of triangles in the progressive stream
uint32 prog_triangles;
try {
prog_triangles = uint32_lexical_cast(findProgNumTriangles->second);
} catch (boost::bad_lexical_cast const&) {
COLLADA_LOG(warn, "Error parsing progressive-stream-num-triangles from headers");
return;
}
COLLADA_LOG(detailed, "adding meshdata triangles = " << prog_triangles)
progData->numProgressiveTriangles = prog_triangles;
//Parse number of mipmaps
uint32 num_mipmaps;
try {
num_mipmaps = uint32_lexical_cast(findMipmaps->second);
} catch (boost::bad_lexical_cast const&) {
COLLADA_LOG(warn, "Error parsing number of mipmaps from headers");
return;
}
Mesh::ProgressiveMipmapMap allMipmaps;
for (uint32 i=0; i<num_mipmaps; i++) {
std::string mipmapBaseName = "Mipmap-" + boost::lexical_cast<std::string>(i);
std::string mipmapNameHeader = mipmapBaseName + "-Name";
std::string mipmapHashHeader = mipmapBaseName + "-Hash";
FileHeaders::const_iterator findMipmapName = headers.find(mipmapNameHeader);
if (findMipmapName == headers.end()) {
COLLADA_LOG(warn, "Could not find mipmap name from headers " << mipmapNameHeader);
return;
}
FileHeaders::const_iterator findMipmapHash = headers.find(mipmapHashHeader);
if (findMipmapHash == headers.end()) {
COLLADA_LOG(warn, "Could not find mipmap hash from headers " << mipmapHashHeader);
return;
}
Mesh::ProgressiveMipmapArchive mipmapArchive;
mipmapArchive.name = findMipmapName->second;
Transfer::Fingerprint mipmapHash;
try {
mipmapHash = Transfer::Fingerprint::convertFromHex(findMipmapHash->second);
} catch (std::invalid_argument const&) {
COLLADA_LOG(warn, "Error parsing mipmap hash from headers");
return;
}
mipmapArchive.archiveHash = mipmapHash;
COLLADA_LOG(detailed, "mipmap name " << mipmapArchive.name << " hash " << mipmapArchive.archiveHash);
Mesh::ProgressiveMipmaps mipmapList;
uint32 level = 0;
FileHeaders::const_iterator findMipmapLevel;
do {
std::string mipmapLevelHeader = mipmapBaseName + "-Level-" + boost::lexical_cast<std::string>(level);
findMipmapLevel = headers.find(mipmapLevelHeader);
if (findMipmapLevel != headers.end()) {
Mesh::ProgressiveMipmapLevel mipmapLevel;
std::vector<std::string> tokens;
boost::split(tokens, findMipmapLevel->second, boost::is_any_of(","));
if (tokens.size() != 4) {
COLLADA_LOG(warn, "Got wrong number of tokens when splitting mipmap level");
return;
}
try {
mipmapLevel.offset = uint32_lexical_cast(tokens[0]);
mipmapLevel.length = uint32_lexical_cast(tokens[1]);
mipmapLevel.width = uint32_lexical_cast(tokens[2]);
mipmapLevel.height = uint32_lexical_cast(tokens[3]);
} catch (boost::bad_lexical_cast const&) {
COLLADA_LOG(warn, "Error converting mipmap level tokens to integers");
return;
}
COLLADA_LOG(detailed, "mipmap level " << level << " has " << mipmapLevel.offset << "," << mipmapLevel.length << "," << mipmapLevel.width << "," << mipmapLevel.height);
mipmapList[level] = mipmapLevel;
}
level++;
} while (findMipmapLevel != headers.end());
mipmapArchive.mipmaps = mipmapList;
allMipmaps[mipmapArchive.name] = mipmapArchive;
}
progData->mipmaps = allMipmaps;
mesh->progressiveData = progData;
}
Mesh::VisualPtr ColladaSystem::load(const Transfer::RemoteFileMetadata& metadata, const Transfer::Fingerprint& fp,
Transfer::DenseDataPtr data)
{
if(!canLoad(data))
return Mesh::VisualPtr();
ColladaDocumentLoader loader(metadata.getURI(), fp);
SparseData data_reflatten = SparseData();
data_reflatten.addValidData(data);
Transfer::DenseDataPtr flatData = data_reflatten.flatten();
char const* buffer = reinterpret_cast<char const*>(flatData->begin());
loader.load(buffer, flatData->length());
Mesh::MeshdataPtr mesh = loader.getMeshdata();
addHeaderData(metadata, mesh);
return mesh;
}
Mesh::VisualPtr ColladaSystem::load(Transfer::DenseDataPtr data) {
if(!canLoad(data))
return Mesh::VisualPtr();
ColladaDocumentLoader loader(Transfer::URI(""), Transfer::Fingerprint::null() );
SparseData data_reflatten = SparseData();
data_reflatten.addValidData(data);
Transfer::DenseDataPtr flatData = data_reflatten.flatten();
char const* buffer = reinterpret_cast<char const*>(flatData->begin());
loader.load(buffer, flatData->length());
Mesh::MeshdataPtr mesh = loader.getMeshdata();
return mesh;
}
bool ColladaSystem::convertVisual(const Mesh::VisualPtr& visual, const String& format, std::ostream& vout) {
// Currently OpenCOLLADA only seems to support writing to files, despite
// having a generic StreamWriter interface. To save to a stream, we save to
// a temporary file and then read it back.
String fname = Path::Get(Path::DIR_TEMP, Path::GetTempFilename("colladasystem.convertVisual."));
bool converted = convertVisual(visual, format, fname);
// Read it back and get it into the output stream if successful
if (converted) {
int attempts = 5;
while (!boost::filesystem::exists(fname) && attempts >= 0) {
fname = Path::Get(Path::DIR_TEMP, Path::GetTempFilename("colladasystem.convertVisual."));
converted = convertVisual(visual, format, fname);
if (!converted) {
// Regardless of success, make sure we cleanup the file.
if (boost::filesystem::exists(fname)) {
bool removed = boost::filesystem::remove(fname);
if (!removed) COLLADA_LOG(error, "Failed to remove temporary conversion file " << fname);
}
return false;
}
attempts--;
}
assert(boost::filesystem::exists(fname));
// Sigh. It would be nice to use boost::iostreams::copy, but that closes
// the output buffer, which may not be what we want.
std::ifstream vin(fname.c_str(), ifstream::in | ifstream::binary);
#define COLLADA_CONVERT_BUF_SIZE 1024
char buf[COLLADA_CONVERT_BUF_SIZE];
bool copied_all = true;
while(true) {
std::streamsize nread =
boost::iostreams::read(vin, buf, COLLADA_CONVERT_BUF_SIZE);
if (nread == -1) break;
std::streamsize write_pos = 0;
while(write_pos < nread) {
if (!vout) {
copied_all = false;
break;
}
std::streamsize nwritten =
boost::iostreams::write(vout, buf, nread);
write_pos += nwritten;
}
if (!copied_all) break;
}
converted = converted && copied_all;
}
// Regardless of success, make sure we cleanup the file.
if (boost::filesystem::exists(fname)) {
bool removed = boost::filesystem::remove(fname);
if (!removed) COLLADA_LOG(error, "Failed to remove temporary conversion file " << fname);
}
return converted;
}
bool ColladaSystem::convertVisual(const Mesh::VisualPtr& visual, const String& format, const String& filename) {
Mesh::MeshdataPtr meshdata(std::tr1::dynamic_pointer_cast<Mesh::Meshdata>(visual));
if (!meshdata) return false;
// format is ignored, we only know one format
int result = meshdataToCollada(*meshdata, filename);
return (result == 0);
}
} // namespace Models
} // namespace Sirikata
|
vishalbelsare/minifold
|
minifold/html_table.py
|
<gh_stars>0
#!/usr/bin/env python3
#
# This file is part of the minifold project.
# https://github.com/nokia/minifold
__author__ = "<NAME>"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "Copyright (C) 2018, Nokia"
__license__ = "BSD-3"
from html.parser import HTMLParser
from minifold.log import Log
from minifold.entries_connector import EntriesConnector
class HtmlTableParser(HTMLParser):
def __init__(self, columns :list, output_list :list, keep_entry = None):
"""
Constructor.
Args:
columns: list of string mapping the attribute name
corresponding with the index. If data is fetch
for columns having a greater index than
len(columns), columns[-1] is used, and this
key may store a list of string values instead
of a single string. This allow to store data
stored among several columns in a single attribute.
output_list: reference to an output list where the
data will be output (one dict per row, one
key/value per column).
keep_entry: callback which determine whether an
must entry must be kept or discard. Pass None
to filter nothing. This is the opportunity to
discard a header or irrelevant row.
"""
HTMLParser.__init__(self)
self.fetch_data = False
self.columns = columns
self.index = 0
self.entries = output_list
self.entry = dict()
self.value = str()
self.keep_entry = keep_entry
def attributes(self, object :str) -> set:
return set(self.columns)
# Inherited abstract method
def error(self, message):
Log.error(message)
def handle_starttag(self, tag, attrs):
if tag == "td":
# Enable fetch data
self.fetch_data = True
def handle_endtag(self, tag):
if tag == "td": # Push key/value
# Disable fetch data
self.fetch_data = False
# Push new key/value pair
key = self.columns[self.index] if self.index < len(self.columns) else self.columns[-1]
if key in self.entry.keys():
current_value = self.entry[key]
if not isinstance(current_value, list):
self.entry[key] = [current_value]
if self.value:
self.entry[key].append(self.value)
else:
if self.value:
self.entry[key] = self.value
# Reset key/value pair
self.value = str()
self.index += 1
elif tag == "tr":
# Push entry
if self.keep_entry is None or self.keep_entry(self.entry):
self.entries.append(self.entry)
# Reset entry
self.index = 0
self.entry = dict()
def handle_data(self, data):
data = data.strip()
if self.fetch_data and data:
self.value += data
def html_table(filename :str, columns :list, keep_entry = None) -> list:
entries = list()
parser = HtmlTableParser(columns, entries, keep_entry)
with open(filename, "r") as f:
s = f.read()
parser.feed(s)
return entries
class HtmlTableConnector(EntriesConnector):
def __init__(self, filename :str, columns :list, keep_entry = None):
"""
Constructor.
Args:
filename: Input HTML filename.
columns: list of string mapping the attribute name
corresponding with the index. If data is fetch
for columns having a greater index than
len(columns), columns[-1] is used, and this
key may store a list of string values instead
of a single string. This allow to store data
stored among several columns in a single attribute.
keep_entry: callback which determine whether an
must entry must be kept or discard. Pass None
to filter nothing. This is the opportunity to
discard a header or irrelevant row.
"""
super().__init__(list())
self.m_parser = HtmlTableParser(columns, self.m_entries, keep_entry)
with open(filename, "r") as f:
s = f.read()
self.m_parser.feed(s)
super().__init__(self.m_entries)
|
C-B4/unifiedpush-server
|
model/api/src/main/java/org/jboss/aerogear/unifiedpush/api/document/IDocumentList.java
|
package org.jboss.aerogear.unifiedpush.api.document;
import java.util.List;
public interface IDocumentList<T extends IDocument<?, ?>, I> {
List<T> getDocuments();
List<I> getIgnoredIds();
}
|
EGAlberts/some_bandits
|
some_bandits/bandits/EwS.py
|
<filename>some_bandits/bandits/EwS.py
import numpy as np
from random import sample
from some_bandits.bandit_options import bandit_args
from some_bandits.utilities import convert_conf, save_to_pickle, calculate_utility
from some_bandits.bandits.Bandit import Bandit
from some_bandits.bandits.Expert import Expert
from statistics import mean
#ETA = 1
CUM_REWARD = 0
N_K = 1
class EwS(Bandit, Expert):
def __init__(self, formula):
super().__init__("EwS-" + formula)
self.weights, self.distribution = self.ews_initialize(len(self.arms))
self.num_arms = len(self.arms)
self.set_functions(formula)
#np.random.seed(1337)
self.arm_reward_pairs = {}
for arm in self.arms: self.arm_reward_pairs[arm] = [0.0,0.0]
self.last_action = bandit_args["initial_configuration"]
self.distr_func()
def ews_initialize(self, num_arms):
return [0] * num_arms, []
def start_strategy(self, reward):
self.arm_reward_pairs[self.last_action][CUM_REWARD]+=reward
self.arm_reward_pairs[self.last_action][N_K]+=1
self.update_func() #Update weights
self.distr_func() #(re-)calculate Pt
new_action = self.sample_action()
self.last_action = self.arms[new_action]
return self.arms[new_action]
def distr_func(self):
# exp(eta * S^_t-1i) / SUMkj=1 exp(eta * S^_t-1j)
sum_weights = 0.0
for i, arm in enumerate(self.arms):
times_arm_played = self.arm_reward_pairs[arm][N_K]
squared_gap = np.square(self.weights[i])
sum_weights+= np.exp(-2*times_arm_played*squared_gap)
self.distribution.clear()
for i, arm in enumerate(self.arms):
times_arm_played = self.arm_reward_pairs[arm][N_K]
squared_gap = np.square(self.weights[i])
self.distribution.append(np.exp(-2 * times_arm_played * squared_gap)/sum_weights)
def update_func(self, payoff=None, action=None):
empirical_gaps = None
empirical_mean_rewards = []
for arm in self.arms:
current_entry = self.arm_reward_pairs[arm]
if(current_entry[N_K] == 0):
empirical_mean_rewards.append(0)
else:
empirical_mean_rewards.append(current_entry[CUM_REWARD]/current_entry[N_K])
empirical_gaps = [max(empirical_mean_rewards) - emp_mean_rew for emp_mean_rew in empirical_mean_rewards]
self.weights = empirical_gaps
def propagate_reward(self, reward, chosen_action):
self.arm_reward_pairs[chosen_action][CUM_REWARD]+=reward
self.arm_reward_pairs[chosen_action][N_K]+=1
self.update_func()
self.distr_func()
|
android-nostalgic/platform_dalvik
|
libcore/icu/src/main/java/com/ibm/icu4jni/math/BigDecimal.java
|
package com.ibm.icu4jni.math;
public class BigDecimal extends Number {
@Override
public double doubleValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public float floatValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public int intValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public long longValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public String toString() {
return "";
}
}
|
NikolaVodenicharov/WeatherJavaApp
|
app/src/main/java/com/example/weath/ui/activities/WeatherFragment.java
|
<reponame>NikolaVodenicharov/WeatherJavaApp<filename>app/src/main/java/com/example/weath/ui/activities/WeatherFragment.java
package com.example.weath.ui.activities;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModelProvider;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.example.weath.R;
import com.example.weath.databinding.FragmentWeatherBinding;
import com.example.weath.domain.models.SkyCondition;
import com.example.weath.ui.models.ForecastDayUi;
import com.example.weath.ui.models.WeatherUi;
import com.example.weath.ui.utils.ForecastAdapter;
import com.example.weath.ui.viewModels.StartViewModel;
import java.util.List;
public class WeatherFragment extends Fragment {
private FragmentWeatherBinding binding;
StartViewModel viewModel;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
binding = FragmentWeatherBinding.inflate(inflater, container, false);
binding.setLifecycleOwner(this);
viewModel = new ViewModelProvider(getActivity()).get(StartViewModel.class);
binding.setViewModel(viewModel);
View view = binding.getRoot();
return view;
}
@Override
public void onResume() {
super.onResume();
viewModel.fillCityWeather();
observeSkyCondition();
observeForecast();
observeErrorMessage();
}
private void observeErrorMessage() {
viewModel.getWeatherUiLiveData().observe(this, new Observer<WeatherUi>() {
@Override
public void onChanged(WeatherUi weatherUi) {
TextView errorMessageTextView = getView().findViewById(R.id.error_message);
boolean noErrorMessage =
viewModel.getWeatherUiLiveData().getValue() == null ||
viewModel.getWeatherUiLiveData().getValue().getErrorMessage() == null ||
viewModel.getWeatherUiLiveData().getValue().getErrorMessage().isEmpty();
if (noErrorMessage){
errorMessageTextView.setVisibility(View.GONE);
}
else{
errorMessageTextView.setVisibility(View.VISIBLE);
}
}
});
}
private void observeSkyCondition() {
viewModel.getWeatherUiLiveData().observe(this, new Observer<WeatherUi>() {
@Override
public void onChanged(WeatherUi weatherUi) {
if (weatherUi == null){
return;
}
SkyCondition skyCondition = weatherUi.getSkyCondition();
int drawableId = findSkyConditionDrawableId(skyCondition);
ImageView imageView = getView().findViewById(R.id.imageView);
imageView.setImageResource(drawableId);
}
});
}
private void observeForecast() {
viewModel.getWeatherUiLiveData().observe(this, new Observer<WeatherUi>() {
@Override
public void onChanged(WeatherUi weatherUi) {
boolean shouldDisplayForecast = weatherUi.getForecast() != null &&
weatherUi.getForecast().size() > 0;
if (!shouldDisplayForecast){
return;
}
List<ForecastDayUi> forecast = weatherUi.getForecast();
ForecastAdapter adapter = new ForecastAdapter(forecast);
RecyclerView recyclerView = getView().findViewById(R.id.recyclerViewForecast);
recyclerView.setAdapter(adapter);
recyclerView.setLayoutManager(new LinearLayoutManager(getContext()));
}
});
}
public static int findSkyConditionDrawableId(SkyCondition skyCondition){
switch (skyCondition){
case CLEAR:
return R.drawable.sun;
case CLOUDS:
return R.drawable.clouds;
case RAIN:
return R.drawable.heavy_rain;
case THUNDERSTORM:
return R.drawable.cloud_lightning;
case SNOW:
return R.drawable.snow;
default:
throw new IllegalArgumentException("Input parameter is not found.");
}
}
}
|
tlalexander/stitchEm
|
lib/include/libvideostitch/profile.hpp
|
<gh_stars>100-1000
// Copyright (c) 2012-2017 VideoStitch SAS
// Copyright (c) 2018 stitchEm
#ifndef PROFILE_HPP_
#define PROFILE_HPP_
#include "config.hpp"
#include <iosfwd>
#ifdef _MSC_VER
#include <Windows.h>
#else
#include <sys/time.h>
#endif
// macro-based profiling, enabled only when -DSIMPLEPROFILEON
#ifdef SIMPLEPROFILEON
#define SIMPLEPROFILE_MS(title) Util::SimpleProfiler prof((title), false, Logger::get(Logger::Debug))
#else
#define SIMPLEPROFILE_MS(title) (void)0
#endif
// GPU/CPU timeline analysis, enabled only with -DUSE_NVTX
#ifdef USE_NVTX
#include "nvToolsExt.h"
#else
typedef int nvtxRangeId_t;
#define nvtxRangeStartA(a) 0
#define nvtxRangeEnd(a) ((a)++)
#define nvtxMarkA(a)
#endif
namespace VideoStitch {
class ThreadSafeOstream;
namespace Util {
/**
* @brief A simple scoped time-based profiler.
*/
class VS_EXPORT SimpleProfiler {
public:
/**
* Create a profiler.
* @param title Message to display.
* @param usecs If true, display microseconds. Else, display milliseconds.
* @param out Where to write the message.
*/
SimpleProfiler(const char* title, bool usecs, ThreadSafeOstream& out);
~SimpleProfiler();
protected:
/**
* Get the initial time.
*/
void resetTimer();
/**
* Compute duration between the initial time and now.
*/
uint64_t computeDuration();
private:
#ifdef _MSC_VER
LARGE_INTEGER _tv;
#else
struct timeval _tv;
#endif
ThreadSafeOstream& _out;
const char* const _title;
bool usecs;
};
/**
* @brief A simple timer.
*/
class VS_EXPORT SimpleTimer : protected SimpleProfiler {
public:
SimpleTimer();
/**
* Reset the timer.
*/
void reset();
/**
* Return the elapsed time (in microseconds) since the time was created (or reset).
*/
uint64_t elapsed();
};
} // namespace Util
} // namespace VideoStitch
#endif
|
tizenorg/platform.core.uifw.dali-toolkit
|
automated-tests/src/dali-toolkit/utc-Dali-CubeTransitionEffect.cpp
|
<reponame>tizenorg/platform.core.uifw.dali-toolkit
/*
* Copyright (c) 2014 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <iostream>
#include <stdlib.h>
#include <dali-toolkit-test-suite-utils.h>
#include <dali-toolkit/dali-toolkit.h>
#include <dali-toolkit/devel-api/transition-effects/cube-transition-effect.h>
#include <dali-toolkit/devel-api/transition-effects/cube-transition-cross-effect.h>
#include <dali-toolkit/devel-api/transition-effects/cube-transition-fold-effect.h>
#include <dali-toolkit/devel-api/transition-effects/cube-transition-wave-effect.h>
#include <dali/devel-api/adaptor-framework/bitmap-loader.h>
#include <dali/public-api/images/buffer-image.h>
using namespace Dali;
using namespace Dali::Toolkit;
namespace
{
const unsigned int NUM_ROWS = 16;
const unsigned int NUM_COLUMNS = 10;
const Vector2 VIEW_AREA_SIZE( 480.0f, 800.0f );
const float TRANSITION_DURATION = 0.5f;
const float CUBE_DISPLACEMENT = 55.f;
const Vector2 PAN_POSITION1( VIEW_AREA_SIZE.x * 0.75f, VIEW_AREA_SIZE.y * 0.25f );
const Vector2 PAN_DISPLACEMENT1( -5.f, 5.f );
const Vector2 PAN_POSITION2( VIEW_AREA_SIZE.x * 0.25f, VIEW_AREA_SIZE.y * 0.75f );
const Vector2 PAN_DISPLACEMENT2( 5.f, 5.f );
const Vector4 FULL_BRIGHTNESS(1.f,1.f,1.f,1.f);
const Vector4 HALF_BRIGHTNESS(0.5f, 0.5f, 0.5f, 1.f);
const int RENDER_FRAME_INTERVAL = 16;
static const float FLT_EPISILON = 0.0001f;
static const float EPISILON = 0.05f;
const float TRANSITION_BEFORE_END_DURATION = TRANSITION_DURATION - 0.05f;
static bool gObjectCreatedCallBackCalled;
static void TestCallback(BaseHandle handle)
{
gObjectCreatedCallBackCalled = true;
}
/**
* Simulate time passed by, waiting for certain process to finish
* @param[in] application Test application instance
* @param[in] durationToPass Time to pass in milliseconds.
*/
void Wait(ToolkitTestApplication& application, float durationToPass)
{
int duration = static_cast<int>(durationToPass*1000.f);
// wait 2 more frames to compensate the two frames used by the image waiting for the loading succeeded signal
for(int i = 0; i <= duration/RENDER_FRAME_INTERVAL+2 ; i++)
{
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
}
}
//Callback class to test whether transition completed signal is emitted when the transition animation is finished
class TransitionCompletedCallback : public Dali::ConnectionTracker
{
public:
TransitionCompletedCallback( bool& signalReceived, CubeTransitionEffect& effect, Texture& image )
: mSignalVerified( signalReceived ),
mCurrentEffect( effect ),
mActorTransitTo( image )
{
}
void Callback( CubeTransitionEffect effect, Texture image )
{
tet_infoline( "Verifying TransitionCompletedSignal" );
if( mCurrentEffect == effect && mActorTransitTo == image )
{
mSignalVerified = true;
}
}
void Reset()
{
mSignalVerified = false;
}
bool& mSignalVerified;
CubeTransitionEffect& mCurrentEffect;
Texture& mActorTransitTo;
};
} // namespace
void cube_transition_effect_startup(void)
{
test_return_value = TET_UNDEF;
}
void cube_transition_effect_cleanup(void)
{
test_return_value = TET_PASS;
}
int UtcDaliCubeTransitionWaveEffectNew(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionWaveEffectNew ");
CubeTransitionEffect waveEffect;
DALI_TEST_CHECK( !waveEffect );
waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
DALI_TEST_CHECK( waveEffect );
waveEffect.Reset();
//Additional check to ensure object is created by checking if it's registered
ObjectRegistry registry = Stage::GetCurrent().GetObjectRegistry();
DALI_TEST_CHECK( registry );
gObjectCreatedCallBackCalled = false;
registry.ObjectCreatedSignal().Connect( &TestCallback );
{
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
}
DALI_TEST_CHECK( gObjectCreatedCallBackCalled );
END_TEST;
}
int UtcDaliCubeTransitionCrossEffectNew(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionCrossEffectNew ");
CubeTransitionEffect crossEffect;
DALI_TEST_CHECK( !crossEffect );
crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
DALI_TEST_CHECK( crossEffect );
crossEffect.Reset();
//Additional check to ensure object is created by checking if it's registered
ObjectRegistry registry = Stage::GetCurrent().GetObjectRegistry();
DALI_TEST_CHECK( registry );
gObjectCreatedCallBackCalled = false;
registry.ObjectCreatedSignal().Connect( &TestCallback );
{
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
}
DALI_TEST_CHECK( gObjectCreatedCallBackCalled );
END_TEST;
}
int UtcDaliCubeTransitionFoldEffectNew(void)
{
ToolkitTestApplication application;
tet_infoline( " UtcDaliCubeTransitionFoldEffectNew " );
CubeTransitionEffect foldEffect;
DALI_TEST_CHECK( !foldEffect );
foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
DALI_TEST_CHECK( foldEffect );
foldEffect.Reset();
//Additional check to ensure object is created by checking if it is registered
ObjectRegistry registry = Stage::GetCurrent().GetObjectRegistry();
DALI_TEST_CHECK( registry );
gObjectCreatedCallBackCalled = false;
registry.ObjectCreatedSignal().Connect( &TestCallback );
{
CubeTransitionEffect foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
}
DALI_TEST_CHECK( gObjectCreatedCallBackCalled );
END_TEST;
}
int UtcDaliCubeTransitionEffectSetGetTransitionDuration(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectSetGetTransitionDuration ");
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetTransitionDuration( TRANSITION_DURATION );
waveEffect.SetSize( VIEW_AREA_SIZE );
DALI_TEST_EQUALS( TRANSITION_DURATION, waveEffect.GetTransitionDuration(), TEST_LOCATION );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetTransitionDuration( TRANSITION_DURATION );
crossEffect.SetSize( VIEW_AREA_SIZE );
DALI_TEST_EQUALS( TRANSITION_DURATION, crossEffect.GetTransitionDuration(), TEST_LOCATION );
CubeTransitionEffect foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
foldEffect.SetTransitionDuration( TRANSITION_DURATION );
DALI_TEST_EQUALS( TRANSITION_DURATION, foldEffect.GetTransitionDuration(), TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionEffectSetGetCubeDisplacement(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectSetGetTransitionDuration ");
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS);
waveEffect.SetSize( VIEW_AREA_SIZE );
waveEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
DALI_TEST_EQUALS( CUBE_DISPLACEMENT, waveEffect.GetCubeDisplacement(), TEST_LOCATION );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
crossEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
DALI_TEST_EQUALS( CUBE_DISPLACEMENT, crossEffect.GetCubeDisplacement(), TEST_LOCATION );
//Cube displacement is not used in CubeTransitionFoldEffect
END_TEST;
}
//Test common codes in base class
int UtcDaliCubeTransitionEffectGetRoot(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectGetRoot ");
unsigned int totalNum = NUM_ROWS*NUM_COLUMNS;
Texture texture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 40, 40 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
Stage::GetCurrent().Add( waveEffect );
waveEffect.SetCurrentTexture( texture );
waveEffect.SetTargetTexture( texture );
application.SendNotification();
application.Render();
waveEffect.StartTransition();
Wait( application, TRANSITION_DURATION * 0.5f );
// check that we have a total of NUM_ROWS*NUM_COLUMNS cubes;
Actor boxesRoot = waveEffect.GetChildAt(0);
DALI_TEST_CHECK( totalNum == boxesRoot.GetChildCount() );
// check that every cube has two children
DALI_TEST_CHECK( 2 == boxesRoot.GetChildAt(0).GetChildCount() );
DALI_TEST_CHECK( 2 == boxesRoot.GetChildAt(totalNum/2).GetChildCount() );
DALI_TEST_CHECK( 2 == boxesRoot.GetChildAt(totalNum-1).GetChildCount() );
END_TEST;
}
int UtcDaliCubeTransitionEffectIsTransitioning(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectIsTransiting ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture texture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 40, 40 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
Stage::GetCurrent().Add( waveEffect );
waveEffect.SetTransitionDuration( TRANSITION_DURATION );
waveEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
DALI_TEST_CHECK( !waveEffect.IsTransitioning() );
waveEffect.SetCurrentTexture( texture );
waveEffect.SetTargetTexture( texture );
//transition is started
waveEffect.StartTransition();
DALI_TEST_CHECK( waveEffect.IsTransitioning() );
//transition is finished
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK( !waveEffect.IsTransitioning() );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
Stage::GetCurrent().Add( crossEffect );
crossEffect.SetTransitionDuration( TRANSITION_DURATION );
crossEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
DALI_TEST_CHECK( !crossEffect.IsTransitioning() );
crossEffect.SetCurrentTexture( texture );
crossEffect.SetTargetTexture( texture );
//transition is started
crossEffect.StartTransition(false);
DALI_TEST_CHECK( crossEffect.IsTransitioning() );
//transition is finished
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK( !crossEffect.IsTransitioning() );
CubeTransitionEffect foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
Stage::GetCurrent().Add( foldEffect );
foldEffect.SetTransitionDuration( TRANSITION_DURATION );
DALI_TEST_CHECK( !foldEffect.IsTransitioning() );
foldEffect.SetCurrentTexture( texture );
foldEffect.SetTargetTexture( texture );
//transition is started
foldEffect.StartTransition(true);
DALI_TEST_CHECK(foldEffect.IsTransitioning() );
//transition is finished
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK( !foldEffect.IsTransitioning() );
END_TEST;
}
//Test common codes in base class
int UtcDaliCubeTransitionEffectSetCurrentTexture(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectSetCurrentTexture ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture texture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 40, 40 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
waveEffect.SetCurrentTexture( texture );
Stage::GetCurrent().Add( waveEffect );
application.SendNotification();
application.Render();
waveEffect.StartTransition();
// the current image content is set to the tiles facing the camera
Actor currentTile = waveEffect.GetChildAt(0).GetChildAt(0).GetChildAt(0);
Actor targetTile = waveEffect.GetChildAt(0).GetChildAt(0).GetChildAt(1);
//check the pixel area set to the cube
Vector4 pixelAreaDef( 0.f, 0.f, 1.f / NUM_COLUMNS, 1.f / NUM_ROWS);
Property::Index textureRectIndex = currentTile.GetPropertyIndex( "uTextureRect" );
DALI_TEST_CHECK( textureRectIndex != Property::INVALID_INDEX );
Property::Value textureRectValue = currentTile.GetProperty( textureRectIndex );
DALI_TEST_CHECK( textureRectValue.GetType() == Property::VECTOR4 );
Vector4 pixelArea;
DALI_TEST_CHECK( textureRectValue.Get( pixelArea ) );
DALI_TEST_EQUALS( pixelAreaDef, pixelArea, FLT_EPISILON, TEST_LOCATION );
END_TEST;
}
//Test common codes in base class
int UtcDaliCubeTransitionEffectSetTargetTexture(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectSetTargetTexture ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture texture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 40, 40 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
Stage::GetCurrent().Add( waveEffect );
waveEffect.SetCurrentTexture( texture );
waveEffect.SetTargetTexture( texture );
Stage::GetCurrent().Add( waveEffect );
application.SendNotification();
application.Render();
waveEffect.StartTransition();
// the target image content is set to the tiles currently invisible to the camera
Actor tile = waveEffect.GetChildAt(0).GetChildAt(0).GetChildAt(1);
//check the pixel area set to the cube
Vector4 pixelAreaDef( 0.f, 0.f, 1.f / NUM_COLUMNS, 1.f / NUM_ROWS);
Property::Index textureRectIndex = tile.GetPropertyIndex( "uTextureRect" );
DALI_TEST_CHECK( textureRectIndex != -1 );
Property::Value textureRectValue = tile.GetProperty( textureRectIndex );
DALI_TEST_CHECK( textureRectValue.GetType() == Property::VECTOR4 );
Vector4 pixelArea;
DALI_TEST_CHECK( textureRectValue.Get( pixelArea ) );
DALI_TEST_EQUALS( pixelAreaDef, pixelArea, FLT_EPISILON, TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionWaveEffectStartTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionWaveEffectStartTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
BitmapLoader loader = BitmapLoader::New( "Image.jpg" );
loader.Load();
PixelData pixelData = loader.GetPixelData();
Texture texture = Texture::New( TextureType::TEXTURE_2D, pixelData.GetPixelFormat(), pixelData.GetWidth(), pixelData.GetHeight() );
texture.Upload( pixelData );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
waveEffect.SetTransitionDuration( TRANSITION_DURATION );
waveEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
waveEffect.SetCurrentTexture( texture );
Stage::GetCurrent().Add( waveEffect );
application.SendNotification();
application.Render();
waveEffect.StartTransition( true );
Actor cube = waveEffect.GetChildAt(0).GetChildAt(0);
//check the cube rotation value and color values just before the end of different transitions
waveEffect.SetTargetTexture( texture );
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
waveEffect.SetTargetTexture( texture );
waveEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
waveEffect.SetTargetTexture( texture );
waveEffect.StartTransition(false);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
waveEffect.SetTargetTexture( texture );
waveEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionCrossEffectStartTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionCrossEffectStartTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
BitmapLoader loader = BitmapLoader::New( "Image.jpg" );
loader.Load();
PixelData pixelData = loader.GetPixelData();
Texture texture = Texture::New( TextureType::TEXTURE_2D, pixelData.GetPixelFormat(), pixelData.GetWidth(), pixelData.GetHeight() );
texture.Upload( pixelData );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
crossEffect.SetTransitionDuration( TRANSITION_DURATION );
crossEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
crossEffect.SetCurrentTexture( texture );
crossEffect.SetTargetTexture( texture );
Stage::GetCurrent().Add( crossEffect );
application.SendNotification();
application.Render();
crossEffect.StartTransition(true);
Actor cube0 = crossEffect.GetChildAt(0).GetChildAt(0);
Actor cube1 = crossEffect.GetChildAt(0).GetChildAt(1);
//check the cube rotation value and color values just before the end of different transitions
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::XAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
crossEffect.SetTargetTexture( texture );
crossEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::XAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
crossEffect.SetTargetTexture( texture );
crossEffect.StartTransition(false);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::XAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
crossEffect.SetTargetTexture( texture );
crossEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::XAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionFoldEffectStartTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionFoldEffectStartTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture texture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 40, 40 );
CubeTransitionEffect foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
foldEffect.SetTransitionDuration( TRANSITION_DURATION );
foldEffect.SetCurrentTexture( texture );
foldEffect.SetTargetTexture( texture );
Stage::GetCurrent().Add( foldEffect );
application.SendNotification();
application.Render();
foldEffect.StartTransition(true);
Actor cube0 = foldEffect.GetChildAt(0).GetChildAt(0);
Actor cube1 = foldEffect.GetChildAt(0).GetChildAt(1);
//check the cube rotation value and color values just before the end of different transitions
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
foldEffect.SetTargetTexture( texture );
foldEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
foldEffect.SetTargetTexture( texture );
foldEffect.StartTransition(false);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(),FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
foldEffect.SetTargetTexture( texture );
foldEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_BEFORE_END_DURATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( -Dali::ANGLE_90, Vector3::YAXIS), EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), HALF_BRIGHTNESS, EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), FULL_BRIGHTNESS, EPISILON, TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionEffectSignalTransitionCompleted(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectSignalTransitionCompleted ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture firstTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 30, 30 );
Texture secondTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 20, 20 );
Texture thirdTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 40, 40 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
waveEffect.SetTransitionDuration( TRANSITION_DURATION );
waveEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
Stage::GetCurrent().Add( waveEffect );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
crossEffect.SetTransitionDuration( TRANSITION_DURATION );
crossEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
Stage::GetCurrent().Add( crossEffect );
CubeTransitionEffect foldEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
foldEffect.SetTransitionDuration( TRANSITION_DURATION );
Stage::GetCurrent().Add( foldEffect );
bool signalVerified = false;
CubeTransitionEffect currentEffect;
Texture actorTransitTo;
TransitionCompletedCallback callback(signalVerified, currentEffect, actorTransitTo);
waveEffect.TransitionCompletedSignal().Connect( &callback, &TransitionCompletedCallback::Callback );
crossEffect.TransitionCompletedSignal().Connect( &callback, &TransitionCompletedCallback::Callback );
foldEffect.TransitionCompletedSignal().Connect( &callback, &TransitionCompletedCallback::Callback );
//check that the wave effect is used to transit to secondTexture
currentEffect = waveEffect;
actorTransitTo = secondTexture;
waveEffect.SetCurrentTexture( firstTexture );
waveEffect.SetTargetTexture( secondTexture );
waveEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK(callback.mSignalVerified);
callback.Reset();
//check that the wave effect is used to transit to thirdTexture
actorTransitTo = thirdTexture;
waveEffect.SetTargetTexture( thirdTexture );
waveEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK(callback.mSignalVerified);
callback.Reset();
//check that the cross effect is used to transit to secondTexture
currentEffect = crossEffect;
actorTransitTo = secondTexture;
crossEffect.SetCurrentTexture( thirdTexture );
crossEffect.SetTargetTexture( secondTexture );
crossEffect.StartTransition(true);
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK(callback.mSignalVerified);
callback.Reset();
//check that the cross effect is used to transit to firstTexture
actorTransitTo = firstTexture;
crossEffect.SetTargetTexture( firstTexture );
crossEffect.StartTransition(false);
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK(callback.mSignalVerified);
callback.Reset();
//check that the fold effect is used to transit to secondTexture
currentEffect = foldEffect;
actorTransitTo = secondTexture;
foldEffect.SetCurrentTexture( firstTexture );
foldEffect.SetTargetTexture( secondTexture );
foldEffect.StartTransition();
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK( callback.mSignalVerified );
callback.Reset();
//check that the fold effect is used to transit to thirdTexture
actorTransitTo = thirdTexture;
foldEffect.SetTargetTexture( thirdTexture );
foldEffect.StartTransition( false );
Wait( application, TRANSITION_DURATION );
DALI_TEST_CHECK( callback.mSignalVerified );
END_TEST;
}
int UtcDaliCubeTransitionEffectPauseResumeTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionEffectPauseResumeTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture firstTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 30, 30 );
Texture secondTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 20, 20 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
waveEffect.SetTransitionDuration( TRANSITION_DURATION );
waveEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
Stage::GetCurrent().Add( waveEffect );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
crossEffect.SetTransitionDuration( TRANSITION_DURATION );
crossEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
Stage::GetCurrent().Add( crossEffect );
CubeTransitionEffect foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
foldEffect.SetTransitionDuration( TRANSITION_DURATION );
Stage::GetCurrent().Add( foldEffect );
bool signalVerified = false;
CubeTransitionEffect currentEffect;
Texture actorTransitTo;
TransitionCompletedCallback callback(signalVerified, currentEffect, actorTransitTo);
waveEffect.TransitionCompletedSignal().Connect( &callback, &TransitionCompletedCallback::Callback );
crossEffect.TransitionCompletedSignal().Connect( &callback, &TransitionCompletedCallback::Callback );
foldEffect.TransitionCompletedSignal().Connect( &callback, &TransitionCompletedCallback::Callback );
currentEffect = waveEffect;
actorTransitTo = secondTexture;
waveEffect.SetCurrentTexture( firstTexture );
waveEffect.SetTargetTexture( secondTexture );
// start transition; transit for 0.5*duration; pause for 0.5*duration;
// resume for 0.25*duration; pause for 0.25*duration; resume for another 0.25*duration;
// only until now the transition finished signal can be received
waveEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_DURATION*0.5f );
DALI_TEST_CHECK(!callback.mSignalVerified);
waveEffect.PauseTransition();
Wait( application, TRANSITION_DURATION*0.5f );
DALI_TEST_CHECK(!callback.mSignalVerified);
waveEffect.ResumeTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(!callback.mSignalVerified);
waveEffect.PauseTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(!callback.mSignalVerified);
waveEffect.ResumeTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(callback.mSignalVerified);
callback.Reset();
currentEffect = crossEffect;
actorTransitTo = firstTexture;
crossEffect.SetCurrentTexture( secondTexture );
crossEffect.SetTargetTexture( firstTexture );
// start transition; transit for 0.25*duration; pause for 0.2*duration;
// resume for 0.5*duration; pause for 0.2*duration; resume for another 0.25*duration;
// only until now the transition finished signal can be received
crossEffect.StartTransition(false);
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(!callback.mSignalVerified);
crossEffect.PauseTransition();
Wait( application, TRANSITION_DURATION*0.2f );
DALI_TEST_CHECK(!callback.mSignalVerified);
crossEffect.ResumeTransition();
Wait( application, TRANSITION_DURATION*0.5f );
DALI_TEST_CHECK(!callback.mSignalVerified);
crossEffect.PauseTransition();
Wait( application, TRANSITION_DURATION*0.2f );
DALI_TEST_CHECK(!callback.mSignalVerified);
crossEffect.ResumeTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(callback.mSignalVerified);
callback.Reset();
currentEffect = foldEffect;
actorTransitTo = secondTexture;
foldEffect.SetCurrentTexture( firstTexture );
foldEffect.SetTargetTexture( secondTexture );
// start transition; transit for 0.5*duration; pause for 0.5*duration;
// resume for 0.25*duration; pause for 0.25*duration; resume for another 0.25*duration;
// only until now the transition finished signal can be received
foldEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_DURATION*0.5f );
DALI_TEST_CHECK(!callback.mSignalVerified);
foldEffect.PauseTransition();
Wait( application, TRANSITION_DURATION*0.5f );
DALI_TEST_CHECK(!callback.mSignalVerified);
foldEffect.ResumeTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(!callback.mSignalVerified);
foldEffect.PauseTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(!callback.mSignalVerified);
foldEffect.ResumeTransition();
Wait( application, TRANSITION_DURATION*0.25f );
DALI_TEST_CHECK(callback.mSignalVerified);
END_TEST;
}
int UtcDaliCubeTransitionWaveEffectStopTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionWaveEffectStopTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture firstTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 30, 30 );
Texture secondTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 20, 20 );
CubeTransitionEffect waveEffect = CubeTransitionWaveEffect::New( NUM_ROWS, NUM_COLUMNS );
waveEffect.SetSize( VIEW_AREA_SIZE );
waveEffect.SetTransitionDuration( TRANSITION_DURATION );
waveEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
waveEffect.SetCurrentTexture( firstTexture );
waveEffect.SetTargetTexture( secondTexture );
Stage::GetCurrent().Add( waveEffect );
application.SendNotification();
application.Render();
waveEffect.StartTransition(true);
Actor cube = waveEffect.GetChildAt(0).GetChildAt(0);
//check the cube rotation value and color values reset after stopping different transitions in the middle
Wait( application, TRANSITION_DURATION*0.2f );
waveEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
waveEffect.SetTargetTexture( firstTexture );
waveEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_DURATION*0.4f );
waveEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
waveEffect.SetTargetTexture( secondTexture );
waveEffect.StartTransition(false);
Wait( application, TRANSITION_DURATION*0.6f );
waveEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
waveEffect.SetTargetTexture( firstTexture );
waveEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_DURATION*0.8f );
waveEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionCrossEffectStopTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionCrossEffectStopTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture firstTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 30, 30 );
Texture secondTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 20, 20 );
CubeTransitionEffect crossEffect = CubeTransitionCrossEffect::New( NUM_ROWS, NUM_COLUMNS );
crossEffect.SetSize( VIEW_AREA_SIZE );
crossEffect.SetTransitionDuration( TRANSITION_DURATION );
crossEffect.SetCubeDisplacement( CUBE_DISPLACEMENT );
crossEffect.SetCurrentTexture( firstTexture );
crossEffect.SetTargetTexture( secondTexture );
Stage::GetCurrent().Add( crossEffect );
application.SendNotification();
application.Render();
crossEffect.StartTransition(true);
Actor cube0 = crossEffect.GetChildAt(0).GetChildAt(0);
Actor cube1 = crossEffect.GetChildAt(0).GetChildAt(1);
//check the cube rotation values and color values reset after stop the different transitions in the middle
Wait( application, TRANSITION_DURATION*0.2f );
crossEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
crossEffect.SetTargetTexture( firstTexture );
crossEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_DURATION*0.4f );
crossEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
crossEffect.SetTargetTexture( secondTexture );
crossEffect.StartTransition(false);
Wait( application, TRANSITION_DURATION*0.6f );
crossEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::ZERO), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
crossEffect.SetTargetTexture( firstTexture );
crossEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_DURATION*0.8f );
crossEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::YAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::XAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
END_TEST;
}
int UtcDaliCubeTransitionFoldEffectStopTransition(void)
{
ToolkitTestApplication application;
tet_infoline(" UtcDaliCubeTransitionFoldEffectStopTransition ");
application.GetGlAbstraction().SetCheckFramebufferStatusResult(GL_FRAMEBUFFER_COMPLETE );
Texture firstTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 30, 30 );
Texture secondTexture = Texture::New( TextureType::TEXTURE_2D, Pixel::RGBA8888, 20, 20 );
CubeTransitionEffect foldEffect = CubeTransitionFoldEffect::New( NUM_ROWS, NUM_COLUMNS );
foldEffect.SetSize( VIEW_AREA_SIZE );
foldEffect.SetTransitionDuration( TRANSITION_DURATION );
foldEffect.SetCurrentTexture( firstTexture );
foldEffect.SetTargetTexture( secondTexture );
Stage::GetCurrent().Add( foldEffect );
application.SendNotification();
application.Render();
foldEffect.StartTransition(true);
Actor cube0 = foldEffect.GetChildAt(0).GetChildAt(0);
Actor cube1 = foldEffect.GetChildAt(0).GetChildAt(1);
//check the cube rotation values and color values after stop the different transitions in the middle
Wait( application, TRANSITION_DURATION*0.2f );
foldEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::YAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::XAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
foldEffect.SetTargetTexture( firstTexture );
foldEffect.StartTransition(PAN_POSITION1, PAN_DISPLACEMENT1);
Wait( application, TRANSITION_DURATION*0.4f );
foldEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::YAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::XAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
foldEffect.SetTargetTexture( secondTexture );
foldEffect.StartTransition(false);
Wait( application, TRANSITION_DURATION*0.6f );
foldEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::YAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::XAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
foldEffect.SetTargetTexture( firstTexture );
foldEffect.StartTransition(PAN_POSITION2, PAN_DISPLACEMENT2);
Wait( application, TRANSITION_DURATION*0.8f );
foldEffect.StopTransition();
application.SendNotification();
application.Render(RENDER_FRAME_INTERVAL);
DALI_TEST_EQUALS( cube1.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::YAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetCurrentOrientation(), Quaternion( Dali::ANGLE_0, Vector3::YAXIS), FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(0).GetCurrentColor(), FULL_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
DALI_TEST_EQUALS( cube0.GetChildAt(1).GetCurrentColor(), HALF_BRIGHTNESS, FLT_EPISILON, TEST_LOCATION );
END_TEST;
}
|
WarNote/StarsOfEternityBot
|
src/embeds/warframe/itemSearch/weapons/index.js
|
<reponame>WarNote/StarsOfEternityBot<gh_stars>0
const primeWeapon = require('./primeWeapon');
const weapon = require('./weapon');
module.exports = { weapon, primeWeapon };
|
DPlamenov/Homework-tasks
|
ProgrammingBasics02/NestedConditionalStatementsLab/TradeCommissions.java
|
<reponame>DPlamenov/Homework-tasks<gh_stars>0
package NestedConditionalStatementsLab;
import java.util.Scanner;
public class TradeCommissions {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
String town = scanner.nextLine();
double sold = Double.parseDouble(scanner.nextLine());
double result = 0;
switch (town){
//Sofia 5% 7% 8% 12%
//Varna 4.5% 7.5% 10% 13%
//Plovdiv 5.5% 8% 12% 14.5%
case "Sofia":
if(sold >= 0 && sold <= 500){
result = 0.05 * sold;
}else if(sold > 500 && sold <= 1000){
result = 0.07 * sold;
}else if(sold > 1000 && sold <= 10000){
result = 0.08 * sold;
}else if(sold > 10000){
result = 0.12 * sold;
}else{
System.out.println("error");
}
break;
case "Varna":
if(sold >= 0 && sold <= 500){
result = 0.045 * sold;
}else if(sold > 500 && sold <= 1000){
result = 0.075 * sold;
}else if(sold > 1000 && sold <= 10000){
result = 0.10 * sold;
}else if(sold > 10000){
result = 0.13 * sold;
}else{
System.out.println("error");
}
break;
case "Plovdiv":
if(sold >= 0 && sold <= 500){
result = 0.055 * sold;
}else if(sold > 500 && sold <= 1000){
result = 0.08 * sold;
}else if(sold > 1000 && sold <= 10000){
result = 0.12 * sold;
}else if(sold > 10000){
result = 0.145 * sold;
}else{
System.out.println("error");
}
break;
default:
System.out.println("error");
}
if(result != 0){
System.out.printf("%.2f", result);
}
}
}
|
iu-uits-es/ojb
|
src/java/org/odmg/DMap.java
|
<reponame>iu-uits-es/ojb
package org.odmg;
/**
* The ODMG Map collection interface.
* All of the operations defined by the JavaSoft <code>Map</code>
* interface are supported by an ODMG implementation of <code>DMap</code>,
* the exception <code>UnsupportedOperationException</code> is not thrown when a
* call is made to any of the <code>Map</code> methods.
* @author <NAME> (as Java Editor of the Object Data Management Group)
* @version ODMG 3.0
*/
// * @see com.sun.java.util.collections.UnsupportedOperationException
public interface DMap extends java.util.Map
{
}
|
GO-Eratosthenes/dhdt
|
dhdt/generic/handler_www.py
|
<reponame>GO-Eratosthenes/dhdt
# generic libraries
import os
import tarfile
import zipfile
import urllib.request
import ftps # for Copernicus FTP-download
# geospatial libaries
from osgeo import gdal
def get_file_from_ftps(url, user, password,
file_path, file_name, dump_dir=os.getcwd()):
""" Downloads a file from a ftps-server
Paramters
---------
url : string
server address
user : string
username
password : string
password for access
file_path : string
location on the server
file_name : string
name of the file
dump_dir : string
path to place the content
"""
if dump_dir[-1]!='/':
dump_dir += '/'
client = ftps.FTPS('ftps://' +user+ ':' +password+ '@' +url)
client.list()
client.download( os.path.join(file_path, file_name),
os.path.join(dump_dir, file_name))
return
def url_exist(file_url):
""" Check if an url exist
Parameters
----------
file_url : string
url of www location
Returns
-------
verdict : dtype=boolean
verdict if present
"""
try:
urllib.request.urlopen(file_url).code == 200
return True
except:
return False
def get_tar_file(tar_url, dump_dir=os.getcwd()):
""" Downloads and unpacks compressed folder
Parameters
----------
tar_url : string
url of world wide web location
dump_dir : string
path to place the content
Returns
-------
tar_names : list
list of strings of file names within the compressed folder
"""
ftp_stream = urllib.request.urlopen(tar_url)
tar_file = tarfile.open(fileobj=ftp_stream, mode="r|gz")
tar_file.extractall(path=dump_dir)
tar_names = tar_file.getnames()
return tar_names
def get_zip_file(zip_url, dump_dir=os.getcwd()):
""" Downloads and unpacks compressed folder
Parameters
----------
zip_url : string
url of world wide web location
dump_dir : string
path to place the content
Returns
-------
zip_names : list
list of strings of file names within the compressed folder
"""
zip_resp = urllib.request.urlopen(zip_url)
temp_zip = open(dump_dir + 'tempfile.zip', "wb")
temp_zip.write(zip_resp.read())
temp_zip.close()
zf = zipfile.ZipFile(dump_dir + "tempfile.zip")
zf.extractall(path = dump_dir)
zf.close()
os.remove(dump_dir + 'tempfile.zip')
def bulk_download_and_mosaic(url_list, dem_path, sat_tile, bbox, crs, new_res=10):
for i in range(len(url_list)):
gran_url = url_list[i]
gran_url_new = change_url_resolution(gran_url,new_res)
# download and integrate DEM data into tile
print('starting download of DEM tile')
if url_exist(gran_url_new):
tar_names = get_tar_file(gran_url_new, dem_path)
else:
tar_names = get_tar_file(gran_url, dem_path)
print('finished download of DEM tile')
# load data, interpolate into grid
dem_name = [s for s in tar_names if 'dem.tif' in s]
if i ==0:
dem_new_name = sat_tile + '_DEM.tif'
else:
dem_new_name = dem_name[0][:-4]+'_utm.tif'
ds = gdal.Warp(os.path.join(dem_path, dem_new_name),
os.path.join(dem_path, dem_name[0]),
dstSRS=crs,
outputBounds=(bbox[0], bbox[2], bbox[1], bbox[3]),
xRes=new_res, yRes=new_res,
outputType=gdal.GDT_Float64)
ds = None
if i>0: # mosaic tiles togehter
merge_command = ['python', 'gdal_merge.py',
'-o', os.path.join(dem_path, sat_tile + '_DEM.tif'),
os.path.join(dem_path, sat_tile + '_DEM.tif'),
os.path.join(dem_path, dem_new_name)]
my_env = os.environ['CONDA_DEFAULT_ENV']
os.system('conda run -n ' + my_env + ' '+
' '.join(merge_command[1:]))
os.remove(os.path.join(dem_path,dem_new_name))
for fn in tar_names:
os.remove(os.path.join(dem_path,fn))
def change_url_resolution(url_string,new_res):
""" the file name can have the spatail resolution within, this function
replaces this string
Paramters
---------
url_string : string
url of world wide web location
new_res : integer
new resolution (10, 32, ...)
Returns
-------
url_string : string
url of new world wide web location
"""
# get resolution
props = url_string.split('_')
for i in range(1,len(props)):
if props[i][-1] == 'm':
old_res = props[i]
props[i] = str(new_res)+'m'
if (old_res=='2m') & (new_res==10):
# the files are subdivided in quads
props = props[:-4]+props[-2:]
url_string_2 = '_'.join(props)
folders = url_string_2.split('/')
for i in range(len(folders)):
print
if folders[i] == old_res:
folders[i] = str(new_res)+'m'
gran_url_new = '/'.join(folders)
return gran_url_new
def reduce_duplicate_urls(url_list):
""" because the shapefiles are in 2 meter, the tiles are 4 fold, therfore
make a selection, to bypass duplicates
Parameters
----------
url_list : list
list of strings with url's of www locations
Returns
-------
url_list : list
reduced list of strings with url's of www location
"""
tiles = ()
for i in url_list:
tiles += (i.split('/')[-2],)
uni_set = set(tiles)
ids = []
for i in range(len(uni_set)):
idx = tiles.index(uni_set.pop())
ids.append(idx)
url_list = [url_list[i] for i in ids]
# print('reduced to '+str(len(url_list))+ ' elevation chips')
return url_list
|
AZANIR/adwancer-selenium-webdriwer-test
|
src/test/java/com/herokuapp/theinternet/draganddroptests/DragAndDropTests.java
|
package com.herokuapp.theinternet.draganddroptests;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.herokuapp.theinternet.base.TestUtilities;
import com.herokuapp.theinternet.pages.DragAndDropPage;
public class DragAndDropTests extends TestUtilities {
@Test
public void dragAToBTest() {
log.info("Starting dragAToBTest");
// Open DragAndDropPage
DragAndDropPage dragAndDropPage = new DragAndDropPage(driver, log);
dragAndDropPage.openPage();
// Drag box A and drop it on box B
dragAndDropPage.dragAtoB();
// Verify correct headers in correct boxes
String columnAText = dragAndDropPage.getColumnAText();
Assert.assertTrue(columnAText.equals("B"), "Column A header should be B, but it is: " + columnAText);
String columnBText = dragAndDropPage.getColumnBText();
Assert.assertTrue(columnBText.equals("A"), "Column A header should be B, but it is: " + columnBText);
}
}
|
wasong/cmpt433
|
as2_final/display.c
|
#include "fileHandler.h"
#include "display.h"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
void initDisplay() {
// writeToFile("/sys/devices/platform/bone_capemgr/slots", "BB-I2C1");
printf("Initiating display\n");
exportGPIOPin(61);
exportGPIOPin(44);
writeToFile(LEFT_DIGIT_DIR, "out");
writeToFile(RIGHT_DIGIT_DIR, "out");
}
void showLeft() {
writeToFile(LEFT_DIGIT_VAL, "1");
writeToFile(RIGHT_DIGIT_VAL, "0");
}
void showRight() {
writeToFile(RIGHT_DIGIT_VAL, "1");
writeToFile(LEFT_DIGIT_VAL, "0");
}
|
maomanqi/azkaban
|
azkaban-db/src/main/java/azkaban/db/DatabaseOperatorImpl.java
|
<reponame>maomanqi/azkaban
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.db;
import static java.util.Objects.*;
import com.google.inject.Inject;
import java.sql.Connection;
import java.sql.SQLException;
import org.apache.commons.dbutils.DbUtils;
import org.apache.commons.dbutils.QueryRunner;
import org.apache.commons.dbutils.ResultSetHandler;
import org.apache.log4j.Logger;
/**
* Implement AZ DB related operations. This class is thread safe.
*/
public class DatabaseOperatorImpl implements DatabaseOperator {
private static final Logger logger = Logger.getLogger(DatabaseOperatorImpl.class);
private final QueryRunner queryRunner;
/**
* Note: this queryRunner should include a concrete {@link AzkabanDataSource} inside.
*/
@Inject
public DatabaseOperatorImpl(final QueryRunner queryRunner) {
requireNonNull(queryRunner.getDataSource(), "data source must not be null.");
this.queryRunner = queryRunner;
}
/**
* query method Implementation. it will call {@link AzkabanDataSource#getConnection()} inside
* queryrunner.query.
*/
@Override
public <T> T query(final String baseQuery, final ResultSetHandler<T> resultHandler,
final Object... params)
throws SQLException {
try {
return this.queryRunner.query(baseQuery, resultHandler, params);
} catch (final SQLException ex) {
// todo kunkun-tang: Retry logics should be implemented here.
logger.error("query failed", ex);
throw ex;
}
}
/**
* transaction method Implementation.
*/
@Override
public <T> T transaction(final SQLTransaction<T> operations) throws SQLException {
Connection conn = null;
try {
conn = this.queryRunner.getDataSource().getConnection();
conn.setAutoCommit(false);
final DatabaseTransOperator transOperator = new DatabaseTransOperatorImpl(this.queryRunner,
conn);
final T res = operations.execute(transOperator);
conn.commit();
return res;
} catch (final SQLException ex) {
// todo kunkun-tang: Retry logics should be implemented here.
logger.error("transaction failed", ex);
throw ex;
} finally {
DbUtils.closeQuietly(conn);
}
}
/**
* update implementation. it will call {@link AzkabanDataSource#getConnection()} inside
* queryrunner.update.
*
* @param updateClause sql statements to execute
* @param params Initialize the PreparedStatement's IN parameters
* @return the number of rows being affected by update
*/
@Override
public int update(final String updateClause, final Object... params) throws SQLException {
try {
return this.queryRunner.update(updateClause, params);
} catch (final SQLException ex) {
// todo kunkun-tang: Retry logics should be implemented here.
logger.error("update failed", ex);
throw ex;
}
}
}
|
EdgardOliveira/MedAlertApp
|
app/src/main/java/br/com/technologies/venom/medalertapp/models/Usuario.java
|
package br.com.technologies.venom.medalertapp.models;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.room.Ignore;
public class Usuario implements Parcelable {
private String nome;
private String email;
private String senha;
private String token;
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getSenha() {
return senha;
}
public void setSenha(String senha) {
this.senha = senha;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public Usuario(){
}
@Ignore
public Usuario(String email, String senha) {
this.email = email;
this.senha = senha;
}
@Ignore
public Usuario(String nome, String email, String senha, String token) {
this.nome = nome;
this.email = email;
this.senha = senha;
this.token = token;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(this.nome);
dest.writeString(this.email);
dest.writeString(this.senha);
dest.writeString(this.token);
}
protected Usuario(Parcel in) {
this.nome = in.readString();
this.email = in.readString();
this.senha = in.readString();
this.token = in.readString();
}
public static final Creator<Usuario> CREATOR = new Creator<Usuario>() {
@Override
public Usuario createFromParcel(Parcel source) {
return new Usuario(source);
}
@Override
public Usuario[] newArray(int size) {
return new Usuario[size];
}
};
}
|
BigSully/Spring-Security-Third-Edition
|
Chapter03/chapter03.03-calendar/src/main/java/com/packtpub/springsecurity/userdetails/CalendarUserDetailsService.java
|
package com.packtpub.springsecurity.userdetails;
import java.util.Collection;
import com.packtpub.springsecurity.core.authority.CalendarUserAuthorityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Component;
import com.packtpub.springsecurity.dataaccess.CalendarUserDao;
import com.packtpub.springsecurity.domain.CalendarUser;
/**
* Integrates with Spring Security using our existing {@link CalendarUserDao} by looking up a {@link CalendarUser} and
* converting it into a {@link UserDetails} so that Spring Security can do the username/password comparison for us.
*
* @author <NAME>
*
*/
@Component
public class CalendarUserDetailsService implements UserDetailsService {
private static final Logger logger = LoggerFactory
.getLogger(CalendarUserDetailsService.class);
private final CalendarUserDao calendarUserDao;
@Autowired
public CalendarUserDetailsService(final CalendarUserDao calendarUserDao) {
if (calendarUserDao == null) {
throw new IllegalArgumentException("calendarUserDao cannot be null");
}
this.calendarUserDao = calendarUserDao;
}
/**
* Lookup a {@link CalendarUser} by the username representing the email address. Then, convert the
* {@link CalendarUser} into a {@link UserDetails} to conform to the {@link UserDetails} interface.
*/
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
CalendarUser user = calendarUserDao.findUserByEmail(username);
if (user == null) {
throw new UsernameNotFoundException("Invalid username/password.");
}
Collection<? extends GrantedAuthority> authorities = CalendarUserAuthorityUtils.createAuthorities(user);
return new User(user.getEmail(), user.getPassword(), authorities);
}
}
|
seasuresh1/cov-backend
|
src/main/java/com/covid/dto/PatientProviderRelationshipDto.java
|
package com.covid.dto;
import java.util.Date;
public class PatientProviderRelationshipDto {
private int patientId;
private int providerId;
private String relationshipType;
private int locationId;
private Date relationshipStartDate;
private Date relationshipEndDate;
public int getPatientId() {
return patientId;
}
public void setPatientId(int patientId) {
this.patientId = patientId;
}
public int getProviderId() {
return providerId;
}
public void setProviderId(int providerId) {
this.providerId = providerId;
}
public String getRelationshipType() {
return relationshipType;
}
public void setRelationshipType(String relationshipType) {
this.relationshipType = relationshipType;
}
public int getLocationId() {
return locationId;
}
public void setLocationId(int locationId) {
this.locationId = locationId;
}
public Date getRelationshipStartDate() {
return relationshipStartDate;
}
public void setRelationshipStartDate(Date relationshipStartDate) {
this.relationshipStartDate = relationshipStartDate;
}
public Date getRelationshipEndDate() {
return relationshipEndDate;
}
public void setRelationshipEndDate(Date relationshipEndDate) {
this.relationshipEndDate = relationshipEndDate;
}
}
|
shisheng-1/guice-persist-orient
|
src/main/java/ru/vyarus/guice/persist/orient/db/scheme/initializer/core/spi/SchemeDescriptor.java
|
package ru.vyarus.guice.persist.orient.db.scheme.initializer.core.spi;
import com.google.common.collect.Maps;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.util.List;
import java.util.Map;
/**
* Model initialization descriptor.
*
* @author <NAME>
* @since 04.03.2015
*/
@SuppressWarnings("checkstyle:VisibilityModifier")
@SuppressFBWarnings("URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
public class SchemeDescriptor {
/**
* Model class.
*/
public Class<?> modelClass;
/**
* String representation of orient scheme class name (modelClass.simpleName).
*/
public String schemeClass;
/**
* Model class hierarchy types (ignoring interfaces, only types that will be mapped to orient schema).
*/
public List<Class<?>> modelHierarchy;
/**
* Model hierarchy root class.
*/
public Class<?> modelRootClass;
/**
* True if model doesn't exist in scheme.
* May be modified by extension, e.g. in case of rename extension.
*/
public boolean initialRegistration;
/**
* True when orient registration performed for model.
*/
public boolean registered;
/**
* Shared storage for extensions. May be used to exchange between extensions or to store some
* data between before/after extension methods (if singleton extension used).
*/
public Map<String, Object> ext = Maps.newHashMap();
}
|
ZFGCCP/ZFGC3
|
src/main/java/com/zfgc/mappers/AvatarStagingDbObjMapper.java
|
<filename>src/main/java/com/zfgc/mappers/AvatarStagingDbObjMapper.java
package com.zfgc.mappers;
import com.zfgc.dbobj.AvatarStagingDbObj;
import com.zfgc.dbobj.AvatarStagingDbObjExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface AvatarStagingDbObjMapper {
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
long countByExample(AvatarStagingDbObjExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int deleteByExample(AvatarStagingDbObjExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int deleteByPrimaryKey(Integer avatarStagingId);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int insert(AvatarStagingDbObj record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int insertSelective(AvatarStagingDbObj record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
List<AvatarStagingDbObj> selectByExample(AvatarStagingDbObjExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
AvatarStagingDbObj selectByPrimaryKey(Integer avatarStagingId);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int updateByExampleSelective(@Param("record") AvatarStagingDbObj record, @Param("example") AvatarStagingDbObjExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int updateByExample(@Param("record") AvatarStagingDbObj record, @Param("example") AvatarStagingDbObjExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int updateByPrimaryKeySelective(AvatarStagingDbObj record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table AVATAR_STAGING
*
* @mbg.generated Thu Oct 31 01:30:06 EDT 2019
*/
int updateByPrimaryKey(AvatarStagingDbObj record);
}
|
instill-ai/pipeline-backend
|
pkg/datamodel/error.go
|
package datamodel
type Error struct {
Status int32 `json:"status,omitempty"`
Title string `json:"title,omitempty"`
Detail string `json:"detail,omitempty"`
}
|
faetools/go-hubspot
|
vendor/github.com/faetools/devtool/format/writers/state-writer.go
|
<filename>vendor/github.com/faetools/devtool/format/writers/state-writer.go
package writers
type (
// StateFunc is a function that writes and returns another state for the state writer.
StateFunc func([]byte) (StateFunc, []byte, int, error)
stateWriter struct{ state StateFunc }
)
var _ Writer = (*stateWriter)(nil)
// NewStateWriter returns a new writer that changes its state based on the given state function.
func NewStateWriter(start StateFunc) Writer {
return &stateWriter{state: start}
}
func (w *stateWriter) Write(p []byte) (size int, err error) {
var written int
for {
if len(p) == 0 {
return
}
w.state, p, written, err = w.state(p)
size += written
if err != nil {
return
}
}
}
func (w *stateWriter) WriteByte(b byte) (err error) {
var p []byte
w.state, p, _, err = w.state([]byte{b})
switch {
case err != nil:
return err
case len(p) > 0:
// state has changed, do again
return w.WriteByte(p[0])
default:
return nil
}
}
func (w *stateWriter) WriteString(s string) (int, error) {
return w.Write([]byte(s))
}
|
yelircaasi/cltk
|
src/cltk/alphabet/arb.py
|
"""The Arabic alphabet. Sources:
- ``pyarabic`` `<https://github.com/linuxscout/pyarabic>`_
- ``arabicstemmer`` `<https://github.com/assem-ch/arabicstemmer/blob/master/algorithm/stemmer.sbl>`_
>>> from cltk.alphabet import arb
>>> arb.LETTERS[:5]
('ا', 'ب', 'ت', 'ة', 'ث')
>>> arb.PUNCTUATION_MARKS
['،', '؛', '؟']
>>> arb.ALEF
'ا'
>>> arb.WEAK
('ا', 'و', 'ي', 'ى')
"""
__author__ = "<NAME> <lakhdar[.]benzahia[at]gmail[.]com>"
# Hamza letter
HAMZA = "\u0621"
HAMZA_ABOVE_ALEF = "\u0623"
HAMZA_BELOW_ALEF = "\u0625"
ALEF_MADDA = "\u0622"
HAMZA_ABOVE_WAW = "\u0624"
HAMZA_ABOVE_YEH = "\u0626"
ALEF = "\u0627"
ALEF_MAKSURA = "\u0649"
BEH = "\u0628"
TEH_MARBUTA = "\u0629"
TEH = "\u062a"
THEH = "\u062b"
JEEM = "\u062c"
HAH = "\u062d"
KHAH = "\u062e"
DAL = "\u062f"
THEL = "\u0630"
REH = "\u0631"
ZAIN = "\u0632"
SEEN = "\u0633"
SHEEN = "\u0634"
SAD = "\u0635"
DAD = "\u0636"
TAH = "\u0637"
ZAH = "\u0638"
AIN = "\u0639"
GHAIN = "\u063a"
FEH = "\u0641"
QAF = "\u0642"
KAF = "\u0643"
LAM = "\u0644"
MEEM = "\u0645"
NOON = "\u0646"
HEH = "\u0647"
WAW = "\u0648"
YEH = "\u064a"
MINI_ALEF = "\u0670"
ALEF_WASLA = "\u0671"
MADDA_ABOVE = "\u0653"
HAMZA_ABOVE = "\u0654"
HAMZA_BELOW = "\u0655"
# Small Letters
SMALL_ALEF = "\u0670"
SMALL_WAW = "\u06E5"
SMALL_YEH = "\u06E6"
# Ligatures Lam-Alef
LAM_ALEF = "\ufefb"
LAM_ALEF_HAMZA_ABOVE = "\ufef7"
LAM_ALEF_HAMZA_BELOW = "\ufef9"
LAM_ALEF_MADDA_ABOVE = "\ufef5"
SIMPLE_LAM_ALEF = "\u0644\u0627"
SIMPLE_LAM_ALEF_HAMZA_ABOVE = "\u0644\u0623"
SIMPLE_LAM_ALEF_HAMZA_BELOW = "\u0644\u0625"
SIMPLE_LAM_ALEF_MADDA_ABOVE = "\u0644\u0622"
# shaped forms
LAM_ALEF_ISOLATED = "\ufefb"
LAM_ALEF_FINAL = "\ufefc"
LAM_ALEF_HAMZA_ABOVE_ISOLATED = "\ufef7"
LAM_ALEF_HAMZA_ABOVE_FINAL = "\ufef8"
LAM_ALEF_HAMZA_BELOW_ISOLATED = "\ufef9"
LAM_ALEF_HAMZA_BELOW_FINAL = "\ufefa"
LAM_ALEF_MADDA_ABOVE_ISOLATED = "\ufef5"
LAM_ALEF_MADDA_ABOVE_FINAL = "\ufef6"
HAMZA_ISOLATED = "\ufe80"
ALEF_HAMZA_ABOVE_ISOLATED = "\ufe83"
ALEF_HAMZA_ABOVE_FINAL = "\ufe84"
ALEF_HAMZA_BELOW_ISOLATED = "\ufe87"
ALEF_HAMZA_BELOW_FINAL = "\ufe88"
YEH_HAMZA_INITIAL = "\ufe8b"
YEH_HAMZA_MEDIAL = "\ufe8c"
YEH_HAMZA_ISOLATED = "\ufe89"
YEH_HAMZA_FINAL = "\ufe8a"
ALEF_MADDA_ISOLATED = "\ufe81"
ALEF_MADDA_FINAL = "\ufe82"
WAW_HAMZA_ISOLATED = "\ufe85"
WAW_HAMZA_FINAL = "\ufe86"
ALEF_ISOLATED = "\ufe8d"
ALEF_FINAL = "\ufe8e"
BEH_ISOLATED = "\ufe8f"
BEH_FINAL = "\ufe90"
BEH_INITIAL = "\ufe91"
BEH_MEDIAL = "\ufe92"
TEH_MARBUTA_ISOLATED = "\ufe93"
TEH_MARBUTA_FINAL = "\ufe94"
TEH_INITIAL = "\ufe97"
TEH_MEDIAL = "\ufe98"
TEH_ISOLATED = "\ufe95"
TEH_FINAL = "\ufe96"
THEH_INITIAL = "\ufe9b"
THEH_MEDIAL = "\ufe9c"
THEH_FINAL = "\ufe9a"
THEH_ISOLATED = "\ufe99"
JEEM_INITIAL = "\ufe9f"
JEEM_MEDIAL = "\ufea0"
JEEM_ISOLATED = "\ufe9d"
JEEM_FINAL = "\ufe9e"
HAH_INITIAL = "\ufea3"
HAH_MEDIAL = "\ufea4"
HAH_ISOLATED = "\ufea1"
HAH_FINAL = "\ufea2"
KHAH_INITIAL = "\ufea7"
KHAH_MEDIAL = "\ufea8"
KHAH_ISOLATED = "\ufea5"
KHAH_FINAL = "\ufea6"
DAL_ISOLATED = "\ufea9"
DAL_FINAL = "\ufeaa"
THEL_ISOLATED = "\ufeab"
THEL_FINAL = "\ufeac"
REH_ISOLATED = "\ufead"
REH_FINAL = "\ufeae"
ZAIN_ISOLATED = "\ufeaf"
ZAIN_FINAL = "\ufeb0"
SEEN_INITIAL = "\ufeb3"
SEEN_MEDIAL = "\ufeb4"
SEEN_ISOLATED = "\ufeb1"
SEEN_FINAL = "\ufeb2"
SHEEN_INITIAL = "\ufeb7"
SHEEN_MEDIAL = "\ufeb8"
SHEEN_ISOLATED = "\ufeb5"
SHEEN_FINAL = "\ufeb6"
SAD_INITIAL = "\ufebb"
SAD_MEDIAL = "\ufebc"
SAD_ISOLATED = "\ufeb9"
SAD_FINAL = "\ufeba"
DAD_INITIAL = "\ufebf"
DAD_MEDIAL = "\ufec0"
DAD_ISOLATED = "\ufebd"
DAD_FINAL = "\ufebe"
TAH_INITIAL = "\ufec3"
TAH_MEDIAL = "\ufec4"
TAH_ISOLATED = "\ufec1"
TAH_FINAL = "\ufec2"
ZAH_INITIAL = "\ufec7"
ZAH_MEDIAL = "\ufec8"
ZAH_ISOLATED = "\ufec5"
ZAH_FINAL = "\ufec6"
AIN_INITIAL = "\ufecb"
AIN_MEDIAL = "\ufecc"
AIN_ISOLATED = "\ufec9"
AIN_FINAL = "\ufeca"
GHAIN_INITIAL = "\ufecf"
GHAIN_MEDIAL = "\ufed0"
GHAIN_ISOLATED = "\ufecd"
GHAIN_FINAL = "\ufece"
FEH_INITIAL = "\ufed3"
FEH_MEDIAL = "\ufed4"
FEH_ISOLATED = "\ufed1"
FEH_FINAL = "\ufed2"
QAF_INITIAL = "\ufed7"
QAF_MEDIAL = "\ufed8"
QAF_ISOLATED = "\ufed5"
QAF_FINAL = "\ufed6"
KAF_INITIAL = "\ufedb"
KAF_MEDIAL = "\ufedC"
KAF_ISOLATED = "\ufed9"
KAF_FINAL = "\ufeda"
LAM_INITIAL = "\ufedf"
LAM_MEDIAL = "\ufed0"
LAM_ISOLATED = "\ufedd"
LAM_FINAL = "\ufede"
MEEM_INITIAL = "\ufee3"
MEEM_MEDIAL = "\ufee4"
MEEM_ISOLATED = "\ufee1"
MEEM_FINAL = "\ufee2"
NOON_INITIAL = "\ufee7"
NOON_MEDIAL = "\ufee8"
NOON_ISOLATED = "\ufee5"
NOON_FINAL = "\ufee6"
HEH_INITIAL = "\ufeeb"
HEH_MEDIAL = "\ufeec"
HEH_ISOLATED = "\ufee9"
HEH_FINAL = "\ufeea"
WAW_ISOLATED = "\ufeed"
WAW_FINAL = "\ufeee"
ALEF_MAKSURA_ISOLATED = "\ufeef"
ALEF_MAKSURA_FINAL = "\ufef0"
YEH_INITIAL = "\ufef3"
YEH_MEDIAL = "\ufef4"
YEH_ISOLATED = "\ufef1"
YEH_FINAL = "\ufef2"
# Punctuation marks
COMMA = "\u060C"
SEMICOLON = "\u061B"
QUESTION = "\u061F"
# Kasheeda, Tatweel
KASHEEDA = "\u0640"
# Other symbols
PERCENT = "\u066a"
DECIMAL = "\u066b"
THOUSANDS = "\u066c"
STAR = "\u066d"
FULL_STOP = "\u06d4"
BYTE_ORDER_MARK = "\ufeff"
# Diacritics
FATHATAN = "\u064b"
DAMMATAN = "\u064c"
KASRATAN = "\u064d"
FATHA = "\u064e"
DAMMA = "\u064f"
KASRA = "\u0650"
SHADDA = "\u0651"
SUKUN = "\u0652"
# groups
HAMZAT = (
HAMZA,
HAMZA_ABOVE_ALEF,
HAMZA_BELOW_ALEF,
ALEF_MADDA,
HAMZA_ABOVE_WAW,
HAMZA_ABOVE_YEH,
HAMZA_ABOVE,
HAMZA_BELOW,
)
ALEFAT = (
ALEF,
ALEF_MADDA,
HAMZA_BELOW_ALEF,
HAMZA_ABOVE_ALEF,
ALEF_WASLA,
ALEF_MAKSURA,
SMALL_ALEF,
)
WEAK = (ALEF, WAW, YEH, ALEF_MAKSURA)
YEHLIKE = (YEH, HAMZA_ABOVE_YEH, ALEF_MAKSURA, SMALL_YEH)
WAWLIKE = (WAW, HAMZA_ABOVE_WAW, SMALL_WAW)
TEHLIKE = (TEH, TEH_MARBUTA)
SMALL = (SMALL_ALEF, SMALL_WAW, SMALL_YEH)
LETTERS = (
ALEF,
BEH,
TEH,
TEH_MARBUTA,
THEH,
JEEM,
HAH,
KHAH,
DAL,
THEL,
REH,
ZAIN,
SEEN,
SHEEN,
SAD,
DAD,
TAH,
ZAH,
AIN,
GHAIN,
FEH,
QAF,
KAF,
LAM,
MEEM,
NOON,
HEH,
WAW,
YEH,
HAMZA,
ALEF_MADDA,
HAMZA_ABOVE_ALEF,
HAMZA_ABOVE_WAW,
HAMZA_BELOW_ALEF,
HAMZA_ABOVE_YEH,
)
TASHKEEL = (FATHATAN, DAMMATAN, KASRATAN, FATHA, DAMMA, KASRA, SUKUN, SHADDA)
HARAKAT = (FATHATAN, DAMMATAN, KASRATAN, FATHA, DAMMA, KASRA, SUKUN)
SHORTHARAKAT = (FATHA, DAMMA, KASRA, SUKUN)
TANWEEN = (FATHATAN, DAMMATAN, KASRATAN)
NOT_DEF_HARAKA = KASHEEDA
LIGATURES_LAM_ALEF = (
LAM_ALEF,
LAM_ALEF_HAMZA_ABOVE,
LAM_ALEF_HAMZA_BELOW,
LAM_ALEF_MADDA_ABOVE,
)
ALPHABETIC_ORDER = {
ALEF: 1,
BEH: 2,
TEH: 3,
TEH_MARBUTA: 3,
THEH: 4,
JEEM: 5,
HAH: 6,
KHAH: 7,
DAL: 8,
THEL: 9,
REH: 10,
ZAIN: 11,
SEEN: 12,
SHEEN: 13,
SAD: 14,
DAD: 15,
TAH: 16,
ZAH: 17,
AIN: 18,
GHAIN: 19,
FEH: 20,
QAF: 21,
KAF: 22,
LAM: 23,
MEEM: 24,
NOON: 25,
HEH: 26,
WAW: 27,
YEH: 28,
HAMZA: 29,
ALEF_MADDA: 29,
HAMZA_ABOVE_ALEF: 29,
HAMZA_ABOVE_WAW: 29,
HAMZA_BELOW_ALEF: 29,
HAMZA_ABOVE_YEH: 29,
}
NAMES = {
ALEF: "ألف",
BEH: "باء",
TEH: "تاء",
TEH_MARBUTA: "تاء مربوطة",
THEH: "ثاء",
JEEM: "جيم",
HAH: "حاء",
KHAH: "خاء",
DAL: "دال",
THEL: "ذال",
REH: "راء",
ZAIN: "زاي",
SEEN: "سين",
SHEEN: "شين",
SAD: "صاد",
DAD: "ضاد",
TAH: "طاء",
ZAH: "ظاء",
AIN: "عين",
GHAIN: "غين",
FEH: "فاء",
QAF: "قاف",
KAF: "كاف",
LAM: "لام",
MEEM: "ميم",
NOON: "نون",
HEH: "هاء",
WAW: "واو",
YEH: "ياء",
HAMZA: "همزة",
KASHEEDA: "تطويل",
ALEF_MADDA: "ألف ممدودة",
ALEF_MAKSURA: "ألف مقصورة",
HAMZA_ABOVE_ALEF: "همزة على الألف",
HAMZA_ABOVE_WAW: "همزة على الواو",
HAMZA_BELOW_ALEF: "همزة تحت الألف",
HAMZA_ABOVE_YEH: "همزة على الياء",
FATHATAN: "فتحتان",
DAMMATAN: "ضمتان",
KASRATAN: "كسرتان",
FATHA: "فتحة",
DAMMA: "ضمة",
KASRA: "كسرة",
SHADDA: "شدة",
SUKUN: "سكون",
}
SHAPED_FORMS = {
HAMZA: HAMZA_ISOLATED,
HAMZA_ABOVE_ALEF: (ALEF_HAMZA_ABOVE_ISOLATED, ALEF_HAMZA_ABOVE_FINAL),
HAMZA_BELOW_ALEF: (ALEF_HAMZA_BELOW_ISOLATED, ALEF_HAMZA_BELOW_FINAL),
HAMZA_ABOVE_YEH: (
YEH_HAMZA_ISOLATED,
YEH_HAMZA_INITIAL,
YEH_HAMZA_MEDIAL,
YEH_HAMZA_FINAL,
),
ALEF_MADDA: (ALEF_MADDA_ISOLATED, ALEF_MADDA_FINAL),
HAMZA_ABOVE_WAW: (WAW_HAMZA_ISOLATED, WAW_HAMZA_FINAL),
ALEF: (ALEF_ISOLATED, ALEF_FINAL),
BEH: (BEH_ISOLATED, BEH_FINAL, BEH_INITIAL, BEH_MEDIAL),
TEH_MARBUTA: (TEH_MARBUTA_ISOLATED, TEH_MARBUTA_FINAL),
TEH: (TEH_ISOLATED, TEH_INITIAL, TEH_MEDIAL, TEH_FINAL),
THEH: (THEH_ISOLATED, THEH_INITIAL, THEH_MEDIAL, THEH_FINAL),
JEEM: (JEEM_ISOLATED, JEEM_INITIAL, JEEM_MEDIAL, JEEM_FINAL),
HAH: (HAH_ISOLATED, HAH_INITIAL, HAH_MEDIAL, HAH_FINAL),
KHAH: (KHAH_ISOLATED, KHAH_INITIAL, KHAH_MEDIAL, KHAH_FINAL),
DAL: (DAL_ISOLATED, DAL_FINAL),
THEL: (THEL_ISOLATED, THEL_FINAL),
REH: (REH_ISOLATED, REH_FINAL),
ZAIN: (ZAIN_ISOLATED, ZAIN_FINAL),
SEEN: (SEEN_ISOLATED, SEEN_INITIAL, SEEN_MEDIAL, SEEN_FINAL),
SHEEN: (SHEEN_ISOLATED, SHEEN_INITIAL, SHEEN_MEDIAL, SHEEN_FINAL),
SAD: (SAD_ISOLATED, SAD_INITIAL, SAD_MEDIAL, SAD_FINAL),
DAD: (DAD_ISOLATED, DAD_INITIAL, DAD_MEDIAL, DAD_FINAL),
TAH: (TAH_ISOLATED, TAH_INITIAL, TAH_MEDIAL, TAH_FINAL),
ZAH: (ZAH_ISOLATED, ZAH_INITIAL, ZAH_MEDIAL, ZAH_FINAL),
AIN: (AIN_ISOLATED, AIN_INITIAL, AIN_MEDIAL, AIN_FINAL),
GHAIN: (GHAIN_ISOLATED, GHAIN_INITIAL, GHAIN_MEDIAL, GHAIN_FINAL),
FEH: (FEH_ISOLATED, FEH_INITIAL, FEH_MEDIAL, FEH_FINAL),
QAF: (QAF_ISOLATED, QAF_INITIAL, QAF_MEDIAL, QAF_FINAL),
KAF: (KAF_ISOLATED, KAF_INITIAL, KAF_MEDIAL, KAF_FINAL),
LAM: (LAM_ISOLATED, LAM_INITIAL, LAM_MEDIAL, LAM_FINAL),
MEEM: (MEEM_ISOLATED, MEEM_INITIAL, MEEM_MEDIAL, MEEM_FINAL),
NOON: (NOON_ISOLATED, NOON_INITIAL, NOON_MEDIAL, NOON_FINAL),
HEH: (HEH_ISOLATED, HEH_INITIAL, HEH_MEDIAL, HEH_FINAL),
WAW: (WAW_ISOLATED, WAW_FINAL),
ALEF_MAKSURA: (ALEF_MAKSURA_ISOLATED, ALEF_MAKSURA_FINAL),
YEH: (YEH_ISOLATED, YEH_INITIAL, YEH_MEDIAL, YEH_FINAL),
LAM_ALEF: (LAM_ALEF_ISOLATED, LAM_ALEF_FINAL),
LAM_ALEF_HAMZA_ABOVE: (LAM_ALEF_HAMZA_ABOVE_ISOLATED, LAM_ALEF_HAMZA_ABOVE_FINAL),
LAM_ALEF_HAMZA_BELOW: (LAM_ALEF_HAMZA_BELOW_ISOLATED, LAM_ALEF_HAMZA_BELOW_FINAL),
LAM_ALEF_MADDA_ABOVE: (LAM_ALEF_MADDA_ABOVE_ISOLATED, LAM_ALEF_MADDA_ABOVE_FINAL),
}
PUNCTUATION_MARKS = [COMMA, SEMICOLON, QUESTION]
WESTERN_ARABIC_NUMERALS = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
EASTERN_ARABIC_NUMERALS = ["۰", "۱", "۲", "۳", "٤", "۵", "٦", "۷", "۸", "۹"]
|
ParradoxSVK/elite_buffer_vtrace
|
tests/stats_test.py
|
<reponame>ParradoxSVK/elite_buffer_vtrace<gh_stars>1-10
import shutil
import unittest
import os
from option_flags import change_args
from stats.stats import Statistics
from threading import Event
class TestStats(unittest.TestCase):
def setUp(self):
if not os.path.exists("temp"):
os.makedirs("temp")
self.flags = change_args(max_avg_reward=10000000, max_episodes=10000000)
self.stats = Statistics(Event(), "temp", self.flags, "", "", False, True)
def tearDown(self) -> None:
os.system('rm -rf temp')
def test_rollout_data_processing(self):
start_rew_1 = [10.1, 5, 7, 6.3, 7, 6, 6, 8]
start_rew_2 = [15, 5, 4, 7, 34, 6, 2, 0.5, 3, 4]
start_ep_steps_1 = [0, 5, 4, 8, 21, 33, 47, 53]
start_ep_steps_2 = [5, 5, 4, 7, 34, 60, 28, 71, 38, 29]
self.stats.process_worker_rollout(start_rew_1, start_ep_steps_1)
self.stats.process_worker_rollout(start_rew_2, start_ep_steps_2)
total_start_rew = sum(start_rew_1) + sum(start_rew_2)
total_start_steps = sum(start_ep_steps_1) + sum(start_ep_steps_2)
total_end_rew = 0
total_end_steps = 0
self.stats.file_writer.close()
with open("temp/Scores.txt") as file:
for line in file:
total_end_rew += float(line.split(',')[0])
with open("temp/Episode_steps.txt") as file:
for line in file:
total_end_steps += int(line.split(',')[0])
self.assertEqual(total_start_rew, total_end_rew, 'total sum of rewards is not the same')
self.assertEqual(total_start_steps, total_end_steps, 'total sum of episode_steps is not the same')
def test_processing_unequal_data_length(self):
start_rew = [10.1, 5, 7, 6.3, 7, 6, 6, 8]
start_ep_steps_good = [4, 5, 6, 6.3, 7, 7, 6, 11]
start_ep_steps = [0, 5, 4.5]
self.stats.process_worker_rollout(start_rew, start_ep_steps_good)
with self.assertRaises(ValueError):
self.stats.process_worker_rollout(start_rew, start_ep_steps)
self.stats.file_writer.close()
|
dweng0/game
|
node_modules/@babylonjs/core/Audio/Interfaces/ISoundOptions.js
|
export {};
//# sourceMappingURL=ISoundOptions.js.map
|
danielogen/msc_research
|
selected projects/desktop/mars-sim-v3.1.0/mars-sim-core/org/mars_sim/msp/core/robot/ai/job/Makerbot.java
|
/**
* Mars Simulation Project
* Makerbot.java
* @version 3.1.0 2019-09-20
* @author <NAME>
*/
package org.mars_sim.msp.core.robot.ai.job;
import java.io.Serializable;
import java.util.Iterator;
import java.util.List;
import org.mars_sim.msp.core.robot.RoboticAttributeType;
import org.mars_sim.msp.core.robot.RoboticAttributeManager;
import org.mars_sim.msp.core.person.ai.SkillType;
import org.mars_sim.msp.core.person.ai.task.ManufactureGood;
import org.mars_sim.msp.core.person.ai.task.ProduceFood;
import org.mars_sim.msp.core.person.ai.task.SalvageGood;
import org.mars_sim.msp.core.robot.Robot;
import org.mars_sim.msp.core.structure.Settlement;
import org.mars_sim.msp.core.structure.building.Building;
import org.mars_sim.msp.core.structure.building.function.FunctionType;
import org.mars_sim.msp.core.structure.building.function.Manufacture;
/**
* The Makerbot class represents an engineer job focusing on manufacturing goods
*/
public class Makerbot
extends RobotJob
implements Serializable {
/** default serial id. */
private static final long serialVersionUID = 1L;
// private static Logger logger = Logger.getLogger(Engineer.class.getName());
/** Constructor. */
public Makerbot() {
// Use Job constructor
super(Makerbot.class);
jobTasks.add(ManufactureGood.class);
jobTasks.add(SalvageGood.class);
jobTasks.add(ProduceFood.class);
}
/**
* Gets a robot's capability to perform this job.
* @param robot the robot to check.
* @return capability (min 0.0).
*/
public double getCapability(Robot robot) {
double result = 0D;
int materialsScienceSkill = robot.getSkillManager().getSkillLevel(SkillType.MATERIALS_SCIENCE);
result = materialsScienceSkill;
RoboticAttributeManager attributes = robot.getRoboticAttributeManager();
int experienceAptitude = attributes.getAttribute(RoboticAttributeType.EXPERIENCE_APTITUDE);
result+= result * ((experienceAptitude - 50D) / 100D);
return result;
}
/**
* Gets the base settlement need for this job.
* @param settlement the settlement in need.
* @return the base need >= 0
*/
public double getSettlementNeed(Settlement settlement) {
double result = 0D;
// Add (tech level * process number / 2) for all manufacture buildings.
List<Building> manufactureBuildings = settlement.getBuildingManager().getBuildings(FunctionType.MANUFACTURE);
Iterator<Building> i = manufactureBuildings.iterator();
while (i.hasNext()) {
Building building = i.next();
Manufacture workshop = (Manufacture) building.getFunction(FunctionType.MANUFACTURE);
result += workshop.getTechLevel() * workshop.getSupportingProcesses() / 2D;
}
return result;
}
}
|
guokaia/Mekatok-V3
|
foundation/mekatok-foundation-dic/mekatok-dic-declare/src/main/java/icu/guokai/mekatok/framework/dic/service/IDictionaryService.java
|
package icu.guokai.mekatok.framework.dic.service;
import icu.guokai.mekatok.framework.core.service.IService;
/**
* 字典功能暴露服务
* @author GuoKai
* @date 2021/8/19
*/
public interface IDictionaryService extends IService {
}
|
mountainash/web-scrobbler
|
src/core/i18n.js
|
<gh_stars>100-1000
'use strict';
/**
* Node attributes that define how node content will be localized.
*
* There're following supported attributes:
* - i18n: replace value of `textContent` property by localized text;
* - i18n-title: replace value of `title` attribute by localized text;
* - i18n-placeholder: replace value of `placeholder` attribute by localized text.
*
* @type {Array}
*/
const I18N_ATTRS = ['i18n', 'i18n-title', 'i18n-placeholder'];
const domParser = new DOMParser();
/**
* Localize all nodes.
*/
function localizeDocument() {
// Localize static nodes
localizeElementChilds(document);
// Localize dynamic nodes
new MutationObserver((mutations) => {
for (const mutation of mutations) {
for (const node of mutation.addedNodes) {
localizeElement(node);
localizeElementChilds(node);
}
}
}).observe(document.body, {
childList: true,
subtree: true,
});
}
/**
* Localize given element.
* @param {Object} element Element to localize
*/
function localizeElement(element) {
if (!(element instanceof Element)) {
return;
}
for (const attr of I18N_ATTRS) {
if (!element.hasAttribute(attr)) {
continue;
}
const tag = element.getAttribute(attr);
const text = chrome.i18n.getMessage(tag) || tag;
switch (attr) {
case 'i18n':
if (hasHtmlTags(text)) {
const nodes = makeNodes(text);
if (nodes) {
nodes.forEach((n) => {
element.appendChild(n);
});
} else {
// Fallback
element.textContent = text;
}
} else {
element.textContent = text;
}
break;
case 'i18n-title':
element.setAttribute('title', text);
break;
case 'i18n-placeholder':
element.setAttribute('placeholder', text);
break;
}
}
}
/**
* Localize children of given element.
* @param {Object} element Element to localize
*/
function localizeElementChilds(element) {
switch (element.nodeType) {
case Node.ELEMENT_NODE:
case Node.DOCUMENT_NODE:
for (const attr of I18N_ATTRS) {
const nodes = element.querySelectorAll(`[${attr}]`);
nodes.forEach(localizeElement);
}
break;
}
}
/**
* Create array of nodes which can be applied to node to be translated.
* @param {String} rawHtml String contains HTML code
* @return {Array} Array of nodes from given text
*/
function makeNodes(rawHtml) {
const body = domParser.parseFromString(rawHtml, 'text/html').body;
return [...body.childNodes].filter((a) => {
return a.nodeType === a.TEXT_NODE || a.tagName === 'A';
});
}
/**
* Check if given text contains HTML tags
* @param {String} text String supposed to have HTML tags
* @return {Boolean} Check result
*/
function hasHtmlTags(text) {
return /<.+?>/.test(text);
}
document.addEventListener('DOMContentLoaded', localizeDocument);
|
Neusoft-Technology-Solutions/aws-sdk-cpp
|
aws-cpp-sdk-medialive/source/model/CreateMultiplexProgramRequest.cpp
|
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/medialive/model/CreateMultiplexProgramRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::MediaLive::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
CreateMultiplexProgramRequest::CreateMultiplexProgramRequest() :
m_multiplexIdHasBeenSet(false),
m_multiplexProgramSettingsHasBeenSet(false),
m_programNameHasBeenSet(false),
m_requestId(Aws::Utils::UUID::RandomUUID()),
m_requestIdHasBeenSet(true)
{
}
Aws::String CreateMultiplexProgramRequest::SerializePayload() const
{
JsonValue payload;
if(m_multiplexProgramSettingsHasBeenSet)
{
payload.WithObject("multiplexProgramSettings", m_multiplexProgramSettings.Jsonize());
}
if(m_programNameHasBeenSet)
{
payload.WithString("programName", m_programName);
}
if(m_requestIdHasBeenSet)
{
payload.WithString("requestId", m_requestId);
}
return payload.View().WriteReadable();
}
|
dev10/lachesis
|
src/posposet/wire/frame.pb.go
|
<reponame>dev10/lachesis
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: frame.proto
package wire
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type EventDescr struct {
Creator []byte `protobuf:"bytes,1,opt,name=Creator,proto3" json:"Creator,omitempty"`
Hash []byte `protobuf:"bytes,2,opt,name=Hash,proto3" json:"Hash,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EventDescr) Reset() { *m = EventDescr{} }
func (m *EventDescr) String() string { return proto.CompactTextString(m) }
func (*EventDescr) ProtoMessage() {}
func (*EventDescr) Descriptor() ([]byte, []int) {
return fileDescriptor_5379e2b825e15002, []int{0}
}
func (m *EventDescr) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EventDescr.Unmarshal(m, b)
}
func (m *EventDescr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EventDescr.Marshal(b, m, deterministic)
}
func (m *EventDescr) XXX_Merge(src proto.Message) {
xxx_messageInfo_EventDescr.Merge(m, src)
}
func (m *EventDescr) XXX_Size() int {
return xxx_messageInfo_EventDescr.Size(m)
}
func (m *EventDescr) XXX_DiscardUnknown() {
xxx_messageInfo_EventDescr.DiscardUnknown(m)
}
var xxx_messageInfo_EventDescr proto.InternalMessageInfo
func (m *EventDescr) GetCreator() []byte {
if m != nil {
return m.Creator
}
return nil
}
func (m *EventDescr) GetHash() []byte {
if m != nil {
return m.Hash
}
return nil
}
type Flag struct {
Event []byte `protobuf:"bytes,1,opt,name=Event,proto3" json:"Event,omitempty"`
Roots []*EventDescr `protobuf:"bytes,2,rep,name=Roots,proto3" json:"Roots,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Flag) Reset() { *m = Flag{} }
func (m *Flag) String() string { return proto.CompactTextString(m) }
func (*Flag) ProtoMessage() {}
func (*Flag) Descriptor() ([]byte, []int) {
return fileDescriptor_5379e2b825e15002, []int{1}
}
func (m *Flag) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Flag.Unmarshal(m, b)
}
func (m *Flag) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Flag.Marshal(b, m, deterministic)
}
func (m *Flag) XXX_Merge(src proto.Message) {
xxx_messageInfo_Flag.Merge(m, src)
}
func (m *Flag) XXX_Size() int {
return xxx_messageInfo_Flag.Size(m)
}
func (m *Flag) XXX_DiscardUnknown() {
xxx_messageInfo_Flag.DiscardUnknown(m)
}
var xxx_messageInfo_Flag proto.InternalMessageInfo
func (m *Flag) GetEvent() []byte {
if m != nil {
return m.Event
}
return nil
}
func (m *Flag) GetRoots() []*EventDescr {
if m != nil {
return m.Roots
}
return nil
}
type Frame struct {
Index uint64 `protobuf:"varint,1,opt,name=Index,proto3" json:"Index,omitempty"`
FlagTable []*Flag `protobuf:"bytes,2,rep,name=FlagTable,proto3" json:"FlagTable,omitempty"`
ClothoCandidates []*EventDescr `protobuf:"bytes,3,rep,name=ClothoCandidates,proto3" json:"ClothoCandidates,omitempty"`
Atroposes map[string]uint64 `protobuf:"bytes,4,rep,name=Atroposes,proto3" json:"Atroposes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
Balances []byte `protobuf:"bytes,5,opt,name=Balances,proto3" json:"Balances,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Frame) Reset() { *m = Frame{} }
func (m *Frame) String() string { return proto.CompactTextString(m) }
func (*Frame) ProtoMessage() {}
func (*Frame) Descriptor() ([]byte, []int) {
return fileDescriptor_5379e2b825e15002, []int{2}
}
func (m *Frame) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Frame.Unmarshal(m, b)
}
func (m *Frame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Frame.Marshal(b, m, deterministic)
}
func (m *Frame) XXX_Merge(src proto.Message) {
xxx_messageInfo_Frame.Merge(m, src)
}
func (m *Frame) XXX_Size() int {
return xxx_messageInfo_Frame.Size(m)
}
func (m *Frame) XXX_DiscardUnknown() {
xxx_messageInfo_Frame.DiscardUnknown(m)
}
var xxx_messageInfo_Frame proto.InternalMessageInfo
func (m *Frame) GetIndex() uint64 {
if m != nil {
return m.Index
}
return 0
}
func (m *Frame) GetFlagTable() []*Flag {
if m != nil {
return m.FlagTable
}
return nil
}
func (m *Frame) GetClothoCandidates() []*EventDescr {
if m != nil {
return m.ClothoCandidates
}
return nil
}
func (m *Frame) GetAtroposes() map[string]uint64 {
if m != nil {
return m.Atroposes
}
return nil
}
func (m *Frame) GetBalances() []byte {
if m != nil {
return m.Balances
}
return nil
}
func init() {
proto.RegisterType((*EventDescr)(nil), "wire.EventDescr")
proto.RegisterType((*Flag)(nil), "wire.Flag")
proto.RegisterType((*Frame)(nil), "wire.Frame")
proto.RegisterMapType((map[string]uint64)(nil), "wire.Frame.AtroposesEntry")
}
func init() { proto.RegisterFile("frame.proto", fileDescriptor_5379e2b825e15002) }
var fileDescriptor_5379e2b825e15002 = []byte{
// 281 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x51, 0xb1, 0x4e, 0xc3, 0x30,
0x10, 0x55, 0x12, 0x07, 0xc8, 0x15, 0xa1, 0xe8, 0xc4, 0x60, 0x65, 0xaa, 0x32, 0xa0, 0x4c, 0x19,
0x60, 0xa9, 0xaa, 0x2e, 0x90, 0xb6, 0x82, 0xd5, 0xe2, 0x07, 0xdc, 0xe6, 0xa0, 0x15, 0x21, 0xae,
0x6c, 0x53, 0xe8, 0xa7, 0xf0, 0xb7, 0xc8, 0x4e, 0x68, 0x84, 0x50, 0xb7, 0x7b, 0x77, 0xf7, 0xde,
0xf3, 0x3d, 0xc3, 0xe8, 0x45, 0xcb, 0x77, 0x2a, 0x77, 0x5a, 0x59, 0x85, 0xec, 0x73, 0xab, 0x29,
0x9f, 0x02, 0x2c, 0xf6, 0xd4, 0xda, 0x39, 0x99, 0xb5, 0x46, 0x0e, 0xe7, 0x95, 0x26, 0x69, 0x95,
0xe6, 0xc1, 0x38, 0x28, 0x2e, 0xc5, 0x2f, 0x44, 0x04, 0xf6, 0x28, 0xcd, 0x86, 0x87, 0xbe, 0xed,
0xeb, 0x7c, 0x0e, 0x6c, 0xd9, 0xc8, 0x57, 0xbc, 0x86, 0xd8, 0x6b, 0xf4, 0x9c, 0x0e, 0xe0, 0x0d,
0xc4, 0x42, 0x29, 0x6b, 0x78, 0x38, 0x8e, 0x8a, 0xd1, 0x6d, 0x5a, 0x3a, 0xbf, 0x72, 0x30, 0x13,
0xdd, 0x38, 0xff, 0x0e, 0x21, 0x5e, 0xba, 0x77, 0x39, 0x9d, 0xa7, 0xb6, 0xa6, 0x2f, 0xaf, 0xc3,
0x44, 0x07, 0xb0, 0x80, 0xc4, 0xb9, 0x3c, 0xcb, 0x55, 0x43, 0xbd, 0x16, 0x74, 0x5a, 0xae, 0x2d,
0x86, 0x21, 0xce, 0x20, 0xad, 0x1a, 0x65, 0x37, 0xaa, 0x92, 0x6d, 0xbd, 0xad, 0xa5, 0x25, 0xc3,
0xa3, 0x13, 0xe6, 0xff, 0x36, 0x71, 0x02, 0xc9, 0xbd, 0xd5, 0x6a, 0xa7, 0x0c, 0x19, 0xce, 0x3c,
0x2d, 0xeb, 0x7d, 0x7c, 0x6a, 0xc7, 0xe1, 0xa2, 0xb5, 0xfa, 0x20, 0x86, 0x65, 0xcc, 0xe0, 0xe2,
0x41, 0x36, 0xb2, 0x5d, 0x93, 0xe1, 0xb1, 0x8f, 0xe0, 0x88, 0xb3, 0x19, 0x5c, 0xfd, 0x25, 0x62,
0x0a, 0xd1, 0x1b, 0x1d, 0xfc, 0x8d, 0x89, 0x70, 0xa5, 0xbb, 0x7b, 0x2f, 0x9b, 0x0f, 0xf2, 0xe1,
0x32, 0xd1, 0x81, 0x69, 0x38, 0x09, 0x56, 0x67, 0xfe, 0xab, 0xee, 0x7e, 0x02, 0x00, 0x00, 0xff,
0xff, 0x59, 0xc4, 0xf6, 0xec, 0xb9, 0x01, 0x00, 0x00,
}
|
carlosProgrammer/sundaymart-admin-bo
|
resources/js/pages/Dashboard/Chart3.js
|
<filename>resources/js/pages/Dashboard/Chart3.js
import React from "react";
import { Card } from "antd";
import { Column } from "@antv/g2plot";
import { withTranslation } from "react-i18next";
class Chart3 extends React.Component {
data = [
{ type: "1-3秒", value: 0.16 },
{ type: "4-10秒", value: 0.125 },
{ type: "11-30秒", value: 0.24 },
{ type: "31-60秒", value: 0.19 },
{ type: "1-3分", value: 0.22 },
{ type: "3-10分", value: 0.05 },
{ type: "10-30分", value: 0.01 },
{ type: "30+分", value: 0.015 },
];
paletteSemanticRed = "#F4664A";
brandColor = "#5B8FF9";
componentDidMount() {
this.columnPlot = new Column("chart3", {
data: this.data,
xField: "type",
yField: "value",
seriesField: "",
color: ({ type }) => {
if (type === "10-30分" || type === "30+分") {
return this.paletteSemanticRed;
}
return this.brandColor;
},
label: {
content: (originData) => {
const val = parseFloat(originData.value);
if (val < 0.05) {
return (val * 100).toFixed(1) + "%";
}
},
offset: 10,
},
legend: false,
xAxis: {
label: {
autoHide: true,
autoRotate: false,
},
},
});
this.columnPlot.render();
}
render() {
return (
<Card style={{ marginTop: "20px" }}>
<div id="chart3" />
</Card>
);
}
}
export default withTranslation()(Chart3);
|
wilebeast/FireFox-OS
|
B2G/gecko/xpcom/tests/TestID.cpp
|
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <stdio.h>
#include "plstr.h"
#include "nsID.h"
static const char* const ids[] = {
"5C347B10-D55C-11D1-89B7-006008911B81",
"{5C347B10-D55C-11D1-89B7-006008911B81}",
"5c347b10-d55c-11d1-89b7-006008911b81",
"{5c347b10-d55c-11d1-89b7-006008911b81}",
"FC347B10-D55C-F1D1-F9B7-006008911B81",
"{FC347B10-D55C-F1D1-F9B7-006008911B81}",
"fc347b10-d55c-f1d1-f9b7-006008911b81",
"{fc347b10-d55c-f1d1-f9b7-006008911b81}",
};
#define NUM_IDS ((int) (sizeof(ids) / sizeof(ids[0])))
int main(int argc, char** argv)
{
nsID id;
for (int i = 0; i < NUM_IDS; i++) {
const char* idstr = ids[i];
if (!id.Parse(idstr)) {
fprintf(stderr, "TestID: Parse failed on test #%d\n", i);
return -1;
}
char* cp = id.ToString();
if (NULL == cp) {
fprintf(stderr, "TestID: ToString failed on test #%d\n", i);
return -1;
}
if (0 != PL_strcmp(cp, ids[4*(i/4) + 3])) {
fprintf(stderr, "TestID: compare of ToString failed on test #%d\n", i);
return -1;
}
}
return 0;
}
|
grayledw/375RefactoringProject
|
src/DatabaseQueries/DatabaseQuery.java
|
package DatabaseQueries;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import Domain.DatabaseConnectionService;
import Logging.DatabaseQueryLogger;
import Logging.Logger;
public abstract class DatabaseQuery {
protected DatabaseConnectionService dbService;
protected CallableStatement callableStatement;
protected Logger logger;
public DatabaseQuery(DatabaseConnectionService dbService) {
this.dbService = dbService;
this.logger = new DatabaseQueryLogger();
}
public final List<String> getResults() throws SQLException {
List<String> results;
try {
results = getFormattedResultStrings(runQuery());
logger.log("SUCCESS: " + queryToString() + " " + Instant.now().toString());
} catch (SQLException sqlException) {
logger.log("ERROR: " + queryToString() + " " + Instant.now().toString());
throw sqlException;
}
return results;
}
public final ResultSet runQuery() throws SQLException {
prepareCallableStatement();
return callableStatement.executeQuery();
}
protected abstract void prepareCallableStatement() throws SQLException;
protected abstract List<String> getFormattedResultStrings(ResultSet resultSet) throws SQLException;
protected abstract String queryToString();
}
|
KoachieHealthSystemsNPO/bsis-1
|
src/test/java/org/jembi/bsis/helpers/persisters/DivisionPersister.java
|
<reponame>KoachieHealthSystemsNPO/bsis-1<filename>src/test/java/org/jembi/bsis/helpers/persisters/DivisionPersister.java<gh_stars>1-10
package org.jembi.bsis.helpers.persisters;
import javax.persistence.EntityManager;
import org.jembi.bsis.model.location.Division;
public class DivisionPersister extends AbstractEntityPersister<Division> {
@Override
public Division deepPersist(Division division, EntityManager entityManager) {
if (division.getParent() != null) {
deepPersist(division.getParent(), entityManager);
}
return persist(division, entityManager);
}
}
|
sleepxdq/BUCT
|
design-patterns/multithreading/observer/src/main/java/com/xdq/observer/Task.java
|
package com.xdq.observer;
/**
* @author Seven.Xu
* @version 2021/4/30
**/
@FunctionalInterface
public interface Task<R> {
/**
* 任务执行接口
* @return
*/
R call() throws InterruptedException;
}
|
bryamrrr/universyty
|
app/models/enrollment.rb
|
class Enrollment < ApplicationRecord
belongs_to :user
belongs_to :course
has_many :grades, :dependent => :destroy
before_save :default_values
def calcGrade
sum = 0
counter = 0
self.grades.each do |grade|
if grade[:score] >= 14
sum += grade[:score]
counter += 1
end
end
total = sum / counter
self.update_column(:first_score, total)
end
def generate_certificate_code
info = Information.find_by(title: "code")
code = info[:content] + info[:value].to_s
zeros = 7 - code.length
zeros.times do
code.insert(3, "0")
end
self.update_column(:code, code)
info.update_column(:value, info[:value] + 1)
end
private
def default_values
self.current_module ||= 1;
self.current_video ||= 1;
end
end
|
gordon-n-stevenson/fetalReconstruction
|
source/IRTKSimple2/packages/registration/src/irtkConjugateGradientDescentOptimizer.cc
|
/*=========================================================================
Library : Image Registration Toolkit (IRTK)
Module : $Id$
Copyright : Imperial College, Department of Computing
Visual Information Processing (VIP), 2008 onwards
Date : $Date$
Version : $Revision$
Changes : $Author$
Copyright (c) IXICO LIMITED
All rights reserved.
See COPYRIGHT for details
=========================================================================*/
#include <irtkRegistration.h>
#include <gsl/gsl_multimin.h>
#define MAXITS 200
/* The function f that we want to minimize */
extern double evaluate_f (const gsl_vector *v, void *params);
/* The gradient of f */
extern void evaluate_df (const gsl_vector *v, void *params, gsl_vector *df);
/* Compute both f and df together */
extern void evaluate_fdf (const gsl_vector *x, void *params,
double *f, gsl_vector *df);
double irtkConjugateGradientDescentOptimizer::Run()
{
int i, n, status;
size_t iter = 0;
float new_similarity, old_similarity;
// Number of variables we have to optimize
n = _Transformation->NumberOfDOFs();
// Current similarity
old_similarity = _Registration->Evaluate();
// Convert some stuff to GSL
gsl_vector *x = gsl_vector_alloc(n);
for (i = 0; i < n; i++) {
gsl_vector_set(x, i, _Transformation->Get(i));
}
const gsl_multimin_fdfminimizer_type *T;
gsl_multimin_fdfminimizer *s;
gsl_multimin_function_fdf func;
func.n = n;
func.f = &evaluate_f;
func.df = &evaluate_df;
func.fdf = &evaluate_fdf;
T = gsl_multimin_fdfminimizer_conjugate_pr;
s = gsl_multimin_fdfminimizer_alloc(T, n);
gsl_multimin_fdfminimizer_set(s, &func, x, _StepSize, _Epsilon);
// Call GSL Polak-Ribiere routine for conjugate gradient
do {
iter++;
status = gsl_multimin_fdfminimizer_iterate(s);
if (status)
break;
status = gsl_multimin_test_gradient (s->gradient, _Epsilon);
} while (status == GSL_CONTINUE && iter < MAXITS);
new_similarity = s->f;
// Convert some stuff back from GSL
for (i = 0; i < n; i++) {
_Transformation->Put(i, gsl_vector_get(s->x, i));
}
// Delete GSL memory
gsl_multimin_fdfminimizer_free(s);
gsl_vector_free(x);
// Return
if (-new_similarity > old_similarity) {
cout << new_similarity << endl;
return -new_similarity - old_similarity;
} else {
return 0;
}
}
|
mondoo/Clove
|
clove/components/systems/ecs/include/Clove/ECS/ComponentContainer.inl
|
<gh_stars>10-100
#include "Clove/ECS/ECSEvents.hpp"
#include <Clove/Definitions.hpp>
#include <Clove/Event/EventDispatcher.hpp>
#include <Clove/Log/Log.hpp>
#include <type_traits>
CLOVE_DECLARE_LOG_CATEGORY(CloveEcs)
namespace clove {
bool ComponentContainerInterface::hasComponent(Entity entity) {
return entity != NullEntity && entityToIndex.size() > entity && entityToIndex[entity] != nullIndex;
}
ComponentContainerInterface::IndexType ComponentContainerInterface::size() const {
return entities.size();
}
ComponentContainerInterface::Iterator ComponentContainerInterface::begin() noexcept {
return entities.begin();
}
ComponentContainerInterface::Iterator ComponentContainerInterface::end() noexcept {
return entities.end();
}
template<typename ComponentType>
ComponentContainer<ComponentType>::ComponentContainer(EventDispatcher *dispatcher)
: ecsEventDispatcher{ dispatcher } {
}
template<typename ComponentType>
ComponentContainer<ComponentType>::ComponentContainer(ComponentContainer &&other) noexcept = default;
template<typename ComponentType>
ComponentContainer<ComponentType> &ComponentContainer<ComponentType>::operator=(ComponentContainer &&other) noexcept = default;
template<typename ComponentType>
ComponentContainer<ComponentType>::~ComponentContainer() = default;
template<typename ComponentType>
template<typename... ConstructArgs>
ComponentType &ComponentContainer<ComponentType>::addComponent(Entity entity, ConstructArgs &&... args) {
if(!hasComponent(entity)) {
if(entityToIndex.size() <= entity) {
entityToIndex.resize(entity + 1, nullIndex);
}
entities.push_back(entity);
components.emplace_back(ComponentType{ std::forward<ConstructArgs>(args)... });
entityToIndex[entity] = entities.size() - 1;
} else {
components[entityToIndex[entity]] = ComponentType{ std::forward<ConstructArgs>(args)... };
}
ecsEventDispatcher->broadCastEvent(ComponentAddedEvent<ComponentType>{ entity, components[entityToIndex[entity]] });
return components[entityToIndex[entity]];
}
template<typename ComponentType>
ComponentType &ComponentContainer<ComponentType>::getComponent(Entity entity) {
CLOVE_ASSERT_MSG(hasComponent(entity), "{0}: Entity does not have component", CLOVE_FUNCTION_NAME_PRETTY);
return components[entityToIndex[entity]];
}
template<typename ComponentType>
void ComponentContainer<ComponentType>::cloneComponent(Entity from, Entity to) {
if constexpr(std::is_copy_constructible_v<ComponentType>) {
addComponent(to, getComponent(from));
} else {
CLOVE_LOG(CloveEcs, LogLevel::Warning, "{0}: Component is not copy constructable. Entity {1} will be incomplete.", CLOVE_FUNCTION_NAME_PRETTY, to);
}
}
template<typename ComponentType>
void ComponentContainer<ComponentType>::removeComponent(Entity entity) {
if(hasComponent(entity)) {
ecsEventDispatcher->broadCastEvent(ComponentRemovedEvent<ComponentType>{ entity, components[entityToIndex[entity]] });
IndexType const index{ entityToIndex[entity] };
IndexType const lastIndex{ entities.size() - 1 };
if(index < lastIndex) {
Entity const movedEntity{ entities[lastIndex] };
entities[index] = std::move(entities.back());
components[index] = std::move(components.back());
entityToIndex[movedEntity] = index;
}
entities.pop_back();
components.pop_back();
entityToIndex[entity] = nullIndex;
}
}
}
|
jarocho105/pre2
|
erp_desktop_all/src_puntoventa/com/bydan/erp/puntoventa/presentation/web/jsf/sessionbean/report/CajaDiariaUsuarioSessionBean.java
|
/*
*AVISO LEGAL
© Copyright
*Este programa esta protegido por la ley de derechos de autor.
*La reproduccion o distribucion ilicita de este programa o de cualquiera de
*sus partes esta penado por la ley con severas sanciones civiles y penales,
*y seran objeto de todas las sanciones legales que correspondan.
*Su contenido no puede copiarse para fines comerciales o de otras,
*ni puede mostrarse, incluso en una version modificada, en otros sitios Web.
Solo esta permitido colocar hipervinculos al sitio web.
*/
package com.bydan.erp.puntoventa.presentation.web.jsf.sessionbean.report;
import java.util.Set;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.Date;
import java.io.Serializable;
import com.bydan.framework.erp.util.Constantes;
import com.bydan.erp.puntoventa.business.entity.*;
@SuppressWarnings("unused")
public class CajaDiariaUsuarioSessionBean extends CajaDiariaUsuarioSessionBeanAdditional {
private static final long serialVersionUID = 1L;
protected Boolean isPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario;
protected Boolean isPermiteRecargarInformacion;
protected String sNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario;
protected Boolean isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario;
protected Long lIdCajaDiariaUsuarioActualForeignKey;
protected Long lIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras;
protected Boolean isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras;
protected String sUltimaBusquedaCajaDiariaUsuario;
protected String sServletGenerarHtmlReporte;
protected Integer iNumeroPaginacion;
protected Integer iNumeroPaginacionPagina;
protected String sPathNavegacionActual="";
protected Boolean isPaginaPopup=false;
protected String sStyleDivArbol="";
protected String sStyleDivContent="";
protected String sStyleDivOpcionesBanner="";
protected String sStyleDivExpandirColapsar="";
protected String sFuncionBusquedaRapida="";
Boolean isBusquedaDesdeForeignKeySesionEmpresa;
Long lidEmpresaActual;
Boolean isBusquedaDesdeForeignKeySesionCliente;
Long lidClienteActual;
Boolean isBusquedaDesdeForeignKeySesionCaja;
Long lidCajaActual;
Boolean isBusquedaDesdeForeignKeySesionUsuario;
Long lidUsuarioActual;
private Long id;
private Long id_empresa;
private Long id_cliente;
private Long id_caja;
private Long id_usuario;
private Date fecha_inicio;
private Date fecha_fin;
protected Boolean conGuardarRelaciones=false;
protected Boolean estaModoGuardarRelaciones=false;
protected Boolean esGuardarRelacionado=false;
protected Boolean estaModoBusqueda=false;
protected Boolean noMantenimiento=false;
protected CajaDiariaUsuarioSessionBeanAdditional cajadiariausuarioSessionBeanAdditional=null;
public CajaDiariaUsuarioSessionBeanAdditional getCajaDiariaUsuarioSessionBeanAdditional() {
return this.cajadiariausuarioSessionBeanAdditional;
}
public void setCajaDiariaUsuarioSessionBeanAdditional(CajaDiariaUsuarioSessionBeanAdditional cajadiariausuarioSessionBeanAdditional) {
try {
this.cajadiariausuarioSessionBeanAdditional=cajadiariausuarioSessionBeanAdditional;
} catch(Exception e) {
;
}
}
public CajaDiariaUsuarioSessionBean () {
this.inicializarCajaDiariaUsuarioSessionBean();
}
public void inicializarCajaDiariaUsuarioSessionBean () {
this.isPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario=false;
this.isPermiteRecargarInformacion=false;
this.sNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario="";
this.isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario=false;
this.lIdCajaDiariaUsuarioActualForeignKey=0L;
this.lIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras=0L;
this.isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras=false;
this.sUltimaBusquedaCajaDiariaUsuario ="";
this.sServletGenerarHtmlReporte="";
this.iNumeroPaginacion=10;
this.iNumeroPaginacionPagina=0;
this.sPathNavegacionActual="";
this.sFuncionBusquedaRapida="";
this.sStyleDivArbol="display:table-row;width:20%;height:800px;visibility:visible";
this.sStyleDivContent="height:600px;width:80%";
this.sStyleDivOpcionesBanner="display:table-row";
this.sStyleDivExpandirColapsar="display:table-row";
this.isPaginaPopup=false;
this.estaModoGuardarRelaciones=true;
this.conGuardarRelaciones=false;
this.esGuardarRelacionado=false;
this.estaModoBusqueda=false;
this.noMantenimiento=false;
isBusquedaDesdeForeignKeySesionEmpresa=false;
lidEmpresaActual=0L;
isBusquedaDesdeForeignKeySesionCliente=false;
lidClienteActual=0L;
isBusquedaDesdeForeignKeySesionCaja=false;
lidCajaActual=0L;
isBusquedaDesdeForeignKeySesionUsuario=false;
lidUsuarioActual=0L;
this.id_empresa=-1L;
this.id_cliente=-1L;
this.id_caja=-1L;
this.id_usuario=-1L;
this.fecha_inicio=new Date();
this.fecha_fin=new Date();
}
public void setPaginaPopupVariables(Boolean isPopupVariables) {
if(isPopupVariables) {
if(!this.isPaginaPopup) {
this.sStyleDivArbol="display:none;width:0px;height:0px;visibility:hidden";
this.sStyleDivContent="height:800px;width:100%";;
this.sStyleDivOpcionesBanner="display:none";
this.sStyleDivExpandirColapsar="display:none";
this.isPaginaPopup=true;
}
} else {
if(this.isPaginaPopup) {
this.sStyleDivArbol="display:table-row;width:15%;height:600px;visibility:visible;overflow:auto;";
this.sStyleDivContent="height:600px;width:80%";
this.sStyleDivOpcionesBanner="display:table-row";
this.sStyleDivExpandirColapsar="display:table-row";
this.isPaginaPopup=false;
}
}
}
public Boolean getisPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario() {
return this.isPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario;
}
public void setisPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario(
Boolean isPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario) {
this.isPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario= isPermiteNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario;
}
public Boolean getisPermiteRecargarInformacion() {
return this.isPermiteRecargarInformacion;
}
public void setisPermiteRecargarInformacion(
Boolean isPermiteRecargarInformacion) {
this.isPermiteRecargarInformacion=isPermiteRecargarInformacion;
}
public String getsNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario() {
return this.sNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario;
}
public void setsNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario(String sNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario) {
this.sNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario = sNombrePaginaNavegacionHaciaForeignKeyDesdeCajaDiariaUsuario;
}
public Boolean getisBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario() {
return isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario;
}
public void setisBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario(
Boolean isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario) {
this.isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario= isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuario;
}
public Long getlIdCajaDiariaUsuarioActualForeignKey() {
return lIdCajaDiariaUsuarioActualForeignKey;
}
public void setlIdCajaDiariaUsuarioActualForeignKey(
Long lIdCajaDiariaUsuarioActualForeignKey) {
this.lIdCajaDiariaUsuarioActualForeignKey = lIdCajaDiariaUsuarioActualForeignKey;
}
public Long getlIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras() {
return lIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras;
}
public void setlIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras(
Long lIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras) {
this.lIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras = lIdCajaDiariaUsuarioActualForeignKeyParaPosibleAtras;
}
public Boolean getisBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras() {
return isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras;
}
public void setisBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras(
Boolean isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras) {
this.isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras = isBusquedaDesdeForeignKeySesionForeignKeyCajaDiariaUsuarioParaPosibleAtras;
}
public String getsUltimaBusquedaCajaDiariaUsuario() {
return sUltimaBusquedaCajaDiariaUsuario;
}
public void setsUltimaBusquedaCajaDiariaUsuario(String sUltimaBusquedaCajaDiariaUsuario) {
this.sUltimaBusquedaCajaDiariaUsuario = sUltimaBusquedaCajaDiariaUsuario;
}
public String getsServletGenerarHtmlReporte() {
return sServletGenerarHtmlReporte;
}
public void setsServletGenerarHtmlReporte(String sServletGenerarHtmlReporte) {
this.sServletGenerarHtmlReporte = sServletGenerarHtmlReporte;
}
public Integer getiNumeroPaginacion() {
return iNumeroPaginacion;
}
public void setiNumeroPaginacion(Integer iNumeroPaginacion) {
this.iNumeroPaginacion= iNumeroPaginacion;
}
public Integer getiNumeroPaginacionPagina() {
return iNumeroPaginacionPagina;
}
public void setiNumeroPaginacionPagina(Integer iNumeroPaginacionPagina) {
this.iNumeroPaginacionPagina= iNumeroPaginacionPagina;
}
public String getsPathNavegacionActual() {
return this.sPathNavegacionActual;
}
public void setsPathNavegacionActual(String sPathNavegacionActual) {
this.sPathNavegacionActual = sPathNavegacionActual;
}
public Boolean getisPaginaPopup() {
return this.isPaginaPopup;
}
public void setisPaginaPopup(Boolean isPaginaPopup) {
this.isPaginaPopup = isPaginaPopup;
}
public String getsStyleDivArbol() {
return this.sStyleDivArbol;
}
public void setsStyleDivArbol(String sStyleDivArbol) {
this.sStyleDivArbol = sStyleDivArbol;
}
public String getsStyleDivContent() {
return this.sStyleDivContent;
}
public void setsStyleDivContent(String sStyleDivContent) {
this.sStyleDivContent = sStyleDivContent;
}
public String getsStyleDivOpcionesBanner() {
return this.sStyleDivOpcionesBanner;
}
public void setsStyleDivOpcionesBanner(String sStyleDivOpcionesBanner) {
this.sStyleDivOpcionesBanner = sStyleDivOpcionesBanner;
}
public String getsStyleDivExpandirColapsar() {
return this.sStyleDivExpandirColapsar;
}
public void setsStyleDivExpandirColapsar(String sStyleDivExpandirColapsar) {
this.sStyleDivExpandirColapsar = sStyleDivExpandirColapsar;
}
public String getsFuncionBusquedaRapida() {
return this.sFuncionBusquedaRapida;
}
public void setsFuncionBusquedaRapida(String sFuncionBusquedaRapida) {
this.sFuncionBusquedaRapida = sFuncionBusquedaRapida;
}
public Boolean getConGuardarRelaciones() {
return this.conGuardarRelaciones;
}
public void setConGuardarRelaciones(Boolean conGuardarRelaciones) {
this.conGuardarRelaciones = conGuardarRelaciones;
}
public Boolean getEstaModoGuardarRelaciones() {
return this.estaModoGuardarRelaciones;
}
public void setEstaModoGuardarRelaciones(Boolean estaModoGuardarRelaciones) {
this.estaModoGuardarRelaciones = estaModoGuardarRelaciones;
}
public Boolean getEsGuardarRelacionado() {
return this.esGuardarRelacionado;
}
public void setEsGuardarRelacionado(Boolean esGuardarRelacionado) {
this.esGuardarRelacionado = esGuardarRelacionado;
}
public Boolean getEstaModoBusqueda() {
return this.estaModoBusqueda;
}
public void setEstaModoBusqueda(Boolean estaModoBusqueda) {
this.estaModoBusqueda = estaModoBusqueda;
}
public Boolean getNoMantenimiento() {
return this.noMantenimiento;
}
public void setNoMantenimiento(Boolean noMantenimiento) {
this.noMantenimiento = noMantenimiento;
}
public Long getid() {
return this.id;
}
public Long getid_empresa() {
return this.id_empresa;
}
public Long getid_cliente() {
return this.id_cliente;
}
public Long getid_caja() {
return this.id_caja;
}
public Long getid_usuario() {
return this.id_usuario;
}
public Date getfecha_inicio() {
return this.fecha_inicio;
}
public Date getfecha_fin() {
return this.fecha_fin;
}
public void setid(Long newid)throws Exception
{
try {
if(this.id!=newid) {
if(newid==null) {
//newid=0L;
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna id");
}
}
this.id=newid;
}
} catch(Exception e) {
throw e;
}
}
public void setid_empresa(Long newid_empresa)throws Exception
{
try {
if(this.id_empresa!=newid_empresa) {
if(newid_empresa==null) {
//newid_empresa=-1L;
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna id_empresa");
}
}
this.id_empresa=newid_empresa;
}
} catch(Exception e) {
throw e;
}
}
public void setid_cliente(Long newid_cliente)throws Exception
{
try {
if(this.id_cliente!=newid_cliente) {
if(newid_cliente==null) {
//newid_cliente=-1L;
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna id_cliente");
}
}
this.id_cliente=newid_cliente;
}
} catch(Exception e) {
throw e;
}
}
public void setid_caja(Long newid_caja)throws Exception
{
try {
if(this.id_caja!=newid_caja) {
if(newid_caja==null) {
//newid_caja=-1L;
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna id_caja");
}
}
this.id_caja=newid_caja;
}
} catch(Exception e) {
throw e;
}
}
public void setid_usuario(Long newid_usuario)throws Exception
{
try {
if(this.id_usuario!=newid_usuario) {
if(newid_usuario==null) {
//newid_usuario=-1L;
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna id_usuario");
}
}
this.id_usuario=newid_usuario;
}
} catch(Exception e) {
throw e;
}
}
public void setfecha_inicio(Date newfecha_inicio)throws Exception
{
try {
if(this.fecha_inicio!=newfecha_inicio) {
if(newfecha_inicio==null) {
//newfecha_inicio=new Date();
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna fecha_inicio");
}
}
this.fecha_inicio=newfecha_inicio;
}
} catch(Exception e) {
throw e;
}
}
public void setfecha_fin(Date newfecha_fin)throws Exception
{
try {
if(this.fecha_fin!=newfecha_fin) {
if(newfecha_fin==null) {
//newfecha_fin=new Date();
if(Constantes.ISDEVELOPING) {
System.out.println("CajaDiariaUsuario:Valor nulo no permitido en columna fecha_fin");
}
}
this.fecha_fin=newfecha_fin;
}
} catch(Exception e) {
throw e;
}
}
public Boolean getisBusquedaDesdeForeignKeySesionEmpresa() {
return isBusquedaDesdeForeignKeySesionEmpresa;
}
public void setisBusquedaDesdeForeignKeySesionEmpresa(
Boolean isBusquedaDesdeForeignKeySesionEmpresa) {
this.isBusquedaDesdeForeignKeySesionEmpresa = isBusquedaDesdeForeignKeySesionEmpresa;
}
public Long getlidEmpresaActual() {
return lidEmpresaActual;
}
public void setlidEmpresaActual(Long lidEmpresaActual) {
this.lidEmpresaActual = lidEmpresaActual;
}
public Boolean getisBusquedaDesdeForeignKeySesionCliente() {
return isBusquedaDesdeForeignKeySesionCliente;
}
public void setisBusquedaDesdeForeignKeySesionCliente(
Boolean isBusquedaDesdeForeignKeySesionCliente) {
this.isBusquedaDesdeForeignKeySesionCliente = isBusquedaDesdeForeignKeySesionCliente;
}
public Long getlidClienteActual() {
return lidClienteActual;
}
public void setlidClienteActual(Long lidClienteActual) {
this.lidClienteActual = lidClienteActual;
}
public Boolean getisBusquedaDesdeForeignKeySesionCaja() {
return isBusquedaDesdeForeignKeySesionCaja;
}
public void setisBusquedaDesdeForeignKeySesionCaja(
Boolean isBusquedaDesdeForeignKeySesionCaja) {
this.isBusquedaDesdeForeignKeySesionCaja = isBusquedaDesdeForeignKeySesionCaja;
}
public Long getlidCajaActual() {
return lidCajaActual;
}
public void setlidCajaActual(Long lidCajaActual) {
this.lidCajaActual = lidCajaActual;
}
public Boolean getisBusquedaDesdeForeignKeySesionUsuario() {
return isBusquedaDesdeForeignKeySesionUsuario;
}
public void setisBusquedaDesdeForeignKeySesionUsuario(
Boolean isBusquedaDesdeForeignKeySesionUsuario) {
this.isBusquedaDesdeForeignKeySesionUsuario = isBusquedaDesdeForeignKeySesionUsuario;
}
public Long getlidUsuarioActual() {
return lidUsuarioActual;
}
public void setlidUsuarioActual(Long lidUsuarioActual) {
this.lidUsuarioActual = lidUsuarioActual;
}
}
|
vusec/firestarter
|
apps/mysql-5.1.65/storage/ndb/src/kernel/blocks/dbtup/DbtupCommit.cpp
|
<reponame>vusec/firestarter
/* Copyright (C) 2003 MySQL AB
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
#define DBTUP_C
#define DBTUP_COMMIT_CPP
#include "Dbtup.hpp"
#include <RefConvert.hpp>
#include <ndb_limits.h>
#include <pc.hpp>
#include <signaldata/TupCommit.hpp>
#include "../dblqh/Dblqh.hpp"
void Dbtup::execTUP_DEALLOCREQ(Signal* signal)
{
TablerecPtr regTabPtr;
FragrecordPtr regFragPtr;
Uint32 frag_page_id, frag_id;
jamEntry();
frag_id= signal->theData[0];
regTabPtr.i= signal->theData[1];
frag_page_id= signal->theData[2];
Uint32 page_index= signal->theData[3];
ptrCheckGuard(regTabPtr, cnoOfTablerec, tablerec);
getFragmentrec(regFragPtr, frag_id, regTabPtr.p);
ndbassert(regFragPtr.p != NULL);
if (! (((frag_page_id << MAX_TUPLES_BITS) + page_index) == ~ (Uint32) 0))
{
Local_key tmp;
tmp.m_page_no= getRealpid(regFragPtr.p, frag_page_id);
tmp.m_page_idx= page_index;
PagePtr pagePtr;
Tuple_header* ptr= (Tuple_header*)get_ptr(&pagePtr, &tmp, regTabPtr.p);
ndbassert(ptr->m_header_bits & Tuple_header::FREE);
if (ptr->m_header_bits & Tuple_header::LCP_KEEP)
{
ndbassert(! (ptr->m_header_bits & Tuple_header::FREED));
ptr->m_header_bits |= Tuple_header::FREED;
return;
}
if (regTabPtr.p->m_attributes[MM].m_no_of_varsize)
{
jam();
free_var_rec(regFragPtr.p, regTabPtr.p, &tmp, pagePtr);
} else {
free_fix_rec(regFragPtr.p, regTabPtr.p, &tmp, (Fix_page*)pagePtr.p);
}
}
}
void Dbtup::execTUP_WRITELOG_REQ(Signal* signal)
{
jamEntry();
OperationrecPtr loopOpPtr;
loopOpPtr.i= signal->theData[0];
Uint32 gci= signal->theData[1];
c_operation_pool.getPtr(loopOpPtr);
while (loopOpPtr.p->prevActiveOp != RNIL) {
jam();
loopOpPtr.i= loopOpPtr.p->prevActiveOp;
c_operation_pool.getPtr(loopOpPtr);
}
do {
ndbrequire(get_trans_state(loopOpPtr.p) == TRANS_STARTED);
signal->theData[0]= loopOpPtr.p->userpointer;
signal->theData[1]= gci;
if (loopOpPtr.p->nextActiveOp == RNIL) {
jam();
EXECUTE_DIRECT(DBLQH, GSN_LQH_WRITELOG_REQ, signal, 2);
return;
}
jam();
EXECUTE_DIRECT(DBLQH, GSN_LQH_WRITELOG_REQ, signal, 2);
jamEntry();
loopOpPtr.i= loopOpPtr.p->nextActiveOp;
c_operation_pool.getPtr(loopOpPtr);
} while (true);
}
/* ---------------------------------------------------------------- */
/* INITIALIZATION OF ONE CONNECTION RECORD TO PREPARE FOR NEXT OP. */
/* ---------------------------------------------------------------- */
void Dbtup::initOpConnection(Operationrec* regOperPtr)
{
set_tuple_state(regOperPtr, TUPLE_ALREADY_ABORTED);
set_trans_state(regOperPtr, TRANS_IDLE);
regOperPtr->currentAttrinbufLen= 0;
regOperPtr->op_struct.op_type= ZREAD;
regOperPtr->op_struct.m_disk_preallocated= 0;
regOperPtr->op_struct.m_load_diskpage_on_commit= 0;
regOperPtr->op_struct.m_wait_log_buffer= 0;
regOperPtr->op_struct.in_active_list = false;
regOperPtr->m_undo_buffer_space= 0;
}
static
inline
bool
operator>(const Local_key& key1, const Local_key& key2)
{
return key1.m_page_no > key2.m_page_no ||
(key1.m_page_no == key2.m_page_no && key1.m_page_idx > key2.m_page_idx);
}
void
Dbtup::dealloc_tuple(Signal* signal,
Uint32 gci,
Page* page,
Tuple_header* ptr,
Operationrec* regOperPtr,
Fragrecord* regFragPtr,
Tablerec* regTabPtr)
{
Uint32 lcpScan_ptr_i= regFragPtr->m_lcp_scan_op;
Uint32 lcp_keep_list = regFragPtr->m_lcp_keep_list;
Uint32 bits = ptr->m_header_bits;
Uint32 extra_bits = Tuple_header::FREED;
if (bits & Tuple_header::DISK_PART)
{
jam();
Local_key disk;
memcpy(&disk, ptr->get_disk_ref_ptr(regTabPtr), sizeof(disk));
PagePtr tmpptr;
tmpptr.i = m_pgman.m_ptr.i;
tmpptr.p = reinterpret_cast<Page*>(m_pgman.m_ptr.p);
disk_page_free(signal, regTabPtr, regFragPtr,
&disk, tmpptr, gci);
}
if (! (bits & (Tuple_header::LCP_SKIP | Tuple_header::ALLOC)) &&
lcpScan_ptr_i != RNIL)
{
jam();
ScanOpPtr scanOp;
c_scanOpPool.getPtr(scanOp, lcpScan_ptr_i);
Local_key rowid = regOperPtr->m_tuple_location;
Local_key scanpos = scanOp.p->m_scanPos.m_key;
rowid.m_page_no = page->frag_page_id;
if (rowid > scanpos)
{
jam();
extra_bits = Tuple_header::LCP_KEEP; // Note REMOVE FREE
ptr->m_operation_ptr_i = lcp_keep_list;
regFragPtr->m_lcp_keep_list = rowid.ref();
}
}
ptr->m_header_bits = bits | extra_bits;
if (regTabPtr->m_bits & Tablerec::TR_RowGCI)
{
jam();
* ptr->get_mm_gci(regTabPtr) = gci;
}
}
void
Dbtup::commit_operation(Signal* signal,
Uint32 gci,
Tuple_header* tuple_ptr,
PagePtr pagePtr,
Operationrec* regOperPtr,
Fragrecord* regFragPtr,
Tablerec* regTabPtr)
{
ndbassert(regOperPtr->op_struct.op_type != ZDELETE);
Uint32 lcpScan_ptr_i= regFragPtr->m_lcp_scan_op;
Uint32 save= tuple_ptr->m_operation_ptr_i;
Uint32 bits= tuple_ptr->m_header_bits;
Tuple_header *disk_ptr= 0;
Tuple_header *copy= (Tuple_header*)
c_undo_buffer.get_ptr(®OperPtr->m_copy_tuple_location);
Uint32 copy_bits= copy->m_header_bits;
Uint32 fixsize= regTabPtr->m_offsets[MM].m_fix_header_size;
Uint32 mm_vars= regTabPtr->m_attributes[MM].m_no_of_varsize;
if(mm_vars == 0)
{
jam();
memcpy(tuple_ptr, copy, 4*fixsize);
disk_ptr= (Tuple_header*)(((Uint32*)copy)+fixsize);
}
else
{
jam();
/**
* Var_part_ref is only stored in *allocated* tuple
* so memcpy from copy, will over write it...
* hence subtle copyout/assign...
*/
Local_key tmp;
Var_part_ref *ref= tuple_ptr->get_var_part_ref_ptr(regTabPtr);
ref->copyout(&tmp);
memcpy(tuple_ptr, copy, 4*fixsize);
ref->assign(&tmp);
PagePtr vpagePtr;
Uint32 *dst= get_ptr(&vpagePtr, *ref);
Var_page* vpagePtrP = (Var_page*)vpagePtr.p;
Uint32 *src= copy->get_end_of_fix_part_ptr(regTabPtr);
Uint32 sz= ((mm_vars + 1) << 1) + (((Uint16*)src)[mm_vars]);
ndbassert(4*vpagePtrP->get_entry_len(tmp.m_page_idx) >= sz);
memcpy(dst, src, sz);
copy_bits |= Tuple_header::CHAINED_ROW;
if(copy_bits & Tuple_header::MM_SHRINK)
{
jam();
vpagePtrP->shrink_entry(tmp.m_page_idx, (sz + 3) >> 2);
update_free_page_list(regFragPtr, vpagePtr);
}
disk_ptr = (Tuple_header*)(((Uint32*)copy)+fixsize+((sz + 3) >> 2));
}
if (regTabPtr->m_no_of_disk_attributes &&
(copy_bits & Tuple_header::DISK_INLINE))
{
jam();
Local_key key;
memcpy(&key, copy->get_disk_ref_ptr(regTabPtr), sizeof(Local_key));
Uint32 logfile_group_id= regFragPtr->m_logfile_group_id;
PagePtr diskPagePtr = *(PagePtr*)&m_pgman.m_ptr;
ndbassert(diskPagePtr.p->m_page_no == key.m_page_no);
ndbassert(diskPagePtr.p->m_file_no == key.m_file_no);
Uint32 sz, *dst;
if(copy_bits & Tuple_header::DISK_ALLOC)
{
jam();
disk_page_alloc(signal, regTabPtr, regFragPtr, &key, diskPagePtr, gci);
}
if(regTabPtr->m_attributes[DD].m_no_of_varsize == 0)
{
jam();
sz= regTabPtr->m_offsets[DD].m_fix_header_size;
dst= ((Fix_page*)diskPagePtr.p)->get_ptr(key.m_page_idx, sz);
}
else
{
jam();
dst= ((Var_page*)diskPagePtr.p)->get_ptr(key.m_page_idx);
sz= ((Var_page*)diskPagePtr.p)->get_entry_len(key.m_page_idx);
}
if(! (copy_bits & Tuple_header::DISK_ALLOC))
{
jam();
disk_page_undo_update(diskPagePtr.p,
&key, dst, sz, gci, logfile_group_id);
}
memcpy(dst, disk_ptr, 4*sz);
memcpy(tuple_ptr->get_disk_ref_ptr(regTabPtr), &key, sizeof(Local_key));
ndbassert(! (disk_ptr->m_header_bits & Tuple_header::FREE));
copy_bits |= Tuple_header::DISK_PART;
}
if(lcpScan_ptr_i != RNIL && (bits & Tuple_header::ALLOC))
{
jam();
ScanOpPtr scanOp;
c_scanOpPool.getPtr(scanOp, lcpScan_ptr_i);
Local_key rowid = regOperPtr->m_tuple_location;
Local_key scanpos = scanOp.p->m_scanPos.m_key;
rowid.m_page_no = pagePtr.p->frag_page_id;
if(rowid > scanpos)
{
jam();
copy_bits |= Tuple_header::LCP_SKIP;
}
}
Uint32 clear=
Tuple_header::ALLOC | Tuple_header::FREE |
Tuple_header::DISK_ALLOC | Tuple_header::DISK_INLINE |
Tuple_header::MM_SHRINK | Tuple_header::MM_GROWN;
copy_bits &= ~(Uint32)clear;
tuple_ptr->m_header_bits= copy_bits;
tuple_ptr->m_operation_ptr_i= save;
if (regTabPtr->m_bits & Tablerec::TR_RowGCI)
{
jam();
* tuple_ptr->get_mm_gci(regTabPtr) = gci;
}
if (regTabPtr->m_bits & Tablerec::TR_Checksum) {
jam();
setChecksum(tuple_ptr, regTabPtr);
}
}
void
Dbtup::disk_page_commit_callback(Signal* signal,
Uint32 opPtrI, Uint32 page_id)
{
Uint32 hash_value;
Uint32 gci;
OperationrecPtr regOperPtr;
jamEntry();
c_operation_pool.getPtr(regOperPtr, opPtrI);
c_lqh->get_op_info(regOperPtr.p->userpointer, &hash_value, &gci);
TupCommitReq * const tupCommitReq= (TupCommitReq *)signal->getDataPtr();
tupCommitReq->opPtr= opPtrI;
tupCommitReq->hashValue= hash_value;
tupCommitReq->gci= gci;
tupCommitReq->diskpage = page_id;
regOperPtr.p->op_struct.m_load_diskpage_on_commit= 0;
regOperPtr.p->m_commit_disk_callback_page= page_id;
m_global_page_pool.getPtr(m_pgman.m_ptr, page_id);
{
PagePtr tmp;
tmp.i = m_pgman.m_ptr.i;
tmp.p = reinterpret_cast<Page*>(m_pgman.m_ptr.p);
disk_page_set_dirty(tmp);
}
execTUP_COMMITREQ(signal);
if(signal->theData[0] == 0)
{
jam();
c_lqh->tupcommit_conf_callback(signal, regOperPtr.p->userpointer);
}
}
void
Dbtup::disk_page_log_buffer_callback(Signal* signal,
Uint32 opPtrI,
Uint32 unused)
{
Uint32 hash_value;
Uint32 gci;
OperationrecPtr regOperPtr;
jamEntry();
c_operation_pool.getPtr(regOperPtr, opPtrI);
c_lqh->get_op_info(regOperPtr.p->userpointer, &hash_value, &gci);
Uint32 page= regOperPtr.p->m_commit_disk_callback_page;
TupCommitReq * const tupCommitReq= (TupCommitReq *)signal->getDataPtr();
tupCommitReq->opPtr= opPtrI;
tupCommitReq->hashValue= hash_value;
tupCommitReq->gci= gci;
tupCommitReq->diskpage = page;
ndbassert(regOperPtr.p->op_struct.m_load_diskpage_on_commit == 0);
regOperPtr.p->op_struct.m_wait_log_buffer= 0;
m_global_page_pool.getPtr(m_pgman.m_ptr, page);
execTUP_COMMITREQ(signal);
ndbassert(signal->theData[0] == 0);
c_lqh->tupcommit_conf_callback(signal, regOperPtr.p->userpointer);
}
/**
* Move to the first operation performed on this tuple
*/
void
Dbtup::findFirstOp(OperationrecPtr & firstPtr)
{
jam();
printf("Detect out-of-order commit(%u) -> ", firstPtr.i);
ndbassert(!firstPtr.p->is_first_operation());
while(firstPtr.p->prevActiveOp != RNIL)
{
firstPtr.i = firstPtr.p->prevActiveOp;
c_operation_pool.getPtr(firstPtr);
}
ndbout_c("%u", firstPtr.i);
}
/* ----------------------------------------------------------------- */
/* --------------- COMMIT THIS PART OF A TRANSACTION --------------- */
/* ----------------------------------------------------------------- */
void Dbtup::execTUP_COMMITREQ(Signal* signal)
{
FragrecordPtr regFragPtr;
OperationrecPtr regOperPtr;
TablerecPtr regTabPtr;
KeyReqStruct req_struct;
TransState trans_state;
Uint32 no_of_fragrec, no_of_tablerec;
TupCommitReq * const tupCommitReq= (TupCommitReq *)signal->getDataPtr();
regOperPtr.i= tupCommitReq->opPtr;
Uint32 hash_value= tupCommitReq->hashValue;
Uint32 gci = tupCommitReq->gci;
jamEntry();
c_operation_pool.getPtr(regOperPtr);
regFragPtr.i= regOperPtr.p->fragmentPtr;
trans_state= get_trans_state(regOperPtr.p);
no_of_fragrec= cnoOfFragrec;
ndbrequire(trans_state == TRANS_STARTED);
ptrCheckGuard(regFragPtr, no_of_fragrec, fragrecord);
no_of_tablerec= cnoOfTablerec;
regTabPtr.i= regFragPtr.p->fragTableId;
hash_value= tupCommitReq->hashValue;
gci= tupCommitReq->gci;
req_struct.signal= signal;
req_struct.hash_value= hash_value;
req_struct.gci= gci;
regOperPtr.p->m_commit_disk_callback_page = tupCommitReq->diskpage;
#ifdef VM_TRACE
if (tupCommitReq->diskpage == RNIL)
{
m_pgman.m_ptr.i = RNIL;
m_pgman.m_ptr.p = 0;
req_struct.m_disk_page_ptr.i = RNIL;
req_struct.m_disk_page_ptr.p = 0;
}
#endif
ptrCheckGuard(regTabPtr, no_of_tablerec, tablerec);
PagePtr page;
Tuple_header* tuple_ptr= (Tuple_header*)
get_ptr(&page, ®OperPtr.p->m_tuple_location, regTabPtr.p);
/**
* NOTE: This has to be run before potential time-slice when
* waiting for disk, as otherwise the "other-ops" in a multi-op
* commit might run while we're waiting for disk
*
*/
if (!regTabPtr.p->tuxCustomTriggers.isEmpty())
{
if(get_tuple_state(regOperPtr.p) == TUPLE_PREPARED)
{
jam();
OperationrecPtr loopPtr = regOperPtr;
if (unlikely(!regOperPtr.p->is_first_operation()))
{
findFirstOp(loopPtr);
}
/**
* Execute all tux triggers at first commit
* since previous tuple is otherwise removed...
*/
jam();
goto first;
while(loopPtr.i != RNIL)
{
c_operation_pool.getPtr(loopPtr);
first:
executeTuxCommitTriggers(signal,
loopPtr.p,
regFragPtr.p,
regTabPtr.p);
set_tuple_state(loopPtr.p, TUPLE_TO_BE_COMMITTED);
loopPtr.i = loopPtr.p->nextActiveOp;
}
}
}
bool get_page = false;
if(regOperPtr.p->op_struct.m_load_diskpage_on_commit)
{
jam();
Page_cache_client::Request req;
/**
* Only last op on tuple needs "real" commit,
* hence only this one should have m_load_diskpage_on_commit
*/
ndbassert(tuple_ptr->m_operation_ptr_i == regOperPtr.i);
/**
* Check for page
*/
if(!regOperPtr.p->m_copy_tuple_location.isNull())
{
jam();
Tuple_header* tmp= (Tuple_header*)
c_undo_buffer.get_ptr(®OperPtr.p->m_copy_tuple_location);
memcpy(&req.m_page,
tmp->get_disk_ref_ptr(regTabPtr.p), sizeof(Local_key));
if (unlikely(regOperPtr.p->op_struct.op_type == ZDELETE &&
tmp->m_header_bits & Tuple_header::DISK_ALLOC))
{
jam();
/**
* Insert+Delete
*/
regOperPtr.p->op_struct.m_load_diskpage_on_commit = 0;
regOperPtr.p->op_struct.m_wait_log_buffer = 0;
disk_page_abort_prealloc(signal, regFragPtr.p,
&req.m_page, req.m_page.m_page_idx);
c_lgman->free_log_space(regFragPtr.p->m_logfile_group_id,
regOperPtr.p->m_undo_buffer_space);
goto skip_disk;
if (0) ndbout_c("insert+delete");
jamEntry();
goto skip_disk;
}
}
else
{
jam();
// initial delete
ndbassert(regOperPtr.p->op_struct.op_type == ZDELETE);
memcpy(&req.m_page,
tuple_ptr->get_disk_ref_ptr(regTabPtr.p), sizeof(Local_key));
ndbassert(tuple_ptr->m_header_bits & Tuple_header::DISK_PART);
}
req.m_callback.m_callbackData= regOperPtr.i;
req.m_callback.m_callbackFunction =
safe_cast(&Dbtup::disk_page_commit_callback);
/*
* Consider commit to be correlated. Otherwise pk op + commit makes
* the page hot. XXX move to TUP which knows better.
*/
int flags= regOperPtr.p->op_struct.op_type |
Page_cache_client::COMMIT_REQ | Page_cache_client::CORR_REQ;
int res= m_pgman.get_page(signal, req, flags);
switch(res){
case 0:
/**
* Timeslice
*/
jam();
signal->theData[0] = 1;
return;
case -1:
ndbrequire("NOT YET IMPLEMENTED" == 0);
break;
default:
jam();
}
get_page = true;
{
PagePtr tmpptr;
tmpptr.i = m_pgman.m_ptr.i;
tmpptr.p = reinterpret_cast<Page*>(m_pgman.m_ptr.p);
disk_page_set_dirty(tmpptr);
}
regOperPtr.p->m_commit_disk_callback_page= res;
regOperPtr.p->op_struct.m_load_diskpage_on_commit= 0;
}
if(regOperPtr.p->op_struct.m_wait_log_buffer)
{
jam();
/**
* Only last op on tuple needs "real" commit,
* hence only this one should have m_wait_log_buffer
*/
ndbassert(tuple_ptr->m_operation_ptr_i == regOperPtr.i);
Callback cb;
cb.m_callbackData= regOperPtr.i;
cb.m_callbackFunction =
safe_cast(&Dbtup::disk_page_log_buffer_callback);
Uint32 sz= regOperPtr.p->m_undo_buffer_space;
Logfile_client lgman(this, c_lgman, regFragPtr.p->m_logfile_group_id);
int res= lgman.get_log_buffer(signal, sz, &cb);
jamEntry();
switch(res){
case 0:
jam();
signal->theData[0] = 1;
return;
case -1:
ndbrequire("NOT YET IMPLEMENTED" == 0);
break;
default:
jam();
}
}
assert(tuple_ptr);
skip_disk:
req_struct.m_tuple_ptr = tuple_ptr;
Uint32 nextOp = regOperPtr.p->nextActiveOp;
Uint32 prevOp = regOperPtr.p->prevActiveOp;
/**
* The trigger code (which is shared between detached/imediate)
* check op-list to check were to read before values from
* detached triggers should always read from original tuple value
* from before transaction start, not from any intermediate update
*
* Setting the op-list has this effect
*/
regOperPtr.p->nextActiveOp = RNIL;
regOperPtr.p->prevActiveOp = RNIL;
if(tuple_ptr->m_operation_ptr_i == regOperPtr.i)
{
jam();
/**
* Perform "real" commit
*/
Uint32 disk = regOperPtr.p->m_commit_disk_callback_page;
set_change_mask_info(&req_struct, regOperPtr.p);
checkDetachedTriggers(&req_struct, regOperPtr.p, regTabPtr.p,
disk != RNIL);
tuple_ptr->m_operation_ptr_i = RNIL;
if(regOperPtr.p->op_struct.op_type != ZDELETE)
{
jam();
commit_operation(signal, gci, tuple_ptr, page,
regOperPtr.p, regFragPtr.p, regTabPtr.p);
}
else
{
jam();
if (get_page)
ndbassert(tuple_ptr->m_header_bits & Tuple_header::DISK_PART);
dealloc_tuple(signal, gci, page.p, tuple_ptr,
regOperPtr.p, regFragPtr.p, regTabPtr.p);
}
}
if (nextOp != RNIL)
{
c_operation_pool.getPtr(nextOp)->prevActiveOp = prevOp;
}
if (prevOp != RNIL)
{
c_operation_pool.getPtr(prevOp)->nextActiveOp = nextOp;
}
if(!regOperPtr.p->m_copy_tuple_location.isNull())
{
jam();
c_undo_buffer.free_copy_tuple(®OperPtr.p->m_copy_tuple_location);
}
initOpConnection(regOperPtr.p);
signal->theData[0] = 0;
}
void
Dbtup::set_change_mask_info(KeyReqStruct * const req_struct,
Operationrec * const regOperPtr)
{
ChangeMaskState state = get_change_mask_state(regOperPtr);
if (state == USE_SAVED_CHANGE_MASK) {
jam();
req_struct->changeMask.setWord(0, regOperPtr->saved_change_mask[0]);
req_struct->changeMask.setWord(1, regOperPtr->saved_change_mask[1]);
} else if (state == RECALCULATE_CHANGE_MASK) {
jam();
// Recompute change mask, for now set all bits
req_struct->changeMask.set();
} else if (state == SET_ALL_MASK) {
jam();
req_struct->changeMask.set();
} else {
jam();
ndbrequire(state == DELETE_CHANGES);
req_struct->changeMask.set();
}
}
void
Dbtup::calculateChangeMask(Page* const pagePtr,
Tablerec* const regTabPtr,
KeyReqStruct * const req_struct)
{
OperationrecPtr loopOpPtr;
Uint32 saved_word1= 0;
Uint32 saved_word2= 0;
loopOpPtr.i= req_struct->m_tuple_ptr->m_operation_ptr_i;
do {
c_operation_pool.getPtr(loopOpPtr);
ndbrequire(loopOpPtr.p->op_struct.op_type == ZUPDATE);
ChangeMaskState change_mask= get_change_mask_state(loopOpPtr.p);
if (change_mask == USE_SAVED_CHANGE_MASK) {
jam();
saved_word1|= loopOpPtr.p->saved_change_mask[0];
saved_word2|= loopOpPtr.p->saved_change_mask[1];
} else if (change_mask == RECALCULATE_CHANGE_MASK) {
jam();
//Recompute change mask, for now set all bits
req_struct->changeMask.set();
return;
} else {
ndbrequire(change_mask == SET_ALL_MASK);
jam();
req_struct->changeMask.set();
return;
}
loopOpPtr.i= loopOpPtr.p->prevActiveOp;
} while (loopOpPtr.i != RNIL);
req_struct->changeMask.setWord(0, saved_word1);
req_struct->changeMask.setWord(1, saved_word2);
}
|
zoek1/unlock
|
smart-contracts/test/Lock/withdrawByAddress.js
|
<filename>smart-contracts/test/Lock/withdrawByAddress.js<gh_stars>0
const BigNumber = require('bignumber.js')
const deployLocks = require('../helpers/deployLocks')
const shouldFail = require('../helpers/shouldFail')
const unlockContract = artifacts.require('../Unlock.sol')
const TestErc20Token = artifacts.require('TestErc20Token.sol')
const getProxy = require('../helpers/proxy')
let unlock, lock
let token
contract('Lock / withdrawByAddress', accounts => {
let owner = accounts[0]
before(async () => {
unlock = await getProxy(unlockContract)
const locks = await deployLocks(unlock, owner)
lock = locks['OWNED']
// Put some ERC-20 tokens into the contract
token = await TestErc20Token.new({ from: owner })
// TODO: mint or transfer to a contract throws an error in Truffle
// however the tx does mine. Not sure what's causing this yet..
//await token.mint(lock.address, 42000)
})
describe.skip('when the owner withdraws funds for a specific token', () => {
let ownerBalance
let contractBalance
before(async () => {
ownerBalance = new BigNumber(await token.balanceOf(owner))
contractBalance = new BigNumber(await token.balanceOf(lock.address))
await lock.withdraw(token.address, 0, {
from: owner,
})
})
it("should set the lock's balance to 0", async () => {
assert.equal(await token.balanceOf(lock.address), 0)
})
it("should increase the owner's balance with the funds from the lock", async () => {
const balance = new BigNumber(await token.balanceOf(owner))
assert.equal(
balance.toString(),
ownerBalance.plus(contractBalance).toString()
)
})
it('should fail if there is nothing left to withdraw', async () => {
await shouldFail(
lock.withdraw(token.address, 0, {
from: owner,
}),
'NOT_ENOUGH_FUNDS'
)
})
})
})
|
angelopassaro/Hacktoberfest-1
|
python/text_classification.py
|
import re
import pandas as pd
import docx2txt
import RAKE
import datetime
from tkinter import *
text = docx2txt.process("sample.docx")
sentences = re.split('\n',text)
dataset_sentences=pd.DataFrame(sentences,columns=["sentences"])
null_sentences=dataset_sentences["sentences"]!=''
dataset_sentences=dataset_sentences[null_sentences]
final_sentence = []
for sent in dataset_sentences["sentences"]:
final_sentence.append(sent.lstrip('0123456789. '))
stopwords="stopwords.txt"
rake_object = RAKE.Rake(stopwords)
keywords_freq=rake_object.run(text)
keywords = [i[0] for i in keywords_freq if i[1]>2]
keywords.sort()
date=datetime.datetime.now()
date_name=str(date.year)+str(date.strftime("%m"))+str(date.strftime("%d"))+str(date.strftime("%H"))+str(date.strftime("%M"))+str(date.strftime("%S"))+str(date.strftime("%f"))
root = Tk()
root.title("Search and Display")
mainframe = Frame(root)
mainframe.grid(column=0,row=0, sticky=(N,W,E,S) )
mainframe.columnconfigure(0, weight = 1)
mainframe.rowconfigure(0, weight = 1)
mainframe.pack(pady = 100, padx = 100)
tkvar = StringVar(root)
tkvar.set('Select a Keyword') # set the default option
popupMenu = OptionMenu(mainframe, tkvar, *keywords)
Label(mainframe, text="Choose a Keyword",justify=LEFT).grid(row = 1, column = 1)
popupMenu.grid(row = 2, column =1)
xxx=Label(text="")
xxx.pack(side='top')
yyy=Label(text="")
yyy.pack(side='top')
def change_dropdown(*args):
xxx.configure(text=tkvar.get(), justify=LEFT,wraplength=1000)
temp=""
for line in final_sentence:
if re.match("(.*)"+tkvar.get()+"(.*)",line.lower()):
temp+="\n\n"+line
text_file=open("output_"+date_name+".txt",'a')
text_file.write(tkvar.get())
text_file.write(" : \n")
text_file.write(line)
text_file.write("\n\n")
text_file.close()
yyy.configure(text=temp, justify=LEFT,wraplength=1000)
tkvar.trace('w', change_dropdown)
width, height = root.winfo_screenwidth(), root.winfo_screenheight()
root.geometry('%dx%d+0+0' % (width,height))
root.mainloop()
|
Waiviogit/waivio
|
src/client/object/ObjectGallery/GalleryAlbum.js
|
import { max, get } from 'lodash';
import React from 'react';
import PropTypes from 'prop-types';
import { Card } from 'antd';
import { getImagePath } from '../../helpers/image';
import DEFAULTS from '../../object/const/defaultValues';
import './GalleryAlbum.less';
const GalleryAlbum = ({ album }) => {
const filterItems = album.items;
const albumItem = max(filterItems, item => item.weight);
const getRelatedAlbumCount = item => get(item, 'count');
const getAlbumCount = filterItems ? filterItems.length : 0;
const albumCount = getRelatedAlbumCount(album) ? getRelatedAlbumCount(album) : getAlbumCount;
const imagePath = albumItem
? getImagePath(album, albumItem.body, 'preview')
: DEFAULTS.ALBUM_COVER;
return (
<div className="GalleryAlbum">
<Card hoverable cover={<img alt="example" src={imagePath} className="GalleryAlbum__image" />}>
<Card.Meta title={`${album.body} (${albumCount})`} />
</Card>
</div>
);
};
GalleryAlbum.propTypes = {
album: PropTypes.shape().isRequired,
};
export default GalleryAlbum;
|
CarbonStack/carbon-stack
|
components/issues/edit/EditIssueForm.js
|
<reponame>CarbonStack/carbon-stack
import React from 'react'
import {
sansSerifFontFamily,
errorColor
} from '../../../lib/styles/variables'
import MarkdownEditor from '../../shared/MarkdownEditor'
import Spinner from '../../shared/Spinner'
import DiffEditor from '../../shared/DiffEditor'
class EditIssueForm extends React.PureComponent {
onChange = () => {
const {
actions
} = this.props
actions.updateForm({
title: this.title.value,
content: this.content.value
})
}
onDiffViewChange = () => {
const {
actions
} = this.props
actions.updateForm({
title: this.title.value,
content: this.diffView.value
})
}
onBackButtonClick = () => {
window.history.back()
}
render () {
const {
actions,
issue,
form,
isSubmitting,
error,
isDiffEditorOpen
} = this.props
return (
<div className='root'>
<div className='title'>
<input
className='title-input'
ref={title => (this.title = title)}
type='text'
value={form.title}
placeholder={'What\'s up? (title)'}
onChange={this.onChange}
/>
</div>
<div className='content'>
<MarkdownEditor
ref={content => (this.content = content)}
value={form.content}
placeholder='Describe your issue! (Markdown supported)'
onChange={this.onChange}
/>
</div>
<div className='control'>
<button
className={isDiffEditorOpen
? 'primary'
: ''
}
onClick={actions.toggleDiffEditor}
>
Diff
</button>
<p className='error'>
{error != null && error.message}
</p>
<button
onClick={this.onBackButtonClick}
>Cancel</button>
{/*
FIXME: Uncomment after PR implemented
<button
className='primary'
disabled={isSubmitting}
onClick={actions.requestCreateIssue}
>
{isSubmitting && <Spinner />} Submit as a PR
</button> */}
<button
className='primary'
disabled={isSubmitting}
onClick={actions.requestUpdateIssue}
>
{isSubmitting && <Spinner />} Update Now
</button>
</div>
{isDiffEditorOpen &&
<div className='diff'>
<DiffEditor
ref={diffView => (this.diffView = diffView)}
originalValue={issue.latestCommit.content}
value={form.content}
onChange={this.onDiffViewChange}
/>
</div>
}
<style jsx>{`
.root {
margin-top: 25px;
}
.title {
display: flex;
margin-bottom: 15px;
}
.title .title-input {
font-size: 36px;
height: 45px;
flex: 1;
min-width: 0;
border-radius: 0;
border-width: 0 0 1px;
}
.control {
display: flex;
margin: 0.25em 0;
}
.control .error {
flex: 1;
text-align: right;
color: ${errorColor};
}
.diff {
margin-top: 15px;
}
`}</style>
</div>
)
}
}
export default EditIssueForm
|
mihai-constantin/ACS
|
PAJ/effective-java/src/com/luxoft/effectivejava/module01/item02/javabeans/Customer.java
|
<gh_stars>0
package com.luxoft.effectivejava.module01.item02.javabeans;
import java.util.Date;
public class Customer {
private Gender gender;
private String surname;
private String firstName;
private String middleName;
private Date birthday;
private Date becomeCustomer;
public Customer(Gender gender, String surname, String firstName) {
this.gender = gender;
this.surname = surname;
this.firstName = firstName;
}
public void setGender(Gender gender) {
this.gender = gender;
}
public void setSurname(String surname) {
this.surname = surname;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getMiddleName() {
return middleName;
}
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
public Date getBecomeCustomer() {
return becomeCustomer;
}
public void setBecomeClient(Date becomeCustomer) {
this.becomeCustomer = becomeCustomer;
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(gender.getGreeting())
.append(" ")
.append(surname)
.append(", ")
.append(firstName);
if (null != becomeCustomer) {
sb.append(" has become a customer on: ")
.append(becomeCustomer);
}
return sb.toString();
}
public static void main (String[] args) {
Customer firstClient = new Customer(Gender.MALE, "Smith", "John");
firstClient.setBecomeClient(new Date());
System.out.println(firstClient);
}
}
|
karan/keys
|
user/store_test.go
|
package user_test
import (
"bytes"
"context"
"encoding/json"
"testing"
"github.com/keys-pub/keys"
"github.com/keys-pub/keys/ds"
"github.com/keys-pub/keys/user"
"github.com/keys-pub/keys/util"
"github.com/pkg/errors"
"github.com/stretchr/testify/require"
)
func testStore(t *testing.T, dst ds.DocumentStore, scs keys.SigchainStore, req *util.MockRequestor, clock *clock) *user.Store {
ust, err := user.NewStore(dst, scs, req, clock.Now)
require.NoError(t, err)
return ust
}
func TestNewUserForTwitterSigning(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
req := util.NewMockRequestor()
clock := newClock()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
usr, err := user.NewUserForSigning(ust, sk.ID(), "twitter", "123456789012345")
require.NoError(t, err)
msg, err := usr.Sign(sk)
require.NoError(t, err)
expected := `BEGIN MESSAGE.
GaZybOsIjCQ9nU5 QoXI1pS28UWypBb HHSXegeFk1M6huT W5rwWMtO4Gcx4u3
Gjbya7YnsVfnAVz xvTtqmINcMmTCKq 6Xr2MZHgg4UNRDb Zy2loGoGN3Mvxd4
r7FIwpZOJPE1JEq D2gGjkgLByR9CFG 2aCgRgZZwl5UAa4 6bmBzjEOhmsiW0K
TDXulMojfPebRMl JBdGc81U8wUvF0I 1LUOo5fLogY3MDW UqhLx.
END MESSAGE.`
require.Equal(t, expected, msg)
require.False(t, len(msg) > 280)
require.Equal(t, 274, len(msg))
out, err := user.Verify(msg, sk.ID(), usr)
require.NoError(t, err)
require.Equal(t, usr.Service, out.Service)
require.Equal(t, usr.Name, out.Name)
}
func TestNewUserMarshal(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
req := util.NewMockRequestor()
clock := newClock()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
usr, err := user.New(ust, sk.ID(), "twitter", "123456789012345", "https://twitter.com/123456789012345/status/1234567890", 1)
require.NoError(t, err)
b, err := json.Marshal(usr)
require.NoError(t, err)
require.Equal(t, `{"k":"<KEY>","n":"123456789012345","sq":1,"sr":"twitter","u":"https://twitter.com/123456789012345/status/1234567890"}`, string(b))
var usrOut user.User
err = json.Unmarshal(b, &usrOut)
require.NoError(t, err)
require.Equal(t, usr.Name, usrOut.Name)
require.Equal(t, usr.Seq, usrOut.Seq)
require.Equal(t, usr.KID, usrOut.KID)
require.Equal(t, usr.Service, usrOut.Service)
require.Equal(t, usr.URL, usrOut.URL)
usr, err = user.NewUserForSigning(ust, sk.ID(), "twitter", "123456789012345")
require.NoError(t, err)
b, err = json.Marshal(usr)
require.NoError(t, err)
require.Equal(t, `{"k":"kex132yw8ht5p8cetl2jmvknewjawt9xwzdlrk2pyxlnwjyqrdq0dawqqph077","n":"123456789012345","sr":"twitter"}`, string(b))
}
func TestResultGithub(t *testing.T) {
// SetLogger(NewLogger(DebugLevel))
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
req := util.NewMockRequestor()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
req.SetResponse("https://gist.github.com/alice/70281cc427850c272a8574af4d8564d9", testdataBytes(t, "testdata/github/70281cc427850c272a8574af4d8564d9"))
usr, err := user.NewUserForSigning(ust, sk.ID(), "github", "alice")
require.NoError(t, err)
msg, err := usr.Sign(sk)
require.NoError(t, err)
t.Logf(msg)
_, err = user.Verify(msg, sk.ID(), usr)
require.NoError(t, err)
sc := keys.NewSigchain(sk.ID())
stu, err := user.New(ust, sk.ID(), "github", "alice", "https://gist.github.com/alice/70281cc427850c272a8574af4d8564d9", sc.LastSeq()+1)
require.NoError(t, err)
st, err := user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.NoError(t, err)
err = sc.Add(st)
require.NoError(t, err)
err = scs.SaveSigchain(sc)
require.NoError(t, err)
_, err = user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.EqualError(t, err, "user set in sigchain already")
result, err := ust.Update(context.TODO(), sk.ID())
require.NoError(t, err)
require.NotNil(t, result)
require.Equal(t, user.StatusOK, result.Status)
require.Equal(t, "github", result.User.Service)
require.Equal(t, "alice", result.User.Name)
require.Equal(t, int64(1234567890004), result.VerifiedAt)
require.Equal(t, int64(1234567890003), result.Timestamp)
result, err = ust.Get(context.TODO(), sk.ID())
require.NoError(t, err)
require.Equal(t, "github", result.User.Service)
require.Equal(t, "alice", result.User.Name)
result, err = ust.User(context.TODO(), "<EMAIL>@github")
require.NoError(t, err)
require.Equal(t, "github", result.User.Service)
require.Equal(t, "alice", result.User.Name)
kids, err := ust.KIDs(context.TODO())
require.NoError(t, err)
require.Equal(t, 1, len(kids))
require.Equal(t, keys.ID("<KEY>"), kids[0])
}
func TestResultGithubWrongName(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
req := util.NewMockRequestor()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
usr, err := user.NewUserForSigning(ust, sk.ID(), "github", "alice2")
require.NoError(t, err)
msg, err := usr.Sign(sk)
require.NoError(t, err)
require.NotEqual(t, "", msg)
t.Logf(msg)
sc := keys.NewSigchain(sk.ID())
req.SetResponse("https://gist.github.com/alice/a7b1370270e2672d4ae88fa5d0c6ade7", testdataBytes(t, "testdata/github/a7b1370270e2672d4ae88fa5d0c6ade7"))
user2, err := user.New(ust, sk.ID(), "github", "alice", "https://gist.github.com/alice/a7b1370270e2672d4ae88fa5d0c6ade7", 1)
require.NoError(t, err)
b2, err := json.Marshal(user2)
require.NoError(t, err)
st2, err := keys.NewSigchainStatement(sc, b2, sk, "user", clock.Now())
require.NoError(t, err)
err = sc.Add(st2)
require.NoError(t, err)
result, err := ust.CheckSigchain(context.TODO(), sc)
require.NoError(t, err)
require.NotNil(t, result)
require.Equal(t, user.StatusStatementInvalid, result.Status)
require.Equal(t, result.Err, "name mismatch alice != alice2")
}
func TestResultGithubWrongService(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
req := util.NewMockRequestor()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
sc := keys.NewSigchain(sk.ID())
muser := &user.User{KID: sk.ID(), Service: "github2", Name: "gabriel"}
msg, err := muser.Sign(sk)
require.NoError(t, err)
t.Logf(msg)
req.SetResponse("https://gist.github.com/alice/bd<PASSWORD>688cbcc0a65fa0890d76", testdataBytes(t, "testdata/github/bd679134acba688cbcc0a65fa0890d76"))
usr, err := user.New(ust, sk.ID(), "github", "alice", "https://gist.github.com/alice/bd679<PASSWORD>ac<PASSWORD>88cbcc0a65fa0890d76", 1)
require.NoError(t, err)
b, err := json.Marshal(usr)
require.NoError(t, err)
st, err := keys.NewSigchainStatement(sc, b, sk, "user", clock.Now())
require.NoError(t, err)
err = sc.Add(st)
require.NoError(t, err)
result, err := ust.CheckSigchain(context.TODO(), sc)
require.NoError(t, err)
require.NotNil(t, result)
require.Equal(t, user.StatusStatementInvalid, result.Status)
require.Equal(t, result.Err, "service mismatch github != github2")
}
func TestResultTwitter(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
req := util.NewMockRequestor()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
usr, err := user.NewUserForSigning(ust, sk.ID(), "twitter", "bob")
require.NoError(t, err)
msg, err := usr.Sign(sk)
require.NoError(t, err)
t.Logf(msg)
sc := keys.NewSigchain(sk.ID())
stu, err := user.New(ust, sk.ID(), "twitter", "bob", "https://twitter.com/bob/status/1205589994380783616", sc.LastSeq()+1)
require.NoError(t, err)
st, err := user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.NoError(t, err)
err = sc.Add(st)
require.NoError(t, err)
err = scs.SaveSigchain(sc)
require.NoError(t, err)
_, err = user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.EqualError(t, err, "user set in sigchain already")
req.SetResponse("https://twitter.com/bob/status/1205589994380783616", testdataBytes(t, "testdata/twitter/1205589994380783616"))
result, err := ust.Update(context.TODO(), sk.ID())
require.NoError(t, err)
require.NotNil(t, result)
require.NotNil(t, result.User)
require.Equal(t, user.StatusOK, result.Status)
require.Equal(t, "twitter", result.User.Service)
require.Equal(t, "bob", result.User.Name)
require.Equal(t, int64(1234567890004), result.VerifiedAt)
require.Equal(t, int64(1234567890003), result.Timestamp)
}
func TestResultReddit(t *testing.T) {
// keys.SetLogger(keys.NewLogger(keys.DebugLevel))
// services.SetLogger(keys.NewLogger(keys.DebugLevel))
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
req := util.NewMockRequestor()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
usr, err := user.NewUserForSigning(ust, sk.ID(), "reddit", "charlie")
require.NoError(t, err)
msg, err := usr.Sign(sk)
require.NoError(t, err)
t.Logf(msg)
sc := keys.NewSigchain(sk.ID())
stu, err := user.New(ust, sk.ID(), "reddit", "charlie", "https://www.reddit.com/r/keyspubmsgs/comments/f8g9vd/charlie/", sc.LastSeq()+1)
require.NoError(t, err)
st, err := user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.NoError(t, err)
err = sc.Add(st)
require.NoError(t, err)
err = scs.SaveSigchain(sc)
require.NoError(t, err)
_, err = user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.EqualError(t, err, "user set in sigchain already")
req.SetResponse("https://reddit.com/r/keyspubmsgs/comments/f8g9vd/charlie.json", testdataBytes(t, "testdata/reddit/charlie.json"))
result, err := ust.Update(context.TODO(), sk.ID())
require.NoError(t, err)
require.NotNil(t, result)
require.NotNil(t, result.User)
require.Equal(t, user.StatusOK, result.Status)
require.Equal(t, "reddit", result.User.Service)
require.Equal(t, "charlie", result.User.Name)
require.Equal(t, int64(1234567890004), result.VerifiedAt)
require.Equal(t, int64(1234567890003), result.Timestamp)
}
func TestUserUnverified(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
req := util.NewMockRequestor()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
sc := keys.NewSigchain(sk.ID())
stu, err := user.New(ust, sk.ID(), "twitter", "bob", "https://twitter.com/bob/status/1", sc.LastSeq()+1)
require.NoError(t, err)
st, err := user.NewUserSigchainStatement(sc, stu, sk, clock.Now())
require.NoError(t, err)
err = sc.Add(st)
require.NoError(t, err)
req.SetError("https://twitter.com/bob/status/1", errors.Errorf("testing"))
require.NoError(t, err)
// users, err := ust.Update(context.TODO(), sk.ID())
// require.NoError(t, err)
// t.Logf("users: %+v", users)
// TODO: Finish test
}
func TestCheckNoUsers(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
sc := keys.NewSigchain(sk.ID())
req := util.NewMockRequestor()
clock := newClock()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
result, err := ust.CheckSigchain(context.TODO(), sc)
require.NoError(t, err)
require.Nil(t, result)
rk := keys.GenerateEdX25519Key()
result, err = ust.Update(context.TODO(), rk.ID())
require.NoError(t, err)
require.Nil(t, result)
}
func TestVerify(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
req := util.NewMockRequestor()
clock := newClock()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
ust := testStore(t, dst, scs, req, clock)
u, uerr := user.NewUserForSigning(ust, sk.ID(), "github", "gabriel")
require.NoError(t, uerr)
require.NotNil(t, u)
msg, err := u.Sign(sk)
require.NoError(t, err)
uout, err := user.Verify(msg, sk.ID(), nil)
require.NoError(t, err)
require.Equal(t, "gabriel", uout.Name)
require.Equal(t, "github", uout.Service)
require.Equal(t, sk.ID(), uout.KID)
_, err = user.Verify(msg, sk.ID(), uout)
require.NoError(t, err)
}
func TestNewUser(t *testing.T) {
sk := keys.NewEdX25519KeyFromSeed(keys.Bytes32(bytes.Repeat([]byte{0x01}, 32)))
clock := newClock()
dst := ds.NewMem()
scs := keys.NewSigchainStore(dst)
req := util.NewMockRequestor()
ust := testStore(t, dst, scs, req, clock)
u, uerr := user.New(ust, sk.ID(), "github", "gabriel", "https://gist.github.com/gabriel/deadbeef", 1)
require.NoError(t, uerr)
require.NotNil(t, u)
u2, uerr := user.New(ust, sk.ID(), "github", "gabriel", "https://gist.githb.com/gabriel/deadbeef", 1)
require.EqualError(t, uerr, "invalid host for url https://gist.githb.com/gabriel/deadbeef")
require.Nil(t, u2)
u3, uerr := user.New(ust, sk.ID(), "github", "gabriel", "http://gist.github.com/gabriel/deadbeef", 1)
require.EqualError(t, uerr, "invalid scheme for url http://gist.github.com/gabriel/deadbeef")
require.Nil(t, u3)
u4, uerr := user.New(ust, sk.ID(), "github", "gabriel", "https://gist.github.com/gabril/deadbeef", 1)
require.EqualError(t, uerr, "path invalid (name mismatch) gabril != gabriel")
require.Nil(t, u4)
u5, uerr := user.New(ust, sk.ID(), "github", "gabriel", "https://gist.github.com/gabriel", 1)
require.EqualError(t, uerr, "path invalid [gabriel] for url https://gist.github.com/gabriel")
require.Nil(t, u5)
u6, uerr := user.New(ust, sk.ID(), "github", "gab", "https://gist.github.com/gabriel/deadbeef", 1)
require.EqualError(t, uerr, "path invalid (name mismatch) gabriel != gab")
require.Nil(t, u6)
u7, uerr := user.New(ust, sk.ID(), "git", "gabriel", "https://gist.github.com/gabriel/deadbeef", 1)
require.EqualError(t, uerr, "invalid service git")
require.Nil(t, u7)
u8, uerr := user.New(ust, sk.ID(), "github", "", "https://gist.github.com/gabriel/deadbeef", 1)
require.EqualError(t, uerr, "name is empty")
require.Nil(t, u8)
u10, uerr := user.New(ust, sk.ID(), "twitter", "Gbrltest", "https://twitter.com/gbrltest/status/1234", 1)
require.EqualError(t, uerr, "name is not lowercase alphanumeric (a-z0-9)")
require.Nil(t, u10)
u11, uerr := user.New(ust, sk.ID(), "twitter", "gbrltest🤓", "https://twitter.com/gbrltest/status/1234", 1)
require.EqualError(t, uerr, "name is not lowercase alphanumeric (a-z0-9)")
require.Nil(t, u11)
u12, uerr := user.New(ust, sk.ID(), "twitter", "gbrltest", "twitter.com/gbrltest/status/1234", 1)
require.EqualError(t, uerr, "invalid scheme for url twitter.com/gbrltest/status/1234")
require.Nil(t, u12)
}
|
peombwa/Sample-Graph-Python-Client
|
src/users/models/microsoftgraphtime_off_reason.py
|
<gh_stars>0
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class MicrosoftgraphtimeOffReason(Model):
"""MicrosoftgraphtimeOffReason.
:param id:
:type id: str
:param created_date_time:
:type created_date_time: datetime
:param last_modified_date_time:
:type last_modified_date_time: datetime
:param last_modified_by:
:type last_modified_by: ~users.models.MicrosoftgraphidentitySet
:param display_name:
:type display_name: str
:param icon_type: Possible values include: 'none', 'car', 'calendar',
'running', 'plane', 'firstAid', 'doctor', 'notWorking', 'clock',
'juryDuty', 'globe', 'cup', 'phone', 'weather', 'umbrella', 'piggyBank',
'dog', 'cake', 'trafficCone', 'pin', 'sunny', 'unknownFutureValue'
:type icon_type: str or ~users.models.enum
:param is_active:
:type is_active: bool
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
'last_modified_date_time': {'key': 'lastModifiedDateTime', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'MicrosoftgraphidentitySet'},
'display_name': {'key': 'displayName', 'type': 'str'},
'icon_type': {'key': 'iconType', 'type': 'str'},
'is_active': {'key': 'isActive', 'type': 'bool'},
}
def __init__(self, id=None, created_date_time=None, last_modified_date_time=None, last_modified_by=None, display_name=None, icon_type=None, is_active=None):
super(MicrosoftgraphtimeOffReason, self).__init__()
self.id = id
self.created_date_time = created_date_time
self.last_modified_date_time = last_modified_date_time
self.last_modified_by = last_modified_by
self.display_name = display_name
self.icon_type = icon_type
self.is_active = is_active
|
fincd-aws/aws-alb-ingress-controller
|
internal/alb/generator/tag_test.go
|
<gh_stars>1-10
package generator
import (
"testing"
"github.com/stretchr/testify/assert"
)
func Test_TagLB(t *testing.T) {
gen := TagGenerator{
ClusterName: "cluster",
DefaultTags: map[string]string{
"key": "value",
},
}
expected := map[string]string{
"kubernetes.io/cluster/cluster": "owned",
TagKeyIngressName: "ingress",
TagKeyNamespace: "namespace",
"ingress.k8s.aws/cluster": "cluster",
"ingress.k8s.aws/stack": "namespace/ingress",
"ingress.k8s.aws/resource": "LoadBalancer",
"key": "value",
}
assert.Equal(t, gen.TagLB("namespace", "ingress"), expected)
}
func Test_TagTGGroup(t *testing.T) {
gen := TagGenerator{
ClusterName: "cluster",
DefaultTags: map[string]string{
"key": "value",
},
}
expected := map[string]string{
"kubernetes.io/cluster/cluster": "owned",
TagKeyIngressName: "ingress",
TagKeyNamespace: "namespace",
"ingress.k8s.aws/cluster": "cluster",
"ingress.k8s.aws/stack": "namespace/ingress",
"key": "value",
}
assert.Equal(t, gen.TagTGGroup("namespace", "ingress"), expected)
}
func Test_TagTG(t *testing.T) {
gen := TagGenerator{}
expected := map[string]string{
TagKeyServiceName: "service",
TagKeyServicePort: "port",
"ingress.k8s.aws/resource": "namespace/ingress-service:port",
}
assert.Equal(t, gen.TagTG("namespace", "ingress", "service", "port"), expected)
}
|
johsteffens/beth
|
lib/bcore/bcore_signal.c
|
/** Author and Copyright 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "bcore_signal.h"
#include "bcore_arr.h"
#include "bcore_bin_ml.h"
#include "bcore_btree.h"
#include "bcore_const_manager.h"
#include "bcore_control.h"
#include "bcore_file.h"
#include "bcore_flect.h"
#include "bcore_folder.h"
#include "bcore_function_manager.h"
#include "bcore_hmap.h"
#include "bcore_hmap_tp_sr.h"
#include "bcore_img.h"
#include "bcore_leaf.h"
#include "bcore_leaf_obj.h"
#include "bcore_life.h"
#include "bcore_name.h"
#include "bcore_name_manager.h"
#include "bcore_sr.h"
#include "bcore_sinks.h"
#include "bcore_sources.h"
#include "bcore_spect.h"
#include "bcore_spect_array.h"
#include "bcore_spect_compare.h"
#include "bcore_spect_hash.h"
#include "bcore_spect_inst.h"
#include "bcore_spect_interpreter.h"
#include "bcore_spect_sink.h"
#include "bcore_spect_source.h"
#include "bcore_spect_translator.h"
#include "bcore_spect_via.h"
#include "bcore_spect_matrix.h"
#include "bcore_sc.h"
#include "bcore_st.h"
#include "bcore_tbman.h"
#include "bcore_tp.h"
#include "bcore_tp_fastmap.h"
#include "bcore_threads.h"
#include "bcore_x_threads.h"
#include "bcore_trait.h"
#include "bcore_txt_ml.h"
#include "bcore_xoila.h"
#include "bcore.xo.h"
#include "bcore_spect_inst_call.h"
#include "bcore_spect_via_call.h"
#include "bcore_hmap_name.h"
#include "bcore_cday.h"
#include "bcore_error_manager.h"
#include "bcore_prsg.h"
BCORE_DEFINE_FUNCTIONS_OBJ_FLAT( bcore_signal_s )
tp_t bcore_signal_s_handle_type( const bcore_signal_s* o, tp_t target )
{
if( ( o->target == TYPEOF_all ) || ( o->target == TYPEOF_local ) || ( o->target == target ) ) return o->type;
return TYPEOF_none;
}
/**********************************************************************************************************************/
vd_t bcore_signal_s_broadcast( const bcore_signal_s* o, bcore_fp_signal_handler* arr, uz_t size )
{
vd_t ret = NULL;
if( o->type == TYPEOF_down0 || o->type == TYPEOF_down1 )
{
for( uz_t i = size; i > 0; i-- ) if( ( ret = arr[ i - 1 ]( o ) ) ) return ret;
}
else
{
for( uz_t i = 0; i < size; i++ ) if( ( ret = arr[ i ]( o ) ) ) return ret;
}
return ret;
}
bcore_signal_s bcore_signal_init( tp_t target, tp_t type, vd_t object )
{
return ( bcore_signal_s ){ .target = target, .type = type, .object = object };
}
vd_t bcore_general_signal_handler( const bcore_signal_s* o )
{
bcore_fp_signal_handler arr[] =
{
/// system critical items (keep order)
bcore_tbman_signal_handler,
bcore_tp_signal_handler,
bcore_name_manager_signal_handler,
bcore_function_manager_signal_handler,
bcore_flect_signal_handler,
bcore_feature_signal_handler,
bcore_trait_signal_handler,
bcore_spect_signal_handler,
bcore_sc_signal_handler,
bcore_st_signal_handler,
bcore_spect_source_signal_handler,
bcore_const_manager_signal_handler,
/// other items
bcore_control_signal_handler,
bcore_name_signal_handler,
bcore_tp_fastmap_signal_handler,
bcore_spect_inst_signal_handler,
bcore_spect_via_signal_handler,
bcore_btree_signal_handler,
bcore_hmap_signal_handler,
bcore_hmap_tp_sr_signal_handler,
bcore_life_signal_handler,
bcore_ref_signal_handler,
bcore_spect_array_signal_handler,
bcore_spect_sink_signal_handler,
bcore_sinks_signal_handler,
bcore_sources_signal_handler,
bcore_spect_compare_signal_handler,
bcore_spect_interpreter_signal_handler,
bcore_spect_translator_signal_handler,
bcore_spect_matrix_signal_handler,
bcore_threads_signal_handler,
bcore_x_threads_signal_handler,
bcore_txt_ml_signal_handler,
bcore_bin_ml_signal_handler,
bcore_arr_signal_handler,
bcore_img_signal_handler,
bcore_leaf_signal_handler,
bcore_leaf_obj_signal_handler,
bcore_file_signal_handler,
bcore_folder_signal_handler,
bcore_spect_hash_signal_handler,
bcore_xo_signal_handler,
bcore_spect_inst_call_signal_handler,
bcore_spect_via_call_signal_handler,
bcore_hmap_name_signal_handler,
bcore_cday_signal_handler,
bcore_error_manager_signal_handler,
bcore_prsg_signal_handler,
};
return bcore_signal_s_broadcast( o, arr, sizeof( arr ) / sizeof( bcore_fp_signal_handler ) );
}
|
lechium/tvOS10Headers
|
System/Library/PrivateFrameworks/GeoServices.framework/GEOSimpleTileRequester.h
|
<gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.0
* on Wednesday, March 22, 2017 at 9:03:11 AM Mountain Standard Time
* Operating System: Version 10.1 (Build 14U593)
* Image Source: /System/Library/PrivateFrameworks/GeoServices.framework/GeoServices
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
#import <GeoServices/GeoServices-Structs.h>
#import <GeoServices/GEOTileRequester.h>
#import <libobjc.A.dylib/NSURLSessionDataDelegate.h>
@class NSMutableArray, NSMutableSet, GEOTileKeyMap, NSOperationQueue, NSURLSession, NSString;
@interface GEOSimpleTileRequester : GEOTileRequester <NSURLSessionDataDelegate> {
NSMutableArray* _waiting;
NSMutableSet* _running;
GEOTileKeyMap* _keysToBaseOps;
BOOL _cancelled;
BOOL _subclassImplementsTileEdition;
NSMutableArray* _errors;
NSOperationQueue* _delegateQueue;
NSURLSession* _session;
}
@property (readonly) unsigned long long hash;
@property (readonly) Class superclass;
@property (copy,readonly) NSString * description;
@property (copy,readonly) NSString * debugDescription;
+(long long)eTagType;
-(void)cancel;
-(void)dealloc;
-(BOOL)isRunning;
-(void)start;
-(void)_cleanup;
-(void)_cancel;
-(id)_nextPendingOperation;
-(void)_startNextPendingOperation:(id)arg1 ;
-(id)_runningOperationForTask:(id)arg1 ;
-(id)initWithKeyList:(id)arg1 manifestConfiguration:(id)arg2 locale:(id)arg3 cachedEtags:(id)arg4 cachedData:(id)arg5 priorities:(id)arg6 ;
-(id)_delegateGCDQueue;
-(BOOL)allowsCookies;
-(id)urlForTileKey:(GEOTileKey*)arg1 ;
-(id)localizationURLForTileKey:(GEOTileKey*)arg1 ;
-(int)checksumMethodForIncomingTileDataWithKey:(GEOTileKey*)arg1 ;
-(id)mergeBaseTileEtag:(id)arg1 withLocalizationTileEtag:(id)arg2 ;
-(id)mergeBaseTile:(id)arg1 withLocalizationTile:(id)arg2 ;
-(id)editionHeader;
-(void)_operationFailed:(id)arg1 error:(id)arg2 ;
-(void)_operationFinished:(id)arg1 ;
-(void)_doWorkOrFinish;
-(unsigned)tileEditionForKey:(GEOTileKey*)arg1 ;
-(void)_releaseEverything;
-(void)cancelKey:(const GEOTileKey*)arg1 ;
-(void)_cancelKey:(GEOTileKey)arg1 ;
-(void)reprioritizeKey:(const GEOTileKey*)arg1 newPriority:(unsigned)arg2 ;
-(void)_reprioritizeKey:(GEOTileKey)arg1 newPriority:(unsigned)arg2 ;
-(BOOL)_verifyDataIntegrity:(id)arg1 checksumMethod:(int)arg2 ;
@end
|
olafurpg/algebird
|
algebird-test/src/test/scala/com/twitter/algebird/FirstLaws.scala
|
<reponame>olafurpg/algebird
package com.twitter.algebird
import com.twitter.algebird.BaseProperties._
import com.twitter.algebird.scalacheck.arbitrary._
import com.twitter.algebird.scalacheck.NonEmptyVector
import org.scalacheck.Prop.forAll
class FirstLaws extends CheckProperties {
property("First should sum properly") {
forAll { v: NonEmptyVector[First[Int]] =>
val first = Semigroup.sumOption[First[Int]](v.items).get
first == v.items.head
}
}
property("First.+ should work") {
forAll((l: First[Int], r: First[Int]) => l + r == l)
}
property("First.aggregator returns the first item") {
forAll { v: NonEmptyVector[Int] => v.items.head == First.aggregator(v.items) }
}
property("First[Int] is a semigroup")(semigroupLaws[First[Int]])
}
|
gina-alaska/ASSIST
|
db/migrate/20120307020458_change_obs_datetime_to_datetime_in_observations.rb
|
<gh_stars>0
class ChangeObsDatetimeToDatetimeInObservations < ActiveRecord::Migration
def up
change_column :observations, :obs_datetime, :datetime
end
def down
change_column :observations, :obs_datetime, :date
end
end
|
xswz8015/infra
|
go/src/infra/cros/internal/util/collections_test.go
|
<filename>go/src/infra/cros/internal/util/collections_test.go
// Copyright 2021 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package util
import (
"testing"
)
func TestUnorderedEqual(t *testing.T) {
a := []string{"a", "b", "c", "a"}
b := []string{"b", "c", "a", "a"}
c := []string{"a", "b", "b", "c"}
if !UnorderedEqual(a, b) {
t.Fatalf("UnorderedEqual: got false, expected true")
}
if UnorderedEqual(a, c) {
t.Fatalf("UnorderedEqual: got true, expected false")
}
}
func TestUnorderedContains(t *testing.T) {
a := []string{"a", "b", "c", "a"}
b := []string{"b", "c"}
c := []string{"b", "d"}
if !UnorderedContains(a, b) {
t.Fatalf("UnorderedEqual: got false, expected true")
}
if UnorderedContains(a, c) {
t.Fatalf("UnorderedEqual: got true, expected false")
}
}
|
hakanardo/pyvx
|
pyvx/backend/mock_backend.py
|
<reponame>hakanardo/pyvx
import pyvx
class Lib(object):
def vxCreateContext(self):
return 42
def __getattr__(self, item):
return -1
def _get_backend_version(self):
return pyvx.__backend_version__
def _get_backend_name(self):
return b"mock"
def _get_backend_install_path(self):
return b"nowhere"
def _get_FMT_REF(self):
return b""
def _get_FMT_SIZE(self):
return b""
class Ffi(object):
def string(self, obj):
return bytes(obj)
def typeof(self, obj):
return None
lib, ffi = Lib(), Ffi()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.