text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
# Test decoding with backward task.
# Requirements:
# 1. pretrained attention model, as Decode/ would.
# 2. pretrained soothsayer model predicting backward probability. (Backward/Model)
# 3. input file to decode.
SEQ2SEQ_PARAMS=save/test-atten/params
SEQ2SEQ_MODEL=save/test-atten/model8
TASK=backward
INPUT_FILE=data/t_given_s_test.txt
OUTPUT_FILE=save/test-future-$TASK-decode/decode.txt
SOOTHSAYER_MODEL=save/test-future-$TASK/model10
PREDICTOR_WEIGHT=1
th Future/Decode/decode.lua \
-params_file $SEQ2SEQ_PARAMS \
-model_file $SEQ2SEQ_MODEL \
-InputFile $INPUT_FILE \
-OutputFile $OUTPUT_FILE \
-FuturePredictorModelFile $SOOTHSAYER_MODEL \
-PredictorWeight $PREDICTOR_WEIGHT \
-Task $TASK \
-gpu_index 2
|
if test ! $(which yarn)
then
brew install yarn
fi |
#!/bin/bash
if [ ! -f DeepSpeech.py ]; then
echo "Please make sure you run this from DeepSpeech's top level directory."
exit 1
fi
GLOBAL_LOG="\\/dev\\/null" # by default there is no global logging
NODES=2 # to be adjusted
GPUS=8 # to be adjusted
HASH=`git rev-parse HEAD`
TO_RUN=run-`date +%Y-%m-%d-%H-%M` # default run name is based on user's name and current date time
runs_dir="/data/runs"
# parsing parameters
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-h|--help)
echo "Usage: enqueue.sh [--help] [-g|--gpus N] [-n|--nodes N] [-l|-log file] [-c|--continue run] [run]"
echo ""
echo "--help print this help message"
echo "--gpus N allocates N gpus per node"
echo "--nodes N allocates N nodes"
echo "--log file if set, global logs will get appended to the provided file"
echo "--continue run continue a former run by copying its transitional content into the new run directory"
echo "run name of or path to a run directory - if just a name, it will be created at $runs_dir/<run>"
echo " defaults to user's name plus current date/time"
echo
exit 0
;;
-g|--gpus)
GPUS="$2"
shift
;;
-n|--nodes)
NODES="$2"
shift
;;
-l|--log)
GLOBAL_LOG=$(sed 's/[\/&]/\\&/g' <<< $2) # path-slashes are to be escaped
shift
;;
-c|--continue)
TO_CONTINUE="$2"
shift
;;
*)
TO_RUN="$1"
;;
esac
shift # past argument or value
done
# creates a run directory path from a run name given by $1
# if run name is a directory, it's treated relative to current directory
# if just a name, it will be treated relative to $runs_dir
function to_run_dir {
local run_dir
if [[ $1 == .*\/.* ]]; then
if [[ $1 == \/.* ]]; then
run_dir=$1
else
run_dir=$runs_dir/$1
fi
else
run_dir=$runs_dir/$1
fi
echo ${run_dir//\/\//\/}
}
# run directory from which "keep" data is to be copied over into the new run directory
if [ "$TO_CONTINUE" ]; then
CONTINUE_DIR=$(to_run_dir "$TO_CONTINUE")
if [ ! -d "$CONTINUE_DIR/results/keep" ]; then
echo "Cannot continue former run. Directory $CONTINUE_DIR/results/keep doesn't exist."
exit 1
fi
fi
# preparing run directory
RUN_DIR=$(to_run_dir "$TO_RUN")
echo "Creating run directory $RUN_DIR..."
if [ -d "$RUN_DIR" ]; then
echo "Run directory $RUN_DIR already exists."
exit 1
elif [ -f "$RUN_DIR" ]; then
echo "Run directory path $RUN_DIR is already a file."
exit 1
fi
mkdir -p "$RUN_DIR/src"
mkdir -p "$RUN_DIR/results"
# copying local tree into "src" sub-directory of run directory
# excluding .git
rsync -av . "$RUN_DIR/src" --exclude=/.git
# copying over "keep" data from continue run directory
if [ "$CONTINUE_DIR" ]; then
cp -rf "$CONTINUE_DIR/results/keep" "$RUN_DIR/results/"
fi
mkdir -p "$RUN_DIR/results/keep"
# patch-creating job.sbatch file from job-template with our parameters
sed \
-e "s/__ID__/$HASH/g" \
-e "s/__NAME__/$TO_RUN/g" \
-e "s/__GLOBAL_LOG__/$GLOBAL_LOG/g" \
-e "s/__NODES__/$NODES/g" \
-e "s/__GPUS__/$GPUS/g" \
bin/job-template.sbatch > $RUN_DIR/job.sbatch
# enqueuing the new job description and run directory
sbatch -D $RUN_DIR $RUN_DIR/job.sbatch
|
#!/bin/bash
install_saw() {
brew_tap "saw" "TylerBrock/saw"
brew_install "saw" "saw"
}
|
<reponame>cxh1378/vue-cli-config<filename>node-test/app.js
const http = require('http')
http.createServer((req, res) => {
console.log('req:', req)
// 发送 HTTP 头部
// HTTP 状态值: 200 : OK
// 内容类型: text/plain
res.writeHead(200, {'Content-Type': 'text/plain'});
// 发送响应数据 "Hello World"
res.end('Hello World\n');
}).listen(8888)
console.log('Server running at http://127.0.0.1:8888') |
require 'rails_helper'
RSpec.describe DeliveryOrganisationsImporter, type: :importer do
subject(:importer) { DeliveryOrganisationsImporter.new }
describe '#import' do
let(:output) { StringIO.new }
let(:acronyms) { StringIO.new }
let(:organisation) { instance_double(GovernmentOrganisationRegister::Organisation, retired?: false) }
let(:organisations) { instance_double(GovernmentOrganisationRegister::Organisations) }
before do
allow(GovernmentOrganisationRegister::Organisations).to receive(:new) { organisations }
allow(organisations).to receive(:each).and_yield(organisation)
end
describe 'new organisation' do
it 'imports the organisation' do
allow(organisation).to receive_messages(key: 'D1234', name: 'Org', website: 'http://example.com')
expect {
importer.import(acronyms, output)
}.to change(DeliveryOrganisation, :count).by(1)
delivery_organisation = DeliveryOrganisation.last
expect(delivery_organisation.natural_key).to eq('D1234')
expect(delivery_organisation.name).to eq('Org')
expect(delivery_organisation.website).to eq('http://example.com')
output.rewind
expect(output.read).to eq("new organisation: key=D1234, name=Org\n")
end
end
describe 'retired organisation' do
it 'ignores organisations which are past their end date' do
allow(organisation).to receive_messages(key: 'D7890', name: 'Org', retired?: true)
expect {
importer.import(acronyms, output)
}.to_not change(DeliveryOrganisation, :count)
output.rewind
expect(output.read).to eq("ignoring organisation, retired: key=D7890, name=Org\n")
end
end
describe 'existing organisation, with changes' do
it 'updates the organisation' do
FactoryGirl.create(:delivery_organisation, natural_key: 'D5678', name: 'Org', website: 'http://example.com')
allow(organisation).to receive_messages(key: 'D5678', name: 'New Org', website: 'http://example.org')
expect {
importer.import(acronyms, output)
}.to_not change(DeliveryOrganisation, :count)
output.rewind
expect(output.read).to eq("updating organisation: key=D5678, name=New Org\n")
end
end
describe 'existing organisation, without changes' do
it 'ignores the organisation' do
FactoryGirl.create(:delivery_organisation, natural_key: 'D5678', name: 'Org', website: 'http://example.com')
allow(organisation).to receive_messages(key: 'D5678', name: 'Org', website: 'http://example.com')
expect {
importer.import(acronyms, output)
}.to_not change(DeliveryOrganisation, :count)
output.rewind
expect(output.read).to eq("ignoring organisation, no changes: key=D5678, name=Org\n")
end
end
end
end
|
#!/bin/sh
# Execute the following command to allow the script to be executed on MacOS:
# chmod a+x download_wsdl.sh && xattr -d com.apple.quarantine download_wsdl.sh
curl "http://localhost:8080/service/CodesService?wsdl" -o CodesService.wsdl |
import cloneDeep from 'lodash/cloneDeep';
import React, { FC, useState, useEffect, useCallback, useMemo } from 'react';
import { useHistory, Link } from 'react-router-dom';
import { Container, Row, Col, Button, Form } from 'react-bootstrap';
import classNames from 'classnames';
import useInCrisisModal from '@/hooks/use-in-crisis-modal';
import useHeaderTitle from '@/hooks/use-header-title';
import useQuery from '@/hooks/use-query';
import useAccount from '@/hooks/use-account';
import AsyncPage from '@/components/async-page';
import SurveyQuestion from '@/components/survey-question';
import CollectPhoneModal from '@/components/collect-phone-modal';
import { assessmentService, accountService } from '@/lib/services';
import { Assessment, QUESTION_TYPE, SelectedQuestionAnswer } from '@/lib/models';
import ProgressBar from '@/components/progress-bar';
import useHandleError from '@/hooks/use-handle-error';
const WeeklyAssessment: FC = () => {
useHeaderTitle('assessment');
const handleError = useHandleError();
const history = useHistory<any>();
const query = useQuery();
const questionId = query.get('questionId');
const sessionId = query.get('sessionId');
const { account, setAccount } = useAccount();
const { openInCrisisModal } = useInCrisisModal();
const [assessment, setAssessment] = useState<Assessment | undefined>();
const [answerChangedByUser, setAnswerChangedByUser] = useState<boolean>(false);
const [showPhoneModal, setShowPhoneModal] = useState<boolean>(false);
const SUPPORT_ROUTE = useMemo(() => {
const params = new URLSearchParams({});
const clinicIds = history.location.state?.routedClinicIds ?? [];
const providerId = history.location.state?.routedProviderId;
const supportRoleIds = history.location.state?.routedSupportRoleIds;
if (Array.isArray(clinicIds)) {
for (const clinicId of clinicIds) {
params.append('clinicId', clinicId);
}
}
if (providerId) {
params.append('providerId', providerId);
}
if (Array.isArray(supportRoleIds)) {
for (const supportRoleId of supportRoleIds) {
params.append('supportRoleId', supportRoleId);
}
}
return `/connect-with-support?${params.toString()}`;
}, [history.location.state]);
const fetchData = useCallback(async () => {
const response = await assessmentService.getEvidenceAssessmentQuestion(questionId, sessionId).fetch();
setAnswerChangedByUser(false);
setAssessment(response.assessment);
}, [questionId, sessionId]);
useEffect(() => {
if (!account) return;
if (!account.phoneNumber) {
setShowPhoneModal(true);
}
}, [account]);
const navigateForwards = useCallback(
(submissionResponse) => {
if (submissionResponse.assessment) {
history.push(
`/weekly-assessment?questionId=${submissionResponse.assessment.question.questionId}&sessionId=${submissionResponse.assessment.sessionId}`
);
} else {
history.push(SUPPORT_ROUTE);
}
},
[SUPPORT_ROUTE, history]
);
const navigateBackwards = useCallback(() => {
if (!assessment) return;
if (assessment.previousQuestionId) {
history.push(`/weekly-assessment?questionId=${assessment.previousQuestionId}&sessionId=${assessment.previousSessionId}`);
}
}, [assessment, history]);
const submitAnswer = useCallback(async () => {
if (!assessment) return;
try {
const result = await assessmentService
.submitEvidenceAssessmentQuestion({
assessmentAnswers: assessment.question.selectedAssessmentAnswers,
questionId: assessment.question.questionId,
sessionId: assessment.sessionId,
})
.fetch();
return result;
} catch (error) {
throw error;
}
}, [assessment]);
// Need to use this as a sort of psuedo click handler for QUAD question types
useEffect(() => {
if (!assessment) return;
if (assessment.question.questionType !== QUESTION_TYPE.QUAD) return;
if (!assessment.question.selectedAssessmentAnswers) return;
if (!assessment.question.selectedAssessmentAnswers.length) return;
if (!answerChangedByUser) return;
async function submitAnswerAndNavigateForwards() {
try {
const submissionResponse = await submitAnswer();
navigateForwards(submissionResponse);
} catch (error) {
handleError(error);
}
}
submitAnswerAndNavigateForwards();
}, [answerChangedByUser, assessment, handleError, navigateForwards, submitAnswer]);
function handleSurveyQuestionChange(_questionId: string, selectedAssessmentAnswers: SelectedQuestionAnswer[]) {
if (!assessment) return;
const assessmentClone = cloneDeep(assessment);
assessmentClone.question.selectedAssessmentAnswers = selectedAssessmentAnswers;
setAnswerChangedByUser(true);
setAssessment(assessmentClone);
const selectedAnswers = assessment.question.answers.filter((answer) =>
selectedAssessmentAnswers.find((selectedAnswer) => selectedAnswer.answerId === answer.answerId)
);
const isCrisis = selectedAnswers.some((answer) => answer.isCrisis);
const isCall = selectedAnswers.some((answer) => answer.isCall);
isCrisis && openInCrisisModal(isCall);
}
async function handleBackButtonClick() {
if (!assessment) return;
if (assessment.question.selectedAssessmentAnswers && assessment.question.selectedAssessmentAnswers.length) {
try {
await submitAnswer();
navigateBackwards();
} catch (error) {
handleError(error);
}
} else {
navigateBackwards();
}
}
async function handleNextButtonClick() {
try {
const submissionResponse = await submitAnswer();
navigateForwards(submissionResponse);
} catch (error) {
handleError(error);
}
}
return (
<>
<CollectPhoneModal
show={showPhoneModal}
onHide={() => {
if (account?.phoneNumber) {
setShowPhoneModal(false);
} else {
history.push(SUPPORT_ROUTE, { skipAssessment: true });
}
}}
onSubmit={async (phoneNumber) => {
if (!account) return;
try {
const accountResponse = await accountService
.updatePhoneNumberForAccountId(account?.accountId, {
phoneNumber,
})
.fetch();
setAccount(accountResponse.account);
setShowPhoneModal(false);
} catch (error) {
handleError(error);
}
}}
/>
<AsyncPage fetchData={fetchData}>
<ProgressBar current={assessment?.assessmentProgress || 0} max={assessment?.assessmentProgressTotal || 0} />
<Container className="pt-5 pb-5">
<Row>
<Col md={{ span: 10, offset: 1 }} lg={{ span: 8, offset: 2 }} xl={{ span: 6, offset: 3 }}>
{assessment?.assessmentPrompt && <p className="mb-3">{assessment?.assessmentPrompt}</p>}
<Form>
{assessment?.question && (
<SurveyQuestion key={assessment.question.questionId} question={assessment.question} onChange={handleSurveyQuestionChange} />
)}
<div
className={classNames({
'd-flex': true,
'justify-content-end': !assessment?.previousQuestionId,
'justify-content-between': assessment?.previousQuestionId && assessment?.nextQuestionId,
})}
>
{assessment?.previousQuestionId && (
<Button variant="outline-primary" onClick={handleBackButtonClick}>
back
</Button>
)}
{assessment?.question.questionType !== QUESTION_TYPE.QUAD && (
<Button variant="primary" onClick={handleNextButtonClick}>
{assessment?.nextQuestionId ? 'next' : 'done'}
</Button>
)}
</div>
</Form>
</Col>
</Row>
{!assessment?.previousQuestionId && (
<p className="text-center">
<Link
to={{
pathname: SUPPORT_ROUTE,
state: { skipAssessment: true },
}}
>
skip for now
</Link>
</p>
)}
</Container>
</AsyncPage>
</>
);
};
export default WeeklyAssessment;
|
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.gui.project.bottombar;
import javafx.animation.AnimationTimer;
import javafx.concurrent.Task;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.ProgressBar;
import javafx.scene.layout.HBox;
import net.jamsimulator.jams.event.Listener;
import net.jamsimulator.jams.gui.JamsApplication;
import net.jamsimulator.jams.gui.project.event.ProjectCloseEvent;
import net.jamsimulator.jams.project.Project;
public class ProjectTaskBarElement extends HBox implements ProjectBottomBarElement {
public static final String NAME = "project_task_bar_element";
public static final String STYLE_CLASS = "project-task-bar-element";
private final Project project;
private final Label label;
private final ProgressBar bar;
private final UpdateTimer timer;
private Task<?> currentTask;
public ProjectTaskBarElement(Project project) {
getStyleClass().add(STYLE_CLASS);
this.project = project;
label = new Label();
bar = new ProgressBar(-1);
timer = new UpdateTimer();
bar.setProgress(-1);
bar.setVisible(false);
getChildren().addAll(label, bar);
JamsApplication.getProjectsTabPane().registerListeners(this, true);
timer.start();
}
@Listener
private void onProjectClose(ProjectCloseEvent event) {
if (project.equals(event.getProject())) {
timer.stop();
}
}
@Override
public ProjectBottomBarPosition getPosition() {
return ProjectBottomBarPosition.RIGHT;
}
@Override
public int getPriority() {
return Integer.MAX_VALUE - 100;
}
@Override
public Node asNode() {
return this;
}
@Override
public String getName() {
return NAME;
}
private class UpdateTimer extends AnimationTimer {
private long nextFrame = 0L;
@Override
public void handle(long now) {
if (nextFrame > now) return;
nextFrame = now + 100000000L;
if (currentTask == null || currentTask.isDone()) {
findNewTask();
}
}
private void findNewTask() {
currentTask = project.getTaskExecutor().getFirstTask().orElse(null);
if (currentTask == null) {
label.textProperty().unbind();
label.setText(null);
bar.progressProperty().unbind();
bar.setProgress(-1);
bar.setVisible(false);
} else {
label.textProperty().bind(currentTask.titleProperty());
bar.setVisible(true);
bar.progressProperty().bind(currentTask.progressProperty());
}
}
}
}
|
const get = require('lodash/get');
const has = require('lodash/has');
const isEmpty = require('lodash/isEmpty');
const {
CIVILITY_LIST,
HELPER,
AUXILIARY_ROLES,
CARE_DAYS_INDEX,
FUNDING_FREQUENCIES,
CUSTOMER_SITUATIONS,
FUNDING_NATURES,
SERVICE_NATURES,
STOPPED,
ARCHIVED,
ACTIVATED,
EVENT_TRANSPORT_MODE_LIST,
} = require('./constants');
const { CompaniDate } = require('./dates/companiDates');
const UtilsHelper = require('./utils');
const Customer = require('../models/Customer');
const Role = require('../models/Role');
const User = require('../models/User');
const UserCompany = require('../models/UserCompany');
const SectorHistory = require('../models/SectorHistory');
const ReferentHistory = require('../models/ReferentHistory');
const Service = require('../models/Service');
const ContractRepository = require('../repositories/ContractRepository');
const CustomerRepository = require('../repositories/CustomerRepository');
const { nationalities } = require('../data/nationalities');
const { countries } = require('../data/countries');
const getServicesNameList = (subscriptions) => {
let list = `${UtilsHelper.getLastVersion(subscriptions[0].service.versions, 'startDate').name}`;
if (subscriptions.length > 1) {
for (const sub of subscriptions.slice(1)) {
list = list.concat(`\r\n ${UtilsHelper.getLastVersion(sub.service.versions, 'startDate').name}`);
}
}
return list;
};
const customerExportHeader = [
'Id Bénéficiaire',
'Titre',
'Nom',
'Prenom',
'Date de naissance',
'Adresse',
'Ville',
'1ère intervention',
'Id Auxiliaire référent(e)',
'Auxiliaire référent(e)',
'Situation',
'Environnement',
'Objectifs',
'Autres',
'Nom associé au compte bancaire',
'IBAN',
'BIC',
'RUM',
'Date de signature du mandat',
'Nombre de souscriptions',
'Souscriptions',
'Nombre de financements',
'Date de création',
'Statut',
];
const formatIdentity = person => `${person.firstname} ${person.lastname}`;
const getStatus = (customer) => {
if (isEmpty(customer)) return '';
if (customer.archivedAt) return ARCHIVED;
if (customer.stoppedAt) return STOPPED;
return ACTIVATED;
};
exports.exportCustomers = async (credentials) => {
const companyId = get(credentials, 'company._id', null);
const customers = await Customer.find({ company: companyId })
.populate({ path: 'subscriptions.service' })
.populate({ path: 'firstIntervention', select: 'startDate', match: { company: companyId } })
.populate({ path: 'referent', match: { company: companyId } })
.lean({ autopopulate: true });
const rows = [customerExportHeader];
for (const cus of customers) {
const birthDate = get(cus, 'identity.birthDate');
const lastname = get(cus, 'identity.lastname');
const mandates = get(cus, 'payment.mandates') || [];
const lastMandate = UtilsHelper.getLastVersion(mandates, 'createdAt') || {};
const signedAt = lastMandate.signedAt ? CompaniDate(lastMandate.signedAt).format('dd/LL/yyyy') : '';
const subscriptionsCount = get(cus, 'subscriptions.length') || 0;
const firstIntervention = get(cus, 'firstIntervention.startDate');
const situation = CUSTOMER_SITUATIONS.find(sit => sit.value === get(cus, 'followUp.situation'));
const cells = [
get(cus, '_id') || '',
CIVILITY_LIST[get(cus, 'identity.title')] || '',
lastname ? lastname.toUpperCase() : '',
get(cus, 'identity.firstname') || '',
birthDate ? CompaniDate(birthDate).format('dd/LL/yyyy') : '',
get(cus, 'contact.primaryAddress.fullAddress') || '',
get(cus, 'contact.primaryAddress.city') || '',
firstIntervention ? CompaniDate(firstIntervention).format('dd/LL/yyyy') : '',
get(cus, 'referent._id') || '',
has(cus, 'referent.identity') ? formatIdentity(get(cus, 'referent.identity')) : '',
situation ? situation.label : '',
get(cus, 'followUp.environment') || '',
get(cus, 'followUp.objectives') || '',
get(cus, 'followUp.misc') || '',
get(cus, 'payment.bankAccountOwner') || '',
get(cus, 'payment.iban') || '',
get(cus, 'payment.bic') || '',
lastMandate.rum || '',
signedAt,
subscriptionsCount,
subscriptionsCount ? getServicesNameList(cus.subscriptions) : '',
get(cus, 'fundings.length') || 0,
cus.createdAt ? CompaniDate(cus.createdAt).format('dd/LL/yyyy') : '',
getStatus(cus),
];
rows.push(cells);
}
return rows;
};
const getDataForAuxiliariesExport = (aux, contractsLength, contract) => {
const nationality = get(aux, 'identity.nationality');
const lastname = get(aux, 'identity.lastname');
const birthDate = get(aux, 'identity.birthDate');
const address = get(aux, 'contact.address.fullAddress');
const birthCountry = get(aux, 'identity.birthCountry');
const { inactivityDate, createdAt } = aux;
const transport = get(aux, 'administrative.transportInvoice.transportType');
return {
Email: get(aux, 'local.email') || '',
Équipe: get(aux, 'sector.name') || '',
'Id Auxiliaire': aux._id || '',
Titre: CIVILITY_LIST[get(aux, 'identity.title')] || '',
Nom: lastname ? lastname.toUpperCase() : '',
Prénom: get(aux, 'identity.firstname') || '',
'Date de naissance': birthDate ? CompaniDate(birthDate).format('dd/LL/yyyy') : '',
'Pays de naissance': countries[birthCountry] || '',
'Departement de naissance': get(aux, 'identity.birthState') || '',
'Ville de naissance': get(aux, 'identity.birthCity') || '',
Nationalité: nationality ? nationalities[nationality] : '',
'N° de sécurité sociale': get(aux, 'identity.socialSecurityNumber') || '',
Addresse: address || '',
Téléphone: get(aux, 'contact.phone') || '',
'Nombre de contrats': contractsLength,
Établissement: get(aux, 'establishment.name') || '',
'Date de début de contrat prestataire': get(contract, 'startDate', null)
? CompaniDate(contract.startDate).format('dd/LL/yyyy')
: '',
'Date de fin de contrat prestataire': get(contract, 'endDate', null)
? CompaniDate(contract.endDate).format('dd/LL/yyyy')
: '',
'Date d\'inactivité': inactivityDate ? CompaniDate(inactivityDate).format('dd/LL/yyyy') : '',
'Date de création': createdAt ? CompaniDate(createdAt).format('dd/LL/yyyy') : '',
'Mode de transport par défaut': EVENT_TRANSPORT_MODE_LIST[transport] || '',
};
};
exports.exportAuxiliaries = async (credentials) => {
const rows = [];
const companyId = get(credentials, 'company._id');
const userCompanies = await UserCompany.find({ company: companyId }, { user: 1 }).lean();
if (!userCompanies.length) return rows;
const roles = await Role.find({ name: { $in: AUXILIARY_ROLES } }).lean();
const auxiliaries = await User
.find({ 'role.client': { $in: roles.map(role => role._id) }, _id: { $in: userCompanies.map(u => u.user) } })
.populate({ path: 'sector', populate: { path: 'sector', select: 'name' }, match: { company: companyId } })
.populate({ path: 'contracts', select: '_id startDate endDate' })
.populate({ path: 'establishment', select: 'name', match: { company: companyId } })
.lean();
if (!auxiliaries.length) rows.push(getDataForAuxiliariesExport({}, 0));
else {
for (const aux of auxiliaries) {
const { contracts } = aux;
if (contracts && contracts.length) {
for (const contract of contracts) {
rows.push(getDataForAuxiliariesExport(aux, contracts.length, contract));
}
} else {
rows.push(getDataForAuxiliariesExport(aux, 0));
}
}
}
return [Object.keys(rows[0]), ...rows.map(d => Object.values(d))];
};
const helperExportHeader = [
'Email',
'Téléphone',
'Id Aidant(e)',
'Aidant(e) - Nom',
'Aidant(e) - Prénom',
'Id Bénéficiaire',
'Bénéficiaire - Titre',
'Bénéficiaire - Nom',
'Bénéficiaire - Prénom',
'Bénéficiaire - Rue',
'Bénéficiaire - Code postal',
'Bénéficiaire - Ville',
'Date de création',
];
exports.exportHelpers = async (credentials) => {
const rows = [helperExportHeader];
const companyId = get(credentials, 'company._id');
const userCompanies = await UserCompany.find({ company: companyId }, { user: 1 }).lean();
if (!userCompanies.length) return rows;
const role = await Role.findOne({ name: HELPER }).lean();
const helpers = await User
.find({ 'role.client': role._id, _id: { $in: userCompanies.map(u => u.user) } })
.populate({
path: 'customers',
populate: { path: 'customer', select: 'identity contact' },
match: { company: companyId },
})
.lean();
for (const hel of helpers) {
const customer = hel.customers && hel.customers.customer;
rows.push([
get(hel, 'local.email') || '',
get(hel, 'contact.phone', '') !== '' ? `+33${hel.contact.phone.substring(1)}` : '',
get(hel, '_id') || '',
get(hel, 'identity.lastname', '').toUpperCase(),
get(hel, 'identity.firstname') || '',
get(customer, '_id') || '',
CIVILITY_LIST[get(customer, 'identity.title')] || '',
get(customer, 'identity.lastname', '').toUpperCase(),
get(customer, 'identity.firstname') || '',
get(customer, 'contact.primaryAddress.street') || '',
get(customer, 'contact.primaryAddress.zipCode') || '',
get(customer, 'contact.primaryAddress.city') || '',
hel.createdAt ? CompaniDate(hel.createdAt).format('dd/LL/yyyy') : '',
]);
}
return rows;
};
const sectorExportHeader = [
'Equipe',
'Id Auxiliaire',
'Nom',
'Prénom',
'Date d\'arrivée dans l\'équipe',
'Date de départ de l\'équipe',
];
exports.exportSectors = async (credentials) => {
const companyId = get(credentials, 'company._id', null);
const sectorHistories = await SectorHistory
.find({ company: companyId, startDate: { $exists: true } })
.populate({ path: 'sector', select: '_id name' })
.populate({ path: 'auxiliary', select: '_id identity.firstname identity.lastname' })
.lean();
const data = [sectorExportHeader];
for (const sectorHistory of sectorHistories) {
data.push([
get(sectorHistory, 'sector.name', null) || '',
get(sectorHistory, 'auxiliary._id', null) || '',
get(sectorHistory, 'auxiliary.identity.lastname', null) || '',
get(sectorHistory, 'auxiliary.identity.firstname', null) || '',
CompaniDate(sectorHistory.startDate).format('dd/LL/yyyy'),
sectorHistory.endDate ? CompaniDate(sectorHistory.endDate).format('dd/LL/yyyy') : '',
]);
}
return data;
};
const staffRegisterHeader = [
'Id Auxiliaire',
'Nom',
'Prénom',
'Civilité',
'Date de naissance',
'Nationalité',
'Emploi',
'Type de contrat',
'Date de début',
'Date de fin',
];
exports.exportStaffRegister = async (credentials) => {
const staffRegister = await ContractRepository.getStaffRegister(credentials.company._id);
const rows = [staffRegisterHeader];
for (const contract of staffRegister) {
const birthDate = get(contract, 'user.identity.birthDate');
rows.push([
get(contract, 'user._id') || '',
get(contract, 'user.identity.lastname', '').toUpperCase(),
get(contract, 'user.identity.firstname') || '',
CIVILITY_LIST[get(contract, 'user.identity.title')] || '',
birthDate ? CompaniDate(birthDate).format('dd/LL/yyyy') : '',
nationalities[get(contract, 'user.identity.nationality')] || '',
'Auxiliaire de vie',
'CDI',
CompaniDate(contract.startDate).format('dd/LL/yyyy'),
contract.endDate ? CompaniDate(contract.endDate).format('dd/LL/yyyy') : '',
]);
}
return rows;
};
const referentsHeader = [
'Id Bénéficiaire',
'Bénéficiaire - Titre',
'Bénéficiaire - Nom',
'Bénéficiaire - Prénom',
'Id Auxiliaire',
'Auxiliaire - Titre',
'Auxiliaire - Nom',
'Auxiliaire - Prénom',
'Date de début',
'Date de fin',
];
exports.exportReferents = async (credentials) => {
const referentsHistories = await ReferentHistory.find({ company: get(credentials, 'company._id', '') })
.populate({ path: 'auxiliary' })
.populate({ path: 'customer' })
.lean();
const rows = [referentsHeader];
for (const referentHistory of referentsHistories) {
rows.push([
get(referentHistory, 'customer._id') || '',
CIVILITY_LIST[get(referentHistory, 'customer.identity.title')] || '',
get(referentHistory, 'customer.identity.lastname', '').toUpperCase(),
get(referentHistory, 'customer.identity.firstname') || '',
get(referentHistory, 'auxiliary._id') || '',
CIVILITY_LIST[get(referentHistory, 'auxiliary.identity.title')] || '',
get(referentHistory, 'auxiliary.identity.lastname', '').toUpperCase(),
get(referentHistory, 'auxiliary.identity.firstname') || '',
CompaniDate(referentHistory.startDate).format('dd/LL/yyyy'),
referentHistory.endDate ? CompaniDate(referentHistory.endDate).format('dd/LL/yyyy') : '',
]);
}
return rows;
};
const serviceHeader = [
'Nature',
'Entreprise',
'Nom',
'Montant unitaire par défaut',
'TVA (%)',
'Plan de majoration',
'Date de début',
'Date de création',
'Date de mise a jour',
];
exports.exportServices = async (credentials) => {
const companyId = get(credentials, 'company._id', null);
const services = await Service.find({ company: companyId })
.populate({ path: 'company' })
.populate({ path: 'versions.surcharge', match: { company: companyId } })
.lean();
const data = [serviceHeader];
for (const service of services) {
const lastVersion = UtilsHelper.getLastVersion(service.versions, 'startDate');
data.push([
SERVICE_NATURES.find(nat => nat.value === service.nature).label,
service.company.name,
lastVersion.name,
UtilsHelper.formatFloatForExport(lastVersion.defaultUnitAmount),
UtilsHelper.formatFloatForExport(lastVersion.vat),
lastVersion.surcharge ? lastVersion.surcharge.name : '',
CompaniDate(lastVersion.startDate).format('dd/LL/yyyy'),
CompaniDate(service.createdAt).format('dd/LL/yyyy'),
CompaniDate(service.updatedAt).format('dd/LL/yyyy')]);
}
return data;
};
exports.exportSubscriptions = async (credentials) => {
const customers = await Customer
.find({ subscriptions: { $exists: true, $not: { $size: 0 } }, company: get(credentials, 'company._id') })
.populate({ path: 'subscriptions.service' })
.lean();
const data = [];
if (!customers.length) return data;
for (const cus of customers) {
for (const sub of cus.subscriptions) {
const lastServiceVersion = UtilsHelper.getLastVersion(sub.service.versions, 'startDate');
const lastVersion = UtilsHelper.getLastVersion(sub.versions, 'createdAt');
data.push({
'Id Bénéficiaire': get(cus, '_id') || '',
Titre: CIVILITY_LIST[get(cus, 'identity.title')] || '',
Nom: get(cus, 'identity.lastname', '').toUpperCase() || '',
Prénom: get(cus, 'identity.firstname', '') || '',
Service: lastServiceVersion ? lastServiceVersion.name : '',
'Prix unitaire TTC': lastVersion ? UtilsHelper.formatFloatForExport(lastVersion.unitTTCRate) : '',
'Volume horaire hebdomadaire estimatif': has(lastVersion, 'weeklyHours')
? UtilsHelper.formatFloatForExport(lastVersion.weeklyHours)
: '',
'Nombre d\'interventions hebdomadaire estimatif': has(lastVersion, 'weeklyCount')
? UtilsHelper.formatFloatForExport(lastVersion.weeklyCount)
: '',
'Dont soirées': lastVersion ? UtilsHelper.formatFloatForExport(get(lastVersion, 'evenings')) : '',
'Dont samedis': lastVersion ? UtilsHelper.formatFloatForExport(get(lastVersion, 'saturdays')) : '',
'Dont dimanches': lastVersion ? UtilsHelper.formatFloatForExport(get(lastVersion, 'sundays')) : '',
});
}
}
return [Object.keys(data[0]), ...data.map(d => Object.values(d))];
};
const fundingExportHeader = [
'Id Bénéficiaire',
'Titre',
'Nom',
'Prénom',
'Id tiers payeur',
'Tiers payeur',
'Code EPA',
'Nature',
'Service',
'Date de début',
'Date de fin',
'Numéro de dossier',
'Fréquence',
'Montant TTC',
'Montant unitaire TTC',
'Nombre d\'heures',
'Jours',
'Participation du/de la bénéficiaire',
];
exports.exportFundings = async (credentials) => {
const customerFundings = await CustomerRepository.getCustomerFundings(get(credentials, 'company._id', null));
const data = [fundingExportHeader];
for (const cus of customerFundings) {
const funding = UtilsHelper.mergeLastVersionWithBaseObject(cus.funding, 'createdAt');
const nature = FUNDING_NATURES.find(nat => nat.value === funding.nature);
const lastServiceVersion = has(funding, 'subscription.service.versions')
? UtilsHelper.getLastVersion(funding.subscription.service.versions, 'startDate')
: null;
const frequency = FUNDING_FREQUENCIES.find(freq => freq.value === funding.frequency);
let careDays = '';
if (funding.careDays) {
careDays = funding.careDays.map(dayIndex => CARE_DAYS_INDEX[dayIndex]).join(' ');
}
data.push([
cus._id || '',
CIVILITY_LIST[get(cus, 'identity.title')] || '',
get(cus, 'identity.lastname', '').toUpperCase() || '',
get(cus, 'identity.firstname', '') || '',
get(funding, 'thirdPartyPayer._id') || '',
get(funding, 'thirdPartyPayer.name') || '',
get(funding, 'fundingPlanId') || '',
nature ? nature.label : '',
lastServiceVersion ? lastServiceVersion.name : '',
funding.startDate ? CompaniDate(funding.startDate).format('dd/LL/yyyy') : '',
funding.endDate ? CompaniDate(funding.endDate).format('dd/LL/yyyy') : '',
funding.folderNumber || '',
frequency ? frequency.label : '',
UtilsHelper.formatFloatForExport(funding.amountTTC),
UtilsHelper.formatFloatForExport(funding.unitTTCRate),
UtilsHelper.formatFloatForExport(funding.careHours),
careDays || '',
UtilsHelper.formatFloatForExport(funding.customerParticipationRate),
]);
}
return data;
};
|
<filename>cell_tools/_readwrite/_funcs/_make_peak_bed_from_multiome_features.py
# _make_peak_bed_from_multiome_features.py
__module_name__ = "_make_peak_bed_from_multiome_features.py"
__author__ = ", ".join(["<NAME>"])
__email__ = ", ".join(["<EMAIL>",])
# package imports #
# --------------- #
import os
import pandas as pd
import scipy.io as io
from scipy import sparse
def _make_peak_bed_from_multiome_features(multiome_dir, return_data=False):
"""
Convert multiome feature.tsv to a file that only contains scATAC-seq peaks.
Parameters:
-----------
path
path to feature.tsv
type: str
return_df
defaut: False
type: bool
Returns:
--------
[ optional ] peaks
pandas DataFrame of peaks
"""
feature_df = pd.read_csv(
os.path.join(multiome_dir, "features.tsv"),
sep="\t",
header=None,
names=["ensg", "name", "feature_type", "chr", "start", "stop"],
)
peak_bed_filepath = os.path.join(multiome_dir, "peaks.bed")
peak_df = feature_df.loc[feature_df.feature_type == "Peaks"][
["chr", "start", "stop"]
]
peak_idx = peak_df.index.astype(int)
peak_df = peak_df.reset_index(drop=True)
peak_df.to_csv(peak_bed_filepath, sep="\t", header=None, index=False)
mat = io.mmread(os.path.join(multiome_dir, "matrix.mtx")).toarray()[peak_idx]
io.mmwrite(
target=os.path.join(multiome_dir, "peak.matrix.mtx"), a=sparse.csr_matrix(mat)
)
if return_data:
return feature_df, peak_df, mat |
// +build !ignore_autogenerated
// Generated code
// run `make generate` to update
// Code generated by deepcopy-gen. DO NOT EDIT.
package v1
import (
conditionv1 "github.com/atlassian/ctrl/apis/condition/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LocationDescriptor) DeepCopyInto(out *LocationDescriptor) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptor.
func (in *LocationDescriptor) DeepCopy() *LocationDescriptor {
if in == nil {
return nil
}
out := new(LocationDescriptor)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *LocationDescriptor) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LocationDescriptorConfigMapNames) DeepCopyInto(out *LocationDescriptorConfigMapNames) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptorConfigMapNames.
func (in *LocationDescriptorConfigMapNames) DeepCopy() *LocationDescriptorConfigMapNames {
if in == nil {
return nil
}
out := new(LocationDescriptorConfigMapNames)
in.DeepCopyInto(out)
return out
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptorDependency.
func (in *LocationDescriptorDependency) DeepCopy() *LocationDescriptorDependency {
if in == nil {
return nil
}
out := new(LocationDescriptorDependency)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LocationDescriptorList) DeepCopyInto(out *LocationDescriptorList) {
*out = *in
out.TypeMeta = in.TypeMeta
out.ListMeta = in.ListMeta
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]LocationDescriptor, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptorList.
func (in *LocationDescriptorList) DeepCopy() *LocationDescriptorList {
if in == nil {
return nil
}
out := new(LocationDescriptorList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *LocationDescriptorList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LocationDescriptorResource) DeepCopyInto(out *LocationDescriptorResource) {
*out = *in
if in.DependsOn != nil {
in, out := &in.DependsOn, &out.DependsOn
*out = make([]LocationDescriptorDependency, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
if in.Spec != nil {
in, out := &in.Spec, &out.Spec
*out = new(runtime.RawExtension)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptorResource.
func (in *LocationDescriptorResource) DeepCopy() *LocationDescriptorResource {
if in == nil {
return nil
}
out := new(LocationDescriptorResource)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LocationDescriptorSpec) DeepCopyInto(out *LocationDescriptorSpec) {
*out = *in
out.ConfigMapNames = in.ConfigMapNames
if in.Resources != nil {
in, out := &in.Resources, &out.Resources
*out = make([]LocationDescriptorResource, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptorSpec.
func (in *LocationDescriptorSpec) DeepCopy() *LocationDescriptorSpec {
if in == nil {
return nil
}
out := new(LocationDescriptorSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LocationDescriptorStatus) DeepCopyInto(out *LocationDescriptorStatus) {
*out = *in
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]conditionv1.Condition, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
if in.ResourceStatuses != nil {
in, out := &in.ResourceStatuses, &out.ResourceStatuses
*out = make([]ResourceStatus, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LocationDescriptorStatus.
func (in *LocationDescriptorStatus) DeepCopy() *LocationDescriptorStatus {
if in == nil {
return nil
}
out := new(LocationDescriptorStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ResourceStatus) DeepCopyInto(out *ResourceStatus) {
*out = *in
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]conditionv1.Condition, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceStatus.
func (in *ResourceStatus) DeepCopy() *ResourceStatus {
if in == nil {
return nil
}
out := new(ResourceStatus)
in.DeepCopyInto(out)
return out
}
|
package com.yoga.tenant.tenant.mapper;
import com.yoga.core.mybatis.MyMapper;
import com.yoga.tenant.tenant.model.Tenant;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface TenantMapper extends MyMapper<Tenant> {
void updateSystemTenantId();
}
|
/*
* (C) Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* <EMAIL> (夜色)
*/
package com.mpush.common.message;
import com.mpush.api.Constants;
import com.mpush.api.connection.Connection;
import com.mpush.api.protocol.Packet;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
/**
* Created by ohun on 2015/12/28.
*
* @author <EMAIL>
*/
public abstract class ByteBufMessage extends BaseMessage {
public ByteBufMessage(Packet message, Connection connection) {
super(message, connection);
}
@Override
public void decode(byte[] body) {
decode(Unpooled.wrappedBuffer(body));
}
@Override
public byte[] encode() {
ByteBuf body = Unpooled.buffer();
encode(body);
byte[] bytes = new byte[body.readableBytes()];
body.readBytes(bytes);
return bytes;
}
public abstract void decode(ByteBuf body);
public abstract void encode(ByteBuf body);
public void encodeString(ByteBuf body, String field) {
encodeBytes(body, field == null ? null : field.getBytes(Constants.UTF_8));
}
public void encodeByte(ByteBuf body, byte field) {
body.writeByte(field);
}
public void encodeInt(ByteBuf body, int field) {
body.writeInt(field);
}
public void encodeLong(ByteBuf body, long field) {
body.writeLong(field);
}
public void encodeBytes(ByteBuf body, byte[] field) {
if (field == null || field.length == 0) {
body.writeShort(0);
} else if (field.length < Short.MAX_VALUE) {
body.writeShort(field.length).writeBytes(field);
} else {
body.writeShort(Short.MAX_VALUE).writeInt(field.length - Short.MAX_VALUE).writeBytes(field);
}
}
public String decodeString(ByteBuf body) {
byte[] bytes = decodeBytes(body);
if (bytes == null) return null;
return new String(bytes, Constants.UTF_8);
}
public byte[] decodeBytes(ByteBuf body) {
int fieldLength = body.readShort();
if (fieldLength == 0) return null;
if (fieldLength == Short.MAX_VALUE) {
fieldLength += body.readInt();
}
byte[] bytes = new byte[fieldLength];
body.readBytes(bytes);
return bytes;
}
public byte decodeByte(ByteBuf body) {
return body.readByte();
}
public int decodeInt(ByteBuf body) {
return body.readInt();
}
public long decodeLong(ByteBuf body) {
return body.readLong();
}
}
|
import { L10N } from '.'
const italian: L10N = {
// Home
new_game: 'Nuova Partita',
continue_game: 'Continua',
credits: 'Crediti',
// Game
map: 'Mappa',
inventory: 'Inventario',
help: 'Aiuto',
menu: 'Menu',
continue_hint_keyboard: 'Premi spazio per continuare',
continue_hint_mobile: 'Tocca per continuare',
controls_game_controls: 'Controlli di gioco',
controls_move: 'Muovi',
controls_interact: 'Interagisci',
controls_space: '\u2423 (spazio)',
controls_shortcuts: 'Scorciatoie Mappa/Inventario',
notification_automatic_door_message:
'Il computer ha automaticamente aperto la porta usando la tua precedente soluzione',
close_game: 'Menu principale',
// Characters
mesopotamia_jones: '<NAME>',
von_talin: '<NAME>',
hammurtosh: '<NAME>',
von_dogen: 'W<NAME>',
conscious_rock: 'Roccia Senziente',
// Riddle
back: 'Indietro',
knock: 'Bussa alla porta',
riddle_solved: 'Indovinello risolto!',
riddle_open_door: 'Apri la porta',
riddle_solved_hint: 'Clicca il pulsante o premi spazio',
hint_execute: 'Esegui',
hint_result: 'Risultato',
hint_clear: 'Ripristina',
// Riddle questions
riddle_return_question: ([a]: number[]) => `Inserisci il numero ${a}`,
riddle_sum_question: ([a, b]: number[]) =>
`Quanto fa la somma di ${a} e ${b}?`,
riddle_word_question: ([a, b, c, d]: string[]) =>
`Se la porta aprire vorrai, queste lettere inserire dovrai: ${a}, ${b}, ${c}, ${d}`,
riddle_if_question: ([a, b, c]: number[]) =>
`Se il numero magico è pari, la porta si apre con la somma di ${a} e ${b}, altrimenti con il prodotto.\n Il numero magico è ${c}`,
riddle_loop_question: ([a]: number[]) =>
`Se dalla piramide uscire vorrai, ${a} volte bussare sulla porta dovrai`,
// Blocks
block_number: 'numero',
block_letter: 'lettera',
block_magic_number: 'numero_magico',
block_result: 'soluzione',
block_knock_door: 'bussaPorta',
block_join: 'unisci',
block_if: 'se',
block_then: 'allora',
block_else: 'altrimenti',
block_loop: 'ripeti',
block_times: 'volte',
block_is_even: 'è pari',
block_riddle_return_given_number: 'Dato il numero',
block_riddle_return_given_numbers: 'Dati i numeri',
block_riddle_return_given_letters: 'Date le lettere',
block_riddle_return_open_with: 'apri la porta con',
block_riddle_return_numbers_tooltip:
'I dati sono numeri, quindi il risultato deve essere un numero',
// Items
computer_name: 'HAL 1337',
computer_description:
'La macchina che Von Talin in passato ha costruito. Questo macchina si collega alle porte e permette di risolvere gli enigmi automaticamente, se programmato correttamente.',
old_key_name: 'vecchia chiave',
old_key_description:
'questa chiave serve ad aprire una cassa contenente il computer di Von Talin.',
conscious_rock_name: 'roccia senziente',
conscious_rock_description:
'questo oggetto traduce automaticamente i testi degli indovinelli. Sembra contenere il fantasma di un maestro di cuneiforme.',
cuneiform_legend_name: 'legenda di cuneiforme',
cuneiform_legend_description:
"una legenda che permette di tradurre l'alfabeto cuneiforme nel nostro alfabeto.",
map_name: 'mappa della piramide',
map_description: 'una mappa che mostra le stanze visitate della piramide.',
// Tutorials
tut_riddle_title: "L'indovinello",
tut_riddle_text:
'Per aprire la porta devi risolvere questo indovinello. Fai attenzione, i dati cambiano ogni volta.',
tut_translate_riddle_title: "Tradurre l'indovinello",
tut_translate_riddle_text:
"Selezionando una lettera puoi scoprire a quale carattere dell'alfabeto corrisponde nella legenda in basso.",
tut_open_door_title: 'Il lucchetto',
tut_open_door_text:
"Per aprire la porta e passare alla stanza successiva devi inserire nel lucchetto la soluzione dell'indovinello. Puoi usare anche le frecce ▲ e ▼ della tastiera.",
tut_computer_title: 'Il Computer',
tut_computer_text:
'Il computer ti permette di creare una soluzione e si collega alla porta cercando di aprirla usando la soluzione che hai inventato.',
tut_blocks_title: 'I blocchi',
tut_blocks_text:
"Per creare una soluzione devi trascinare questi blocchi nell'area bianca ed unirli come i pezzi di un puzzle.",
tut_solution_title: 'Provare la soluzione',
tut_solution_text:
'Questo pulsante esegue la tua soluzione, se è corretta si aprirá la porta.',
// Dialogues
dialogue_1_0:
"Accidenti, mentre esploravo delle antiche rovine si è rotto il pavimento e sono finito in questa caverna... devo cercare di uscire da qui. Più avanti c'è una porta, vediamo cosa c'è oltre.",
dialogue_2_0:
"Com'è possibile che ci sia una persona qui, queste rovine dovrebbero essere state chiuse da migliaia di anni... Chi sei tu?",
dialogue_2_1:
'Chi osa risvegliarmi dal mio eterno sonno! Vabbè sto dormendo da 104 anni, forse era anche ora che mi svegliassi... Mi chiamo <NAME> ed anche io ero un esploratore come te ma poi...',
dialogue_2_2: '... Poi ti sei fatto male al ginocchio?',
dialogue_2_3:
'Cosa? E perché mai!? No, come te sono rimasto intrappolato in queste rovine, forse lo avrai già notato ma le porte in questo posto sono maledette!',
dialogue_2_4:
'Ogni volta che ci passi i numeri degli indovinelli cambiano e ti costringono a risolvere sempre lo stesso enigma! Per questo motivo avevo iniziato a costruire una macchina in grado di risolvere gli indovinelli automaticamente.',
dialogue_2_5:
"Sfortunatamente in queste caverne si aggira anche <NAME>, la mummia malvagia che ha maledetto questo posto e che mi ha trasformato in un fantasma, condannandomi a vivere qui per l'eternità.",
dialogue_2_6:
'Per proteggerlo dalle grinfie di Hammurtosh, ho nascosto la mia macchina in una cassa nella stanza in cui sei caduto inizialmente, tieni la chiave per aprirla.',
dialogue_2_7:
'Una volta recuperata la macchina, torna da me e ti farò un regalo!',
dialogue_3_0:
'Ora che hai il computer ti sarà più facile uscire dalla caverna, ma lascia che ti aiuti ancora... ecco fatto, ti ho installato le versioni pre-alphabeto di Minecraft e Angry Birds proprio come mi avevi chiesto.',
dialogue_3_1:
'Veramente non ricordo di avertelo chiesto, inoltre non credo che mi saranno di aiuto. Non hai qualcosa di più utile?',
dialogue_3_2:
'Vabbè come vuoi, sembra che ti serva compagnia ed inoltre è pericoloso andare da solo! Prendi questo.',
dialogue_4_0:
'Per caso hai visto il mio cane? Gli avevo insegnato ad abbaiare in cuneiforme...',
dialogue_5_0:
'Guarda guarda un esploratore sconosciuto, complimenti per essere arrivato fin qui! Sei libero di uscire...',
dialogue_5_1:
"...basta solo che risolvi l'indovinello sull'ultima porta. È una sfortuna non ci sia mai riuscito nessuno...",
dialogue_5_2: 'Sei proprio diaboliho.',
dialogue_5_3: "Benvenuto all'infernoh muhauhau.",
dialogue_6_0: 'Wow.',
dialogue_7_0:
'Traduzione: "Attento esploratore, oltre quella porta troverai solo pericolo".',
dialogue_8_0: 'Grazie per averci liberati! Il mio cane vuole parlarti',
dialogue_9_0: 'WOW! AH-ROO',
dialogue_9_1:
'Traduzione: Sono il tuo capo. Test passato. Such Developer. Gioco finito. Titoli di coda... wow!',
dialogue_need_key: "E' chiuso... servirebbe una chiave.",
dialogue_need_rock:
"Questa porta non si apre... l'incisione dice che serve una roccia con un numero di Bacon."
}
export default italian
|
<gh_stars>1-10
# Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/aiplatform/v1beta1/tensorboard_service.proto for package 'Google.Cloud.AIPlatform.V1beta1'
# Original file comments:
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/cloud/aiplatform/v1beta1/tensorboard_service_pb'
module Google
module Cloud
module AIPlatform
module V1beta1
module TensorboardService
# TensorboardService
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.aiplatform.v1beta1.TensorboardService'
# Creates a Tensorboard.
rpc :CreateTensorboard, ::Google::Cloud::AIPlatform::V1beta1::CreateTensorboardRequest, ::Google::Longrunning::Operation
# Gets a Tensorboard.
rpc :GetTensorboard, ::Google::Cloud::AIPlatform::V1beta1::GetTensorboardRequest, ::Google::Cloud::AIPlatform::V1beta1::Tensorboard
# Updates a Tensorboard.
rpc :UpdateTensorboard, ::Google::Cloud::AIPlatform::V1beta1::UpdateTensorboardRequest, ::Google::Longrunning::Operation
# Lists Tensorboards in a Location.
rpc :ListTensorboards, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardsRequest, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardsResponse
# Deletes a Tensorboard.
rpc :DeleteTensorboard, ::Google::Cloud::AIPlatform::V1beta1::DeleteTensorboardRequest, ::Google::Longrunning::Operation
# Creates a TensorboardExperiment.
rpc :CreateTensorboardExperiment, ::Google::Cloud::AIPlatform::V1beta1::CreateTensorboardExperimentRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardExperiment
# Gets a TensorboardExperiment.
rpc :GetTensorboardExperiment, ::Google::Cloud::AIPlatform::V1beta1::GetTensorboardExperimentRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardExperiment
# Updates a TensorboardExperiment.
rpc :UpdateTensorboardExperiment, ::Google::Cloud::AIPlatform::V1beta1::UpdateTensorboardExperimentRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardExperiment
# Lists TensorboardExperiments in a Location.
rpc :ListTensorboardExperiments, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardExperimentsRequest, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardExperimentsResponse
# Deletes a TensorboardExperiment.
rpc :DeleteTensorboardExperiment, ::Google::Cloud::AIPlatform::V1beta1::DeleteTensorboardExperimentRequest, ::Google::Longrunning::Operation
# Creates a TensorboardRun.
rpc :CreateTensorboardRun, ::Google::Cloud::AIPlatform::V1beta1::CreateTensorboardRunRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardRun
# Batch create TensorboardRuns.
rpc :BatchCreateTensorboardRuns, ::Google::Cloud::AIPlatform::V1beta1::BatchCreateTensorboardRunsRequest, ::Google::Cloud::AIPlatform::V1beta1::BatchCreateTensorboardRunsResponse
# Gets a TensorboardRun.
rpc :GetTensorboardRun, ::Google::Cloud::AIPlatform::V1beta1::GetTensorboardRunRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardRun
# Updates a TensorboardRun.
rpc :UpdateTensorboardRun, ::Google::Cloud::AIPlatform::V1beta1::UpdateTensorboardRunRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardRun
# Lists TensorboardRuns in a Location.
rpc :ListTensorboardRuns, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardRunsRequest, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardRunsResponse
# Deletes a TensorboardRun.
rpc :DeleteTensorboardRun, ::Google::Cloud::AIPlatform::V1beta1::DeleteTensorboardRunRequest, ::Google::Longrunning::Operation
# Batch create TensorboardTimeSeries that belong to a TensorboardExperiment.
rpc :BatchCreateTensorboardTimeSeries, ::Google::Cloud::AIPlatform::V1beta1::BatchCreateTensorboardTimeSeriesRequest, ::Google::Cloud::AIPlatform::V1beta1::BatchCreateTensorboardTimeSeriesResponse
# Creates a TensorboardTimeSeries.
rpc :CreateTensorboardTimeSeries, ::Google::Cloud::AIPlatform::V1beta1::CreateTensorboardTimeSeriesRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardTimeSeries
# Gets a TensorboardTimeSeries.
rpc :GetTensorboardTimeSeries, ::Google::Cloud::AIPlatform::V1beta1::GetTensorboardTimeSeriesRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardTimeSeries
# Updates a TensorboardTimeSeries.
rpc :UpdateTensorboardTimeSeries, ::Google::Cloud::AIPlatform::V1beta1::UpdateTensorboardTimeSeriesRequest, ::Google::Cloud::AIPlatform::V1beta1::TensorboardTimeSeries
# Lists TensorboardTimeSeries in a Location.
rpc :ListTensorboardTimeSeries, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardTimeSeriesRequest, ::Google::Cloud::AIPlatform::V1beta1::ListTensorboardTimeSeriesResponse
# Deletes a TensorboardTimeSeries.
rpc :DeleteTensorboardTimeSeries, ::Google::Cloud::AIPlatform::V1beta1::DeleteTensorboardTimeSeriesRequest, ::Google::Longrunning::Operation
# Reads multiple TensorboardTimeSeries' data. The data point number limit is
# 1000 for scalars, 100 for tensors and blob references. If the number of
# data points stored is less than the limit, all data will be returned.
# Otherwise, that limit number of data points will be randomly selected from
# this time series and returned.
rpc :BatchReadTensorboardTimeSeriesData, ::Google::Cloud::AIPlatform::V1beta1::BatchReadTensorboardTimeSeriesDataRequest, ::Google::Cloud::AIPlatform::V1beta1::BatchReadTensorboardTimeSeriesDataResponse
# Reads a TensorboardTimeSeries' data. By default, if the number of data
# points stored is less than 1000, all data will be returned. Otherwise, 1000
# data points will be randomly selected from this time series and returned.
# This value can be changed by changing max_data_points, which can't be
# greater than 10k.
rpc :ReadTensorboardTimeSeriesData, ::Google::Cloud::AIPlatform::V1beta1::ReadTensorboardTimeSeriesDataRequest, ::Google::Cloud::AIPlatform::V1beta1::ReadTensorboardTimeSeriesDataResponse
# Gets bytes of TensorboardBlobs.
# This is to allow reading blob data stored in consumer project's Cloud
# Storage bucket without users having to obtain Cloud Storage access
# permission.
rpc :ReadTensorboardBlobData, ::Google::Cloud::AIPlatform::V1beta1::ReadTensorboardBlobDataRequest, stream(::Google::Cloud::AIPlatform::V1beta1::ReadTensorboardBlobDataResponse)
# Write time series data points of multiple TensorboardTimeSeries in multiple
# TensorboardRun's. If any data fail to be ingested, an error will be
# returned.
rpc :WriteTensorboardExperimentData, ::Google::Cloud::AIPlatform::V1beta1::WriteTensorboardExperimentDataRequest, ::Google::Cloud::AIPlatform::V1beta1::WriteTensorboardExperimentDataResponse
# Write time series data points into multiple TensorboardTimeSeries under
# a TensorboardRun. If any data fail to be ingested, an error will be
# returned.
rpc :WriteTensorboardRunData, ::Google::Cloud::AIPlatform::V1beta1::WriteTensorboardRunDataRequest, ::Google::Cloud::AIPlatform::V1beta1::WriteTensorboardRunDataResponse
# Exports a TensorboardTimeSeries' data. Data is returned in paginated
# responses.
rpc :ExportTensorboardTimeSeriesData, ::Google::Cloud::AIPlatform::V1beta1::ExportTensorboardTimeSeriesDataRequest, ::Google::Cloud::AIPlatform::V1beta1::ExportTensorboardTimeSeriesDataResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
|
<reponame>OwenKelvin/mini-netflix<filename>src/app/view/view-movie/view-movie.component.ts
import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRoute } from '@angular/router';
import { MovieService } from 'src/app/core/services/movie.service';
import { imageUrl } from 'src/app/config/app.config';
@Component({
selector: 'app-view-movie',
templateUrl: './view-movie.component.html',
styleUrls: ['./view-movie.component.css']
})
export class ViewMovieComponent implements OnInit {
movie: any;
imageUrl = imageUrl;
constructor(private router: Router, private movieService: MovieService) {}
ngOnInit() {
this.movie = {};
// imageUrl
const activatedRoute: ActivatedRoute = this.router.routerState.root;
const routerChild = activatedRoute.children[0];
if (routerChild) {
routerChild.params.subscribe(params => {
if (params && params.id) {
this.movieService.getMovie(params.id).subscribe(results => {
this.movie = {
...results,
backdrop_path: `${imageUrl}${results.backdrop_path}`,
poster_path: `${imageUrl}${results.backdrop_path}`
};
});
}
});
}
}
}
|
#!/bin/bash
FN="rgug4105a.db_3.2.3.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.13/data/annotation/src/contrib/rgug4105a.db_3.2.3.tar.gz"
"https://bioarchive.galaxyproject.org/rgug4105a.db_3.2.3.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-rgug4105a.db/bioconductor-rgug4105a.db_3.2.3_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-rgug4105a.db/bioconductor-rgug4105a.db_3.2.3_src_all.tar.gz"
)
MD5="3ccf354083ae36a7ae687fb8209c4e5b"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
package org.jooby.internal.mongodb;
import com.mongodb.DBObject;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import org.jooby.mongodb.GeneratedValue;
import org.jooby.mongodb.IdGen;
import org.jooby.test.MockUnit;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mongodb.morphia.Datastore;
import org.mongodb.morphia.annotations.Id;
import org.mongodb.morphia.mapping.MappedClass;
import org.mongodb.morphia.mapping.Mapper;
import org.mongodb.morphia.query.Query;
import org.mongodb.morphia.query.UpdateOperations;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.lang.reflect.Field;
@RunWith(PowerMockRunner.class)
@PrepareForTest({AutoIncID.class, Field.class, StoredId.class})
public class AutoIncIDTest {
public static class MonDoc {
@Id
@GeneratedValue
public Long id;
}
public static class IdIgnored {
@Id
public Long id;
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void prePersitInit() throws Exception {
Field idField = MonDoc.class.getDeclaredField("id");
Object entity = new MonDoc();
new MockUnit(Datastore.class, DBObject.class, Mapper.class, MappedClass.class)
.expect(unit -> {
MappedClass mclass = unit.get(MappedClass.class);
expect(mclass.getIdField()).andReturn(idField);
Class clazz = MonDoc.class;
expect(mclass.getClazz()).andReturn(clazz);
Mapper mapper = unit.get(Mapper.class);
expect(mapper.getMappedClass(entity)).andReturn(mclass);
Query<StoredId> query = unit.mock(Query.class);
UpdateOperations<StoredId> uops = unit.mock(UpdateOperations.class);
expect(uops.inc("value")).andReturn(uops);
StoredId storedId = unit.mockConstructor(StoredId.class, MonDoc.class.getName());
Datastore ds = unit.get(Datastore.class);
expect(ds.find(StoredId.class, "_id", MonDoc.class.getName())).andReturn(query);
expect(ds.createUpdateOperations(StoredId.class)).andReturn(uops);
expect(ds.findAndModify(query, uops)).andReturn(null);
expect(ds.save(storedId)).andReturn(null);
})
.run(unit -> {
new AutoIncID(unit.get(Datastore.class), IdGen.LOCAL)
.prePersist(entity, unit.get(DBObject.class), unit.get(Mapper.class));
});
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void prePersit() throws Exception {
Field idField = MonDoc.class.getDeclaredField("id");
Object entity = new MonDoc();
new MockUnit(Datastore.class, DBObject.class, Mapper.class, MappedClass.class)
.expect(unit -> {
MappedClass mclass = unit.get(MappedClass.class);
expect(mclass.getIdField()).andReturn(idField);
Class clazz = MonDoc.class;
expect(mclass.getClazz()).andReturn(clazz);
Mapper mapper = unit.get(Mapper.class);
expect(mapper.getMappedClass(entity)).andReturn(mclass);
Query<StoredId> query = unit.mock(Query.class);
UpdateOperations<StoredId> uops = unit.mock(UpdateOperations.class);
expect(uops.inc("value")).andReturn(uops);
StoredId storedId = new StoredId();
Datastore ds = unit.get(Datastore.class);
expect(ds.find(StoredId.class, "_id", "Global")).andReturn(query);
expect(ds.createUpdateOperations(StoredId.class)).andReturn(uops);
expect(ds.findAndModify(query, uops)).andReturn(storedId);
})
.run(unit -> {
new AutoIncID(unit.get(Datastore.class), IdGen.GLOBAL)
.prePersist(entity, unit.get(DBObject.class), unit.get(Mapper.class));
});
}
@Test
public void prePersitIgnored() throws Exception {
Field idField = IdIgnored.class.getDeclaredField("id");
Object entity = new IdIgnored();
new MockUnit(Datastore.class, DBObject.class, Mapper.class, MappedClass.class)
.expect(unit -> {
MappedClass mclass = unit.get(MappedClass.class);
expect(mclass.getIdField()).andReturn(idField);
Mapper mapper = unit.get(Mapper.class);
expect(mapper.getMappedClass(entity)).andReturn(mclass);
})
.run(unit -> {
new AutoIncID(unit.get(Datastore.class), IdGen.GLOBAL)
.prePersist(entity, unit.get(DBObject.class), unit.get(Mapper.class));
});
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test(expected = IllegalStateException.class)
public void prePersitErr() throws Exception {
Object entity = new Object();
new MockUnit(Datastore.class, DBObject.class, Mapper.class, MappedClass.class, Field.class,
GeneratedValue.class)
.expect(unit -> {
GeneratedValue genval = unit.get(GeneratedValue.class);
Field idField = unit.get(Field.class);
expect(idField.getAnnotation(GeneratedValue.class)).andReturn(genval);
})
.expect(unit -> {
Field idField = unit.get(Field.class);
idField.setAccessible(true);
idField.set(entity, 1L);
expectLastCall().andThrow(new IllegalAccessException("intentional error"));
MappedClass mclass = unit.get(MappedClass.class);
expect(mclass.getIdField()).andReturn(idField);
Class clazz = MonDoc.class;
expect(mclass.getClazz()).andReturn(clazz);
Mapper mapper = unit.get(Mapper.class);
expect(mapper.getMappedClass(entity)).andReturn(mclass);
Query<StoredId> query = unit.mock(Query.class);
UpdateOperations<StoredId> uops = unit.mock(UpdateOperations.class);
expect(uops.inc("value")).andReturn(uops);
StoredId storedId = new StoredId();
Datastore ds = unit.get(Datastore.class);
expect(ds.find(StoredId.class, "_id", "Global")).andReturn(query);
expect(ds.createUpdateOperations(StoredId.class)).andReturn(uops);
expect(ds.findAndModify(query, uops)).andReturn(storedId);
})
.run(unit -> {
new AutoIncID(unit.get(Datastore.class), IdGen.GLOBAL)
.prePersist(entity, unit.get(DBObject.class), unit.get(Mapper.class));
});
}
}
|
const { globalShortcut } = require('electron').remote
const chai = require('chai')
const dirtyChai = require('dirty-chai')
const isCI = require('electron').remote.getGlobal('isCi')
const { expect } = chai
chai.use(dirtyChai)
describe('globalShortcut module', () => {
before(function () {
if (isCI && process.platform === 'win32') {
this.skip()
}
})
beforeEach(() => {
globalShortcut.unregisterAll()
})
it('can register and unregister single accelerators', () => {
const accelerator = 'CmdOrCtrl+A+B+C'
expect(globalShortcut.isRegistered(accelerator)).to.be.false()
globalShortcut.register(accelerator, () => {})
expect(globalShortcut.isRegistered(accelerator)).to.be.true()
globalShortcut.unregister(accelerator)
expect(globalShortcut.isRegistered(accelerator)).to.be.false()
expect(globalShortcut.isRegistered(accelerator)).to.be.false()
globalShortcut.register(accelerator, () => {})
expect(globalShortcut.isRegistered(accelerator)).to.be.true()
globalShortcut.unregisterAll()
expect(globalShortcut.isRegistered(accelerator)).to.be.false()
})
it('can register and unregister multiple accelerators', () => {
const accelerators = ['CmdOrCtrl+X', 'CmdOrCtrl+Y']
expect(globalShortcut.isRegistered(accelerators[0])).to.be.false()
expect(globalShortcut.isRegistered(accelerators[1])).to.be.false()
globalShortcut.registerAll(accelerators, () => {})
expect(globalShortcut.isRegistered(accelerators[0])).to.be.true()
expect(globalShortcut.isRegistered(accelerators[1])).to.be.true()
globalShortcut.unregisterAll()
expect(globalShortcut.isRegistered(accelerators[0])).to.be.false()
expect(globalShortcut.isRegistered(accelerators[1])).to.be.false()
})
})
|
<filename>lang/py/cookbook/v2/source/cb2_4_12_sol_2.py
def pairwise(iterable):
itnext = iter(iterable).next
while True:
yield itnext(), itnext()
def dictFromSequence(seq):
return dict(pairwise(seq))
|
#!/usr/bin/env bash
# Create and configure a new virtual env to run WOfS within EASI
venv=unhas
venv_name="UNHAS Environment"
venv_root=~/.venv
script_dir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
repos_dir=${script_dir}/../..
function create_venv() {
# Create a virtual env if not already there
if ! $(jupyter kernelspec list | grep -q ${venv}); then
echo "Creating virtual env. Please wait..."
mkdir -p ${venv_root}
python -m venv ${venv_root}/${venv}
realpath /env/lib/python3.8/site-packages > ${venv_root}/${venv}/lib/python3.8/site-packages/base_venv.pth
source ${venv_root}/${venv}/bin/activate
python -m ipykernel install --user --name=${venv} --display-name "${venv_name}"
deactivate
fi
}
function install_dependencies() {
# Install WOfS python dependencies
echo "Creating/updating python dependencies. Please wait..."
source ${venv_root}/${venv}/bin/activate
pip install -U ephem
pip install -U --index-url https://packages.dea.ga.gov.au/ wofs
deactivate
}
function clone_repos() {
# Clore required repos
echo "Cloning additional repos. Please wait..."
if [ ! -d ${repos_dir}/hub-notebooks ] ; then
git clone --depth=1 https://csiro-easi@dev.azure.com/csiro-easi/easi-hub-public/_git/hub-notebooks ${repos_dir}/hub-notebooks
fi
}
create_venv
install_dependencies
clone_repos
|
<reponame>idcodeoverflow/TravelApplication
package cbedoy.cblibrary.services;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
/**
* Created by <NAME> on 28/12/2014.
* <p/>
* Mobile App Developer
* CBLibrary
* <p/>
* E-mail: <EMAIL>
* Facebook: https://www.facebook.com/carlos.bedoy
* Github: https://github.com/cbedoy
*/
public class NotificationService extends Service {
@Override
public IBinder onBind(Intent intent) {
return null;
}
}
|
<reponame>minuk8932/Algorithm_BaekJoon<gh_stars>1-10
package implementation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 14626번: ISBN
*
* @see https://www.acmicpc.net/problem/14626/
*
*/
public class Boj14626 {
private static final char ERASED = '*';
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
char[] ISBN = br.readLine().toCharArray();
System.out.println(getCode(ISBN));
}
private static int getCode(char[] arr) {
int convert = 1;
int checkSign = arr[arr.length - 1] - '0';
int total = 0;
for(int i = 0; i < arr.length - 1; i++) {
if(ERASED == arr[i]) {
if(i % 2 != 0) convert = 3; // 지워진 코드의 자릿수가 짝수번째인 경우
continue;
}
if(i % 2 == 0) total += (arr[i] - '0');
else total += (arr[i] - '0') * 3;
}
int diff = 10 - checkSign;
checkSign = diff == 10 ? 0 : diff; // 끝자리가 10으로 계산되면 이는 0
for(int i = 0; i < 10; i++) {
int value = (total + i * convert) % 10;
if(checkSign == value) return i;
}
return 0;
}
}
|
package org.rs2server.rs2.model.npc.pc;
import org.rs2server.rs2.model.Location;
/**
* @author twelve
*/
public enum PortalCardinality {
BLUE("eastern", 15, 20, Location.create(2680, 2588, 0), new Location[] {
Location.create(2644, 2571, 0),
Location.create(2644, 2572, 0),
Location.create(2645, 2572, 0),
Location.create(2646, 2572, 0),
Location.create(2647, 2572, 0),
Location.create(2648, 2572, 0),
Location.create(2648, 2571, 0)
}),
RED("south-western", 17, 24, Location.create(2645, 2569, 0), new Location[] {
Location.create(2680, 2587, 0),
Location.create(2679, 2587, 0),
Location.create(2679, 2588, 0),
Location.create(2679, 2589, 0),
Location.create(2679, 2590, 0),
Location.create(2679, 2591, 0),
Location.create(2680, 2591, 0)
}),
YELLOW("south-eastern", 16, 22, Location.create(2669, 2570, 0), new Location[] {
Location.create(2668, 2572, 0),
Location.create(2668, 2573, 0),
Location.create(2669, 2573, 0),
Location.create(2670, 2573, 0),
Location.create(2671, 2573, 0),
Location.create(2672, 2573, 0),
Location.create(2672, 2572, 0)
}),
PURPLE("western", 14, 18, Location.create(2628, 2591, 0), new Location[]{
Location.create(2630, 2594, 0),
Location.create(2631, 2594, 0),
Location.create(2631, 2593, 0),
Location.create(2631, 2592, 0),
Location.create(2631, 2591, 0),
Location.create(2631, 2590, 0),
Location.create(2630, 2590, 0)});
private final String direction;
private final int healthChild;
private final int shieldChild;
private final Location location;
private final Location[] spawnLocations;
PortalCardinality(String direction, int healthChild, int shieldChild, Location location, Location[] spawnLocations) {
this.direction = direction;
this.healthChild = healthChild;
this.shieldChild = shieldChild;
this.location = location;
this.spawnLocations = spawnLocations;
}
public final String getDirection() {
return this.direction;
}
public String getName() {
return this.name().toLowerCase();
}
public int getHealthChild() {
return healthChild;
}
public int getShieldChild() {
return shieldChild;
}
public Location getLocation() {
return location;
}
public Location[] getSpawnLocations() {
return spawnLocations;
}
}
|
/**
* @module react
*/
import React from 'react'
/**
* @module HorseCard
*/
import HorseCard from 'components/horse/HorseCard'
/**
* @module expect
*/
import chai, { expect } from 'chai'
/**
* @module shallow
*/
import { shallow } from 'enzyme'
/**
* @module ChaiEnzyme
*/
import chaiEnzyme from 'chai-enzyme'
chai.use(chaiEnzyme())
describe('Components - cards - HorseCard', () => {
let wrapper
beforeEach(() => {
wrapper = shallow(<HorseCard src='' isPending={false} />)
})
it('should exist', () => {
expect(wrapper).to.exist
})
})
|
class Branding
{
private $bodyStyle;
public function setBodyStyle($style)
{
$this->bodyStyle = $style;
}
public function getBodyStyle()
{
return $this->bodyStyle;
}
} |
package com.amala.demo.exception;
public class InstitutionNotFoundException extends RuntimeException{
public InstitutionNotFoundException(String legalName) {
super("Holiday with identifier" + legalName+"does not exist");
}
}
|
#!/usr/bin/env bash
# Copyright (c) 2021, ARM Limited and Contributors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of ARM nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# This script uses the following environment variables from the variant
#
# VARIANT - build variant name
# TOP_DIR - workspace root directory
# CROSS_COMPILE - PATH to GCC including CROSS-COMPILE prefix
# PARALLELISM - number of cores to build across
TOP_DIR=`pwd`
FWTS_PATH=fwts
FWTS_BINARY=fwts_output
RAMDISK_PATH=ramdisk
FWTS_DEP=$RAMDISK_PATH/fwts_build_dep
GCC=tools/gcc-linaro-7.5.0-2019.12-x86_64_aarch64-linux-gnu/bin/aarch64-linux-gnu-
CROSS_COMPILE=$TOP_DIR/$GCC
BUILD_PLAT=$1
BUILD_TYPE=$2
if ! [[ $BUILD_TYPE = S ]] && ! [[ $BUILD_TYPE = F ]] ; then
echo "Please provide a Build type."
echo "Usage build-sct.sh <target> <S/F>"
echo "S->Standalone BBR,F->Full systemready"
exit
fi
if [[ $BUILD_TYPE = S ]]; then
BBR_DIR=$TOP_DIR/../..
else
BBR_DIR=$TOP_DIR/bbr-acs
fi
init()
{
if [[ $BUILD_TYPE = S ]]; then
mkdir -p $TOP_DIR/$RAMDISK_PATH
fi
cp -r $BBR_DIR/common/fwts_build_dep $RAMDISK_PATH
}
do_build()
{
pushd $TOP_DIR/$FWTS_PATH
CROSS_COMPILE_DIR=$(dirname $CROSS_COMPILE)
DEF_PATH=$PATH
PATH=$(getconf PATH) #Reset path to avoid cross compiler mismatch
PATH="$PATH:$CROSS_COMPILE_DIR"
echo $BBR_DIR
if ! patch -R -p0 -s -f --dry-run -p1 < $BBR_DIR/common/patches/bbr-fwts.patch ; then
echo "Applying FWTS Patch ..."
patch -p1 -s -f < $BBR_DIR/common/patches/bbr-fwts.patch
fi
if ! patch -R -p0 -s -f --dry-run -p1 < $BBR_DIR/common/patches/fwts_additional_patches/fwts_spcr_baur_rate.patch ; then
echo "Applying FWTS additional Patches ..."
patch -p1 -s -f < $BBR_DIR/common/patches/fwts_additional_patches/fwts_spcr_baur_rate.patch
fi
if ! patch -R -p0 -s -f --dry-run -p1 < $BBR_DIR/common/patches/fwts_additional_patches/0001-Fix-for-dmicheck-test-crashes-with-Sig7.patch ; then
echo "Applying FWTS additional Patches for dmicheck..."
patch -p1 -s -f < $BBR_DIR/common/patches/fwts_additional_patches/0001-Fix-for-dmicheck-test-crashes-with-Sig7.patch
fi
mkdir -p $FWTS_BINARY
mkdir -p $FWTS_BINARY/bash
autoreconf -ivf
export ac_cv_func_malloc_0_nonnull=yes
export ac_cv_func_realloc_0_nonnull=yes
./configure --host=aarch64-linux-gnu \
--enable-static=yes CFLAGS="-g -O2 -I$TOP_DIR/$FWTS_DEP/include" \
LDFLAGS="-L$TOP_DIR/$FWTS_DEP -Wl,-rpath-link,$TOP_DIR/$FWTS_DEP \
-Wl,-rpath-link,$TOP_DIR/$FWTS_PATH/src/libfwtsiasl/.libs/" \
--prefix=$TOP_DIR/$FWTS_PATH/$FWTS_BINARY \
--exec-prefix=$TOP_DIR/$FWTS_PATH/$FWTS_BINARY --datarootdir=$TOP_DIR/$FWTS_PATH/$FWTS_BINARY \
--with-bashcompletiondir=$TOP_DIR/$FWTS_PATH/$FWTS_BINARY/bash
make install
PATH=$DEF_PATH #Restore def path
popd
}
do_clean()
{
pushd $TOP_DIR/$FWTS_PATH
CROSS_COMPILE_DIR=$(dirname $CROSS_COMPILE)
PATH="$PATH:$CROSS_COMPILE_DIR"
if [ -f "$TOP_DIR/$FWTS_PATH/Makefile" ]; then
make clean
fi
if [ -f "$TOP_DIR/$FWTS_PATH/$FWTS_BINARY/bin/fwts" ]; then
make uninstall
fi
rm -rf $TOP_DIR/$RAMDISK_PATH/$FWTS_BINARY
popd
}
do_package ()
{
echo "Packaging FWTS... $VARIANT";
if [[ $BUILD_TYPE = F ]]; then
sed -i '/ir_bbr_fwts_tests.ini/d' $TOP_DIR/ramdisk/files.txt
if [ "$BUILD_PLAT" = "IR" ]; then
#Add the entry in file.txt of ramdisk
echo "file /bin/ir_bbr_fwts_tests.ini ./fwts_output/bin/ir_bbr_fwts_tests.ini 766 0 0" >> $TOP_DIR/ramdisk/files.txt
cp $BBR_DIR/ebbr/config/ir_bbr_fwts_tests.ini $TOP_DIR/$FWTS_PATH/$FWTS_BINARY/bin
fi
fi
cp -R $TOP_DIR/$FWTS_PATH/$FWTS_BINARY ramdisk
chmod 777 -R $TOP_DIR/$RAMDISK_PATH/$FWTS_BINARY
}
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
init
source $DIR/framework.sh $@
|
package net.sansa_stack.query.spark.sparqlify
import net.sansa_stack.rdf.common.partition.core.{RdfPartitionStateDefault, RdfPartitioner}
import net.sansa_stack.rdf.common.partition.model.sparqlify.SparqlifyUtils2
import org.aksw.obda.domain.impl.LogicalTableTableName
import org.aksw.sparqlify.backend.postgres.DatatypeToStringCast
import org.aksw.sparqlify.config.syntax.Config
import org.aksw.sparqlify.core.algorithms.{CandidateViewSelectorSparqlify, ViewDefinitionNormalizerImpl}
import org.aksw.sparqlify.core.interfaces.SparqlSqlStringRewriter
import org.aksw.sparqlify.core.sql.common.serialization.SqlEscaperBase
import org.aksw.sparqlify.util.{SparqlifyCoreInit, SparqlifyUtils, SqlBackendConfig}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{Row, SparkSession}
object SparqlifyUtils3 // extends StrictLogging
{
def createSparqlSqlRewriter(sparkSession: SparkSession, partitioner: RdfPartitioner[RdfPartitionStateDefault], partitions: Map[RdfPartitionStateDefault, RDD[Row]]): SparqlSqlStringRewriter = {
val config = new Config()
// val loggerCount = new LoggerCount(logger.underlying)
val backendConfig = new SqlBackendConfig(new DatatypeToStringCast(), new SqlEscaperBase("`", "`")) // new SqlEscaperBacktick())
val sqlEscaper = backendConfig.getSqlEscaper()
val typeSerializer = backendConfig.getTypeSerializer()
val sqlFunctionMapping = SparqlifyCoreInit.loadSqlFunctionDefinitions("functions-spark.xml")
val ers = SparqlifyUtils.createDefaultExprRewriteSystem()
val mappingOps = SparqlifyUtils.createDefaultMappingOps(ers)
val candidateViewSelector = new CandidateViewSelectorSparqlify(mappingOps, new ViewDefinitionNormalizerImpl());
val views = partitions.map {
case (p, rdd) =>
//
// logger.debug("Processing RdfPartition: " + p)
val vd = SparqlifyUtils2.createViewDefinition(partitioner, p)
// logger.debug("Created view definition: " + vd)
val tableName = vd.getLogicalTable match {
case o: LogicalTableTableName => o.getTableName
case _ => throw new RuntimeException("Table name required - instead got: " + vd)
}
// val scalaSchema = p.layout.schema
val scalaSchema = partitioner.determineLayout(p).schema
val sparkSchema = ScalaReflection.schemaFor(scalaSchema).dataType.asInstanceOf[StructType]
val df = sparkSession.createDataFrame(rdd, sparkSchema).persist()
df.createOrReplaceTempView(sqlEscaper.escapeTableName(tableName))
config.getViewDefinitions.add(vd)
}
val basicTableInfoProvider = new BasicTableInfoProviderSpark(sparkSession)
val rewriter = SparqlifyUtils.createDefaultSparqlSqlStringRewriter(basicTableInfoProvider, null, config, typeSerializer, sqlEscaper, sqlFunctionMapping)
// val rewrite = rewriter.rewrite(QueryFactory.create("Select * { <http://dbpedia.org/resource/Guy_de_Maupassant> ?p ?o }"))
// val rewrite = rewriter.rewrite(QueryFactory.create("Select * { ?s <http://xmlns.com/foaf/0.1/givenName> ?o ; <http://dbpedia.org/ontology/deathPlace> ?d }"))
rewriter
}
}
|
import numpy as np
from typing import List, Dict
from collections import defaultdict
def random_samples_per_class(timeseries: List[np.ndarray], labels: List[int], num_samples: int) -> Dict[int, List[np.ndarray]]:
timeseries_per_class = defaultdict(list)
unique_classes = set(labels)
for ts, label in zip(timeseries, labels):
timeseries_per_class[label].append(ts)
random_samples = {}
for c in unique_classes:
random_idx = np.random.choice(len(timeseries_per_class[c]), num_samples, replace=False)
random_samples[c] = [timeseries_per_class[c][i] for i in random_idx]
return random_samples |
#!/bin/bash
dieharder -d 204 -g 45 -S 2062763844
|
#!/usr/bin/env bash
# Debug this script if in debug mode
(( $DEBUG == 1 )) && set -x
# Import dsip_lib utility / shared functions if not already
if [[ "$DSIP_LIB_IMPORTED" != "1" ]]; then
. ${DSIP_PROJECT_DIR}/dsiprouter/dsip_lib.sh
fi
function install() {
# Install Dependencies
yum groupinstall -y 'core'
yum groupinstall -y 'base'
yum groupinstall -y 'Development Tools'
yum install -y psmisc curl wget sed gawk vim epel-release perl firewalld uuid-devel openssl-devel
yum install -y logrotate rsyslog certbot
# TODO: we should detect if SELINUX is enabled and if so add proper permissions for kamailio, dsip, etc..
# Disable SELinux
sed -i -e 's/(^SELINUX=).*/SELINUX=disabled/' /etc/selinux/config
# create kamailio user and group
mkdir -p /var/run/kamailio
# sometimes locks aren't properly removed (this seems to happen often on VM's)
rm -f /etc/passwd.lock /etc/shadow.lock /etc/group.lock /etc/gshadow.lock
useradd --system --user-group --shell /bin/false --comment "Kamailio SIP Proxy" kamailio
chown -R kamailio:kamailio /var/run/kamailio
KAM_REPO="kamailio-$(perl -pe 's%([0-9])(?=[0-9])%\1\.%g' <<<${KAM_VERSION})"
yum install -y yum-utils
yum-config-manager --add-repo https://rpm.kamailio.org/centos/kamailio.repo
rpm --import $(grep 'gpgkey' /etc/yum.repos.d/kamailio.repo | cut -d '=' -f 2 | sort -u)
# TODO: get the kamailio guys to fix their rpm signing
yum install -y --disablerepo=kamailio --enablerepo=${KAM_REPO} --nogpgcheck kamailio kamailio-ldap kamailio-mysql \
kamailio-postgresql kamailio-debuginfo kamailio-xmpp kamailio-unixodbc kamailio-utils kamailio-tls \
kamailio-presence kamailio-outbound kamailio-gzcompress kamailio-http_async_client kamailio-dmq_userloc \
kamailio-sipdump kamailio-websocket
# get info about the kamailio install for later use in script
KAM_VERSION_FULL=$(kamailio -v 2>/dev/null | grep '^version:' | awk '{print $3}')
KAM_MODULES_DIR=$(find /usr/lib{32,64,}/{i386*/*,i386*/kamailio/*,x86_64*/*,x86_64*/kamailio/*,*} -name drouting.so -printf '%h' -quit 2>/dev/null)
touch /etc/tmpfiles.d/kamailio.conf
echo "d /run/kamailio 0750 kamailio users" > /etc/tmpfiles.d/kamailio.conf
# create kamailio defaults config
(cat << 'EOF'
RUN_KAMAILIO=yes
USER=kamailio
GROUP=kamailio
SHM_MEMORY=64
PKG_MEMORY=8
PIDFILE=/var/run/kamailio/kamailio.pid
CFGFILE=/etc/kamailio/kamailio.cfg
#DUMP_CORE=yes
EOF
) > /etc/default/kamailio
# Configure Kamailio and Required Database Modules
mkdir -p ${SYSTEM_KAMAILIO_CONFIG_DIR}
mv -f ${SYSTEM_KAMAILIO_CONFIG_DIR}/kamctlrc ${SYSTEM_KAMAILIO_CONFIG_DIR}/kamctlrc.$(date +%Y%m%d_%H%M%S)
if [[ -z "${ROOT_DB_PASS-unset}" ]]; then
local ROOTPW_SETTING="DBROOTPWSKIP=yes"
else
local ROOTPW_SETTING="DBROOTPW=\"${ROOT_DB_PASS}\""
fi
# TODO: we should set STORE_PLAINTEXT_PW to 0, this is not default but would need tested
(cat << EOF
DBENGINE=MYSQL
DBHOST="${KAM_DB_HOST}"
DBPORT="${KAM_DB_PORT}"
DBNAME="${KAM_DB_NAME}"
DBROUSER="${KAM_DB_USER}"
DBROPW="${KAM_DB_PASS}"
DBRWUSER="${KAM_DB_USER}"
DBRWPW="${KAM_DB_PASS}"
DBROOTUSER="${ROOT_DB_USER}"
${ROOTPW_SETTING}
CHARSET=utf8
INSTALL_EXTRA_TABLES=yes
INSTALL_PRESENCE_TABLES=yes
INSTALL_DBUID_TABLES=yes
# STORE_PLAINTEXT_PW=0
EOF
) > ${SYSTEM_KAMAILIO_CONFIG_DIR}/kamctlrc
# Execute 'kamdbctl create' to create the Kamailio database schema
kamdbctl create
# Setup firewall rules
firewall-offline-cmd --zone=public --add-port=${KAM_SIP_PORT}/udp
firewall-offline-cmd --zone=public --add-port=${KAM_SIP_PORT}/tcp
firewall-offline-cmd --zone=public --add-port=${KAM_SIPS_PORT}/tcp
firewall-offline-cmd --zone=public --add-port=${KAM_WSS_PORT}/tcp
firewall-offline-cmd --zone=public --add-port=${KAM_DMQ_PORT}/udp
firewall-offline-cmd --zone=public --add-port=${RTP_PORT_MIN}-${RTP_PORT_MAX}/udp
# Start firewalld
systemctl start firewalld
systemctl enable firewalld
# Fix for bug: https://bugzilla.redhat.com/show_bug.cgi?id=1575845
if (( $? != 0 )); then
systemctl restart dbus
systemctl restart firewalld
fi
# Setup firewall rules
firewall-cmd --zone=public --add-port=${KAM_SIP_PORT}/udp --permanent
firewall-cmd --zone=public --add-port=${KAM_SIP_PORT}/tcp --permanent
firewall-cmd --zone=public --add-port=${KAM_SIPS_PORT}/tcp --permanent
firewall-cmd --zone=public --add-port=${KAM_WSS_PORT}/tcp --permanent
firewall-cmd --zone=public --add-port=${KAM_DMQ_PORT}/udp --permanent
firewall-cmd --zone=public --add-port=${RTP_PORT_MIN}-${RTP_PORT_MAX}/udp
firewall-cmd --reload
# Make sure MariaDB and Local DNS start before Kamailio
if ! grep -q v 'mysql.service dnsmasq.service' /lib/systemd/system/kamailio.service 2>/dev/null; then
sed -i -r -e 's/(After=.*)/\1 mysql.service dnsmasq.service/' /lib/systemd/system/kamailio.service
fi
if ! grep -q v "${DSIP_PROJECT_DIR}/dsiprouter.sh updatednsconfig" /lib/systemd/system/kamailio.service 2>/dev/null; then
sed -i -r -e "0,\|^ExecStart.*|{s||ExecStartPre=-${DSIP_PROJECT_DIR}/dsiprouter.sh updatednsconfig\n&|}" /lib/systemd/system/kamailio.service
fi
systemctl daemon-reload
# Enable Kamailio for system startup
systemctl enable kamailio
# Configure rsyslog defaults
if ! grep -q 'dSIPRouter rsyslog.conf' /etc/rsyslog.conf 2>/dev/null; then
cp -f ${DSIP_PROJECT_DIR}/resources/syslog/rsyslog.conf /etc/rsyslog.conf
fi
# Setup kamailio Logging
cp -f ${DSIP_PROJECT_DIR}/resources/syslog/kamailio.conf /etc/rsyslog.d/kamailio.conf
touch /var/log/kamailio.log
systemctl restart rsyslog
# Setup logrotate
cp -f ${DSIP_PROJECT_DIR}/resources/logrotate/kamailio /etc/logrotate.d/kamailio
# Setup Kamailio to use the CA cert's that are shipped with the OS
mkdir -p ${DSIP_SYSTEM_CONFIG_DIR}/certs
cp ${DSIP_PROJECT_DIR}/kamailio/cacert_dsiprouter.pem ${DSIP_SYSTEM_CONFIG_DIR}/certs/cacert.pem
# Setup dSIPRouter Module
rm -rf /tmp/kamailio 2>/dev/null
git clone --depth 1 -b ${KAM_VERSION_FULL} https://github.com/kamailio/kamailio.git /tmp/kamailio 2>/dev/null &&
cp -rf ${DSIP_PROJECT_DIR}/kamailio/modules/dsiprouter/ /tmp/kamailio/src/modules/ &&
( cd /tmp/kamailio/src/modules/dsiprouter; make; exit $?; ) &&
cp -f /tmp/kamailio/src/modules/dsiprouter/dsiprouter.so ${KAM_MODULES_DIR} ||
return 1
return 0
}
function uninstall {
# Stop servers
systemctl stop kamailio
systemctl disable kamailio
# Backup kamailio configuration directory
mv -f ${SYSTEM_KAMAILIO_CONFIG_DIR} ${SYSTEM_KAMAILIO_CONFIG_DIR}.bak.$(date +%Y%m%d_%H%M%S)
# Uninstall Kamailio modules
yum remove -y kamailio\*
# Remove firewall rules that was created by us:
firewall-cmd --zone=public --remove-port=${KAM_SIP_PORT}/udp --permanent
firewall-cmd --zone=public --remove-port=${KAM_SIP_PORT}/tcp --permanent
firewall-cmd --zone=public --remove-port=${KAM_SIPS_PORT}/tcp --permanent
firewall-cmd --zone=public --remove-port=${KAM_WSS_PORT}/tcp --permanent
firewall-cmd --zone=public --remove-port=${KAM_DMQ_PORT}/udp --permanent
firewall-cmd --zone=public --remove-port=${RTP_PORT_MIN}-${RTP_PORT_MAX}/udp
firewall-cmd --reload
# Remove kamailio Logging
rm -f /etc/rsyslog.d/kamailio.conf
# Remove logrotate settings
rm -f /etc/logrotate.d/kamailio
}
case "$1" in
uninstall|remove)
uninstall
;;
install)
install
;;
*)
printerr "usage $0 [install | uninstall]"
;;
esac
|
<filename>node_modules/react-icons-kit/md/ic_pause_circle_filled_twotone.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_pause_circle_filled_twotone = void 0;
var ic_pause_circle_filled_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M12,4c-4.41,0-8,3.59-8,8c0,4.41,3.59,8,8,8s8-3.59,8-8C20,7.59,16.41,4,12,4z M11,16H9V8h2V16z M15,16h-2 V8h2V16z",
"opacity": ".3"
},
"children": [{
"name": "path",
"attribs": {
"d": "M12,4c-4.41,0-8,3.59-8,8c0,4.41,3.59,8,8,8s8-3.59,8-8C20,7.59,16.41,4,12,4z M11,16H9V8h2V16z M15,16h-2 V8h2V16z",
"opacity": ".3"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "8",
"width": "2",
"x": "13",
"y": "8"
},
"children": [{
"name": "rect",
"attribs": {
"height": "8",
"width": "2",
"x": "13",
"y": "8"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "8",
"width": "2",
"x": "9",
"y": "8"
},
"children": [{
"name": "rect",
"attribs": {
"height": "8",
"width": "2",
"x": "9",
"y": "8"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M12,2C6.48,2,2,6.48,2,12c0,5.52,4.48,10,10,10s10-4.48,10-10C22,6.48,17.52,2,12,2z M12,20c-4.41,0-8-3.59-8-8 c0-4.41,3.59-8,8-8s8,3.59,8,8C20,16.41,16.41,20,12,20z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M12,2C6.48,2,2,6.48,2,12c0,5.52,4.48,10,10,10s10-4.48,10-10C22,6.48,17.52,2,12,2z M12,20c-4.41,0-8-3.59-8-8 c0-4.41,3.59-8,8-8s8,3.59,8,8C20,16.41,16.41,20,12,20z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_pause_circle_filled_twotone = ic_pause_circle_filled_twotone; |
package com.ibm.opl.customdatasource;
import java.io.File;
import java.io.IOException;
import ilog.concert.IloException;
public class TestUtils {
/**
* Runs a model as a .mod file, using a set of .dat files, and a given jdbc configuration.
*
* If connectionString is specified, it will be used instead of the url in the jdbc configuration,
* allowing for tests with database which url is not static (ex: temporary test databases).
* @param modFilename The .mod file
* @param datFilenames An array of .dat files
* @param jdbcConfigurationFile The jdbc configuration file
* @param connectionString An override url
* @throws IOException
* @throws IloException
*/
public final static void runMod(String modFilename, String[] datFilenames, String jdbcConfigurationFile,
String connectionString) throws IOException, IloException {
ModRunner runner = new ModRunner();
runner.run(modFilename, datFilenames, jdbcConfigurationFile, connectionString);
}
/**
* Returns the project root. Only works if the tests are run with maven.
* @return
*/
public static File GetProjectRoot() {
String f = new File(TestUtils.class.getResource("models").getFile()).getAbsolutePath();
// The standard run time for us is to have test classes in ${project}\target\test-classes
String p = f.replace("\\", "/"); // normalize
int index = p.indexOf("target/test-classes");
if (index != -1) {
String loc = p.substring(0, index);
return new File(loc);
}
else
return null;
}
}
|
// Copyright 2011 Foursquare Labs Inc. All Rights Reserved.
package tech.scoundrel.rogue.lift.test
import java.util.regex.Pattern
import com.mongodb.ReadPreference
import tech.scoundrel.rogue._
import tech.scoundrel.rogue.lift.LiftRogue._
import org.bson.types.ObjectId
import org.scalatest.concurrent.ScalaFutures
import org.scalatest._
import tech.scoundrel.rogue.LatLong
/**
* Contains tests that test the interaction of Rogue with a real mongo.
*/
class EndToEndAsyncSpec extends FlatSpec
with Matchers with ScalaFutures
with BeforeAndAfterEach {
def baseTestVenue(): Venue = {
Venue.createRecord
.legacyid(123)
.userid(456)
.venuename("test venue")
.mayor(789)
.mayor_count(3)
.closed(false)
.popularity(List(1L, 2L, 3L))
.categories(List(new ObjectId()))
.geolatlng(LatLong(40.73, -73.98))
.status(VenueStatus.open)
.claims(List(
VenueClaimBson.createRecord.userid(1234).status(ClaimStatus.pending),
VenueClaimBson.createRecord.userid(5678).status(ClaimStatus.approved)
))
.lastClaim(VenueClaimBson.createRecord.userid(5678).status(ClaimStatus.approved))
.tags(List("test tag1", "some tag"))
}
def baseTestVenueClaim(vid: ObjectId): VenueClaim = {
VenueClaim.createRecord
.venueid(vid)
.userid(123)
.status(ClaimStatus.approved)
}
def baseTestTip(): Tip = {
Tip.createRecord
.legacyid(234)
.counts(Map(
"foo" -> 1L,
"bar" -> 2L
))
}
override protected def beforeEach(): Unit = {
RogueTestMongo.connectToMongoAsync
}
override protected def afterEach(): Unit = {
Venue.bulkDeleteAsync_!!!().futureValue
Venue.countAsync().futureValue shouldBe 0
VenueClaim.bulkDeleteAsync_!!!().futureValue
VenueClaim.countAsync().futureValue shouldBe 0
Like.allShards.bulkDeleteAsync_!!!().futureValue
RogueTestMongo.disconnectFromMongoAsync
}
"eqs" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
val vc = baseTestVenueClaim(v.id)
vc.insertAsync().futureValue
// eqs
metaRecordToQueryBuilder(Venue).where(_._id eqs v.id).fetchAsync().futureValue.map(_.id) shouldBe Seq(v.id)
Venue.where(_.mayor eqs v.mayor.value).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor eqs v.mayor.value).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.venuename eqs v.venuename.value).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.closed eqs false).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor eqs 432432).fetchAsync().futureValue.map(_.id) shouldBe Nil
Venue.where(_.closed eqs true).fetchAsync().futureValue.map(_.id) shouldBe Nil
VenueClaim.where(_.status eqs ClaimStatus.approved).fetchAsync().futureValue.map(_.id) shouldBe List(vc.id)
VenueClaim.where(_.venueid eqs v.id).fetchAsync().futureValue.map(_.id) shouldBe List(vc.id)
VenueClaim.where(_.venueid eqs v).fetchAsync().futureValue.map(_.id) shouldBe List(vc.id)
}
"inequality queries" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
val vc = baseTestVenueClaim(v.id)
vc.insertAsync().futureValue
// neq,lt,gt, where the lone Venue has mayor_count=3, and the only
// VenueClaim has status approved.
Venue.where(_.mayor_count neqs 5).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor_count < 5).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor_count lt 5).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor_count <= 5).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor_count lte 5).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
Venue.where(_.mayor_count > 5).fetchAsync().futureValue.map(_.id) shouldBe Nil
Venue.where(_.mayor_count gt 5).fetchAsync().futureValue.map(_.id) shouldBe Nil
Venue.where(_.mayor_count >= 5).fetchAsync().futureValue.map(_.id) shouldBe Nil
Venue.where(_.mayor_count gte 5).fetchAsync().futureValue.map(_.id) shouldBe Nil
Venue.where(_.mayor_count between (3, 5)).fetchAsync().futureValue.map(_.id) shouldBe List(v.id)
VenueClaim.where(_.status neqs ClaimStatus.approved).fetchAsync().futureValue.map(_.id) shouldBe Nil
VenueClaim.where(_.status neqs ClaimStatus.pending).fetchAsync().futureValue.map(_.id) shouldBe List(vc.id)
}
"select queries" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
val base = Venue.where(_._id eqs v.id)
base.select(_.legacyid).fetchAsync().futureValue shouldBe List(v.legacyid.value)
base.select(_.legacyid, _.userid).fetchAsync().futureValue shouldBe List((v.legacyid.value, v.userid.value))
base.select(_.legacyid, _.userid, _.mayor).fetchAsync().futureValue shouldBe List((v.legacyid.value, v.userid.value, v.mayor.value))
base.select(_.legacyid, _.userid, _.mayor, _.mayor_count).fetchAsync().futureValue shouldBe List((v.legacyid.value, v.userid.value, v.mayor.value, v.mayor_count.value))
base.select(_.legacyid, _.userid, _.mayor, _.mayor_count, _.closed).fetchAsync().futureValue shouldBe List((v.legacyid.value, v.userid.value, v.mayor.value, v.mayor_count.value, v.closed.value))
base.select(_.legacyid, _.userid, _.mayor, _.mayor_count, _.closed, _.tags).fetchAsync().futureValue shouldBe List((v.legacyid.value, v.userid.value, v.mayor.value, v.mayor_count.value, v.closed.value, v.tags.value))
}
"selecting enum" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
Venue.where(_._id eqs v.id).select(_.status).fetchAsync().futureValue shouldBe List(VenueStatus.open)
}
"selecting case class" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
val base = Venue.where(_._id eqs v.id)
base.selectCase(_.legacyid, V1).fetchAsync().futureValue shouldBe List(V1(v.legacyid.value))
base.selectCase(_.legacyid, _.userid, V2).fetchAsync().futureValue shouldBe List(V2(v.legacyid.value, v.userid.value))
base.selectCase(_.legacyid, _.userid, _.mayor, V3).fetchAsync().futureValue shouldBe List(V3(v.legacyid.value, v.userid.value, v.mayor.value))
base.selectCase(_.legacyid, _.userid, _.mayor, _.mayor_count, V4).fetchAsync().futureValue shouldBe List(V4(v.legacyid.value, v.userid.value, v.mayor.value, v.mayor_count.value))
base.selectCase(_.legacyid, _.userid, _.mayor, _.mayor_count, _.closed, V5).fetchAsync().futureValue shouldBe List(V5(v.legacyid.value, v.userid.value, v.mayor.value, v.mayor_count.value, v.closed.value))
base.selectCase(_.legacyid, _.userid, _.mayor, _.mayor_count, _.closed, _.tags, V6).fetchAsync().futureValue shouldBe List(V6(v.legacyid.value, v.userid.value, v.mayor.value, v.mayor_count.value, v.closed.value, v.tags.value))
}
"sub-field queries" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
val t = baseTestTip()
t.insertAsync().futureValue
// select subfields
Tip.where(_._id eqs t.id).select(_.counts at "foo").fetchAsync().futureValue shouldBe Seq(Some(1L))
Venue.where(_._id eqs v.id).select(_.geolatlng.unsafeField[Double]("lat")).fetchAsync().futureValue shouldBe List(Some(40.73))
val subuserids: Seq[Option[List[Long]]] = {
Venue.where(_._id eqs v.id)
.select(_.claims.subselect(_.userid))
.fetchAsync().futureValue
}
subuserids shouldBe List(Some(List(1234, 5678)))
val q = Venue.where(_.claims.subfield(_.userid) eqs 1234).select(_.claims.$$)
val subclaims: Seq[List[VenueClaimBson]] = q.fetchAsync().futureValue
subclaims.size shouldBe 1
subclaims.head.size shouldBe 1
subclaims.head.head.userid.value shouldBe 1234
subclaims.head.head.status.value shouldBe ClaimStatus.pending
// selecting a claims.userid when there is no top-level claims list should
// have one element in the List for the one Venue, but an Empty for that
// Venue since there's no list of claims there.
Venue.where(_._id eqs v.id).modify(_.claims unset).and(_.lastClaim unset).updateOneAsync()
Venue.where(_._id eqs v.id).select(_.lastClaim.subselect(_.userid)).fetchAsync().futureValue shouldBe List(None)
val q1 = Venue.where(_._id eqs v.id).select(_.lastClaim.subselect(_.userid))
val q2 = queryToLiftQuery(Venue.select(_.lastClaim.subselect(_.userid)))
//println(s"QQ ${q2}")
Venue.where(_._id eqs v.id).select(_.claims.subselect(_.userid)).fetchAsync().futureValue shouldBe List(None)
}
// "These tests are broken because DummyField doesn't know how to convert a String to an Enum"
"selecting enum sub-field" should "work as expected" ignore {
val v = baseTestVenue()
v.insertAsync().futureValue
// This behavior is broken because we get a String back from mongo, and at
// that point we only have a DummyField for the subfield, and that doesn't
// know how to convert the String to an Enum.
val statuses: Seq[Option[VenueClaimBson.status.MyType]] =
Venue.where(_._id eqs v.id).select(_.lastClaim.subselect(_.status)).fetchAsync().futureValue
// This assertion works.
statuses shouldBe List(Some("Approved"))
// This assertion is what we want, and it fails.
// statuses shouldBe List(Some(ClaimStatus.approved))
val subuseridsAndStatuses: Seq[(Option[List[Long]], Option[List[VenueClaimBson.status.MyType]])] =
Venue.where(_._id eqs v.id)
.select(_.claims.subselect(_.userid), _.claims.subselect(_.status))
.fetchAsync().futureValue
// This assertion works.
subuseridsAndStatuses shouldBe Seq((Some(List(1234, 5678)), Some(List("Pending approval", "Approved"))))
// This assertion is what we want, and it fails.
// subuseridsAndStatuses shouldBe List((Some(List(1234, 5678)), Some(List(ClaimStatus.pending, ClaimStatus.approved))))
}
"read preferences" should "work as expected" in {
// Note: this isn't a real test of readpreference because the test mongo setup
// doesn't have replicas. This basically just makes sure that readpreference
// doesn't break everything.
val v = baseTestVenue()
v.insertAsync().futureValue
// eqs
Venue.where(_._id eqs v.id).fetchAsync().futureValue.map(_.id) shouldBe Seq(v.id)
Venue.where(_._id eqs v.id).setReadPreference(ReadPreference.secondary).fetchAsync().futureValue.map(_.id) shouldBe Seq(v.id)
Venue.where(_._id eqs v.id).setReadPreference(ReadPreference.primary).fetchAsync().futureValue.map(_.id) shouldBe Seq(v.id)
}
"find and modify" should "work as expected" in {
val v1 = Venue.where(_.venuename eqs "v1")
.findAndModify(_.userid setTo 5)
.upsertOneAsync(returnNew = false)
.futureValue
v1 shouldBe None
val v2 = Venue.where(_.venuename eqs "v2")
.findAndModify(_.userid setTo 5)
.upsertOneAsync(returnNew = true)
.futureValue
v2.map(_.userid.value) shouldBe Some(5)
val v3 = Venue.where(_.venuename eqs "v2")
.findAndModify(_.userid setTo 6)
.upsertOneAsync(returnNew = false)
.futureValue
v3.map(_.userid.value) shouldBe Some(5)
val v4 = Venue.where(_.venuename eqs "v2")
.findAndModify(_.userid setTo 7)
.upsertOneAsync(returnNew = true)
.futureValue
v4.map(_.userid.value) shouldBe Some(7)
}
"Regex query" should "work as expected" in {
val v = baseTestVenue()
v.insertAsync().futureValue
Venue.where(_._id eqs v.id).and(_.venuename startsWith "test v").countAsync().futureValue shouldBe 1
Venue.where(_._id eqs v.id).and(_.venuename matches ".es. v".r).countAsync().futureValue shouldBe 1
Venue.where(_._id eqs v.id).and(_.venuename matches "Tes. v".r).countAsync().futureValue shouldBe 0
Venue.where(_._id eqs v.id).and(_.venuename matches Pattern.compile("Tes. v", Pattern.CASE_INSENSITIVE)).countAsync().futureValue shouldBe 1
Venue.where(_._id eqs v.id).and(_.venuename matches "test .*".r).and(_.legacyid in List(v.legacyid.value)).countAsync().futureValue shouldBe 1
Venue.where(_._id eqs v.id).and(_.venuename matches "test .*".r).and(_.legacyid nin List(v.legacyid.value)).countAsync().futureValue shouldBe 0
Venue.where(_.tags matches """some\s.*""".r).countAsync().futureValue shouldBe 1
}
"Batch and limit" should "work as expected" in {
(1 to 50).foreach(_ => baseTestVenue().insertAsync().futureValue)
val q = Venue.select(_._id)
q.limit(10).fetchAsync().futureValue.length shouldBe 10
q.limit(-10).fetchAsync().futureValue.length shouldBe 10
}
"count" should "work as expected" in {
(1 to 10).foreach(_ => baseTestVenue().insertAsync().futureValue)
val q = Venue.select(_._id)
q.countAsync().futureValue shouldBe 10
q.limit(3).countAsync().futureValue shouldBe 3
q.limit(15).countAsync().futureValue shouldBe 10
q.skip(5).countAsync().futureValue shouldBe 5
q.skip(12).countAsync().futureValue shouldBe 0
q.skip(3).limit(5).countAsync().futureValue shouldBe 5
q.skip(8).limit(4).countAsync().futureValue shouldBe 2
}
"distinct" should "work as expected" in {
(1 to 5).foreach(_ => baseTestVenue().userid(1).insertAsync())
(1 to 5).foreach(_ => baseTestVenue().userid(2).insertAsync())
(1 to 5).foreach(_ => baseTestVenue().userid(3).insertAsync())
Venue.where(_.mayor eqs 789).distinctAsync(_.userid).futureValue.length shouldBe 3
Venue.where(_.mayor eqs 789).countDistinctAsync(_.userid).futureValue shouldBe 3
}
"slice" should "work as expected" in {
baseTestVenue().tags(List("1", "2", "3", "4")).insertAsync().futureValue
Venue.select(_.tags.slice(2)).getAsync().futureValue shouldBe Some(List("1", "2"))
Venue.select(_.tags.slice(-2)).getAsync().futureValue shouldBe Some(List("3", "4"))
Venue.select(_.tags.slice(1, 2)).getAsync().futureValue shouldBe Some(List("2", "3"))
}
"Map field" should "work as expected" in {
val emojiCount = Map(":smiley:" -> 2L)
baseTestVenue().emojiCounts(emojiCount).insertAsync().futureValue
Venue.select(_.emojiCounts).getAsync.futureValue shouldBe Some(emojiCount)
Venue.where(_.mayor eqs 789).getAsync.futureValue.map(_.emojiCounts.get) shouldBe Some(emojiCount)
}
}
|
package com.atjl.util.common;
import com.atjl.util.file.PathUtil;
import org.junit.*;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertEquals;
public class PathUtilTest {
@Test
public void testParse(){
String sep = "/";
// PathUtil.parseInner(sep,sep+"a"+sep+"b"+sep+"c");
PathUtil.parseInner(sep,sep);
}
@Test
public void testJoinPath() throws Exception {
// String res = PathUtil.joinInner("/","a","b");
String sep = "/";
String res = PathUtil.joinInner(true,sep,"");
assertEquals("/",res);
res = PathUtil.joinInner(true,sep,"/");
assertEquals("/",res);
res = PathUtil.joinInner(true,sep,"/","/a");
assertEquals("/a",res);
res = PathUtil.joinInner(true,sep,"/","a");
assertEquals("/a",res);
res = PathUtil.joinInner(true,sep,"","a","b");
assertEquals("/a/b",res);
res = PathUtil.joinInner(true,sep,"");
assertEquals("/",res);
res = PathUtil.joinInner(true,sep,"","/bb");
assertEquals("/bb",res);
res =PathUtil.joinInner(true,sep,"","/","");
assertEquals("/",res);
res = PathUtil.joinInner(true,sep,"/abc","/","");
assertEquals("/abc",res);
res = PathUtil.joinInner(true,sep,"abc","bb","cc");
assertEquals("/abc/bb/cc",res);
}
@Test
public void testJoin() throws Exception {
String sep = "\\";
String res = PathUtil.join("");
assertEquals("\\",res);
res = PathUtil.join("\\","\\a");
assertEquals("\\a",res);
res = PathUtil.join(sep,"a");
assertEquals(sep+"a",res);
res = PathUtil.join("","a","b");
assertEquals(sep+"a"+sep+"b",res);
res = PathUtil.join("",sep+"bb");
assertEquals(sep+"bb",res);
res =PathUtil.join("",sep,"");
assertEquals(sep,res);
res = PathUtil.join(sep+"abc",sep,"");
assertEquals(sep+"abc",res);
res = PathUtil.join("abc","bb","cc");
assertEquals(sep+"abc"+sep+"bb"+sep+"cc",res);
}
@Test
public void testFilterPathForSepPaths() throws Exception {
}
@Test
public void testFilterPathForSepS() throws Exception {
}
@Before
public void before() throws Exception {
}
@After
public void after() throws Exception {
}
@BeforeClass
public static void beforeClass() throws Exception{
}
@Rule
public final ExpectedException expectedException = ExpectedException.none();
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_ROOT}/AVOSCloud/AVOS/AVOSCloud/AVOSCloud_Art.inc"
install_resource "${PODS_ROOT}/IQKeyboardManager/IQKeyboardManager/Resources/IQKeyboardManager.bundle"
install_resource "${PODS_ROOT}/MJRefresh/MJRefresh/MJRefresh.bundle"
install_resource "${PODS_ROOT}/SVProgressHUD/SVProgressHUD/SVProgressHUD.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_ROOT}/AVOSCloud/AVOS/AVOSCloud/AVOSCloud_Art.inc"
install_resource "${PODS_ROOT}/IQKeyboardManager/IQKeyboardManager/Resources/IQKeyboardManager.bundle"
install_resource "${PODS_ROOT}/MJRefresh/MJRefresh/MJRefresh.bundle"
install_resource "${PODS_ROOT}/SVProgressHUD/SVProgressHUD/SVProgressHUD.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
<reponame>AliFrank608-TMW/RacingReact
/**
* @module React
*/
import React from 'react'
/**
* @module classNames
*/
import classNames from 'utils/classnames'
/**
* @module PropTypes
*/
import PropTypes from 'prop-types'
/**
* @name VideoButton
* @param {Object} props
* @return {React.Component}
*/
const VideoButton = props => {
const {
className,
modifier,
show,
onClick
} = props
const modifiedClassNames = classNames('video-button', className, modifier, {
show
})
return (
<div className={modifiedClassNames} onClick={onClick}>
<div className='video-button__container'>
<div className='video-button__play'></div>
</div>
</div>
)
}
/**
* defaultProps
* @type {Object}
*/
VideoButton.defaultProps = {
className: '',
modifier: '',
show: false
}
/**
* propTypes
* @type {Object}
*/
VideoButton.propTypes = {
className: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string)
]),
modifier: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string)
]),
show: PropTypes.bool,
onClick: PropTypes.func
}
/**
* @module VideoButton
*/
export default VideoButton
|
package eg.edu.alexu.csd.oop.draw;
import eg.edu.alexu.csd.oop.shapes.Triangle;
import javafx.application.Application;
import javafx.scene.canvas.Canvas;
import javafx.scene.control.*;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.KeyCode;
import javafx.scene.Scene;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Region;
import javafx.scene.layout.VBox;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import org.jfree.fx.FXGraphics2D;
import java.awt.*;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.channels.FileChannel;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.jar.JarEntry;
public class Paint extends Application{
private String current = "select";
private Button save = new Button();
private Button load = new Button();
private Button undo = new Button();
private Button redo = new Button();
private Button line = new Button();
private Button Circle = new Button();
private Button Ellipse = new Button();
private Button select = new Button();
private Button Rectangle = new Button();
private Button Square = new Button();
private Button Triangle = new Button();
private Button customShape = new Button();
private Button resize = new Button();
private Button loadClass = new Button();
private Button delete = new Button();
private Button move = new Button();
private ChoiceBox addedShapes = new ChoiceBox();
private ColorPicker colorPicker = new ColorPicker(Color.BLACK);
private ColorPicker colorPicker2 = new ColorPicker(Color.WHITE);
private Label Border = new Label(" Color: ");
private Label Fill = new Label(" Fill: ");
public static void main(String[] args){
launch(args);
}
public void start(Stage primaryStage){
FileChooser ChooseFile = new FileChooser();
ChooseFile.getExtensionFilters().addAll(new FileChooser.ExtensionFilter("Save", "*.json", "*.xml"));
FileChooser JarChooser = new FileChooser();
JarChooser.getExtensionFilters().addAll(new FileChooser.ExtensionFilter("ClassLoader", "*.jar"));
AtomicReference<Point> p = new AtomicReference<>(new Point());
AtomicReference<Point> t2 = new AtomicReference<>(new Point());
AtomicInteger ct1 = new AtomicInteger();
Shape[] newShape = new Shape[3];
AtomicInteger ct2 = new AtomicInteger();
Engine engine = new Engine();
VBox root = new VBox();
root.setSpacing(5);
HBox menu = new HBox();
menu.setSpacing(3);
menu.getChildren().addAll(save, load, undo, redo, addedShapes, loadClass);
HBox shapes = new HBox();
shapes.setSpacing(3);
shapes.getChildren().addAll(select,move , line, Circle, Ellipse, Rectangle, Square, Triangle, customShape, Border, colorPicker, Fill, colorPicker2, delete, resize);
HBox can = new HBox();
Canvas canvas = new Canvas(1000, 600);
FXGraphics2D graphics = new FXGraphics2D(canvas.getGraphicsContext2D());
can.setStyle("-fx-background-color: WHITE");
can.getChildren().add(canvas);
root.getChildren().addAll(menu, shapes, can);
final AtomicReference<ArrayList<String>>[] ClassNames = new AtomicReference[]{new AtomicReference<>(engine.getClassNames())};
for (String className : ClassNames[0].get()) {
if("TwoPointShapes".equals(className)) continue;
addedShapes.getItems().add(className);
addedShapes.setValue(className);
}
Image image = null;
try {
image = new Image(new FileInputStream("Resources/btn1.png"));
line.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn2.png"));
Circle.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn3.png"));
Ellipse.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn4.png"));
Rectangle.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn5.png"));
Square.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn6.png"));
Triangle.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn8.png"));
undo.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn9.png"));
redo.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn13.png"));
save.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn14.png"));
load.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/btn12.png"));
delete.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/mouse.png"));
select.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/move.png"));
move.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/resize.png"));
resize.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/customShape.png"));
customShape.setGraphic(new ImageView(image));
image = new Image(new FileInputStream("Resources/loadShape.png"));
loadClass.setGraphic(new ImageView(image));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
customShape.setPrefHeight(29);
customShape.setMinHeight(29);
loadClass.setPrefHeight(29);
loadClass.setMinHeight(29);
select.setPrefHeight(29);
select.setMinHeight(29);
move.setPrefHeight(29);
move.setMinHeight(29);
resize.setPrefHeight(29);
resize.setMinHeight(29);
customShape.setMinHeight(29);
colorPicker.setMinHeight(29);
colorPicker2.setMinHeight(29);
undo.setMinHeight(31);
redo.setMinHeight(31);
addedShapes.setMinHeight(31);
loadClass.setMinHeight(31);
addedShapes.setMinWidth(200);
Fill.setFont(new Font("Arial", 20));
Border.setFont(new Font("Arial", 20));
Border.setCenterShape(true);
primaryStage.setScene(new Scene(root, Region.USE_PREF_SIZE, Region.USE_PREF_SIZE));
primaryStage.setTitle("Paint");
primaryStage.setResizable(false);
try {
image = new Image(new FileInputStream("Resources/paint.png"));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
primaryStage.getIcons().add(image);
primaryStage.show();
Triangle.setOnAction(e -> {
current = "Triangle";
disable(Triangle);
});
line.setOnAction(e -> {
current = "Line";
disable(line);
});
Rectangle.setOnAction(e -> {
current = "Rectangle";
disable(Rectangle);
});
Circle.setOnAction(e -> {
current = "Circle";
disable(Circle);
});
Ellipse.setOnAction(e -> {
current = "Ellipse";
disable(Ellipse);
});
Square.setOnAction(e -> {
current = "Square";
disable(Square);
});
select.setOnAction(e -> {
current = "select";
disable(select);
});
move.setOnAction(e -> {
current = "move";
disable(move);
});
resize.setOnAction(e -> {
current = "resize";
disable(resize);
});
loadClass.setOnAction(e -> {
File selectedFile = JarChooser.showOpenDialog(primaryStage);
if(selectedFile!=null)
{
java.util.jar.JarFile jarfile = null; //jar file path(here sqljdbc4.jar)
try {
jarfile = new java.util.jar.JarFile(selectedFile);
} catch (IOException ex) {
ex.printStackTrace();
}
assert jarfile != null;
java.util.Enumeration<JarEntry> enu= jarfile.entries();
while(enu.hasMoreElements())
{
current = selectedFile.getName();
current = current.substring(0, current.length() - 4);
String destdir = "loaded_data/"+current; //abc is my destination directory
JarEntry je = enu.nextElement();
File fl = new File(destdir, je.getName());
if(!fl.exists())
{
fl.getParentFile().mkdirs();
fl = new java.io.File(destdir, je.getName());
}
if(je.isDirectory())
{
continue;
}
InputStream is = null;
try {
is = jarfile.getInputStream(je);
} catch (IOException ex) {
ex.printStackTrace();
}
FileOutputStream fo = null;
try {
fo = new FileOutputStream(fl);
} catch (FileNotFoundException ex) {
ex.printStackTrace();
}
while(true)
{
try {
assert is != null;
if (is.available()<=0) break;
} catch (IOException ex) {
ex.printStackTrace();
}
try {
assert fo != null;
fo.write(is.read());
} catch (IOException ex) {
ex.printStackTrace();
}
}
try {
assert fo != null;
fo.close();
} catch (IOException ex) {
ex.printStackTrace();
}
try {
is.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
if (selectedFile != null) {
current = selectedFile.getName();
current = current.substring(0, current.length() - 4);
selectedFile=new File("loaded_data/"+current+"/eg/edu/alexu/csd/oop/draw/"+current+".class");
Class d = null;
try {
current = selectedFile.getName();
current = current.substring(0, current.length() - 6);
try {
String path = System.getProperty("user.dir");
copyFileUsingChannel(selectedFile, new File(path + "/target/classes/Hybrid/eg/edu/alexu/csd/oop/draw/"+current+".class"));
} catch (IOException ex) {
ex.printStackTrace();
}
String path = System.getProperty("user.dir");
String pack = "eg.edu.alexu.csd.oop.draw";
String Hybridpack = path+"/target/classes/Hybrid";
URL[] a = new URL[]{new File(Hybridpack).toURI().toURL()};
URLClassLoader c = new URLClassLoader(a);
String yahia = pack+"."+current;
d = c.loadClass(yahia);
} catch (ClassNotFoundException | NoClassDefFoundError ex) {
ex.printStackTrace();
} catch (MalformedURLException ex) {
ex.printStackTrace();
}
assert d != null;
if(Shape.class.isAssignableFrom(d)){
addedShapes.getItems().add(current);
addedShapes.setValue(current);
current="select";
}
else {
Alert alert = new Alert(Alert.AlertType.CONFIRMATION);
alert.setTitle("Wrong Class");
alert.setHeaderText("The class you have chosen doesn't implement the required Interface");
alert.showAndWait();
}
}
});
addedShapes.setOnAction(e->{
select.fire();
current="select";
});
customShape.setOnAction(e -> {
current= (String) addedShapes.getValue();
String pack = "eg.edu.alexu.csd.oop.shapes";
try {
Shape shape;
if("Circle".equals(current)|| "Triangle".equals(current) || "Line".equals(current)
|| "Square".equals(current)|| "Ellipse".equals(current)|| "Rectangle".equals(current)) {
Class cl = Class.forName(pack + "." + current);
shape = (Shape) cl.newInstance();
} else {
String path1 = System.getProperty("user.dir");
path1 = path1.replace('\\', '/');
URLClassLoader x = new URLClassLoader(new URL[]{new File(path1+"/target/classes/Hybrid/").toURI().toURL()});
pack = "eg.edu.alexu.csd.oop.draw";
Class cl = x.loadClass(pack + "." + current);
pack = "eg.edu.alexu.csd.oop.shapes";
shape = (Shape) cl.newInstance();
}
ShapeDialogBox shapeDiaglogBox = new ShapeDialogBox(shape,engine,graphics);
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException | MalformedURLException ex) {
ex.printStackTrace();
}
disable(customShape);
customShape.setDisable(false);
select.fire();
});
undo.setOnAction(e -> {
engine.undo();
engine.refresh(graphics);
});
redo.setOnAction(e -> {
engine.redo();
engine.refresh(graphics);
});
save.setOnAction(e -> {
File selectedFile = ChooseFile.showOpenDialog(primaryStage);
if (selectedFile != null) {
engine.save(selectedFile.getPath());
}
});
load.setOnAction(e -> {
File selectedFile = ChooseFile.showOpenDialog(primaryStage);
if (selectedFile != null) {
engine.load(selectedFile.getPath());
engine.refresh(graphics);
}
});
delete.setOnAction(e -> {
if (current.equals("select") && newShape[2] != null) {
engine.removeShape(newShape[2]);
engine.updateUndo();
engine.refresh(graphics);
newShape[2] = null;
ct2.set(0);
}
});
canvas.setOnMousePressed(e -> {
switch (current) {
case "select":
case "resize":
case "move": {
newShape[0] = engine.checkOnShapes((int) e.getX(), (int) e.getY());
if (newShape[0] != null) {
if (newShape[0].getProperties().get("type") == 0d && current.equals("resize")) {
ShapeDialogBox shapeDiaglogBox = new ShapeDialogBox(newShape[0], engine, graphics);
engine.refresh(graphics);
disable(customShape);
customShape.setDisable(false);
break;
}
try {
newShape[1] = (Shape) newShape[0].clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
p.set(new Point((int) e.getX(), (int) e.getY()));
if(!"select".equals(current)) {
Map<String, Double> secondPoint = new HashMap<>(newShape[1].getProperties());
java.awt.Color temp = newShape[1].getColor();
newShape[1].setColor(newShape[1].getFillColor());
newShape[1].setFillColor(temp);
newShape[1].setProperties(secondPoint);
}
engine.removeShape(newShape[0]);
ct2.getAndIncrement();
}
break;
}
case "Triangle": {
if (ct1.get() == 1) {
t2.set(new Point((int) e.getX(), (int) e.getY()));
} else {
p.set(new Point((int) e.getX(), (int) e.getY()));
}
ct1.getAndIncrement();
break;
}
default: {
p.set(new Point((int) e.getX(), (int) e.getY()));
break;
}
}
});
canvas.setOnMouseDragged(e -> {
switch (current) {
case "resize": {
if (newShape[1] != null) {
Shape l = null;
if (newShape[1].getProperties().get("type") == 6d) {
l = new Triangle();
try {
l = (Shape) newShape[1].clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
int diffX = (int) e.getX() - p.get().x;
int diffY = (int) e.getY() - p.get().y;
Map<String, Double> secondPoint = new HashMap<>(l.getProperties());
secondPoint.put("x3", l.getProperties().get("x3") + diffX);
secondPoint.put("y3", l.getProperties().get("y3") + diffY);
l.setProperties(secondPoint);
engine.addTempShape(l);
engine.refresh(graphics);
engine.removeLastShape();
try {
newShape[1] = (Shape) l.clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
p.get().x = (int) e.getX();
p.get().y = (int) e.getY();
break;
} else if (newShape[1].getProperties().get("type") == 0d) {
break;
} else {
try {
l = newShape[1].getClass().newInstance();
} catch (InstantiationException | IllegalAccessException ex) {
ex.printStackTrace();
}
}
try {
l = (Shape) newShape[1].clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
int diffX = (int) e.getX() - p.get().x;
int diffY = (int) e.getY() - p.get().y;
assert l != null;
Map<String, Double> secondPoint = new HashMap<>(l.getProperties());
secondPoint.put("x2", l.getProperties().get("x2") + diffX);
secondPoint.put("y2", l.getProperties().get("y2") + diffY);
secondPoint.put("released", 0d);
l.setProperties(secondPoint);
engine.addTempShape(l);
engine.refresh(graphics);
engine.removeLastShape();
try {
newShape[1] = (Shape) l.clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
p.get().x = (int) e.getX();
p.get().y = (int) e.getY();
}
break;
}
case "move": {
if (newShape[1] != null) {
Shape l = null;
if (newShape[1].getProperties().get("type") == 6d) {
l = new Triangle();
try {
l = (Shape) newShape[1].clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
int diffX = (int) e.getX() - p.get().x;
int diffY = (int) e.getY() - p.get().y;
l.setPosition(new Point(l.getPosition().x + diffX, l.getPosition().y + diffY));
Map<String, Double> secondPoint = new HashMap<>(l.getProperties());
secondPoint.put("x2", l.getProperties().get("x2") + diffX);
secondPoint.put("y2", l.getProperties().get("y2") + diffY);
secondPoint.put("x3", l.getProperties().get("x3") + diffX);
secondPoint.put("y3", l.getProperties().get("y3") + diffY);
l.setProperties(secondPoint);
engine.addTempShape(l);
engine.refresh(graphics);
engine.removeLastShape();
try {
newShape[1] = (Shape) l.clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
p.get().x = (int) e.getX();
p.get().y = (int) e.getY();
break;
}
else if (newShape[1].getProperties().get("type") == 0d) {
try {
l = newShape[1].getClass().newInstance();
} catch (InstantiationException | IllegalAccessException ex) {
ex.printStackTrace();
}
try {
l = (Shape) newShape[1].clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
int diffX = (int) e.getX() - p.get().x;
int diffY = (int) e.getY() - p.get().y;
assert l != null;
l.setPosition(new Point(l.getPosition().x + diffX, l.getPosition().y + diffY));
l.setProperties(newShape[1].getProperties());
l.setFillColor(newShape[1].getColor());
l.setFillColor(newShape[1].getFillColor());
engine.addTempShape(l);
engine.refresh(graphics);
engine.removeLastShape();
try {
newShape[1] = (Shape) l.clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
p.get().x = (int) e.getX();
p.get().y = (int) e.getY();
break;
} else {
try {
l = newShape[1].getClass().newInstance();
} catch (InstantiationException | IllegalAccessException ex) {
ex.printStackTrace();
}
}
try {
l = (Shape) newShape[1].clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
int diffX = (int) e.getX() - p.get().x;
int diffY = (int) e.getY() - p.get().y;
assert l != null;
l.setPosition(new Point(l.getPosition().x + diffX, l.getPosition().y + diffY));
Map<String, Double> secondPoint = new HashMap<>(l.getProperties());
secondPoint.put("x2", l.getProperties().get("x2") + diffX);
secondPoint.put("y2", l.getProperties().get("y2") + diffY);
l.setProperties(secondPoint);
engine.addTempShape(l);
engine.refresh(graphics);
engine.removeLastShape();
try {
newShape[1] = (Shape) l.clone();
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
p.get().x = (int) e.getX();
p.get().y = (int) e.getY();
}
break;
}
case "Triangle": {
if (ct1.get() != 2) break;
Triangle r = new Triangle();
r.setPosition(p.get());
Map<String, Double> length = new HashMap<>();
length.put("x2", (double) t2.get().x);
length.put("y2", (double) t2.get().y);
length.put("x3", e.getX());
length.put("y3", e.getY());
length.put("released", 0d);
r.setFillColor(getColor(colorPicker2.getValue()));
r.setColor(getColor(colorPicker.getValue()));
r.setProperties(length);
engine.addTempShape(r);
engine.refresh(graphics);
engine.removeShape(r);
engine.updateUndo();
break;
}
default: {
Shape l;
String pack = "eg.edu.alexu.csd.oop.shapes";
try {
Class cl = Class.forName(pack + "." + current);
l = (Shape) cl.newInstance();
l.setPosition(p.get());
Map<String, Double> length = new HashMap<>();
length.put("x2", e.getX());
length.put("y2", e.getY());
length.put("released", 0d);
l.setFillColor(getColor(colorPicker2.getValue()));
l.setColor(getColor(colorPicker.getValue()));
l.setProperties(length);
engine.addTempShape(l);
engine.refresh(graphics);
engine.removeLastShape();
newShape[1] = (Shape) l.clone();
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException | CloneNotSupportedException ignored) {
}
break;
}
}
});
canvas.setOnMouseReleased(e -> {
switch (current) {
case "resize":
case "move": {
if (newShape[1] != null) {
if(newShape[1].getProperties().get("type") == 0d && "resize".equals(current)) break;
Map<String, Double> secondPoint = new HashMap<>(newShape[1].getProperties());
java.awt.Color temp=newShape[1].getColor();
newShape[1].setColor(newShape[1].getFillColor());
newShape[1].setFillColor(temp);
secondPoint.put("released", 1d);
newShape[1].setProperties(secondPoint);
engine.addShape(newShape[1]);
engine.refresh(graphics);
newShape[2] = newShape[1];
newShape[1] = null;
}
break;
}
case "Triangle": {
if (ct1.get() != 2) break;
Triangle r = new Triangle();
r.setPosition(p.get());
Map<String, Double> length = new HashMap<>();
length.put("x2", (double) t2.get().x);
length.put("y2", (double) t2.get().y);
length.put("x3", e.getX());
length.put("y3", e.getY());
r.setFillColor(getColor(colorPicker2.getValue()));
r.setColor(getColor(colorPicker.getValue()));
r.setProperties(length);
engine.addShape(r);
engine.refresh(graphics);
ct1.set(0);
break;
}
default: {
if (newShape[1] != null) {
Map <String,Double> m = new HashMap<>(newShape[1].getProperties());
m.put("released",1d);
newShape[1].setProperties(m);
engine.addShape(newShape[1]);
engine.refresh(graphics);
newShape[2] = newShape[1];
newShape[1] = null;
break;
}
}
}
});
root.setOnKeyPressed(ke -> {
KeyCode kc = ke.getCode();
if(kc.equals(KeyCode.DELETE))
{
delete.fire();
}
});
}
private void disable(Button x) {
Triangle.setDisable(false);
line.setDisable(false);
Ellipse.setDisable(false);
Square.setDisable(false);
Rectangle.setDisable(false);
Circle.setDisable(false);
select.setDisable(false);
customShape.setDisable(false);
resize.setDisable(false);
move.setDisable(false);
x.setDisable(true);
}
private java.awt.Color getColor(Color v){
float r = (float)v.getRed();
float b = (float)v.getBlue();
float g = (float)v.getGreen();
float o = (float)v.getOpacity();
return new java.awt.Color(r,g,b,o);
}
private void copyFileUsingChannel(File src, File dest) throws IOException {
FileChannel sourceChannel = null;
FileChannel destinationChannel = null;
try {
sourceChannel = new FileInputStream(src).getChannel();
destinationChannel = new FileOutputStream(dest).getChannel();
destinationChannel.transferFrom(sourceChannel, 0, sourceChannel.size());
} finally {
assert sourceChannel != null;
sourceChannel.close();
assert destinationChannel != null;
destinationChannel.close();
}
}
} |
DROP INDEX IF EXISTS lower_gh_login;
CREATE INDEX index_users_gh_login ON users (gh_login);
|
# Import the required module
from pyembroidery.EmbEncoder import Transcoder
# Define the function to get the shift pattern
def get_shift_pattern():
# Replace this with actual implementation to get the shift pattern
return [1, 2, 3, 1, 2, 3, 1] # Example shift pattern
# Get the shift pattern
pattern = get_shift_pattern()
# Initialize the transcoder
transcoder = Transcoder()
# Encode the shift pattern using the transcoder
encoded_pattern = transcoder.encode(pattern)
# Print the encoded shift pattern
print(encoded_pattern) |
# pmt.py -n 10 -c 3 -s 3 -r 3 WebModel -o WebModelTest_n10_c3_s3_r3
# actions here are just labels, but must be symbols with __name__ attribute
def Initialize(): pass
def ReadInt(): pass
def Login(): pass
def Logout(): pass
def UpdateInt(): pass
# action symbols
actions = (Initialize, ReadInt, Login, Logout, UpdateInt)
testSuite = [
[
(Initialize, ()),
(Logout, ('OleBrumm',)),
(UpdateInt, ('OleBrumm', 1)),
(UpdateInt, ('OleBrumm', 1)),
(UpdateInt, ('OleBrumm', 2)),
(UpdateInt, ('OleBrumm', 1)),
(Logout, ('OleBrumm',)),
(Logout, ('VinniPuhh',)),
],
#
[
(Initialize, ()),
(UpdateInt, ('OleBrumm', 1)),
(UpdateInt, ('OleBrumm', 2)),
(Logout, ('OleBrumm',)),
(UpdateInt, ('OleBrumm', 1)),
(Logout, ('OleBrumm',)),
],
#
[
(Initialize, ()),
(UpdateInt, ('VinniPuhh', 1)),
(UpdateInt, ('VinniPuhh', 1)),
(UpdateInt, ('VinniPuhh', 2)),
(Logout, ('VinniPuhh',)),
],
] |
#!/bin/bash
# Written by Nanbo Sun and CBIG under MIT license: https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
###########################################
# Usage and Reading in Parameters
###########################################
# Usage
usage() { echo "
Usage: $0 -a <docs1> -b <docs2> -t <setting> -k <no_topics> -s <start_init> -e <end_init> -m <MMLDA_dir> -o <out_dir> [-q <queue>]
- docs1 Text file with each line summarizing the document of first
modality; e.g., brain atrophy doc
- docs2 Text file with each line summarizing the document of second
modality; e.g., cognitive scores doc (generated by xxx)
- setting Setting file for MMLDA model; e.g.,
$CBIG_CODE_DIR/external_packages/mmlda-c-dist/setting-100iter.txt
- no_topics Number of topics / factors; e.g., 3
- start_init Start index of random initializations; e.g., 1
- end_init End index of random initializations; e.g., 20; then the MMLDA
model would run 1, 2, ..., 20 random initializations. And,
the index corresponds to the random seed, so it is easy to
replicate results by specifying same random seed.
- MMLDA_dir Directory of binary executable file for MMLDA
- out_dir Output directory; e.g., ~/outputs/MMLDA/
- queue (Optional) if you have a cluster, use it to specify the
queue to which you want to qsub these jobs; if not provided,
jobs will run serially (potentially very slow!)
" 1>&2; exit 1; }
# Reading in parameters
while getopts ":a:b:t:k:s:e:m:o:q:" opt; do
case "${opt}" in
a) docs1=${OPTARG};;
b) docs2=${OPTARG};;
t) setting=${OPTARG};;
k) no_topics=${OPTARG};;
s) start_init=${OPTARG};;
e) end_init=${OPTARG};;
m) MMLDA_dir=${OPTARG};;
o) out_dir=${OPTARG};;
q) queue=${OPTARG};;
*) usage;;
esac
done
shift $((OPTIND-1))
if [ -z "${docs1}" ] || [ -z "${docs2}" ] || [ -z "${setting}" ] || \
[ -z "${no_topics}" ] || [ -z "${start_init}" ] || [ -z "${end_init}" ] || \
[ -z "${MMLDA_dir}" ] || [ -z "${out_dir}" ]; then
echo Missing Parameters!
usage
fi
###########################################
# Main
###########################################
echo '---MMLDA estimation.'
mkdir -p ${out_dir}/k${no_topics}
progress_file=${out_dir}/k${no_topics}/progress.txt
> ${progress_file}
for (( r=${start_init}; r<=${end_init}; r++ )); do
run_dir=${out_dir}/k${no_topics}/r${r}
mkdir -p ${run_dir}
log_file=${run_dir}/mmlda.log
> ${log_file}
# initialize alpha to be 1/no_topics
alpha=$(echo 1/${no_topics} | bc -l)
if [ -z "${queue}" ]; then
# converting relative paths to absolute for qsub
setting=$(readlink -f ${setting})
docs1=$(readlink -f ${docs1})
docs2=$(readlink -f ${docs2})
run_dir=$(readlink -f ${run_dir})
date >> ${log_file}
echo "Docs: ${docs1} ${docs2}" >> ${log_file}
echo "Number of topics: ${no_topics}" >> ${log_file}
echo "Settings:" >> ${log_file}
cat ${setting} >> ${log_file}
${MMLDA_dir}/MMLDA est ${alpha} ${no_topics} ${setting} ${docs1} ${docs2} random ${run_dir} ${r} >> ${log_file}
echo "${r}" >> ${progress_file}
else
qsub -V -q ${queue} << EOJ
#!/bin/bash
#PBS -N 'MMLDA_est'
#PBS -l walltime=150:00:0
#PBS -l mem=3gb
#PBS -e ${run_dir}/mmlda.err
#PBS -o ${run_dir}/mmlda.out
# converting relative paths to absolute for qsub
setting=$(readlink -f ${setting})
docs1=$(readlink -f ${docs1})
docs2=$(readlink -f ${docs2})
run_dir=$(readlink -f ${run_dir})
date >> ${log_file}
echo "Docs: ${docs1} ${docs2}" >> ${log_file}
echo "Number of topics: ${no_topics}" >> ${log_file}
echo "Settings:" >> ${log_file}
cat ${setting} >> ${log_file}
${MMLDA_dir}/MMLDA est ${alpha} ${no_topics} ${setting} ${docs1} ${docs2} random ${run_dir} ${r} >> ${log_file}
echo "${r}" >> ${progress_file}
EOJ
fi
done
# total_num_job=$((${end_init}-${start_init}+1))
# ./CBIG_MMLDA_wait_until_finished.sh ${progress_file} ${total_num_job}
# echo "---MMLDA estimation. -- Finished." |
/*
* Copyright (c) 2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package secondapproach;
public abstract class AbstractApplication {
private IDocumentation documentation;
public abstract IDocumentation createDocumentation();
public void newDocument() {
documentation = createDocumentation();
documentation.open();
}
public void close() {
if (documentation != null) {
documentation.close();
}
}
public void save() {
if (documentation != null) {
documentation.save();
}
}
}
|
<filename>lab2/src/main/java/hr/fer/tel/rassus/lab2/network/SimulatedDatagramSocket.java<gh_stars>0
package hr.fer.tel.rassus.lab2.network;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.SocketException;
import java.util.Random;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This class represents a socket for sending datagram packets over a
* simulated network.
*
* <p>A datagram socket is the sending point for a packet delivery
* service. Each packet sent on a datagram socket is individually
* addressed and routed. Multiple packets sent from one machine
* to another may be routed differently, and may arrive in any order
* depending on parameters of the simulated network.
*
* @author <NAME> <<EMAIL>>
* @see DatagramSocket
* @see DatagramPacket
* @see java.nio.channels.DatagramChannel
*/
public class SimulatedDatagramSocket extends DatagramSocket {
/**
* Packet loss ratio of the simulated network.
*/
private double lossRate;
/**
* Various parameters of the simulated network. All these parameters
* are in milliseconds.
*/
private int averageDelay;
private int jitter;
private int sendingDelay;
/**
* Some attributes for internal use.
*/
private Random random;
private int cumulatedSendingDelay;
/**
* Constructs a datagram socket for sending datagram packets over a
* simulated network and binds it to any available port
* on the local host machine. The socket will be bound to the
* {@link InetAddress#isAnyLocalAddress wildcard} address,
* an IP address chosen by the kernel. A network is not simulated
* for receiving of packets. Additional <code>SimulatedDatagramSocket</code>
* should be used on sender side to simulate the network while receiving.
*
* <p>If there is a security manager,
* its <code>checkListen</code> method is first called
* with 0 as its argument to ensure the operation is allowed.
* This could result in a SecurityException.
*
* @param lossRate Packet loss ratio of the simulated network.
* @param sendingDelay Sending delay of packets in milliseconds.
* @param averageDelay Average delay of packets in milliseconds, including sending delay.
* @param jitter Average delay variation of packets in milliseconds.
*
* @throws SocketException if the socket could not be opened,
* or the socket could not bind to the specified local port.
* @throws SecurityException if a security manager exists and its
* <code>checkListen</code> method doesn't allow the operation.
* @throws IllegalArgumentException if <code>sendingDelay</code> or
* <code>averageDelay</code> is less or equal to zero.
* @see DatagramSocket#DatagramSocket()
* @see SecurityManager#checkListen
*/
public SimulatedDatagramSocket(double lossRate, int sendingDelay, int averageDelay, int jitter) throws SocketException, IllegalArgumentException {
random = new Random();
if (sendingDelay <= 0 || averageDelay <= 0) {
throw new IllegalArgumentException("Delays should be greater than zero");
}
this.lossRate = lossRate;
this.sendingDelay = sendingDelay;
this.cumulatedSendingDelay = -sendingDelay;
this.averageDelay = averageDelay;
this.jitter = jitter;
//set time to wait for answer
super.setSoTimeout(2 * (jitter + averageDelay));
}
/**
* Sends a datagram packet from this socket over the simulated network.
* The <code>DatagramPacket</code> includes information indicating the
* data to be sent, its length, the IP address of the remote host,
* and the port number on the remote host.
*
* <p>If there is a security manager, and the socket is not currently
* connected to a remote address, this method first performs some
* security checks. First, if <code>p.getAddress().isMulticastAddress()</code>
* is true, this method calls the
* security manager's <code>checkMulticast</code> method
* with <code>p.getAddress()</code> as its argument.
* If the evaluation of that expression is false,
* this method instead calls the security manager's
* <code>checkConnect</code> method with arguments
* <code>p.getAddress().getHostAddress()</code> and
* <code>p.getPort()</code>. Each call to a security manager method
* could result in a SecurityException if the operation is not allowed.
*
* @param packet the <code>DatagramPacket</code> to be sent.
*
* @throws IOException if an I/O error occurs.
* @throws SecurityException if a security manager exists and its
* <code>checkMulticast</code> or <code>checkConnect</code>
* method doesn't allow the send.
* @throws java.nio.channels.IllegalBlockingModeException if this socket has an associated channel,
* and the channel is in non-blocking mode.
* @throws IllegalArgumentException if the socket is connected,
* and connected address and packet address differ.
* @see DatagramSocket#send
* @see DatagramPacket
* @see SecurityManager#checkMulticast(InetAddress)
* @see SecurityManager#checkConnect
*/
@Override
public void send(DatagramPacket packet) throws IOException {
if (random.nextDouble() >= lossRate) {
//jitter is uniformly distributed
cumulatedSendingDelay += sendingDelay;
new Thread(new OutgoingDatagramPacket(packet, averageDelay - sendingDelay + cumulatedSendingDelay + (long) (2 * (random.nextDouble() - 0.5) * jitter))).start();
}
}
/**
* Inner class for internal use.
*/
private class OutgoingDatagramPacket implements Runnable {
private final DatagramPacket packet;
private final long time;
private OutgoingDatagramPacket(DatagramPacket packet, long time) {
this.packet = packet;
this.time = time;
}
@Override
public void run() {
try {
//simulate sending delay
Thread.sleep(sendingDelay);
cumulatedSendingDelay -= sendingDelay;
//simulate network delay
Thread.sleep(time);
SimulatedDatagramSocket.super.send(packet);
} catch (InterruptedException e) {
Thread.interrupted();
} catch (IOException ex) {
Logger.getLogger(SimulatedDatagramSocket.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
|
NKEYS=90000
NREADS=8000
NTHREADS=1
NOPEN_FILES=5000
SKEW=0.9
WARM_RATIO=0.45
COLLECT_STATS=1
VALUE_SIZE=1024
READ_PCT=0.5
LL_DB=/disks/data/logstore
DB_DIR=/disks/ssd/logstore
BLOOM_BITS_SIZE=16
STATS_INT=10000
WRITE_BUFF_SIZE=4000
# Clean
rm /disks/data/logstore/*; rm /disks/ssd/logstore/*
#make clean; make -j4 all db_bench
# Run RO Benchmark
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,warmlogstore,pause,stats,warmcache,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=$VALUE_SIZE --bloom_bits=$BLOOM_BITS_SIZE --open_files=$NOPEN_FILES --stats_interval=$STATS_INT --warm_ratio=$WARM_RATIO --advise_random_on_open=1 --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,warmlogstore,stats,warmcache,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=$VALUE_SIZE --bloom_bits=$BLOOM_BITS_SIZE --open_files=$NOPEN_FILES --stats_interval=$STATS_INT --warm_ratio=$WARM_RATIO --advise_random_on_open=1 --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS
# Run WO Benchmark
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,pause,warmcache,stats,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,genzipfinput,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats,writezipf,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=$VALUE_SIZE --bloom_bits=$BLOOM_BITS_SIZE --open_files=$NOPEN_FILES --stats_interval=100000 --warm_ratio=$WARM_RATIO --advise_random_on_open=1 --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,pause,warmcache,stats,genzipfinput,genzipfinput,genzipfinput,writezipf,stats,writezipf,stats,writezipf,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=$VALUE_SIZE --bloom_bits=$BLOOM_BITS_SIZE --open_files=$NOPEN_FILES --stats_interval=100000 --warm_ratio=$WARM_RATIO --advise_random_on_open=1 --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS
# Run RW Benchmark
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,warmlogstore,pause,stats,warmcache,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readzipfinput,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats,genzipfinput,readwritezipf,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=$VALUE_SIZE --bloom_bits=$BLOOM_BITS_SIZE --open_files=$NOPEN_FILES --stats_interval=100000 --warm_ratio=$WARM_RATIO --advise_random_on_open=1 --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS --read_pct=$READ_PCT
#YCSB Workloads
# Set READ_PCT to 0.5 for Workload-A, and to 0.95 for Workload-B
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,pause,warmcache,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats,genzipfinput,ycsb,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=1024 --bloom_bits=16 --open_files=$NOPEN_FILES --stats_interval=100000 --warm_ratio=$WARM_RATIO --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS --read_pct=$READ_PCT
# Workload-E
/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,pause,warmcache,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats,genzipfinput,ycsbscan,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=1024 --bloom_bits=16 --open_files=$NOPEN_FILES --stats_interval=100000 --warm_ratio=$WARM_RATIO --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS --read_pct=$READ_PCT
# Run RANDOM Benchmark
#/usr/bin/time -v ./db_bench --db=$DB_DIR --ll_db=$LL_DB --benchmarks=fillbatch,stats,warmcache,stats,readrandom,stats,readrandom,stats,readrandom,stats,readrandom,stats,readrandom,stats,readrandom,stats --num=$NKEYS --reads=$NREADS --threads=$NTHREADS --compression_ratio=0.99 --value_size=$VALUE_SIZE --bloom_bits=$BLOOM_BITS_SIZE --open_files=$NOPEN_FILES --stats_interval=100000 --warm_ratio=$WARM_RATIO --advise_random_on_open=1 --zipf_skew=$SKEW --use_statistics=$COLLECT_STATS
|
import nltk
sentence = 'This is an example sentence.'
words = nltk.word_tokenize(sentence)
tagged_words = nltk.pos_tag(words)
letter_count = 0
for word in words:
letter_count += len(word)
print('POS Tags: ', tagged_words)
print('Letter Count: ', letter_count) |
declare const _default: (req: any, res: any) => Promise<void>;
/**
* @oas [post] /carts
* summary: "Create a Cart"
* operationId: "PostCart"
* description: "Creates a Cart within the given region and with the initial items. If no
* `region_id` is provided the cart will be associated with the first Region
* available. If no items are provided the cart will be empty after creation.
* If a user is logged in the cart's customer id and email will be set."
* requestBody:
* content:
* application/json:
* schema:
* properties:
* region_id:
* type: string
* description: The id of the Region to create the Cart in.
* country_code:
* type: string
* description: "The 2 character ISO country code to create the Cart in."
* items:
* description: "An optional array of `variant_id`, `quantity` pairs to generate Line Items from."
* type: array
* items:
* properties:
* variant_id:
* description: The id of the Product Variant to generate a Line Item from.
* type: string
* quantity:
* description: The quantity of the Product Variant to add
* type: integer
* context:
* description: "An optional object to provide context to the Cart. The `context` field is automatically populated with `ip` and `user_agent`"
* type: object
* tags:
* - Cart
* responses:
* 200:
* description: "Successfully created a new Cart"
* content:
* application/json:
* schema:
* properties:
* cart:
* $ref: "#/components/schemas/cart"
*/
export default _default;
export declare class Item {
variant_id: string;
quantity: number;
}
export declare class StorePostCartReq {
region_id?: string;
country_code?: string;
items?: Item[];
context?: object;
}
|
#!/bin/bash -e
# cf. [sedコマンドで文字列を改行に置換する、しかもスマートに置換する。](http://qiita.com/richmikan@github/items/3c74212b0d8dec9bd00f)
TARGET="$1"
[ -z "$TARGET" ] && echo TARGET required && exit 1
LF=$(printf '\\\012_')
LF=${LF%_}
sed -e "s/, /$LF/" -i "" "$TARGET"
|
<filename>INFO/Books Codes/Oracle PLSQL Tips and Techniques/OutputChapter13/13_12.sql
-- ***************************************************************************
-- File: 13_12.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 13_12.lis
SET TERMOUT ON
PROMPT Generating script to create:
PROMPT packages, procedures and functions...
PROMPT
DEFINE tablespace = &&tablespace
DEFINE owner = &&owner
PROMPT
SET TERMOUT OFF
SPOOL 13_12.log
CREATE TABLE migrate_procedures
(statement_sequence NUMBER NOT NULL,
statement_text VARCHAR2(2000))
TABLESPACE &&tablespace
STORAGE (INITIAL 1M NEXT 1M PCTINCREASE 0);
DECLARE
lv_sequence_num PLS_INTEGER := 0;
CURSOR cur_source IS
SELECT name, type, line, text
FROM dba_source
WHERE owner = UPPER('&&owner')
AND type IN ('PACKAGE', 'PACKAGE BODY',
'PROCEDURE', 'FUNCTION')
ORDER BY DECODE(type, 'FUNCTION', '2', 'PROCEDURE', '3',
'PACKAGE', '1' || name || 'PA',
'1' || name || 'PB'), name, line;
BEGIN
FOR cur_source_rec IN cur_source LOOP
IF cur_source_rec.line = 1 THEN
IF lv_sequence_num != 0 THEN
lv_sequence_num := lv_sequence_num + 1;
INSERT INTO migrate_procedures
(statement_sequence, statement_text)
VALUES
(lv_sequence_num, '/');
END IF;
lv_sequence_num := lv_sequence_num + 1;
INSERT INTO migrate_procedures
(statement_sequence, statement_text)
VALUES
(lv_sequence_num, 'CREATE OR REPLACE ' ||
cur_source_rec.text);
ELSE
lv_sequence_num := lv_sequence_num + 1;
INSERT INTO migrate_procedures
(statement_sequence, statement_text)
VALUES
(lv_sequence_num, cur_source_rec.text);
END IF;
END LOOP;
lv_sequence_num := lv_sequence_num + 1;
INSERT INTO migrate_procedures
(statement_sequence, statement_text)
VALUES
(lv_sequence_num, '/');
EXCEPTION
WHEN OTHERS THEN
DBMS_OUTPUT.PUT_LINE('Program Error-Begin Error Message.');
DBMS_OUTPUT.PUT_LINE(SQLERRM);
RAISE_APPLICATION_ERROR(-20000, 'End of error message');
END;
/
SPOOL OFF
SET HEADING OFF
SET PAGESIZE 0
SET FEEDBACK OFF
SPOOL 13_12.lis
SELECT statement_text
FROM migrate_procedures
ORDER BY statement_sequence;
SPOOL OFF
DROP TABLE migrate_procedures;
UNDEFINE tablespace
UNDEFINE owner
SPOOL OFF
|
<filename>src/main/java/com/example/demo/TrieDemo/Trie.java
package com.example.demo.TrieDemo;
import com.google.common.base.CharMatcher;
import org.apache.commons.lang.StringUtils;
/**
* @author lei.X
* @date 2018/8/23
*/
public class Trie {
//各个节点的子树数目即字符串中的字符出现的最多种类
private final int SIZE = 26;
//除根节点外其他所有子节点的数目
private int numNode;
//树的深度即最长字符串的长度
private int depth;
//字典树的根
private TrieNode root;
public Trie() {
this.numNode = 0;
this.depth = 0;
this.root = new TrieNode();
}
/**
* 字典树节点类,为私有内部类
*/
private class TrieNode {
// 所有的儿子节点或者一级子节点
private TrieNode[] son;
// 有多少字符串经过或到达这个节点,即节点字符出现的次数
private int numPass;
// 有多少字符串通过这个节点并到此结束的数量
private int numEnd;
// 是否有结束节点
private boolean isEnd;
// 节点的值
private char value;
/**
* 初始化节点类
*/
public TrieNode() {
this.numPass = 0;
this.numEnd = 0;
this.son = new TrieNode[SIZE];
this.isEnd = false;
}
}
private boolean isStrOfLetter(String str) {
if (StringUtils.isBlank(str)) {
return false;
}
if (!CharMatcher.javaLetter().matchesAllOf(str)) {
return false;
}
return true;
}
/**
* 构建树的过程
*
* @param str
* @return
*/
public boolean insertStr(String str) {
if (!isStrOfLetter(str)) {
return false;
}
str = str.toLowerCase();
char[] letters = str.toCharArray();
TrieNode node = this.root;
for (char c : letters) {
int pos = c - 'a';
if (node.son[pos] == null) {
node.son[pos] = new TrieNode();
node.son[pos].value = c;
node.son[pos].numPass = 1;
this.numNode++;
} else {
node.son[pos].numPass++;
}
node = node.son[pos];
}
node.isEnd = true;
node.numEnd++;
if (letters.length > this.depth) {
this.depth = letters.length;
}
return true;
}
/**
* 查找是否存在以某前缀开头的字符串方法出现
*
* @param str
* @return
*/
public boolean isContainPrefix(String str) {
if (!isStrOfLetter(str)) return false;
str = str.toLowerCase();
char[] letters = str.toCharArray();
TrieNode node = this.root;
for (char c : letters) {
int pos = c - 'a';
if (node.son[pos] != null) {
node = node.son[pos];
} else {
return false;
}
}
return true;
}
}
|
/**
* Orthanc - A Lightweight, RESTful DICOM Store
* Copyright (C) 2012-2016 <NAME>, Medical Physics
* Department, University Hospital of Liege, Belgium
* Copyright (C) 2017-2020 <NAME>., Belgium
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
**/
#include "PrecompiledHeaders.h"
#include "Logging.h"
#include "OrthancException.h"
namespace Orthanc
{
namespace Logging
{
const char* EnumerationToString(LogLevel level)
{
switch (level)
{
case LogLevel_ERROR:
return "ERROR";
case LogLevel_WARNING:
return "WARNING";
case LogLevel_INFO:
return "INFO";
case LogLevel_TRACE:
return "TRACE";
default:
throw OrthancException(ErrorCode_ParameterOutOfRange);
}
}
LogLevel StringToLogLevel(const char *level)
{
if (strcmp(level, "ERROR") == 0)
{
return LogLevel_ERROR;
}
else if (strcmp(level, "WARNING") == 0)
{
return LogLevel_WARNING;
}
else if (strcmp(level, "INFO") == 0)
{
return LogLevel_INFO;
}
else if (strcmp(level, "TRACE") == 0)
{
return LogLevel_TRACE;
}
else
{
throw OrthancException(ErrorCode_InternalError);
}
}
}
}
#if ORTHANC_ENABLE_LOGGING != 1
namespace Orthanc
{
namespace Logging
{
void InitializePluginContext(void* pluginContext)
{
}
void Initialize()
{
}
void Finalize()
{
}
void Reset()
{
}
void Flush()
{
}
void EnableInfoLevel(bool enabled)
{
}
void EnableTraceLevel(bool enabled)
{
}
bool IsTraceLevelEnabled()
{
return false;
}
bool IsInfoLevelEnabled()
{
return false;
}
void SetTargetFile(const std::string& path)
{
}
void SetTargetFolder(const std::string& path)
{
}
}
}
#elif ORTHANC_ENABLE_LOGGING_STDIO == 1
/*********************************************************
* Logger compatible with <stdio.h> OR logger that sends its
* output to the emscripten html5 api (depending on the
* definition of __EMSCRIPTEN__)
*********************************************************/
#include <stdio.h>
#ifdef __EMSCRIPTEN__
# include <emscripten/html5.h>
#endif
namespace Orthanc
{
namespace Logging
{
static bool infoEnabled_ = false;
static bool traceEnabled_ = false;
#ifdef __EMSCRIPTEN__
static void ErrorLogFunc(const char* msg)
{
emscripten_console_error(msg);
}
static void WarningLogFunc(const char* msg)
{
emscripten_console_warn(msg);
}
static void InfoLogFunc(const char* msg)
{
emscripten_console_log(msg);
}
static void TraceLogFunc(const char* msg)
{
emscripten_console_log(msg);
}
#else /* __EMSCRIPTEN__ not #defined */
static void ErrorLogFunc(const char* msg)
{
fprintf(stderr, "E: %s\n", msg);
}
static void WarningLogFunc(const char*)
{
fprintf(stdout, "W: %s\n", msg);
}
static void InfoLogFunc(const char*)
{
fprintf(stdout, "I: %s\n", msg);
}
static void TraceLogFunc(const char*)
{
fprintf(stdout, "T: %s\n", msg);
}
#endif /* __EMSCRIPTEN__ */
InternalLogger::~InternalLogger()
{
std::string message = messageStream_.str();
switch (level_)
{
case LogLevel_ERROR:
ErrorLogFunc(message.c_str());
break;
case LogLevel_WARNING:
WarningLogFunc(message.c_str());
break;
case LogLevel_INFO:
if (infoEnabled_)
{
InfoLogFunc(message.c_str());
// TODO: stone_console_info(message_.c_str());
}
break;
case LogLevel_TRACE:
if (traceEnabled_)
{
TraceLogFunc(message.c_str());
}
break;
default:
{
std::stringstream ss;
ss << "Unknown log level (" << level_ << ") for message: " << message;
std::string s = ss.str();
ErrorLogFunc(s.c_str());
}
}
}
void InitializePluginContext(void* pluginContext)
{
}
void Initialize()
{
}
void Finalize()
{
}
void Reset()
{
}
void Flush()
{
}
void EnableInfoLevel(bool enabled)
{
infoEnabled_ = enabled;
if (!enabled)
{
// Also disable the "TRACE" level when info-level debugging is disabled
traceEnabled_ = false;
}
}
bool IsInfoLevelEnabled()
{
return infoEnabled_;
}
void EnableTraceLevel(bool enabled)
{
traceEnabled_ = enabled;
}
bool IsTraceLevelEnabled()
{
return traceEnabled_;
}
void SetTargetFile(const std::string& path)
{
}
void SetTargetFolder(const std::string& path)
{
}
}
}
#else
/*********************************************************
* Logger compatible with the Orthanc plugin SDK, or that
* mimics behavior from Google Log.
*********************************************************/
#include <cassert>
namespace
{
/**
* This is minimal implementation of the context for an Orthanc
* plugin, limited to the logging facilities, and that is binary
* compatible with the definitions of "OrthancCPlugin.h"
**/
typedef enum
{
_OrthancPluginService_LogInfo = 1,
_OrthancPluginService_LogWarning = 2,
_OrthancPluginService_LogError = 3,
_OrthancPluginService_INTERNAL = 0x7fffffff
} _OrthancPluginService;
typedef struct _OrthancPluginContext_t
{
void* pluginsManager;
const char* orthancVersion;
void (*Free) (void* buffer);
int32_t (*InvokeService) (struct _OrthancPluginContext_t* context,
_OrthancPluginService service,
const void* params);
} OrthancPluginContext;
}
#include "Enumerations.h"
#include "SystemToolbox.h"
#include <fstream>
#include <boost/filesystem.hpp>
#include <boost/thread.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
namespace
{
struct LoggingStreamsContext
{
std::string targetFile_;
std::string targetFolder_;
std::ostream* error_;
std::ostream* warning_;
std::ostream* info_;
std::unique_ptr<std::ofstream> file_;
LoggingStreamsContext() :
error_(&std::cerr),
warning_(&std::cerr),
info_(&std::cerr)
{
}
};
}
static std::unique_ptr<LoggingStreamsContext> loggingStreamsContext_;
static boost::mutex loggingStreamsMutex_;
static Orthanc::Logging::NullStream nullStream_;
static OrthancPluginContext* pluginContext_ = NULL;
static bool infoEnabled_ = false;
static bool traceEnabled_ = false;
namespace Orthanc
{
namespace Logging
{
static void GetLogPath(boost::filesystem::path& log,
boost::filesystem::path& link,
const std::string& suffix,
const std::string& directory)
{
/**
From Google Log documentation:
Unless otherwise specified, logs will be written to the filename
"<program name>.<hostname>.<user name>.log<suffix>.",
followed by the date, time, and pid (you can't prevent the date,
time, and pid from being in the filename).
In this implementation : "hostname" and "username" are not used
**/
boost::posix_time::ptime now = boost::posix_time::second_clock::local_time();
boost::filesystem::path root(directory);
boost::filesystem::path exe(SystemToolbox::GetPathToExecutable());
if (!boost::filesystem::exists(root) ||
!boost::filesystem::is_directory(root))
{
throw OrthancException(ErrorCode_CannotWriteFile);
}
char date[64];
sprintf(date, "%04d%02d%02d-%02d%02d%02d.%d",
static_cast<int>(now.date().year()),
now.date().month().as_number(),
now.date().day().as_number(),
static_cast<int>(now.time_of_day().hours()),
static_cast<int>(now.time_of_day().minutes()),
static_cast<int>(now.time_of_day().seconds()),
SystemToolbox::GetProcessId());
std::string programName = exe.filename().replace_extension("").string();
log = (root / (programName + ".log" + suffix + "." + std::string(date)));
link = (root / (programName + ".log" + suffix));
}
static void PrepareLogFolder(std::unique_ptr<std::ofstream>& file,
const std::string& suffix,
const std::string& directory)
{
boost::filesystem::path log, link;
GetLogPath(log, link, suffix, directory);
#if !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__)))
boost::filesystem::remove(link);
boost::filesystem::create_symlink(log.filename(), link);
#endif
file.reset(new std::ofstream(log.string().c_str()));
}
// "loggingStreamsMutex_" must be locked
static void CheckFile(std::unique_ptr<std::ofstream>& f)
{
if (loggingStreamsContext_->file_.get() == NULL ||
!loggingStreamsContext_->file_->is_open())
{
throw OrthancException(ErrorCode_CannotWriteFile);
}
}
static void GetLinePrefix(std::string& prefix,
LogLevel level,
const char* file,
int line)
{
boost::filesystem::path path(file);
boost::posix_time::ptime now = boost::posix_time::microsec_clock::local_time();
boost::posix_time::time_duration duration = now.time_of_day();
/**
From Google Log documentation:
"Log lines have this form:
Lmmdd hh:mm:ss.uuuuuu threadid file:line] msg...
where the fields are defined as follows:
L A single character, representing the log level (eg 'I' for INFO)
mm The month (zero padded; ie May is '05')
dd The day (zero padded)
hh:mm:ss.uuuuuu Time in hours, minutes and fractional seconds
threadid The space-padded thread ID as returned by GetTID() (this matches the PID on Linux)
file The file name
line The line number
msg The user-supplied message"
In this implementation, "threadid" is not printed.
**/
char c;
switch (level)
{
case LogLevel_ERROR:
c = 'E';
break;
case LogLevel_WARNING:
c = 'W';
break;
case LogLevel_INFO:
c = 'I';
break;
case LogLevel_TRACE:
c = 'T';
break;
default:
throw OrthancException(ErrorCode_InternalError);
}
char date[64];
sprintf(date, "%c%02d%02d %02d:%02d:%02d.%06d ",
c,
now.date().month().as_number(),
now.date().day().as_number(),
static_cast<int>(duration.hours()),
static_cast<int>(duration.minutes()),
static_cast<int>(duration.seconds()),
static_cast<int>(duration.fractional_seconds()));
prefix = (std::string(date) + path.filename().string() + ":" +
boost::lexical_cast<std::string>(line) + "] ");
}
void InitializePluginContext(void* pluginContext)
{
assert(sizeof(_OrthancPluginService) == sizeof(int32_t));
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
loggingStreamsContext_.reset(NULL);
pluginContext_ = reinterpret_cast<OrthancPluginContext*>(pluginContext);
}
void Initialize()
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
if (loggingStreamsContext_.get() == NULL)
{
loggingStreamsContext_.reset(new LoggingStreamsContext);
}
}
void Finalize()
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
loggingStreamsContext_.reset(NULL);
}
void Reset()
{
// Recover the old logging context
std::unique_ptr<LoggingStreamsContext> old;
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
if (loggingStreamsContext_.get() == NULL)
{
return;
}
else
{
#if __cplusplus < 201103L
old.reset(loggingStreamsContext_.release());
#else
old = std::move(loggingStreamsContext_);
#endif
// Create a new logging context,
loggingStreamsContext_.reset(new LoggingStreamsContext);
}
}
if (!old->targetFolder_.empty())
{
SetTargetFolder(old->targetFolder_);
}
else if (!old->targetFile_.empty())
{
SetTargetFile(old->targetFile_);
}
}
void EnableInfoLevel(bool enabled)
{
infoEnabled_ = enabled;
if (!enabled)
{
// Also disable the "TRACE" level when info-level debugging is disabled
traceEnabled_ = false;
}
}
bool IsInfoLevelEnabled()
{
return infoEnabled_;
}
void EnableTraceLevel(bool enabled)
{
traceEnabled_ = enabled;
if (enabled)
{
// Also enable the "INFO" level when trace-level debugging is enabled
infoEnabled_ = true;
}
}
bool IsTraceLevelEnabled()
{
return traceEnabled_;
}
void SetTargetFolder(const std::string& path)
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
if (loggingStreamsContext_.get() != NULL)
{
PrepareLogFolder(loggingStreamsContext_->file_, "" /* no suffix */, path);
CheckFile(loggingStreamsContext_->file_);
loggingStreamsContext_->targetFile_.clear();
loggingStreamsContext_->targetFolder_ = path;
loggingStreamsContext_->warning_ = loggingStreamsContext_->file_.get();
loggingStreamsContext_->error_ = loggingStreamsContext_->file_.get();
loggingStreamsContext_->info_ = loggingStreamsContext_->file_.get();
}
}
void SetTargetFile(const std::string& path)
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
if (loggingStreamsContext_.get() != NULL)
{
loggingStreamsContext_->file_.reset(new std::ofstream(path.c_str(), std::fstream::app));
CheckFile(loggingStreamsContext_->file_);
loggingStreamsContext_->targetFile_ = path;
loggingStreamsContext_->targetFolder_.clear();
loggingStreamsContext_->warning_ = loggingStreamsContext_->file_.get();
loggingStreamsContext_->error_ = loggingStreamsContext_->file_.get();
loggingStreamsContext_->info_ = loggingStreamsContext_->file_.get();
}
}
InternalLogger::InternalLogger(LogLevel level,
const char* file,
int line) :
lock_(loggingStreamsMutex_, boost::defer_lock_t()),
level_(level),
stream_(&nullStream_) // By default, logging to "/dev/null" is simulated
{
if (pluginContext_ != NULL)
{
// We are logging using the Orthanc plugin SDK
if (level == LogLevel_TRACE)
{
// No trace level in plugins, directly exit as the stream is
// set to "/dev/null"
return;
}
else
{
pluginStream_.reset(new std::stringstream);
stream_ = pluginStream_.get();
}
}
else
{
// We are logging in a standalone application, not inside an Orthanc plugin
if ((level == LogLevel_INFO && !infoEnabled_) ||
(level == LogLevel_TRACE && !traceEnabled_))
{
// This logging level is disabled, directly exit as the
// stream is set to "/dev/null"
return;
}
std::string prefix;
GetLinePrefix(prefix, level, file, line);
{
// We lock the global mutex. The mutex is locked until the
// destructor is called: No change in the output can be done.
lock_.lock();
if (loggingStreamsContext_.get() == NULL)
{
fprintf(stderr, "ERROR: Trying to log a message after the finalization of the logging engine\n");
lock_.unlock();
return;
}
switch (level)
{
case LogLevel_ERROR:
stream_ = loggingStreamsContext_->error_;
break;
case LogLevel_WARNING:
stream_ = loggingStreamsContext_->warning_;
break;
case LogLevel_INFO:
case LogLevel_TRACE:
stream_ = loggingStreamsContext_->info_;
break;
default:
throw OrthancException(ErrorCode_InternalError);
}
if (stream_ == &nullStream_)
{
// The logging is disabled for this level, we can release
// the global mutex.
lock_.unlock();
}
else
{
try
{
(*stream_) << prefix;
}
catch (...)
{
// Something is going really wrong, probably running out of
// memory. Fallback to a degraded mode.
stream_ = loggingStreamsContext_->error_;
(*stream_) << "E???? ??:??:??.?????? ] ";
}
}
}
}
}
InternalLogger::~InternalLogger()
{
if (pluginStream_.get() != NULL)
{
// We are logging through the Orthanc SDK
std::string message = pluginStream_->str();
if (pluginContext_ != NULL)
{
switch (level_)
{
case LogLevel_ERROR:
pluginContext_->InvokeService(pluginContext_, _OrthancPluginService_LogError, message.c_str());
break;
case LogLevel_WARNING:
pluginContext_->InvokeService(pluginContext_, _OrthancPluginService_LogWarning, message.c_str());
break;
case LogLevel_INFO:
pluginContext_->InvokeService(pluginContext_, _OrthancPluginService_LogInfo, message.c_str());
break;
default:
break;
}
}
}
else if (stream_ != &nullStream_)
{
*stream_ << "\n";
stream_->flush();
}
}
void Flush()
{
if (pluginContext_ != NULL)
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
if (loggingStreamsContext_.get() != NULL &&
loggingStreamsContext_->file_.get() != NULL)
{
loggingStreamsContext_->file_->flush();
}
}
}
void SetErrorWarnInfoLoggingStreams(std::ostream& errorStream,
std::ostream& warningStream,
std::ostream& infoStream)
{
boost::mutex::scoped_lock lock(loggingStreamsMutex_);
loggingStreamsContext_.reset(new LoggingStreamsContext);
loggingStreamsContext_->error_ = &errorStream;
loggingStreamsContext_->warning_ = &warningStream;
loggingStreamsContext_->info_ = &infoStream;
}
}
}
#endif // ORTHANC_ENABLE_LOGGING
|
package seedu.planner.logic.commands;
import static java.util.Objects.requireNonNull;
import static seedu.planner.commons.core.Messages.MESSAGE_NOT_OFFERED_MODULES;
import static seedu.planner.commons.util.CollectionUtil.getAnyOne;
import static seedu.planner.logic.parser.CliSyntax.PREFIX_CODE;
import java.util.Set;
import java.util.logging.Logger;
import seedu.planner.commons.core.EventsCenter;
import seedu.planner.commons.core.LogsCenter;
import seedu.planner.commons.events.ui.FindEvent;
import seedu.planner.logic.CommandHistory;
import seedu.planner.logic.commands.exceptions.CommandException;
import seedu.planner.model.Model;
import seedu.planner.model.module.Module;
/**
* A class representing the {@code find} command.
*/
public class FindCommand extends Command {
public static final String COMMAND_WORD = "find";
public static final String MESSAGE_USAGE = COMMAND_WORD
+ ": Retrieves information about the specified module. "
+ "Parameters: "
+ PREFIX_CODE + "MODULE CODE "
+ "Example: " + COMMAND_WORD + " "
+ PREFIX_CODE + "CS1010 ";
public static final String MESSAGE_SUCCESS = "Retrieved module information for %1$s";
private static final Logger logger = LogsCenter.getLogger(FindCommand.class);
private Module moduleToFind;
public FindCommand(Module moduleToFind) {
this.moduleToFind = moduleToFind;
}
@Override
public CommandResult execute(Model model, CommandHistory commandHistory) throws CommandException {
requireNonNull(model);
if (!model.isModuleOffered(moduleToFind)) {
logger.fine("In find command: " + moduleToFind + " not offered");
throw new CommandException(String.format(
MESSAGE_NOT_OFFERED_MODULES, moduleToFind));
}
Set<Module> finalizedModules = model.finalizeModules(Set.of(moduleToFind));
Module finalizedModule = getAnyOne(finalizedModules).get();
EventsCenter.getInstance().post(new FindEvent(finalizedModule));
return new CommandResult(String.format(MESSAGE_SUCCESS, moduleToFind));
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof FindCommand)) {
return false;
}
FindCommand command = (FindCommand) other;
return moduleToFind.equals(command.moduleToFind);
}
}
|
CREATE TABLE IF NOT EXISTS `biodata`(
`id` INT(11) NOT NULL AUTO_INCREMENT,
`nama_lengkap` VARCHAR(30) NOT NULL,
`tempat_lahir` VARCHAR(20) NOT NULL,
`tanggal_lahir` DATE NOT NULL,
`jenis_kelamin` VARCHAR(10) NOT NULL,
`alamat` VARCHAR(30),
`no_telp` VARCHAR(15),
)
|
#!/bin/sh
./halite --replay-directory replays/ -vvv --width 32 --height 32 "python3 SVMBotAggressive.py" "python3 SVMBotPassive.py"
|
struct Error {
msg: &str,
}
fn create_error(error_msg: &str) -> Error {
Error { msg: error_msg }
}
fn main() {
let custom_error = create_error("File not found");
println!("Custom Error: Error {{msg: \"{}\"}}", custom_error.msg);
} |
package io.opensphere.core.orwell;
/**
* A container in which the statistics describing the operating system are stored.
*/
public class SystemStatistics
{
/**
* The processor architecture of the system.
*/
private String myArchitecture;
/**
* The number of available processors reported by the system.
*/
private long myAvailableProcessors;
/**
* The name of the operating system.
*/
private String myOperatingSystemName;
/**
* The version of the operating system.
*/
private String myOperatingSystemVersion;
/**
* The average load on the system captured at the time of metrics gathering.
*/
private double mySystemLoadAverage;
/**
* The version of the Java Management Specification implemented by the underlying virtual machine.
*/
private String myManagementSpecVersion;
/**
* The name of the Java specification implemented by the underlying virtual machine.
*/
private String mySpecName;
/**
* The vendor providing the implementation of the Java specification in the underlying virtual machine.
*/
private String mySpecVendor;
/**
* The version of the Java specification implemented by the underlying virtual machine.
*/
private String mySpecVersion;
/**
* The name of the underlying virtual machine.
*/
private String myJavaVmName;
/**
* The vendor that provided the implementation of the underlying virtual machine.
*/
private String myJavaVmVendor;
/**
* the version of the underlying virtual machine.
*/
private String myJavaVmVersion;
/**
* Gets the value of the {@link #myArchitecture} field.
*
* @return the value stored in the {@link #myArchitecture} field.
*/
public String getArchitecture()
{
return myArchitecture;
}
/**
* Sets the value of the {@link #myArchitecture} field.
*
* @param pArchitecture the value to store in the {@link #myArchitecture} field.
*/
public void setArchitecture(String pArchitecture)
{
myArchitecture = pArchitecture;
}
/**
* Gets the value of the {@link #myAvailableProcessors} field.
*
* @return the value stored in the {@link #myAvailableProcessors} field.
*/
public long getAvailableProcessors()
{
return myAvailableProcessors;
}
/**
* Sets the value of the {@link #myAvailableProcessors} field.
*
* @param pAvailableProcessors the value to store in the {@link #myAvailableProcessors} field.
*/
public void setAvailableProcessors(long pAvailableProcessors)
{
myAvailableProcessors = pAvailableProcessors;
}
/**
* Gets the value of the {@link #myOperatingSystemName} field.
*
* @return the value stored in the {@link #myOperatingSystemName} field.
*/
public String getOperatingSystemName()
{
return myOperatingSystemName;
}
/**
* Sets the value of the {@link #myOperatingSystemName} field.
*
* @param pOperatingSystemName the value to store in the {@link #myOperatingSystemName} field.
*/
public void setOperatingSystemName(String pOperatingSystemName)
{
myOperatingSystemName = pOperatingSystemName;
}
/**
* Gets the value of the {@link #myOperatingSystemVersion} field.
*
* @return the value stored in the {@link #myOperatingSystemVersion} field.
*/
public String getOperatingSystemVersion()
{
return myOperatingSystemVersion;
}
/**
* Sets the value of the {@link #myOperatingSystemVersion} field.
*
* @param pOperatingSystemVersion the value to store in the {@link #myOperatingSystemVersion} field.
*/
public void setOperatingSystemVersion(String pOperatingSystemVersion)
{
myOperatingSystemVersion = pOperatingSystemVersion;
}
/**
* Gets the value of the {@link #mySystemLoadAverage} field.
*
* @return the value stored in the {@link #mySystemLoadAverage} field.
*/
public double getSystemLoadAverage()
{
return mySystemLoadAverage;
}
/**
* Sets the value of the {@link #mySystemLoadAverage} field.
*
* @param pSystemLoadAverage the value to store in the {@link #mySystemLoadAverage} field.
*/
public void setSystemLoadAverage(double pSystemLoadAverage)
{
mySystemLoadAverage = pSystemLoadAverage;
}
/**
* Gets the value of the {@link #myManagementSpecVersion} field.
*
* @return the value stored in the {@link #myManagementSpecVersion} field.
*/
public String getManagementSpecVersion()
{
return myManagementSpecVersion;
}
/**
* Sets the value of the {@link #myManagementSpecVersion} field.
*
* @param pManagementSpecVersion
* the value to store in the {@link #myManagementSpecVersion} field.
*/
public void setManagementSpecVersion(String pManagementSpecVersion)
{
myManagementSpecVersion = pManagementSpecVersion;
}
/**
* Gets the value of the {@link #mySpecName} field.
*
* @return the value stored in the {@link #mySpecName} field.
*/
public String getSpecName()
{
return mySpecName;
}
/**
* Sets the value of the {@link #mySpecName} field.
*
* @param pSpecName
* the value to store in the {@link #mySpecName} field.
*/
public void setSpecName(String pSpecName)
{
mySpecName = pSpecName;
}
/**
* Gets the value of the {@link #mySpecVendor} field.
*
* @return the value stored in the {@link #mySpecVendor} field.
*/
public String getSpecVendor()
{
return mySpecVendor;
}
/**
* Sets the value of the {@link #mySpecVendor} field.
*
* @param pSpecVendor
* the value to store in the {@link #mySpecVendor} field.
*/
public void setSpecVendor(String pSpecVendor)
{
mySpecVendor = pSpecVendor;
}
/**
* Gets the value of the {@link #mySpecVersion} field.
*
* @return the value stored in the {@link #mySpecVersion} field.
*/
public String getSpecVersion()
{
return mySpecVersion;
}
/**
* Sets the value of the {@link #mySpecVersion} field.
*
* @param pSpecVersion
* the value to store in the {@link #mySpecVersion} field.
*/
public void setSpecVersion(String pSpecVersion)
{
mySpecVersion = pSpecVersion;
}
/**
* Gets the value of the {@link #myJavaVmName} field.
*
* @return the value stored in the {@link #myJavaVmName} field.
*/
public String getJavaVmName()
{
return myJavaVmName;
}
/**
* Sets the value of the {@link #myJavaVmName} field.
*
* @param pJavaVmName
* the value to store in the {@link #myJavaVmName} field.
*/
public void setJavaVmName(String pJavaVmName)
{
myJavaVmName = pJavaVmName;
}
/**
* Gets the value of the {@link #myJavaVmVendor} field.
*
* @return the value stored in the {@link #myJavaVmVendor} field.
*/
public String getJavaVmVendor()
{
return myJavaVmVendor;
}
/**
* Sets the value of the {@link #myJavaVmVendor} field.
*
* @param pJavaVmVendor
* the value to store in the {@link #myJavaVmVendor} field.
*/
public void setJavaVmVendor(String pJavaVmVendor)
{
myJavaVmVendor = pJavaVmVendor;
}
/**
* Gets the value of the {@link #myJavaVmVersion} field.
*
* @return the value stored in the {@link #myJavaVmVersion} field.
*/
public String getJavaVmVersion()
{
return myJavaVmVersion;
}
/**
* Sets the value of the {@link #myJavaVmVersion} field.
*
* @param pJavaVmVersion
* the value to store in the {@link #myJavaVmVersion} field.
*/
public void setJavaVmVersion(String pJavaVmVersion)
{
myJavaVmVersion = pJavaVmVersion;
}
}
|
from typing import List
def check_palindromes(strings: List[str]) -> List[bool]:
def is_palindrome(s: str) -> bool:
s = ''.join(filter(str.isalnum, s)).lower()
low, high = 0, len(s) - 1
while low < high:
if s[low] != s[high]:
return False
low += 1
high -= 1
return True
return [is_palindrome(s) for s in strings]
# Test the function
strings = ["racecar", "hello", "A man a plan a canal Panama", "12321"]
print(check_palindromes(strings)) # Output: [True, False, True, True] |
"""
Calculate the total sum of a given list of numbers using Python
"""
# Function to calculate the total sum of a given list of numbers
def get_total_sum(num_list):
total = 0
for num in num_list:
total += num
return total
if __name__ == '__main__':
num_list = [1,2,3,4,5]
total_sum = get_total_sum(num_list)
print(total_sum) |
<gh_stars>1-10
# 2010, 2011 <NAME>, <EMAIL>
#
# Main file of Slackware package viewer.
#
# A good reference, is the qtsamuri example_02.rb
# Load our QtRuby main window class:
require 'slackware/gui/dobbage_window'
require 'optparse'
module Slackware::Gui
class Args
def self.parse(*args)
options = {}
opts = OptionParser.new(args) {|opts|
opts.banner = "Usage: dobbage [options]"
opts.separator("")
opts.separator("Optional Flags:")
opts.on_tail("-v","--version","show version information") {|o|
puts <<-EOF
Slackware Linux Version: #{SLACKWARE_VERSION}
slack-utils version: #{UTILS_VERSION}
dobbage version: #{DOBBAGE_VERSION}
EOF
exit
}
opts.on_tail("-h","--help","show this help message") {|o|
puts opts
exit
}
}
opts.parse!
return options
end
end
# Launch the application:
def self::launch(*args)
$PROGRAM_NAME = "dobbage"
args.flatten!.compact!
options = Slackware::Gui::Args.parse(args)
app = Qt::Application.new(args)
window = DobbageWindow.new(args)
window.show
app.exec
end
end
|
<reponame>openspending/subsidystories.eu
'use strict';
var subsidyStories = require('../services/subsidy-stories');
module.exports = {
entirePeriod: function(state) {
return subsidyStories.mergePeriods(state.period);
}
};
|
#!/bin/bash
set -e
folder=${HOME}/src
mkdir -p $folder
echo "** Download protobuf-3.6.1 sources"
cd $folder
if [ ! -f protobuf-python-3.6.1.zip ]; then
wget https://github.com/protocolbuffers/protobuf/releases/download/v3.6.1/protobuf-python-3.6.1.zip
fi
if [ ! -f protoc-3.6.1-linux-aarch_64.zip ]; then
wget https://github.com/protocolbuffers/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-aarch_64.zip
fi
echo "** Install protoc"
unzip protobuf-python-3.6.1.zip
unzip protoc-3.6.1-linux-aarch_64.zip -d protoc-3.6.1
sudo cp protoc-3.6.1/bin/protoc /usr/local/bin/protoc
echo "** Build and install protobuf-3.6.1 libraries"
export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
cd protobuf-3.6.1/
./autogen.sh
./configure --prefix=/usr/local
make -j$(nproc)
make check
sudo make install
sudo ldconfig
echo "** Update python3 protobuf module"
# remove previous installation of python3 protobuf module
sudo pip3 uninstall -y protobuf
sudo pip3 install Cython
cd python/
# force compilation with c++11 standard
sed -i '205s/if v:/if True:/' setup.py
python3 setup.py build --cpp_implementation
python3 setup.py test --cpp_implementation
sudo python3 setup.py install --cpp_implementation
echo "** Build protobuf-3.6.1 successfully"
|
# Make sure at least one lowercase letter, one digit and one special character is present
import string
import random
def generate_password(length):
password = ''
# Add a lowercase letter
for i in range(length):
letter = random.choice(string.ascii_lowercase)
password += letter
# Add a digit
digit = random.choice(string.digits)
password += digit
# Add a special character
special_char = random.choice(string.punctuation)
password += special_char
# Generate other randomly selected characters
for i in range(length-3):
random_char = random.choice(string.ascii_letters + string.digits + string.punctuation)
password += random_char
# Shuffle the password
password_list = list(password)
random.shuffle(password_list)
password = ''.join(password_list)
return password |
export type FontSize = {
xxxSmall: 2
xxSmall: 4
xSmall: 8
small: 12
regular: 14
medium: 16
large: 24
xLarge: 32
xxLarge: 40
xxxLarge: 48
}
export const FontSize: FontSize = {
xxxSmall: 2,
xxSmall: 4,
xSmall: 8,
small: 12,
regular: 14,
medium: 16,
large: 24,
xLarge: 32,
xxLarge: 40,
xxxLarge: 48,
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _isType = require("./isType.js");
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
/**
* Represents an Item of an Enum.
* @param {String} key The Enum key.
* @param {Number} value The Enum value.
*/
var EnumItem = /*#__PURE__*/function () {
/* constructor reference so that, this.constructor===EnumItem//=>true */
function EnumItem(key, value) {
var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
_classCallCheck(this, EnumItem);
this.key = key;
this.value = value;
this._options = options;
this._options.ignoreCase = this._options.ignoreCase || false;
}
/**
* Checks if the flagged EnumItem has the passing object.
* @param {EnumItem || String || Number} value The object to check with.
* @return {Boolean} The check result.
*/
_createClass(EnumItem, [{
key: "has",
value: function has(value) {
if (EnumItem.isEnumItem(value)) {
return (this.value & value.value) !== 0;
} else if ((0, _isType.isString)(value)) {
if (this._options.ignoreCase) {
return this.key.toLowerCase().indexOf(value.toLowerCase()) >= 0;
}
return this.key.indexOf(value) >= 0;
} else {
return (this.value & value) !== 0;
}
}
/**
* Checks if the EnumItem is the same as the passing object.
* @param {EnumItem || String || Number} key The object to check with.
* @return {Boolean} The check result.
*/
}, {
key: "is",
value: function is(key) {
if (EnumItem.isEnumItem(key)) {
return this.key === key.key;
} else if ((0, _isType.isString)(key)) {
if (this._options.ignoreCase) {
return this.key.toLowerCase() === key.toLowerCase();
}
return this.key === key;
} else {
return this.value === key;
}
}
/**
* Returns String representation of this EnumItem.
* @return {String} String representation of this EnumItem.
*/
}, {
key: "toString",
value: function toString() {
return this.key;
}
/**
* Returns JSON object representation of this EnumItem.
* @return {String} JSON object representation of this EnumItem.
*/
}, {
key: "toJSON",
value: function toJSON() {
return this.key;
}
/**
* Returns the value to compare with.
* @return {String} The value to compare with.
*/
}, {
key: "valueOf",
value: function valueOf() {
return this.value;
}
}], [{
key: "isEnumItem",
value: function isEnumItem(value) {
return value instanceof EnumItem || (0, _isType.isObject)(value) && value.key !== undefined && value.value !== undefined;
}
}]);
return EnumItem;
}();
exports["default"] = EnumItem;
;
module.exports = exports.default; |
#!/bin/bash
# @DEPRECATED in favor of grantAllPermissionsToDatabases.sh
#
# Grant permissions, to sepgroup and sepgroupreadonly, on a given database using the superuser role.
#
# Usage: grantAllPermissionsToDatabase.sh ${DATABASE_NAME}
#
# @author Daniel Zhang (張道博)
if [ $# -eq 0 ]
then
echo "Usage: grantAllPermissionsToDatabase.sh \$DATABASE_NAME";
exit;
fi
echo "Granting all permissions on database $1...";
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL TABLES IN SCHEMA public TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL FUNCTIONS IN SCHEMA public TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL SEQUENCES IN SCHEMA public TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL TABLES IN SCHEMA public TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL SEQUENCES IN SCHEMA public TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL TABLES IN SCHEMA ep TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL FUNCTIONS IN SCHEMA ep TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL SEQUENCES IN SCHEMA ep TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL TABLES IN SCHEMA ep TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA ep TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL SEQUENCES IN SCHEMA ep TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL TABLES IN SCHEMA dz TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL FUNCTIONS IN SCHEMA dz TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL SEQUENCES IN SCHEMA dz TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL TABLES IN SCHEMA dz TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA dz TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL SEQUENCES IN SCHEMA dz TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL TABLES IN SCHEMA cd TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL FUNCTIONS IN SCHEMA cd TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL SEQUENCES IN SCHEMA cd TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL TABLES IN SCHEMA cd TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA cd TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL SEQUENCES IN SCHEMA cd TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL TABLES IN SCHEMA dw TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL FUNCTIONS IN SCHEMA dw TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL SEQUENCES IN SCHEMA dw TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL TABLES IN SCHEMA dw TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA dw TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL SEQUENCES IN SCHEMA dw TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL TABLES IN SCHEMA az TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL FUNCTIONS IN SCHEMA az TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT ALL ON ALL SEQUENCES IN SCHEMA az TO sepgroup;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL TABLES IN SCHEMA az TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA az TO sepgroupreadonly;" $1
sudo -u postgres psql -U postgres -c "GRANT SELECT ON ALL SEQUENCES IN SCHEMA az TO sepgroupreadonly;" $1
|
<gh_stars>0
const { compile } = require('./utils')
const crypto = require('crypto')
const babel = require('babel-core')
const fs = require('fs')
module.exports = {
process (src, path) {
let code = src
if (path.endsWith('.vue')) code = compile(path, src)
else if (path.endsWith('.png')) code = `module.exports = "data:image/png;base64,${fs.readFileSync(path, 'base64')}"`
return babel.transform(code, { presets: [
['env', { targets: { node: 'current' }}]
] }).code
},
getCacheKey (fileData, filename, configString) {
return crypto.createHash('md5')
.update(fileData + filename + configString, 'utf8')
.digest('hex')
}
}
|
import hashlib
def encryption_oracle(input_string, key):
mac = hashlib.sha256(key + input_string).digest() # Compute MAC using a secure hash function
ciphertext = xor_str(input_string, key) # Encrypt the input using XOR
return ciphertext + mac # Append the MAC to the ciphertext |
<filename>src/fee/index.ts
export * from './FeeAPI';
|
def compare_dicts(dict1, dict2):
if dict1 == dict2:
return True
return False
result = compare_dicts(d1, d2)
print(result) |
<gh_stars>1-10
/*
* If not stated otherwise in this file or this component's LICENSE file the
* following copyright and licenses apply:
*
* Copyright 2021 Metrological
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import Lightning from "@lightningjs/core";
export default class Cursor extends Lightning.Component {
static _template() {
return {
alpha: 0
}
}
_construct() {
this._blink = true;
}
_init() {
this._blinkAnimation = this.animation({duration: 1, repeat: -1, actions: [
{p: 'alpha', v: {0: 0, 0.5: 1, 1: 0}}
]});
}
show() {
if(this._blink) {
this._blinkAnimation.start();
}
else {
this.alpha = 1;
}
}
hide() {
if(this._blink) {
this._blinkAnimation.stop();
}
else {
this.alpha = 0;
}
}
set blink(bool) {
this._blink = bool;
if(this.active) {
if(bool) {
this.show();
}
else {
this.hide();
}
}
}
get blink() {
return this._blink;
}
} |
//#####################################################################
// Copyright 2005-2006, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_CELL.h>
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_NODE.h>
#include <PhysBAM_Tools/Math_Tools/cube.h>
#include <PhysBAM_Geometry/Implicit_Objects_Uniform/LEVELSET_IMPLICIT_OBJECT.h>
#include <PhysBAM_Dynamics/Level_Sets/REMOVED_PARTICLES_BLENDER_3D.h>
#include <PhysBAM_Dynamics/Level_Sets/UNIFORM_REMOVED_PARTICLES_PROCESSING.h>
#include <PhysBAM_Dynamics/Particles/PARTICLE_LEVELSET_REMOVED_PARTICLES.h>
using namespace PhysBAM;
//#####################################################################
// Function Refine_Grid
//#####################################################################
template<class T> void UNIFORM_REMOVED_PARTICLES_PROCESSING<T>::
Refine_Grid_To_Particle_Size(const LEVELSET_IMPLICIT_OBJECT<VECTOR<T,3> >* water_levelset)
{
sim_grid=new GRID<TV>(grid);
GRID<TV> temp_grid(TV_INT(scale_factor*TV(grid.counts)),grid.domain,true);
ARRAY<T,VECTOR<int,3> > temp_phi(temp_grid.Domain_Indices(3));
for(CELL_ITERATOR iterator(temp_grid,3);iterator.Valid();iterator.Next()) temp_phi(iterator.Cell_Index())=(*water_levelset)(iterator.Location());
grid=temp_grid;water_phi.Resize(grid.Domain_Indices(3),false,false);ARRAY<T,VECTOR<int,3> >::Copy(temp_phi,water_phi);
particle_phi.Resize(grid.Domain_Indices(3),false,false);particle_phi.Fill(0);
}
//#####################################################################
// Function Get_Ellipsoid
//#####################################################################
template<class T> void UNIFORM_REMOVED_PARTICLES_PROCESSING<T>::
Get_Ellipsoid(PARTICLE_LEVELSET_REMOVED_PARTICLES<VECTOR<T,3> >& particles,int p,T& radius_x,T& radius_yz,VECTOR<T,3>& major_axis) const
{
T radius=scale*particles.radius(p);
T velocity_magnitude_squared=particles.V(p).Magnitude_Squared();
if(velocity_magnitude_squared>1e-8){ // ellipsoid
T speed=sqrt(velocity_magnitude_squared);
major_axis=particles.V(p)/speed;
if(use_velocity_scaling){
radius_x=radius+(T).5*dt*speed;
if(preserve_volume){radius_yz=sqrt(cube(radius)/radius_x);}
else{radius_yz=radius;}}
else{radius_x=3*radius;radius_yz=radius;}}
else{ // sphere
major_axis=VECTOR<T,3>(1,0,0); // arbitrary axis
radius_x=radius;radius_yz=radius;}
}
//#####################################################################
// Function Incorporate_Removed_Negative_Particles
//#####################################################################
template<class T> void UNIFORM_REMOVED_PARTICLES_PROCESSING<T>::
Incorporate_Removed_Negative_Particles()
{
REMOVED_PARTICLES_BLENDER_3D<T> particle_blender(blending_parameter);
T max_dX_times_particle_power=grid.dX.Max()*particle_power;
for(NODE_ITERATOR it(*sim_grid);it.Valid();it.Next())if(particle_array(it.Node_Index())){
PARTICLE_LEVELSET_REMOVED_PARTICLES<VECTOR<T,3> >& particles=*particle_array(it.Node_Index());
for(int p=1;p<=particles.array_collection->Size();p++){
T radius_x,radius_yz;VECTOR<T,3> major_axis;Get_Ellipsoid(particles,p,radius_x,radius_yz,major_axis);
T one_over_radius_x_squared=(T)1/sqr(radius_x),one_over_radius_yz_squared=(T)1/sqr(radius_yz);
RANGE<TV> box=particle_blender.Get_Bounding_Box(radius_x,radius_yz,particles.X(p),major_axis);
VECTOR<int,3> min_index=grid.Clamped_Index_End_Minus_One(box.Minimum_Corner())+VECTOR<int,3>(1,1,1),max_index=grid.Clamped_Index(box.Maximum_Corner());
for(CELL_ITERATOR jt(grid,RANGE<VECTOR<int,3> >(min_index,max_index));jt.Valid();jt.Next()){
T distance=particle_blender.Get_Distance(one_over_radius_x_squared,one_over_radius_yz_squared,particles.X(p),major_axis,grid.X(jt.Cell_Index()));
particle_phi(jt.Cell_Index())-=max_dX_times_particle_power*particle_blender.C(distance);}}}
}
//#####################################################################
// Function Merge_Phi
//#####################################################################
template<class T> void UNIFORM_REMOVED_PARTICLES_PROCESSING<T>::
Merge_Phi(ARRAY<T,VECTOR<int,3> >& result) const
{
assert((ARRAY<T,VECTOR<int,3> >::Equal_Dimensions(water_phi,particle_phi)));
result.Resize(grid.Domain_Indices(3),false);
result.array=water_phi.array+particle_phi.array;
}
//#####################################################################
// Function Union_Phi
//#####################################################################
template<class T> void UNIFORM_REMOVED_PARTICLES_PROCESSING<T>::
Union_Phi(ARRAY<T,VECTOR<int,3> >& result) const
{
assert((ARRAY<T,VECTOR<int,3> >::Equal_Dimensions(water_phi,particle_phi)));
result.Resize(grid.Domain_Indices(3),false);
T offset=grid.dX.Max()*blending_parameter*particle_power;
for(int i=1;i<=water_phi.array.Size();i++) result.array(i)=min(particle_phi.array(i)+offset,water_phi.array(i));
}
//#####################################################################
// Function Blend_Phi
//#####################################################################
template<class T> void UNIFORM_REMOVED_PARTICLES_PROCESSING<T>::
Blend_Phi(ARRAY<T,VECTOR<int,3> >& result,const T blend_cells) const
{
assert((ARRAY<T,VECTOR<int,3> >::Equal_Dimensions(water_phi,particle_phi)));
result.Resize(grid.Domain_Indices(3),false);
T offset=grid.dX.Max()*blending_parameter*particle_power;
T scale=1/(blend_cells*grid.dX.Max());
for(int i=1;i<=water_phi.array.Size();i++){
T alpha=clamp(scale*water_phi.array(i),(T)0,(T)1);
result.array(i)=(1-alpha)*(water_phi.array(i)+particle_phi.array(i))+alpha*(particle_phi.array(i)+offset);}
}
//#####################################################################
template class UNIFORM_REMOVED_PARTICLES_PROCESSING<float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class UNIFORM_REMOVED_PARTICLES_PROCESSING<double>;
#endif
|
<filename>apps/playground/components/Pressable/Pressable.tsx
/**
* This is primarily a fork of React Native's Touchable Mixin.
* It has been repurposed as it's own standalone control for win32,
* as it needs to support a richer set of functionality on the desktop.
* The touchable variants can be rewritten as wrappers around TouchableWin32
* by passing the correct set of props down and managing state correctly.
*
* React Native's Touchable.js file (https://github.com/facebook/react-native/blob/master/Libraries/Components/Touchable/Touchable.js)
* provides an overview over how touchables work and interact with the gesture responder system.
*/
'use strict';
import { atomic, IDefineUseStyling } from '@uifabricshared/foundation-composable';
import { IPressableProps } from './Pressable.props';
import { mergeSettings, ISlotProps } from '@uifabricshared/foundation-settings';
import { useAsPressable } from './useAsPressable';
import { View } from 'react-native';
export const Pressable = atomic<IPressableProps>(
View,
(userProps: IPressableProps, useStyling: IDefineUseStyling<IPressableProps, ISlotProps<IPressableProps>>) => {
const { renderStyle, ...rest } = userProps;
const { props, state } = useAsPressable(rest);
const styleProps = useStyling(props);
renderStyle && (props.style = renderStyle(state));
return {
slotProps: mergeSettings<ISlotProps<IPressableProps>>(styleProps, { root: props }),
state: { state }
};
}
);
export default Pressable;
|
import os
import yaml
from django.conf import settings
class FileProcessor:
def __init__(self, abspath, context):
self.abspath = abspath
self.context = context
def process_file(self, filename, sef_path):
if "." not in filename:
# if the filename has no dots, it has no extension; therefore, append a default
filename = "{fn}{ext}".format(fn=filename, ext=".django.html")
filepath = os.path.join(self.abspath.root, filename)
self.context["article"] = None # Clear the "article" context variable
with open("{}{}.yaml".format(settings.content_path, sef_path), "r") as f:
page_specific_config = yaml.load(f)
return filepath, page_specific_config |
#include "OGG.h"
#include "../../File/File.h"
BF::FileActionResult BF::OGG::Load(const wchar_t* filePath)
{
/*
File file(filePath);
file.ReadFromDisk();
ByteStreamHusk byteStream(file.Data, file.DataSize);
const char syncPattern[4] = { 'O','g','g','S' };
char buffer[4];
char segmentTable[255];
byteStream.CopyBytesAndMove(buffer, 4);
unsigned char versionFormat = byteStream.ExtractByteAndMove();
unsigned long long granulePosition = byteStream.ExtractLongLongAndMove(Endian::Little);
unsigned int serialNumber = byteStream.ExtractIntegerAndMove(Endian::Little);
unsigned int pageSequenceNumber = byteStream.ExtractIntegerAndMove(Endian::Little);
unsigned int checkSum = byteStream.ExtractIntegerAndMove(Endian::Little);
unsigned char pageSegments = byteStream.ExtractByteAndMove();
byteStream.CopyBytesAndMove(segmentTable, pageSegments);
*/
return FileActionResult::Successful;
}
BF::FileActionResult BF::OGG::Save(const wchar_t* filePath)
{
return FileActionResult::Successful;
}
BF::FileActionResult BF::OGG::ConvertTo(Sound& sound)
{
return FileActionResult::Successful;
}
BF::FileActionResult BF::OGG::ConvertFrom(Sound& sound)
{
return FileActionResult::Successful;
} |
#!/bin/bash
if [ ${SNAPSHOT_ONLY} = "false" ]; then
echo `date '+%Y-%m-%d %H:%M:%S'` : Starting to mirror contents of source
echo
rsync -avzu --delete --stats /backup/source /backup/destination/latest/
echo
echo `date '+%Y-%m-%d %H:%M:%S'` : Finish mirroring
fi
echo `date '+%Y-%m-%d %H:%M:%S'` : Starting snapshot saving differences
rsnapshot -c /backup/rsnapshot.conf daily
echo `date '+%Y-%m-%d %H:%M:%S'` : Finished snapshot
echo `date '+%Y-%m-%d %H:%M:%S'` : Fetch snapshot report
echo
rsnapshot -c /backup/rsnapshot.conf du
echo
echo `date '+%Y-%m-%d %H:%M:%S'` : Finished backup
|
#!/bin/bash -eu
cd "$(dirname $0)"
. ../common.sh
initialise_basebox
$SUDO docker build -t durdn/confluence .
CONFLUENCE_VERSION="$($SUDO docker run --rm durdn/confluence sh -c 'echo $CONFLUENCE_VERSION')"
CONF_TAGGED=`$SUDO docker images | grep durdn/confluence | grep $CONFLUENCE_VERSION | wc -l`
if [ $CONF_TAGGED -eq 0 ]; then
$SUDO docker tag durdn/confluence durdn/confluence:$CONFLUENCE_VERSION
else
echo "Confluence tagged already with version $CONFLUENCE_VERSION"
fi
CONFLUENCE_RUNNING=`$SUDO docker ps | grep durdn/confluence | wc -l`
if [ $CONFLUENCE_RUNNING -eq 0 ]; then
test `docker ps -a | grep durdn/confluence | wc -l` -ne 0 && docker rm confluence
$SUDO docker run -d --name confluence --link postgres:db -p 8090:8090 durdn/confluence
fi
|
#!/bin/sh
appID=$1
password=$2
tenantID=$3
storageAcc=$4
subscriptionID=$5
echo "---Configure Repos for Azure Cli 2.0---"
echo "deb [arch=amd64] https://packages.microsoft.com/repos/azure-cli/ wheezy main" | sudo tee /etc/apt/sources.list.d/azure-cli.list
sudo apt-key adv --keyserver packages.microsoft.com --recv-keys 417A0893
sudo apt-get update
sudo apt-get install apt-transport-https azure-cli openjdk-8-jdk -y
addgroup hab
sudo useradd -g hab hab
usermod -aG sudo hab
sleep 30
curl https://raw.githubusercontent.com/habitat-sh/habitat/master/components/hab/install.sh | sudo bash
mkdir /scripts
echo "#!/bin/sh" >> /scripts/uploadhart.sh
echo "HARTFILE=\$1" >> /scripts/uploadhart.sh
echo "storageAccount='$storageAcc'" >> /scripts/uploadhart.sh
echo "export AZURE_STORAGE_ACCOUNT='$storageAcc'" >> /scripts/uploadhart.sh
echo "az login --service-principal -u '$appID' --password '$password' --tenant '$tenantID' > /dev/null" >> /scripts/uploadhart.sh
echo "az account set --subscription $subscriptionID" >> /scripts/uploadhart.sh
echo "az storage container create --name apphart --output table > /dev/null" >> /scripts/uploadhart.sh
echo "az storage blob upload --container-name apphart -f \$HARTFILE -n \$HARTFILE > /dev/null" >> /scripts/uploadhart.sh
chmod +x /scripts/uploadhart.sh
|
# Ref: Mac Tips, Tricks & Shortcuts in easy steps, 2nd Edition
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until script has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
echo "\\n\\n"
echo "===================="
echo " macOS Defautls "
echo "===================="
echo "Upping bluetooth audio quality (try 40 or 80)"
# Ref: https://www.reddit.com/r/apple/comments/5rfdj6/pro_tip_significantly_improve_bluetooth_audio/
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Max (editable)" 80
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" 80
defaults write com.apple.BluetoothAudioAgent "Apple Initial Bitpool (editable)" 80
defaults write com.apple.BluetoothAudioAgent "Apple Initial Bitpool Min (editable)" 80
defaults write com.apple.BluetoothAudioAgent "Negotiated Bitpool" 80
defaults write com.apple.BluetoothAudioAgent "Negotiated Bitpool Max" 80
defaults write com.apple.BluetoothAudioAgent "Negotiated Bitpool Min" 80
echo "Require password immediately after sleep"
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
echo "Activity monitor showing stats in dock"
defaults write com.apple.ActivityMonitor IconType -int 5
echo "Sorting Activity Monitor results by CPU usage"
defaults write com.apple.ActivityMonitor SortColumn -string "CPUUsage"
defaults write com.apple.ActivityMonitor SortDirection -int 0
echo "Expand print panel by default"
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint2 -bool true
echo "\\n\\n=== Typing options ==="
echo "Set a blazingly fast keyboard repeat rate (lower equals faster)"
defaults write -g InitialKeyRepeat -int 20
defaults write -g KeyRepeat -int 1
defaults write NSGlobalDomain InitialKeyRepeat -int 20
defaults write NSGlobalDomain KeyRepeat -int 1
echo "Disable press-and-hold for keys in favor of key repeat"
defaults write NSGlobalDomain ApplePressAndHoldEnabled -bool false
defaults write com.microsoft.VSCode ApplePressAndHoldEnabled -bool false
defaults write com.microsoft.VSCode ApplePressAndHoldEnabled -bool false
echo "Enable full keyboard access for all controls"
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
echo "Disable automatic capitalization as it’s annoying when typing code"
defaults write NSGlobalDomain NSAutomaticCapitalizationEnabled -bool false
echo "Disable smart dashes as they’re annoying when typing code"
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
echo "Disable automatic period substitution as it’s annoying when typing code"
defaults write NSGlobalDomain NSAutomaticPeriodSubstitutionEnabled -bool false
echo "Disable smart quotes as they’re annoying when typing code"
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
echo "Disable auto-correct"
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
echo "\\n\\n=== Safari options ==="
echo "Enable Safari’s debug menu"
defaults write com.apple.Safari IncludeInternalDebugMenu -bool true
echo "Enabling Safari developer options"
defaults write com.apple.Safari IncludeDevelopMenu -bool true && \
defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true && \
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2DeveloperExtrasEnabled -bool true && \
defaults write -g WebKitDeveloperExtras -bool true
echo "Enabling \"Do Not Track\" on Safari"
defaults write com.apple.Safari SendDoNotTrackHTTPHeader -bool true
echo "Set Safari’s home page to `about:blank` for faster loading"
defaults write com.apple.Safari HomePage -string "about:blank"
echo "\\n\\n=== Monitor options ==="
echo "Enable subpixel font rendering on non-Apple LCDs"
# Ref: https://github.com/kevinSuttle/macOS-Defaults/issues/17#issuecomment-266633501
defaults write NSGlobalDomain AppleFontSmoothing -int 1
echo "Enable HiDPI display modes (requires restart)"
sudo defaults write /Library/Preferences/com.apple.windowserver DisplayResolutionEnabled -bool true
echo "\\n\\n=== Finder options ==="
echo "Show all filename extensions"
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
echo "Show status bar"
defaults write com.apple.finder ShowStatusBar -bool true
echo "Show path bar"
defaults write com.apple.finder ShowPathbar -bool true
echo "Disabling file extension editing warning"
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
echo "List view by default"
# Four-letter codes for the other view modes: `icnv`, `clmv`, `glyv`, `Nlsv`
defaults write com.apple.Finder FXPreferredViewStyle Nlsv
echo "Keeping folders on top of file views"
defaults write com.apple.finder _FXSortFoldersFirst -bool true
echo "Allowing text-selection in Quick Look"
defaults write com.apple.finder QLEnableTextSelection -bool true
echo "Searching current dir by default"
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
echo "Avoid creating .DS_Store files on network or USB volumes"
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true
echo "Show item info near icons on the desktop and in other icon views"
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
echo "Show item info to the right of the icons on the desktop"
/usr/libexec/PlistBuddy -c "Set DesktopViewSettings:IconViewSettings:labelOnBottom false" ~/Library/Preferences/com.apple.finder.plist
echo "Enable snap-to-grid for icons on the desktop and in other icon views"
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:arrangeBy name" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:arrangeBy name" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:arrangeBy name" ~/Library/Preferences/com.apple.finder.plist
echo "\\n\\n=== Dock options ==="
echo "Dimming hidden Dock icons"
defaults write com.apple.dock showhidden -bool true
echo "Don’t show recent applications in Dock"
defaults write com.apple.dock show-recents -bool false
echo "Autohiding dock"
defaults write com.apple.dock autohide -bool true
echo "Disabling automatically rearranging spaces"
defaults write com.apple.dock mru-spaces -bool false
echo "Remove the animation when hiding/showing the Dock"
defaults write com.apple.dock autohide-time-modifier -float 0.25
echo "Restarting Dock"
killall Dock
echo "\\n\\nDefaults are set, please reboot after apps install." |
/*
This package includes some basics for the Steam protocol. It defines basic interfaces that are used throughout go-steam:
There is IMsg, which is extended by IClientMsg (sent after logging in) and abstracts over
the outgoing message types. Both interfaces are implemented by ClientMsgProtobuf and ClientMsg.
Msg is like ClientMsg, but it is used for sending messages before logging in.
There is also the concept of a Packet: This is a type for incoming messages where only
the header is deserialized. It therefore only contains EMsg data, job information and the remaining data.
Its contents can then be read via the Read* methods which read data into a MessageBody - a type which is Serializable and
has an EMsg.
In addition, there are extra types for communication with the Game Coordinator (GC) included in the gamecoordinator sub-package.
For outgoing messages the IGCMsg interface is used which is implemented by GCMsgProtobuf and GCMsg.
Incoming messages are of the GCPacket type and are read like regular Packets.
The actual messages and enums are in the sub-packages steamlang and protobuf, generated from the SteamKit data.
*/
package protocol
|
// tslint:disable
import { Maybe } from '@musical-patterns/utilities'
const env: Maybe<string> = process.env.NODE_ENV
const post: string = env !== 'test' && env != 'material' ? require('./post.html') : ''
const nonPost: string = env === 'development' || env === 'production' ? require('./nonPost.html') : ''
export {
post,
nonPost,
}
|
python ../src/train.py -visible_gpus 1 -train_steps 80000 -train_from "../models/nyt_model_1612055512_step_50000.pt" -use_date True -encoder transformer -bert_data_path ../bert_data/t173 |
emcc sumDouble.c \
-O3 \
-ffast-math \
-s ALLOW_MEMORY_GROWTH=1 \
-s "EXPORTED_FUNCTIONS=['_sumDouble']" \
-o sumDouble.wasm
|
using N3O.Umbraco.Payments.PayPal.Models;
using N3O.Umbraco.Payments.PayPal.Controllers;
using System.Threading.Tasks;
public class PaymentManager
{
public async Task<PayPalPaymentResponse> ProcessPayPalPaymentAsync(PayPalPaymentRequest paymentRequest)
{
try
{
// Instantiate the PayPal controller
var payPalController = new PayPalController();
// Call the PayPal API to process the payment
var paymentResponse = await payPalController.ProcessPaymentAsync(paymentRequest);
return paymentResponse;
}
catch (Exception ex)
{
// Handle any exceptions and log the error
Console.WriteLine($"Error processing PayPal payment: {ex.Message}");
return new PayPalPaymentResponse { Success = false, ErrorMessage = "An error occurred while processing the payment." };
}
}
} |
<reponame>pradeep-gr/mbed-os5-onsemi
/*
* Copyright (c) 2015-2016, Freescale Semiconductor, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* o Neither the name of Freescale Semiconductor, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _FSL_SMARTCARD_PHY_EMVSIM_H_
#define _FSL_SMARTCARD_PHY_EMVSIM_H_
#include "fsl_smartcard.h"
/*!
* @addtogroup smartcard_phy_emvsim_driver
* @{
*/
/*******************************************************************************
* Definitions
******************************************************************************/
/*! @brief Smart card define which specifies the adjustment number of clock cycles during which an ATR string has to be received.
*/
#define SMARTCARD_ATR_DURATION_ADJUSTMENT (360u)
/*! @brief Smart card define which specifies the adjustment number of clock cycles until an initial 'TS' character has to be
* received. */
#define SMARTCARD_INIT_DELAY_CLOCK_CYCLES_ADJUSTMENT (4200u)
/*******************************************************************************
* API
******************************************************************************/
#if defined(__cplusplus)
extern "C" {
#endif
/*!
* @brief Fills in the smartcardInterfaceConfig structure with default values.
*
* @param config The user configuration structure of type smartcard_interface_config_t.
* Function fill in members:
* clockToResetDelay = 42000,
* vcc = kSmartcardVoltageClassB3_3V,
* with default values.
*/
void SMARTCARD_PHY_EMVSIM_GetDefaultConfig(smartcard_interface_config_t *config);
/*!
* @brief Configures a Smart card interface.
*
* @param base The Smart card peripheral module base address.
* @param config The user configuration structure of type smartcard_interface_config_t. The user
* is responsible to fill out the members of this structure and to pass the pointer of this structure
* into this function or call SMARTCARD_PHY_EMVSIMInitUserConfigDefault to fill out structure with default values.
* @param srcClock_Hz Smart card clock generation module source clock.
*
* @retval kStatus_SMARTCARD_Success or kStatus_SMARTCARD_OtherError for an error.
*/
status_t SMARTCARD_PHY_EMVSIM_Init(EMVSIM_Type *base, const smartcard_interface_config_t *config, uint32_t srcClock_Hz);
/*!
* @brief De-initializes a Smart card interface, stops the Smart card clock, and disables the VCC.
*
* @param base Smart card peripheral module base address.
* @param config Smart card configuration structure.
*/
void SMARTCARD_PHY_EMVSIM_Deinit(EMVSIM_Type *base, const smartcard_interface_config_t *config);
/*!
* @brief Activates the Smart card IC.
*
* @param base The EMVSIM peripheral base address.
* @param context A pointer to a Smart card driver context structure.
* @param resetType type of reset to be performed, possible values
* = kSmartcardColdReset, kSmartcardWarmReset
*
* @retval kStatus_SMARTCARD_Success or kStatus_SMARTCARD_OtherError for an error.
*/
status_t SMARTCARD_PHY_EMVSIM_Activate(EMVSIM_Type *base,
smartcard_context_t *context,
smartcard_reset_type_t resetType);
/*!
* @brief De-activates the Smart card IC.
*
* @param base The EMVSIM peripheral base address.
* @param context A pointer to a Smart card driver context structure.
*
* @retval kStatus_SMARTCARD_Success or kStatus_SMARTCARD_OtherError for an error.
*/
status_t SMARTCARD_PHY_EMVSIM_Deactivate(EMVSIM_Type *base, smartcard_context_t *context);
/*!
* @brief Controls the Smart card interface IC.
*
* @param base The EMVSIM peripheral base address.
* @param context A pointer to a Smart card driver context structure.
* @param control A interface command type.
* @param param Integer value specific to control type
*
* @retval kStatus_SMARTCARD_Success or kStatus_SMARTCARD_OtherError for an error.
*/
status_t SMARTCARD_PHY_EMVSIM_Control(EMVSIM_Type *base,
smartcard_context_t *context,
smartcard_interface_control_t control,
uint32_t param);
/*@}*/
#if defined(__cplusplus)
}
#endif
/*! @}*/
#endif /* _FSL_SMARTCARD_PHY_EMVSIM_H_*/
|
<reponame>hristodobrev/Crafty-Arts
import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { RouterModule } from "@angular/router";
import { FormsModule } from '@angular/forms';
import { ToastModule } from 'ng2-toastr/ng2-toastr';
import { projectComponents } from "./index";
import { projectRoutes } from './projects.routing';
@NgModule({
imports: [
CommonModule,
FormsModule,
RouterModule.forChild(projectRoutes),
ToastModule.forRoot()
],
declarations: [
...projectComponents
]
})
export class ProjectsModule { } |
<?hh
namespace Waffle\Tests\Container\Inflector;
use namespace HH\Lib\C;
use type Waffle\Container\Inflector\Inflector;
use type Waffle\Container\Container;
use type Facebook\HackTest\HackTest;
use type Waffle\Tests\Container\Asset\Bar;
use type Waffle\Tests\Container\Asset\Baz;
use type Waffle\Tests\Container\Asset\Foo;
use function Facebook\FBExpect\expect;
class InflectorTest extends HackTest
{
/**
* Asserts that the inflector sets expected method calls.
*/
public function testInflectorSetsExpectedMethodCalls()
{
$container = new Container();
$inflector = (new Inflector('Type'))->setContainer($container);
$inflector->invokeMethod('method1', vec[ 'arg1' ]);
$inflector->invokeMethods(dict[
'method2' => vec[ 'arg1' ] ,
'method3' => vec[ 'arg1' ]
]);
$methods = (new \ReflectionClass($inflector))->getProperty('methods');
$methods->setAccessible(true);
$methods = $methods->getValue($inflector);
expect(C\contains_key($methods, 'method1'))->toBeTrue();
expect(C\contains_key($methods, 'method2'))->toBeTrue();
expect(C\contains_key($methods, 'method3'))->toBeTrue();
expect($methods['method1'])->toBeSame(vec['arg1']);
expect($methods['method2'])->toBeSame(vec['arg1']);
expect($methods['method3'])->toBeSame(vec['arg1']);
}
/**
* Asserts that the inflector sets expected properties.
*/
public function testInflectorSetsExpectedProperties()
{
$container = new Container();
$inflector = (new Inflector('Type'))->setContainer($container);
$inflector->setProperty('property1', 'value');
$inflector->setProperties(dict[
'property2' => 'value',
'property3' => 'value'
]);
$properties = (new \ReflectionClass($inflector))->getProperty('properties');
$properties->setAccessible(true);
$dict = $properties->getValue($inflector);
expect(C\contains_key($dict, 'property1'))->toBeTrue();
expect(C\contains_key($dict, 'property2'))->toBeTrue();
expect(C\contains_key($dict, 'property3'))->toBeTrue();
expect($dict['property1'])->toBeSame('value');
expect($dict['property2'])->toBeSame('value');
expect($dict['property3'])->toBeSame('value');
}
/**
* Asserts that the inflector will inflect on an object with properties.
*/
public function testInflectorInflectsWithProperties()
{
$bar = new Bar();
$container = new Container();
$container->add(Bar::class, (): Bar ==> $bar);
$inflector = (new Inflector('Type'))
->setContainer($container)
->setProperty('bar', Bar::class)
;
$baz = new Baz();
$inflector->inflect($baz);
expect($baz->bar)->toBeInstanceOf(Bar::class);
expect($baz->bar)->toBeSame($bar);
}
/**
* Asserts that the inflector will inflect on an object with method call.
*/
public function testInflectorInflectsWithMethodCall()
{
$container = new Container();
$bar = new Bar();
$container->add(Bar::class, (): Bar ==> $bar);
$inflector = (new Inflector('Type'))
->setContainer($container)
->invokeMethod('setBar', vec[
Bar::class
])
;
$foo = new Foo(null);
$inflector->inflect($foo);
expect($foo->bar)->toBeInstanceOf(Bar::class);
expect($foo->bar)->toBeSame($bar);
}
/**
* Asserts that the inflector will inflect on an object with a callback.
*/
public function xtestInflectorInflectsWithCallback()
{
$foo = new Foo(null);
$bar = new Bar();
$inflector = new Inflector('Type', (mixed $object): void ==> {
/* HH_IGNORE_ERROR[4064] x */
$object->setBar($bar);
});
$inflector->inflect($foo);
expect($foo->bar)->toBeSame($bar);
}
}
|
#!/bin/bash
set -ue
packages="textlint \
textlint-rule-preset-japanese \
textlint-rule-prh \
textlint-rule-preset-ja-technical-writing \
textlint-rule-common-misspellings \
textlint-rule-spellcheck-tech-word"
if ! type npm > /dev/null 2>&1; then
sudo apt-get install -y npm
fi
sudo npm i -g $packages
if ! type node > /dev/null 2>&1; then
sudo ln -s /usr/bin/nodejs /usr/bin/node
fi
rm -rf prh.yml
git clone https://github.com/azu/prh.yml.git
cat << EOS > .textlintrc
{
"rules": {
"common-misspellings": true,
"preset-japanese": true,
"spellcheck-tech-word": true,
"preset-ja-technical-writing": true,
"prh": {
"rulePaths": [
"./prh.yml/ja/jser-info.yml",
"./prh.yml/ja/spoken.yml",
"./prh.yml/ja/typo.yml",
"./prh.yml/ja/web+db.yml",
"./prh.yml/ja/kanji-open.yml"
]
}
}
}
EOS
exit 0
|
<filename>Practica12/Practica12Lib/src/edu/upv/poo/gamesapi/Score.java
package edu.upv.poo.gamesapi;
import java.util.Date;
/**
*
* @author luisroberto
*/
public class Score {
private int id;
private int score;
private String player;
private String game;
private Date date;
/**
* @return the id
*/
public int getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(int id) {
this.id = id;
}
/**
* @return the score
*/
public int getScore() {
return score;
}
/**
* @param score the score to set
*/
public void setScore(int score) {
this.score = score;
}
/**
* @return the player
*/
public String getPlayer() {
return player;
}
/**
* @param player the player to set
*/
public void setPlayer(String player) {
this.player = player;
}
/**
* @return the game
*/
public String getGame() {
return game;
}
/**
* @param game the game to set
*/
public void setGame(String game) {
this.game = game;
}
/**
* @return the date
*/
public Date getDate() {
return date;
}
/**
* @param date the date to set
*/
public void setDate(Date date) {
this.date = date;
}
}
|
<filename>frontend/src/Component/TabContainer/styles.js<gh_stars>10-100
export default ({ spacing }) => ({
container: {
paddingTop: spacing.doubleBaseMargin,
paddingBottom: spacing.doubleBaseMargin,
width: spacing.fullWidth,
},
});
|
<reponame>mjburling/beneficiary-fhir-data
package gov.cms.bfd.server.war.r4.providers.preadj;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.OptionalParam;
import ca.uhn.fhir.rest.annotation.Read;
import ca.uhn.fhir.rest.annotation.RequiredParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.TokenAndListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.codahale.metrics.MetricRegistry;
import com.google.common.annotations.VisibleForTesting;
import com.newrelic.api.agent.Trace;
import gov.cms.bfd.model.rda.PreAdjFissClaim;
import gov.cms.bfd.model.rda.PreAdjMcsClaim;
import gov.cms.bfd.server.war.SpringConfiguration;
import gov.cms.bfd.server.war.r4.providers.TransformerUtilsV2;
import gov.cms.bfd.server.war.r4.providers.preadj.common.ClaimDao;
import gov.cms.bfd.server.war.r4.providers.preadj.common.ResourceTypeV2;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Claim;
import org.hl7.fhir.r4.model.ClaimResponse;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Resource;
/**
* Allows for generic processing of resource using common logic. Claims and ClaimResponses have the
* exact same logic for looking up, transforming, and returning data.
*
* @param <T> The specific fhir resource the concrete provider will serve.
*/
public abstract class AbstractR4ResourceProvider<T extends IBaseResource>
implements IResourceProvider {
/**
* A {@link Pattern} that will match the {@link ClaimResponse#getId()}s used in this application,
* e.g. <code>f-1234</code> or <code>m--1234</code> (for negative IDs).
*/
private static final Pattern CLAIM_ID_PATTERN = Pattern.compile("([fm])-(-?\\p{Alnum}+)");
private EntityManager entityManager;
private MetricRegistry metricRegistry;
private R4ClaimSamhsaMatcher samhsaMatcher;
private ClaimDao claimDao;
private Class<T> resourceType;
/** @param entityManager a JPA {@link EntityManager} connected to the application's database */
@PersistenceContext
public void setEntityManager(EntityManager entityManager) {
this.entityManager = entityManager;
}
/** @param metricRegistry the {@link MetricRegistry} to use */
@Inject
public void setMetricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry;
}
/** @param samhsaMatcher the {@link R4ClaimSamhsaMatcher} to use */
@Inject
public void setSamhsaFilterer(R4ClaimSamhsaMatcher samhsaMatcher) {
this.samhsaMatcher = samhsaMatcher;
}
@PostConstruct
public void init() {
claimDao =
new ClaimDao(
entityManager, metricRegistry, SpringConfiguration.isPreAdjOldMbiHashEnabled());
setResourceType();
}
/** @see IResourceProvider#getResourceType() */
public Class<T> getResourceType() {
return resourceType;
}
@VisibleForTesting
void setResourceType() {
Type superClass = this.getClass().getGenericSuperclass();
if (superClass instanceof ParameterizedType) {
Type[] params = ((ParameterizedType) superClass).getActualTypeArguments();
if (params[0] instanceof Class) {
// unchecked - By principal, it shouldn't be possible for the parameter to not be of type T
//noinspection unchecked
resourceType = (Class<T>) params[0];
} else {
throw new IllegalStateException("Invalid parameterized type declaration");
}
} else {
throw new IllegalStateException("Missing parameterized type declaration");
}
}
/**
* Adds support for the FHIR "read" operation, for {@link ClaimResponse}s. The {@link Read}
* annotation indicates that this method supports the read operation.
*
* <p>Read operations take a single parameter annotated with {@link IdParam}, and should return a
* single resource instance.
*
* @param claimId The read operation takes one parameter, which must be of type {@link IdType} and
* must be annotated with the {@link IdParam} annotation.
* @param requestDetails the request details for the read
* @return Returns a resource matching the specified {@link IdDt}, or <code>null</code> if none
* exists.
*/
@Read
@Trace
public T read(@IdParam IdType claimId, RequestDetails requestDetails) {
if (claimId == null) throw new IllegalArgumentException("Resource ID can not be null");
if (claimId.getVersionIdPartAsLong() != null)
throw new IllegalArgumentException("Resource ID must not define a version.");
String claimIdText = claimId.getIdPart();
if (claimIdText == null || claimIdText.trim().isEmpty())
throw new IllegalArgumentException("Resource ID can not be null/blank");
Matcher claimIdMatcher = CLAIM_ID_PATTERN.matcher(claimIdText);
if (!claimIdMatcher.matches())
throw new IllegalArgumentException("Unsupported ID pattern: " + claimIdText);
String claimIdTypeText = claimIdMatcher.group(1);
Optional<ResourceTypeV2<T>> optional = parseClaimType(claimIdTypeText);
if (optional.isEmpty()) throw new ResourceNotFoundException(claimId);
ResourceTypeV2<T> claimIdType = optional.get();
String claimIdString = claimIdMatcher.group(2);
Object claimEntity;
try {
claimEntity = claimDao.getEntityById(claimIdType, claimIdString);
} catch (NoResultException e) {
throw new ResourceNotFoundException(claimId);
}
return claimIdType.getTransformer().transform(metricRegistry, claimEntity);
}
/**
* Implementation specific claim type parsing
*
* @param typeText String to parse representing the claim type.
* @return The parsed {@link ClaimResponseTypeV2} type.
*/
@VisibleForTesting
abstract Optional<ResourceTypeV2<T>> parseClaimType(String typeText);
/**
* Creates a Set of {@link ResourceTypeV2} for the given claim types.
*
* @param types The types of claims to include
* @return A Set of {@link ResourceTypeV2} claim types.
*/
@VisibleForTesting
@Nonnull
Set<ResourceTypeV2<T>> parseClaimTypes(@Nonnull TokenAndListParam types) {
return types.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().stream()
.map(TokenParam::getValue)
.map(String::toLowerCase)
.map(getResourceTypeMap()::get)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
}
/**
* Returns a set of all supported resource types.
*
* @return Set of all supported resource types.
*/
@VisibleForTesting
abstract Set<ResourceTypeV2<T>> getResourceTypes();
/**
* Returns implementation specific {@link ResourceTypeV2} map.
*
* @return The implementation specific {@link ResourceTypeV2} map.
*/
@VisibleForTesting
abstract Map<String, ResourceTypeV2<T>> getResourceTypeMap();
@Search
@Trace
public Bundle findByPatient(
@RequiredParam(name = "mbi")
@Description(shortDefinition = "The patient identifier to search for")
ReferenceParam mbi,
@OptionalParam(name = "type")
@Description(shortDefinition = "A list of claim types to include")
TokenAndListParam types,
@OptionalParam(name = "isHashed")
@Description(shortDefinition = "A boolean indicating whether or not the MBI is hashed")
String hashed,
@OptionalParam(name = "excludeSAMHSA")
@Description(shortDefinition = "If true, exclude all SAMHSA-related resources")
String samhsa,
@OptionalParam(name = "_lastUpdated")
@Description(shortDefinition = "Include resources last updated in the given range")
DateRangeParam lastUpdated,
@OptionalParam(name = "service-date")
@Description(shortDefinition = "Include resources that completed in the given range")
DateRangeParam serviceDate,
RequestDetails requestDetails) {
if (mbi != null && !StringUtils.isBlank(mbi.getIdPart())) {
String mbiString = mbi.getIdPart();
Bundle bundleResource;
boolean isHashed = !Boolean.FALSE.toString().equalsIgnoreCase(hashed);
boolean excludeSamhsa = Boolean.TRUE.toString().equalsIgnoreCase(samhsa);
if (isHashed) {
TransformerUtilsV2.logMbiHashToMdc(mbiString);
}
if (types != null) {
bundleResource =
createBundleFor(
parseClaimTypes(types),
mbiString,
isHashed,
excludeSamhsa,
lastUpdated,
serviceDate);
} else {
bundleResource =
createBundleFor(
getResourceTypes(), mbiString, isHashed, excludeSamhsa, lastUpdated, serviceDate);
}
return bundleResource;
} else {
throw new IllegalArgumentException("mbi can't be null/blank");
}
}
/**
* Creates a Bundle of resources for the given data using the given {@link ResourceTypeV2}.
*
* @param resourceTypes The {@link ResourceTypeV2} data to retrieve.
* @param mbi The mbi to look up associated data for.
* @param isHashed Denotes if the given mbi is hashed.
* @param lastUpdated Date range of desired lastUpdate values to retrieve data for.
* @param serviceDate Date range of the desired service date to retrieve data for.
* @return A Bundle with data found using the provided parameters.
*/
@VisibleForTesting
Bundle createBundleFor(
Set<ResourceTypeV2<T>> resourceTypes,
String mbi,
boolean isHashed,
boolean excludeSamhsa,
DateRangeParam lastUpdated,
DateRangeParam serviceDate) {
List<T> resources = new ArrayList<>();
for (ResourceTypeV2<T> type : resourceTypes) {
List<?> entities;
entities =
claimDao.findAllByMbiAttribute(
type.getEntityClass(),
type.getEntityMbiRecordAttribute(),
mbi,
isHashed,
lastUpdated,
serviceDate,
type.getEntityEndDateAttribute());
resources.addAll(
entities.stream()
.filter(e -> !excludeSamhsa || hasNoSamhsaData(metricRegistry, e))
.map(e -> type.getTransformer().transform(metricRegistry, e))
.collect(Collectors.toList()));
}
Bundle bundle = new Bundle();
resources.forEach(
c -> {
Bundle.BundleEntryComponent entry = bundle.addEntry();
entry.setResource((Resource) c);
});
return bundle;
}
@VisibleForTesting
boolean hasNoSamhsaData(MetricRegistry metricRegistry, Object entity) {
Claim claim;
if (entity instanceof PreAdjFissClaim) {
claim = FissClaimTransformerV2.transform(metricRegistry, entity);
} else if (entity instanceof PreAdjMcsClaim) {
claim = McsClaimTransformerV2.transform(metricRegistry, entity);
} else {
throw new IllegalArgumentException(
"Unsupported entity " + entity.getClass().getCanonicalName() + " for samhsa filtering");
}
return !samhsaMatcher.test(claim);
}
}
|
def remove_chars(string, remove_list):
for char in remove_list:
string = string.replace(char, '')
return string
# Outputs "Heo Wor" |
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR=`dirname $2`
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/anytime_models/examples/imagenet-dense-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
--adaloss_gamma=0.07 --adaloss_update_per=1 --adaloss_momentum=0.9 --adaloss_final_extra=0.5 --sum_rand_ratio=0 --is_select_arr -f=5 --samloss=100 --densenet_depth=201 -s=17 --batch_size=128 --nr_gpu=4 --densenet_version=dense --min_predict_unit=10 --reduction_ratio=0.5 --dropout_kp=0.9 --opt_at=-1 -g=32 --num_classes=1000
|
#!/bin/bash
set -e
if [ "$#" != 1 ]; then
echo "Please provide tag to checkout" >&2
exit 1
fi
tag="$1"
while [ "$PWD" != '/' -a ! -f moment.js ]; do
cd ..
done
if [ ! -f moment.js ]; then
echo "Run me from the moment repo" >&2
exit 1
fi
basename=$(basename $PWD)
src=moment-npm-git
dest=moment-npm
cd ..
rm -rf $src $dest
git clone $basename $src
mkdir $dest
cp $src/frozen-moment.js $dest
cp $src/package.json $dest
cp $src/README.md $dest
cp $src/LICENSE $dest
cp -r $src/locale $dest
cp -r $src/min $dest
cp $src/ender.js $dest
cp $src/package.js $dest
rm -rf $src
echo "Check out $dest"
|
<reponame>hartt28/YoutubeAnalyzer
package Actors;
import akka.actor.AbstractActorWithTimers;
import akka.actor.ActorRef;
import akka.actor.Props;
import models.SearchingResults;
import scala.concurrent.duration.Duration;
import services.AsynProcessor;
import javax.inject.Inject;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import static akka.pattern.Patterns.ask;
/**
* @author <NAME>
*/
public class SearchActor extends AbstractActorWithTimers {
@Inject
AsynProcessor asynProcessor;
/**ActorRef of Actor*/
private ActorRef userActor;
/**ActorRef of Actor*/
private ActorRef commentsActor;
/**SearchKey*/
private String query;
/**Set of searching results*/
private Set<SearchingResults> output;
private Set<String> history = new HashSet<>();
/**
* Create an instance of the class using.
*/
public static Props getProps() {
System.out.println("SearchActor Start");
return Props.create(SearchActor.class);
}
/**
* Dummy inner class used for the timer
*/
public static final class Tick {
}
/**
* Constructor
*/
public SearchActor() {
this.userActor = null;
this.output = new HashSet<>();
this.asynProcessor = new AsynProcessor();
}
/**
* Start the time, create a Tick every 5 seconds
*/
@Override
public void preStart() {
//getTimers().startPeriodicTimer("Timer", new Tick(), Duration.create(2, TimeUnit.SECONDS));
getTimers().startTimerWithFixedDelay("Timer", new Tick(), Duration.create(30, TimeUnit.SECONDS));
this.commentsActor = getContext().actorOf(CommentsActor.getProps());
}
/**
* Handle the incoming messages
* @return Receive receive
*/
@Override
public Receive createReceive() {
//parson jason 记录谁用信息
return receiveBuilder()
.match(RegisterMsg.class, msg -> {
userActor = sender();
})
.match(SearchRequest.class, firstSearchMsg ->{
query = firstSearchMsg.searchKey;
history.add(query);
firstSearch(firstSearchMsg.searchKey);}
)
.match(Tick.class, msg -> {
if(query !=null){
TickMessage();}
}).build();
}
/**
* Constructor
*/
static public class RegisterMsg {
}
/**
* Define comment Message
*/
static public class commentMessage{
private String videoId;
public commentMessage(String videoId) {
this.videoId = videoId;
}
public String getVideoId() {
return videoId;
}
}
/**
* firstSearch Message
*/
static public class SearchRequest{
private String searchKey;
public SearchRequest(String searchKey) {
this.searchKey = searchKey;
}
}
// private void firstSearch(String key) throws GeneralSecurityException, IOException {
// this.output = new HashSet<>();
// List<SearchingResults> searchingResults = asynProcessor.webSocketSearch(key);
// output.addAll(searchingResults);
// UserActor.SearchMessage searchMessage = new UserActor.SearchMessage(output, Key);
// userActors.forEach(actorRef -> actorRef.tell(searchMessage, self()));
// }
/**
* firstSearch message handling
* @param key message to handle
*/
private void firstSearch(String key) throws GeneralSecurityException, IOException {
asynProcessor.processSearchAsync(key).thenAcceptAsync(searchResults -> {
System.out.println("first search :" + key);
// Copy the current state of results in a temporary variable
Set<SearchingResults> Results = new HashSet<>(searchResults);
SendWithCommentActor(searchResults);
UserActor.SearchMessage searchMessage = new UserActor.SearchMessage(Results, key);
userActor.tell(searchMessage, self());
});
}
/**
* Adding sentiment to search results
* @param searchResults from model.SearchingResults
*/
private void SendWithCommentActor(List<SearchingResults> searchResults){
for(SearchingResults i : searchResults){
commentMessage commentMessage = new commentMessage(i.getVideoId());
commentsActor.tell(commentMessage,self());
CompletableFuture<Object> sentiment = ask(commentsActor,new commentMessage(i.getVideoId()),java.time.Duration.ofMillis(10000)).toCompletableFuture();
String s = (String)sentiment.join();
i.setSentiment(s);
}
}
/**
* firstSearch message handling
*/
public void TickMessage() {
System.out.println("Key = " + query);
asynProcessor.processSearchAsync(query).thenAcceptAsync(searchResults -> {
SendWithCommentActor(searchResults);
// Copy the current state of results in a temporary variable
Set<SearchingResults> oldResults = new HashSet<>(output);
// Add all the results to the list, now filtered to only add the new ones
output.addAll(searchResults);
// Copy the current state of results after addition in a temporary variable
Set<SearchingResults> newResults = new HashSet<>(output);
// Get the new results only by doing new - old = what we have to display
newResults.removeAll(oldResults);
UserActor.SearchMessage searchMessage = new UserActor.SearchMessage(newResults, query);
userActor.tell(searchMessage, self());
});
}
}
|
#!/bin/bash
rm *pizza*
rm in/*pizza*
cp default.cfg main.cfg
|
<reponame>pkowalcze/pitgull
package org.polyvariant
import cats.implicits.*
import cats.effect.*
import sttp.model.Uri
import sttp.client3.*
import org.polyvariant.Gitlab.MergeRequestInfo
import cats.Applicative
import sttp.monad.MonadError
import cats.MonadThrow
import org.polyvariant.Config.ArgumentsParsingException
import cats.effect.std.Console
object Main extends IOApp {
private def printMergeRequests[F[_]: Logger: Applicative](mergeRequests: List[MergeRequestInfo]): F[Unit] =
mergeRequests.traverse { mr =>
Logger[F].info(s"ID: ${mr.mergeRequestIid} by: ${mr.authorUsername}")
}.void
private def readConsent[F[_]: Console: Applicative]: F[Boolean] =
Console[F].readLine.map(_.toLowerCase == "y")
private def qualifyMergeRequestsForDeletion(botUserName: String, mergeRequests: List[MergeRequestInfo]): List[MergeRequestInfo] =
mergeRequests.filter(_.authorUsername == botUserName)
private def program[F[_]: Logger: Console: Async: MonadThrow](args: List[String]): F[Unit] = {
given SttpBackend[Identity, Any] = HttpURLConnectionBackend()
val parsedArgs = Args.parse(args)
for {
config <- Config.fromArgs(parsedArgs)
_ <- Logger[F].info("Starting pitgull bootstrap!")
gitlab = Gitlab.sttpInstance[F](config.gitlabUri, config.token)
mrs <- gitlab.mergeRequests(config.project)
_ <- Logger[F].info(s"Merge requests found: ${mrs.length}")
_ <- printMergeRequests(mrs)
botMrs = qualifyMergeRequestsForDeletion(config.botUser, mrs)
_ <- Logger[F].info(s"Will delete merge requests: ${botMrs.map(_.mergeRequestIid).mkString(", ")}")
_ <- Logger[F].info("Do you want to proceed? y/Y")
_ <- MonadThrow[F]
.ifM(readConsent)(
ifTrue = MonadThrow[F].pure(()),
ifFalse = MonadThrow[F].raiseError(new Exception("User rejected deletion"))
)
_ <- botMrs.traverse(mr => gitlab.deleteMergeRequest(config.project, mr.mergeRequestIid))
_ <- Logger[F].info("Done processing merge requests")
_ <- Logger[F].info("Creating webhook")
_ <- gitlab.createWebhook(config.project, config.pitgullWebhookUrl)
_ <- Logger[F].info("Webhook created")
_ <- Logger[F].success("Bootstrap finished")
} yield ()
}
override def run(args: List[String]): IO[ExitCode] = {
given Logger[IO] = Logger.wrappedPrint[IO]
program[IO](args).recoverWith {
case Config.ArgumentsParsingException =>
Logger[IO].info(Config.usage)
case e: Exception =>
Logger[IO].error(s"Unexpected error ocurred: $e")
} *>
IO.pure(ExitCode.Success)
}
}
|
<filename>src/main/java/com/github/masiuchi/mtdataapi/ListCallback.java
package com.github.masiuchi.mtdataapi;
import org.json.JSONArray;
import org.json.JSONObject;
public abstract class ListCallback implements Callback {
public final void onSuccess(JSONObject response) {
JSONArray items = response.getJSONArray("items");
int totalResults = response.getInt("totalResults");
onSuccess(items, totalResults);
}
public abstract void onSuccess(JSONArray items, int totalResults);
}
|
package com.example.basicbeans.counter;
import org.springframework.stereotype.Component;
import org.springframework.web.context.annotation.SessionScope;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
@Component
@SessionScope
public class SessionScopedCounter implements Counter {
private String uuid = UUID.randomUUID().toString();
private AtomicInteger safeCount = new AtomicInteger();
@Override
public int incrementAndGet() {
return safeCount.incrementAndGet();
}
@Override
public String getUUID() { return uuid; }
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.